gt
stringclasses 1
value | context
stringlengths 2.49k
119k
|
---|---|
#!/usr/bin/env python3
from utilities import filesFromList, vtkWriteDataStructured2d, vtkWriteHeaderAndGridStructured2d
from utilities import vtkWritePointDataHeader, vtkWritePointDataStructured2D
from utilities import writeLog
from plotTools import addContourf, extractFromCSV
from footprintTools import *
from mapTools import readNumpyZTile, farFieldIds, farFieldMean
import sys
import argparse
import numpy as np
import matplotlib.pyplot as plt
'''
Author: Mikko Auvinen
[email protected]
University of Helsinki &
Finnish Meteorological Institute
'''
# = # = # = # Function definitions # = # = # = # = # = # = #
def centralValue( a , tol, nitv=40):
res = 1.
ia = np.ones( np.shape(a) , bool )
ac_old = 0.
icheck = 0
while (res > tol and icheck < int(0.001*len(a))):
for i in range(nitv):
imax = np.argmax( a[ia] ); ia[imax] = False
imin = np.argmin( a[ia] ); ia[imin] = False
icheck += 2
ac = 0.5*(np.max(a[ia]) + np.min(a[ia]))
res= abs( ac - ac_old )/max( abs(ac), abs(ac_old) )
ac_old = ac
#print('res={:e}, i={}'.format(res,icheck))
return ac
# - - - - - - - - - - - - - - - - - - - - - - - - - - - - #
def polarPercentileAvg(dat, vleft, vright ):
return 0.5*(np.percentile(dat,vleft) + np.percentile(dat,vright))
# - - - - - - - - - - - - - - - - - - - - - - - - - - - - #
def externalGridIds( ce, cl ):
cbmin = max( np.min(cl) , np.min(ce) )
cbmax = min( np.max(cl) , np.max(ce) )
#if( cbmin > cbmax ): cbmin = cbmax + 1e-5
jg = (ce>=cbmin); jl = (ce<=cbmax)
# Check if 'greater than' and 'smaller than' are complements
jcompl = all(~jl==jg) and not all(jl==True) and not all(jl==False)
# Check resolutions
dce = ce[1]-ce[0] # delta of external grid
dcl = cl[1]-cl[0] # delta of local box
finer_dcl = dcl < dce
if( finer_dcl and jcompl ):
idN = np.arange(len(jl))
id2 = np.array([ idN[jl][-1], idN[jg][0] ])
je = np.zeros( len(jl) , bool )
je[id2] = True
else:
je = jg * jl
return je
# - - - - - - - - - - - - - - - - - - - - - - - - - - - - #
def meanFromExternal( dat, xe, ye, ze, xl, yl, zl):
# Define the vertical mean flow from the external data.
# <#>e: external, <#>l: local
ke = externalGridIds( ze, zl )
je = externalGridIds( ye, yl )
ie = externalGridIds( xe, xl )
kz, jy, ix = np.meshgrid(ke, je, ie, indexing='ij', sparse=True)
idw = (kz * jy * ix).astype(bool)
id0 = ~(dat == 0) # Indecies where zero occurs.
idw *= id0 # Take away the zero values.
#print(' ke = {}'.format(ke) ); print(' je = {}'.format(je) ); print(' ie = {}'.format(ie) )
return np.mean(dat[idw]), ke, je, ie
# - - - - - - - - - - - - - - - - - - - - - - - - - - - - #
# = # = # = # End Function definitions # = # = # = # = # = #
#========================================================== #
parser = argparse.ArgumentParser(prog='footprint2Mesh.py')
parser.add_argument("fileKey", help="Search string for collecting (.npz) files.",\
nargs='?', default="npz")
parser.add_argument("-a", "--allfiles", help="Select all files automatically.",\
action="store_true", default=False)
parser.add_argument("-fo", "--fileout", type=str, default='FP',\
help="Brief prefix for the footprint output file. (npz format)")
parser.add_argument("-ft", "--filetopo", type=str,\
help="File containing the topography data. (npz format)")
parser.add_argument("-fm", "--filemean", type=str,\
help="File containing the mean velocity data. (npz format)", default=None)
parser.add_argument("-N","--NxG", type=int,nargs=2,\
help="Number of points [Nx, Ny] in the 2D Palm grid.")
parser.add_argument("-dx","--dxG", type=float,nargs=2,\
help="Resolution [dx, dy] of the 2D Palm grid.")
#parser.add_argument("-fm", "--filemean", type=str,\
# help="Name of the mean velocity .csv file.", default=None)
parser.add_argument("-b","--hybrid", help="Hybrid approach with far field correction.",\
action="store_true", default=False)
parser.add_argument("--vtk", help="Write VTK-files.",\
action="store_true", default=False)
parser.add_argument("-i", "--ijk", help="Files contain ijk info.",\
action="store_true", default=False)
parser.add_argument("-cw", "--coefwm", type=float, default=1.,\
help="Coefficient for scaling <w> for mean correction.")
help_px ='''Percentage of first x-koords where the footprint is set to zero (fp=0).
If not specified, (Default=None) the farfield correction is not performed.'''
parser.add_argument("-px","--pxzero", type=float, default=None, help=help_px)
parser.add_argument("-p", "--printOn", help="Print the extracted tile.",\
action="store_true", default=False)
parser.add_argument("-pp", "--printOnly", help="Only print the extracted tile. Don't save.",\
action="store_true", default=False)
parser.add_argument("-v", "--verbose", help="Print all information on screen.",\
action="store_true", default=False)
args = parser.parse_args()
writeLog( parser, args )
#========================================================== #
# Rename ... that's all.
fileKey = args.fileKey
fileout = args.fileout
filetopo = args.filetopo
filemean = args.filemean
NxG = args.NxG
dxG = args.dxG
cw_init = args.coefwm
pxz = args.pxzero
allFiles = args.allfiles
hybridOn = args.hybrid
ijkOn = args.ijk
vtkOn = args.vtk
printOn = args.printOn
printOnly = args.printOnly
verbose = args.verbose
# For writing the header once.
writeHeader = True
# Gather raw footprint data files:
fileNos, fileList = filesFromList( fileKey+"*", allFiles )
if( filemean ):
dat = np.load(filemean)
wm = dat['w']; xm = dat['x']; ym = dat['y']; zm = dat['z']
dat = None
else:
sys.exit(' Error. File for the mean values was not provided.')
# = = = = = = = = = = = = = = = = = = = = = = = = = = = = #
# xO := origin coords. # xt := target coords. # ut := target speed
for fn in fileNos:
if( verbose ): print(' Processing file: {}'.format(fileList[fn]))
xO, yO, zO,\
xt, yt, zt,\
ut, vt, wt = readNumpyZFootprintRaw( fileList[fn] )
# = = = Positive/Negative contributions = = = = = = = = = = #
'''
Now we make a dangerous assumption that the mean value of vertical
velocity w_mean can be obtained from the particle data. This is strictly
not true and its effect must be carefully examined.
'''
wtm_1 = np.mean(wt)
wtm_2, km, jm, im = meanFromExternal( wm, xm, ym, zm, xt, yt, zt )
# Choose the reference vertical velocity.
if( np.sum(km)>0 and np.sum(jm)>0 and np.sum(im)>0 ):
wtm_ref = wtm_2
meanFromExt = True # mean from external
else:
wtm_ref = wtm_1
meanFromExt = False
#wtm_3 = centralValue( wt, 1.e-6, 50 ) # Approximation for mean value.
#wtm_4 = polarPercentileAvg( wt, 5, 95 )
#wtm_5 = farFieldMean( wt, xO, 25. ) # Exclude the first 25%.
'''
- - - - - - - - - - - - -
Location: x={0}, y={1}, z={2}\n
Directly from particle data: w_mean = {3:5.2f}
Central (skimmed) value: w_mean = {4:5.2f}
Polar percentile (5,95): w_mean = {5:5.2f}
Far field mean (25% exluded): w_mean = {6:5.2f}
Mean from external: w_mean = {7:5.2f}
Selected: w_mean = {8:6.3f}
- - - - - - - - - - - - -
'''
# Store the mean values just for printing.
xim = int(np.mean(xt)); yim = int(np.mean(yt)); zim = int(np.mean(zt))
# Boolean to determine whether far field correction is performed.
farFieldCorrOn = (pxz is not None)
if( hybridOn ):
farFieldCorrOn = farFieldCorrOn and (not meanFromExt)
if( farFieldCorrOn ):
print(' Far Field Correction! ')
idx = farFieldIds( xO, pxz ) # Consider the first 15% (=default) of the x-range.
cw = cw_init
count = 0; ro = None
dr = -1000.
dc = 0.005
count_max = 200
r_lim = 100
infoStr = '''
#- # - # - # - # - # - # - # - # - #
Location: x={0}, y={1}, z={2}
Directly from particle data: wtm_1 = {3:5.2f}
Mean from external: wtm_2 = {4:5.2f}
- - - - - - - - - - - - -
'''.format(xim,yim,zim, wtm_1, wtm_2)
if( verbose ): print(infoStr)
while( 1 ):
if( count == 50 ):
dc *= 2.
wt_mean = cw * wtm_ref
ipos = ( (wt-wt_mean) > 0.) # Boolean array for positive values.
ineg = ~ipos # Boolean array for negative values.
# Form a loop that aims to equalize sum(ipos) and sum(ineg) when x < x_lim.
# Function evaluation
r = abs( np.sum(ipos[idx]) - np.sum(ineg[idx]) )
if( ro ):
dr = (r - ro )
if( dr > 0 ):
dc *= -1. # Bad direction, must change.
cw += 1.5*dc # Do not return to cwo
else:
cw += dc
ro = r
count += 1
if( (r < r_lim) or (count > count_max)):
break
# end while
infoItr = '''
Iteration = {0}
w_mean = {1:6.3f}\t vs. w_mean_orig = {2:6.3f}
cw = {3}
r = {4}\t dr = {5}
- - - - - - - - - - - - -
'''.format(count, wt_mean, wtm_ref, cw, r, dr)
if( verbose ): print(infoItr)
elif( meanFromExt ): # no farfield correction
ipos = ( (wt-wtm_ref) > 0.) # Boolean array for positive values.
ineg = ~ipos # Boolean array for negative values.
else:
continue
# Clear memory
xt = None; yt = None; zt = None
ut = None; vt = None; wt = None
# = = = = 2d footprint evaluation. = = = = = = = = = = = = #
# Determine grid coordinates for the footprint domain:
xD, yD = coordsFootprintGrid( NxG, dxG, xO, yO, verbose )
Nt = len(xO); Ntp = len(xO[ipos]); Ntn = len(xO[ineg])
print(' > Nt, Nt(pos), Nt(neg), <xO>+, <xO>- xt, yt, zt = {}\t{}\t{}\t{}\t{}\t{}\t{}\t{}'\
.format( Nt, Ntp, Ntn, np.mean(xO[ipos]), np.mean(xO[ineg]), xim, yim, zim ))
#print(' Number of w>0 / w<0 hits: {} / {} '.format( Ntp, Ntn))
if( verbose ): print(' Processing positive flux contributions ...')
FMpos, XM, YM, ZMpos = fp2mshIJ( xO[ipos], yO[ipos], zO[ipos], xD, yD, dxG[0], dxG[1] )
#print(' mean( FMpos ) = {}'.format(np.mean(FMpos)))
if( verbose ): print(' Processing negative flux contributions ...')
FMneg, XM, YM, ZMneg = fp2mshIJ( xO[ineg], yO[ineg], zO[ineg], xD, yD, dxG[0], dxG[1] )
#print(' mean( FMneg ) = {}'.format(np.mean(FMneg)))
if( verbose ): print(' ... done!')
# Clear memory
xO = None; yO = None; zO = None
# Gather all the recorded z-coordinates.
ZM = np.maximum( ZMpos , ZMneg )
ZMpos = None; ZMneg = None
if( verbose ): print(' Gathering and Normalizing the footprint array ...')
Cnorm = (Nt*dxG[0]*dxG[1]) # Coefficient for normalization.
FM = (FMpos - FMneg)/Cnorm; FMpos = None; FMneg = None
if( verbose ): print(' ... done!')
# = = = = Output procedure = = = = = = = = = = = = = = = #
fileId, varId = idAppendices(fileList[fn], ijkOn )
writeNumpyZFootprint(fileout+fileId, FM, XM, YM, ZM, Cnorm )
if( vtkOn ):
if( writeHeader ):
Rdict = readNumpyZTile( filetopo )
R = Rdict['R']
Rdims = np.array(np.shape(R))
ROrig = Rdict['GlobOrig']
dPx = Rdict['dPx']
Rdict = None
if( all(Rdims != np.shape(XM)) ):
print(' Error! Mismatch Topo_dims={} vs. fp_dims={}'.format(Rdims,np.shape(XM)))
sys.exit(1)
f_vtk = vtkWriteHeaderAndGridStructured2d( XM, YM, R[::-1,:], fileout, 'Footprints')
f_vtk = vtkWritePointDataHeader( f_vtk, FM, len(fileNos) )
writeHeader = False; R=None
f_vtk = vtkWritePointDataStructured2D( f_vtk, FM , XM, 'fp_'+varId )
'''
if( printOn or printOnly ):
CfD = dict()
CfD['title']='F(x,y)'; CfD['label']=fileout; CfD['N']=16
Cfp = addContourf( XM, YM, FM , CfD )
CfD['title']='F_neg(x,y)'; CfD['label']=fileout+'_neg'; CfD['N']=16
Cfm = addContourf( XM, YM, FMneg, CfD )
CfD['title']='Z(x,y)'; CfD['label']=' Topography Height (m) '; CfD['N']=16
Cz = addContourf( XM, YM, ZM, CfD )
plt.show()
'''
FM = ZM = XM = YM = None
# Close the file at the end.
if( vtkOn ): f_vtk.close()
|
|
"""Support for Proxmox VE."""
from datetime import timedelta
import logging
from proxmoxer import ProxmoxAPI
from proxmoxer.backends.https import AuthenticationError
from proxmoxer.core import ResourceException
from requests.exceptions import SSLError
import voluptuous as vol
from homeassistant.const import (
CONF_HOST,
CONF_PASSWORD,
CONF_PORT,
CONF_USERNAME,
CONF_VERIFY_SSL,
)
from homeassistant.core import HomeAssistant
import homeassistant.helpers.config_validation as cv
from homeassistant.helpers.update_coordinator import (
CoordinatorEntity,
DataUpdateCoordinator,
)
PLATFORMS = ["binary_sensor"]
DOMAIN = "proxmoxve"
PROXMOX_CLIENTS = "proxmox_clients"
CONF_REALM = "realm"
CONF_NODE = "node"
CONF_NODES = "nodes"
CONF_VMS = "vms"
CONF_CONTAINERS = "containers"
COORDINATOR = "coordinator"
API_DATA = "api_data"
DEFAULT_PORT = 8006
DEFAULT_REALM = "pam"
DEFAULT_VERIFY_SSL = True
TYPE_VM = 0
TYPE_CONTAINER = 1
UPDATE_INTERVAL = 60
_LOGGER = logging.getLogger(__name__)
CONFIG_SCHEMA = vol.Schema(
{
DOMAIN: vol.All(
cv.ensure_list,
[
vol.Schema(
{
vol.Required(CONF_HOST): cv.string,
vol.Required(CONF_USERNAME): cv.string,
vol.Required(CONF_PASSWORD): cv.string,
vol.Optional(CONF_PORT, default=DEFAULT_PORT): cv.port,
vol.Optional(CONF_REALM, default=DEFAULT_REALM): cv.string,
vol.Optional(
CONF_VERIFY_SSL, default=DEFAULT_VERIFY_SSL
): cv.boolean,
vol.Required(CONF_NODES): vol.All(
cv.ensure_list,
[
vol.Schema(
{
vol.Required(CONF_NODE): cv.string,
vol.Optional(CONF_VMS, default=[]): [
cv.positive_int
],
vol.Optional(CONF_CONTAINERS, default=[]): [
cv.positive_int
],
}
)
],
),
}
)
],
)
},
extra=vol.ALLOW_EXTRA,
)
async def async_setup(hass: HomeAssistant, config: dict):
"""Set up the platform."""
hass.data.setdefault(DOMAIN, {})
def build_client() -> ProxmoxAPI:
"""Build the Proxmox client connection."""
hass.data[PROXMOX_CLIENTS] = {}
for entry in config[DOMAIN]:
host = entry[CONF_HOST]
port = entry[CONF_PORT]
user = entry[CONF_USERNAME]
realm = entry[CONF_REALM]
password = entry[CONF_PASSWORD]
verify_ssl = entry[CONF_VERIFY_SSL]
try:
# Construct an API client with the given data for the given host
proxmox_client = ProxmoxClient(
host, port, user, realm, password, verify_ssl
)
proxmox_client.build_client()
except AuthenticationError:
_LOGGER.warning(
"Invalid credentials for proxmox instance %s:%d", host, port
)
continue
except SSLError:
_LOGGER.error(
'Unable to verify proxmox server SSL. Try using "verify_ssl: false"'
)
continue
return proxmox_client
proxmox_client = await hass.async_add_executor_job(build_client)
async def async_update_data() -> dict:
"""Fetch data from API endpoint."""
proxmox = proxmox_client.get_api_client()
def poll_api() -> dict:
data = {}
for host_config in config[DOMAIN]:
host_name = host_config["host"]
data[host_name] = {}
for node_config in host_config["nodes"]:
node_name = node_config["node"]
data[host_name][node_name] = {}
for vm_id in node_config["vms"]:
data[host_name][node_name][vm_id] = {}
vm_status = call_api_container_vm(
proxmox, node_name, vm_id, TYPE_VM
)
if vm_status is None:
_LOGGER.warning("Vm/Container %s unable to be found", vm_id)
data[host_name][node_name][vm_id] = None
continue
data[host_name][node_name][vm_id] = parse_api_container_vm(
vm_status
)
for container_id in node_config["containers"]:
data[host_name][node_name][container_id] = {}
container_status = call_api_container_vm(
proxmox, node_name, container_id, TYPE_CONTAINER
)
if container_status is None:
_LOGGER.error(
"Vm/Container %s unable to be found", container_id
)
data[host_name][node_name][container_id] = None
continue
data[host_name][node_name][
container_id
] = parse_api_container_vm(container_status)
return data
return await hass.async_add_executor_job(poll_api)
coordinator = DataUpdateCoordinator(
hass,
_LOGGER,
name="proxmox_coordinator",
update_method=async_update_data,
update_interval=timedelta(seconds=UPDATE_INTERVAL),
)
hass.data[DOMAIN][COORDINATOR] = coordinator
# Fetch initial data
await coordinator.async_config_entry_first_refresh()
for platform in PLATFORMS:
await hass.async_create_task(
hass.helpers.discovery.async_load_platform(
platform, DOMAIN, {"config": config}, config
)
)
return True
def parse_api_container_vm(status):
"""Get the container or vm api data and return it formatted in a dictionary.
It is implemented in this way to allow for more data to be added for sensors
in the future.
"""
return {"status": status["status"], "name": status["name"]}
def call_api_container_vm(proxmox, node_name, vm_id, machine_type):
"""Make proper api calls."""
status = None
try:
if machine_type == TYPE_VM:
status = proxmox.nodes(node_name).qemu(vm_id).status.current.get()
elif machine_type == TYPE_CONTAINER:
status = proxmox.nodes(node_name).lxc(vm_id).status.current.get()
except ResourceException:
return None
return status
class ProxmoxEntity(CoordinatorEntity):
"""Represents any entity created for the Proxmox VE platform."""
def __init__(
self,
coordinator: DataUpdateCoordinator,
unique_id,
name,
icon,
host_name,
node_name,
vm_id=None,
):
"""Initialize the Proxmox entity."""
super().__init__(coordinator)
self.coordinator = coordinator
self._unique_id = unique_id
self._name = name
self._host_name = host_name
self._icon = icon
self._available = True
self._node_name = node_name
self._vm_id = vm_id
self._state = None
@property
def unique_id(self) -> str:
"""Return the unique ID for this sensor."""
return self._unique_id
@property
def name(self) -> str:
"""Return the name of the entity."""
return self._name
@property
def icon(self) -> str:
"""Return the mdi icon of the entity."""
return self._icon
@property
def available(self) -> bool:
"""Return True if entity is available."""
return self.coordinator.last_update_success and self._available
class ProxmoxClient:
"""A wrapper for the proxmoxer ProxmoxAPI client."""
def __init__(self, host, port, user, realm, password, verify_ssl):
"""Initialize the ProxmoxClient."""
self._host = host
self._port = port
self._user = user
self._realm = realm
self._password = password
self._verify_ssl = verify_ssl
self._proxmox = None
self._connection_start_time = None
def build_client(self):
"""Construct the ProxmoxAPI client. Allows inserting the realm within the `user` value."""
if "@" in self._user:
user_id = self._user
else:
user_id = f"{self._user}@{self._realm}"
self._proxmox = ProxmoxAPI(
self._host,
port=self._port,
user=user_id,
password=self._password,
verify_ssl=self._verify_ssl,
)
def get_api_client(self):
"""Return the ProxmoxAPI client."""
return self._proxmox
|
|
# -*- coding: utf-8 -*-
# Copyright 2022 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from collections import OrderedDict
import functools
import re
from typing import Dict, Optional, Sequence, Tuple, Type, Union
import pkg_resources
from google.api_core.client_options import ClientOptions
from google.api_core import exceptions as core_exceptions
from google.api_core import gapic_v1
from google.api_core import retry as retries
from google.auth import credentials as ga_credentials # type: ignore
from google.oauth2 import service_account # type: ignore
try:
OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault]
except AttributeError: # pragma: NO COVER
OptionalRetry = Union[retries.Retry, object] # type: ignore
from google.cloud.appengine_admin_v1.services.firewall import pagers
from google.cloud.appengine_admin_v1.types import appengine
from google.cloud.appengine_admin_v1.types import firewall
from .transports.base import FirewallTransport, DEFAULT_CLIENT_INFO
from .transports.grpc_asyncio import FirewallGrpcAsyncIOTransport
from .client import FirewallClient
class FirewallAsyncClient:
"""Firewall resources are used to define a collection of access
control rules for an Application. Each rule is defined with a
position which specifies the rule's order in the sequence of
rules, an IP range to be matched against requests, and an action
to take upon matching requests.
Every request is evaluated against the Firewall rules in
priority order. Processesing stops at the first rule which
matches the request's IP address. A final rule always specifies
an action that applies to all remaining IP addresses. The
default final rule for a newly-created application will be set
to "allow" if not otherwise specified by the user.
"""
_client: FirewallClient
DEFAULT_ENDPOINT = FirewallClient.DEFAULT_ENDPOINT
DEFAULT_MTLS_ENDPOINT = FirewallClient.DEFAULT_MTLS_ENDPOINT
common_billing_account_path = staticmethod(
FirewallClient.common_billing_account_path
)
parse_common_billing_account_path = staticmethod(
FirewallClient.parse_common_billing_account_path
)
common_folder_path = staticmethod(FirewallClient.common_folder_path)
parse_common_folder_path = staticmethod(FirewallClient.parse_common_folder_path)
common_organization_path = staticmethod(FirewallClient.common_organization_path)
parse_common_organization_path = staticmethod(
FirewallClient.parse_common_organization_path
)
common_project_path = staticmethod(FirewallClient.common_project_path)
parse_common_project_path = staticmethod(FirewallClient.parse_common_project_path)
common_location_path = staticmethod(FirewallClient.common_location_path)
parse_common_location_path = staticmethod(FirewallClient.parse_common_location_path)
@classmethod
def from_service_account_info(cls, info: dict, *args, **kwargs):
"""Creates an instance of this client using the provided credentials
info.
Args:
info (dict): The service account private key info.
args: Additional arguments to pass to the constructor.
kwargs: Additional arguments to pass to the constructor.
Returns:
FirewallAsyncClient: The constructed client.
"""
return FirewallClient.from_service_account_info.__func__(FirewallAsyncClient, info, *args, **kwargs) # type: ignore
@classmethod
def from_service_account_file(cls, filename: str, *args, **kwargs):
"""Creates an instance of this client using the provided credentials
file.
Args:
filename (str): The path to the service account private key json
file.
args: Additional arguments to pass to the constructor.
kwargs: Additional arguments to pass to the constructor.
Returns:
FirewallAsyncClient: The constructed client.
"""
return FirewallClient.from_service_account_file.__func__(FirewallAsyncClient, filename, *args, **kwargs) # type: ignore
from_service_account_json = from_service_account_file
@classmethod
def get_mtls_endpoint_and_cert_source(
cls, client_options: Optional[ClientOptions] = None
):
"""Return the API endpoint and client cert source for mutual TLS.
The client cert source is determined in the following order:
(1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the
client cert source is None.
(2) if `client_options.client_cert_source` is provided, use the provided one; if the
default client cert source exists, use the default one; otherwise the client cert
source is None.
The API endpoint is determined in the following order:
(1) if `client_options.api_endpoint` if provided, use the provided one.
(2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the
default mTLS endpoint; if the environment variabel is "never", use the default API
endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise
use the default API endpoint.
More details can be found at https://google.aip.dev/auth/4114.
Args:
client_options (google.api_core.client_options.ClientOptions): Custom options for the
client. Only the `api_endpoint` and `client_cert_source` properties may be used
in this method.
Returns:
Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the
client cert source to use.
Raises:
google.auth.exceptions.MutualTLSChannelError: If any errors happen.
"""
return FirewallClient.get_mtls_endpoint_and_cert_source(client_options) # type: ignore
@property
def transport(self) -> FirewallTransport:
"""Returns the transport used by the client instance.
Returns:
FirewallTransport: The transport used by the client instance.
"""
return self._client.transport
get_transport_class = functools.partial(
type(FirewallClient).get_transport_class, type(FirewallClient)
)
def __init__(
self,
*,
credentials: ga_credentials.Credentials = None,
transport: Union[str, FirewallTransport] = "grpc_asyncio",
client_options: ClientOptions = None,
client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO,
) -> None:
"""Instantiates the firewall client.
Args:
credentials (Optional[google.auth.credentials.Credentials]): The
authorization credentials to attach to requests. These
credentials identify the application to the service; if none
are specified, the client will attempt to ascertain the
credentials from the environment.
transport (Union[str, ~.FirewallTransport]): The
transport to use. If set to None, a transport is chosen
automatically.
client_options (ClientOptions): Custom options for the client. It
won't take effect if a ``transport`` instance is provided.
(1) The ``api_endpoint`` property can be used to override the
default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT
environment variable can also be used to override the endpoint:
"always" (always use the default mTLS endpoint), "never" (always
use the default regular endpoint) and "auto" (auto switch to the
default mTLS endpoint if client certificate is present, this is
the default value). However, the ``api_endpoint`` property takes
precedence if provided.
(2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable
is "true", then the ``client_cert_source`` property can be used
to provide client certificate for mutual TLS transport. If
not provided, the default SSL client certificate will be used if
present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not
set, no client certificate will be used.
Raises:
google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport
creation failed for any reason.
"""
self._client = FirewallClient(
credentials=credentials,
transport=transport,
client_options=client_options,
client_info=client_info,
)
async def list_ingress_rules(
self,
request: Union[appengine.ListIngressRulesRequest, dict] = None,
*,
retry: OptionalRetry = gapic_v1.method.DEFAULT,
timeout: float = None,
metadata: Sequence[Tuple[str, str]] = (),
) -> pagers.ListIngressRulesAsyncPager:
r"""Lists the firewall rules of an application.
.. code-block:: python
from google.cloud import appengine_admin_v1
def sample_list_ingress_rules():
# Create a client
client = appengine_admin_v1.FirewallClient()
# Initialize request argument(s)
request = appengine_admin_v1.ListIngressRulesRequest(
)
# Make the request
page_result = client.list_ingress_rules(request=request)
# Handle the response
for response in page_result:
print(response)
Args:
request (Union[google.cloud.appengine_admin_v1.types.ListIngressRulesRequest, dict]):
The request object. Request message for
`Firewall.ListIngressRules`.
retry (google.api_core.retry.Retry): Designation of what errors, if any,
should be retried.
timeout (float): The timeout for this request.
metadata (Sequence[Tuple[str, str]]): Strings which should be
sent along with the request as metadata.
Returns:
google.cloud.appengine_admin_v1.services.firewall.pagers.ListIngressRulesAsyncPager:
Response message for Firewall.ListIngressRules.
Iterating over this object will yield results and
resolve additional pages automatically.
"""
# Create or coerce a protobuf request object.
request = appengine.ListIngressRulesRequest(request)
# Wrap the RPC method; this adds retry and timeout information,
# and friendly error handling.
rpc = gapic_v1.method_async.wrap_method(
self._client._transport.list_ingress_rules,
default_timeout=None,
client_info=DEFAULT_CLIENT_INFO,
)
# Certain fields should be provided within the metadata header;
# add these here.
metadata = tuple(metadata) + (
gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)),
)
# Send the request.
response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,)
# This method is paged; wrap the response in a pager, which provides
# an `__aiter__` convenience method.
response = pagers.ListIngressRulesAsyncPager(
method=rpc, request=request, response=response, metadata=metadata,
)
# Done; return the response.
return response
async def batch_update_ingress_rules(
self,
request: Union[appengine.BatchUpdateIngressRulesRequest, dict] = None,
*,
retry: OptionalRetry = gapic_v1.method.DEFAULT,
timeout: float = None,
metadata: Sequence[Tuple[str, str]] = (),
) -> appengine.BatchUpdateIngressRulesResponse:
r"""Replaces the entire firewall ruleset in one bulk operation. This
overrides and replaces the rules of an existing firewall with
the new rules.
If the final rule does not match traffic with the '*' wildcard
IP range, then an "allow all" rule is explicitly added to the
end of the list.
.. code-block:: python
from google.cloud import appengine_admin_v1
def sample_batch_update_ingress_rules():
# Create a client
client = appengine_admin_v1.FirewallClient()
# Initialize request argument(s)
request = appengine_admin_v1.BatchUpdateIngressRulesRequest(
)
# Make the request
response = client.batch_update_ingress_rules(request=request)
# Handle the response
print(response)
Args:
request (Union[google.cloud.appengine_admin_v1.types.BatchUpdateIngressRulesRequest, dict]):
The request object. Request message for
`Firewall.BatchUpdateIngressRules`.
retry (google.api_core.retry.Retry): Designation of what errors, if any,
should be retried.
timeout (float): The timeout for this request.
metadata (Sequence[Tuple[str, str]]): Strings which should be
sent along with the request as metadata.
Returns:
google.cloud.appengine_admin_v1.types.BatchUpdateIngressRulesResponse:
Response message for Firewall.UpdateAllIngressRules.
"""
# Create or coerce a protobuf request object.
request = appengine.BatchUpdateIngressRulesRequest(request)
# Wrap the RPC method; this adds retry and timeout information,
# and friendly error handling.
rpc = gapic_v1.method_async.wrap_method(
self._client._transport.batch_update_ingress_rules,
default_timeout=None,
client_info=DEFAULT_CLIENT_INFO,
)
# Certain fields should be provided within the metadata header;
# add these here.
metadata = tuple(metadata) + (
gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)),
)
# Send the request.
response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,)
# Done; return the response.
return response
async def create_ingress_rule(
self,
request: Union[appengine.CreateIngressRuleRequest, dict] = None,
*,
retry: OptionalRetry = gapic_v1.method.DEFAULT,
timeout: float = None,
metadata: Sequence[Tuple[str, str]] = (),
) -> firewall.FirewallRule:
r"""Creates a firewall rule for the application.
.. code-block:: python
from google.cloud import appengine_admin_v1
def sample_create_ingress_rule():
# Create a client
client = appengine_admin_v1.FirewallClient()
# Initialize request argument(s)
request = appengine_admin_v1.CreateIngressRuleRequest(
)
# Make the request
response = client.create_ingress_rule(request=request)
# Handle the response
print(response)
Args:
request (Union[google.cloud.appengine_admin_v1.types.CreateIngressRuleRequest, dict]):
The request object. Request message for
`Firewall.CreateIngressRule`.
retry (google.api_core.retry.Retry): Designation of what errors, if any,
should be retried.
timeout (float): The timeout for this request.
metadata (Sequence[Tuple[str, str]]): Strings which should be
sent along with the request as metadata.
Returns:
google.cloud.appengine_admin_v1.types.FirewallRule:
A single firewall rule that is
evaluated against incoming traffic and
provides an action to take on matched
requests.
"""
# Create or coerce a protobuf request object.
request = appengine.CreateIngressRuleRequest(request)
# Wrap the RPC method; this adds retry and timeout information,
# and friendly error handling.
rpc = gapic_v1.method_async.wrap_method(
self._client._transport.create_ingress_rule,
default_timeout=None,
client_info=DEFAULT_CLIENT_INFO,
)
# Certain fields should be provided within the metadata header;
# add these here.
metadata = tuple(metadata) + (
gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)),
)
# Send the request.
response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,)
# Done; return the response.
return response
async def get_ingress_rule(
self,
request: Union[appengine.GetIngressRuleRequest, dict] = None,
*,
retry: OptionalRetry = gapic_v1.method.DEFAULT,
timeout: float = None,
metadata: Sequence[Tuple[str, str]] = (),
) -> firewall.FirewallRule:
r"""Gets the specified firewall rule.
.. code-block:: python
from google.cloud import appengine_admin_v1
def sample_get_ingress_rule():
# Create a client
client = appengine_admin_v1.FirewallClient()
# Initialize request argument(s)
request = appengine_admin_v1.GetIngressRuleRequest(
)
# Make the request
response = client.get_ingress_rule(request=request)
# Handle the response
print(response)
Args:
request (Union[google.cloud.appengine_admin_v1.types.GetIngressRuleRequest, dict]):
The request object. Request message for
`Firewall.GetIngressRule`.
retry (google.api_core.retry.Retry): Designation of what errors, if any,
should be retried.
timeout (float): The timeout for this request.
metadata (Sequence[Tuple[str, str]]): Strings which should be
sent along with the request as metadata.
Returns:
google.cloud.appengine_admin_v1.types.FirewallRule:
A single firewall rule that is
evaluated against incoming traffic and
provides an action to take on matched
requests.
"""
# Create or coerce a protobuf request object.
request = appengine.GetIngressRuleRequest(request)
# Wrap the RPC method; this adds retry and timeout information,
# and friendly error handling.
rpc = gapic_v1.method_async.wrap_method(
self._client._transport.get_ingress_rule,
default_timeout=None,
client_info=DEFAULT_CLIENT_INFO,
)
# Certain fields should be provided within the metadata header;
# add these here.
metadata = tuple(metadata) + (
gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)),
)
# Send the request.
response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,)
# Done; return the response.
return response
async def update_ingress_rule(
self,
request: Union[appengine.UpdateIngressRuleRequest, dict] = None,
*,
retry: OptionalRetry = gapic_v1.method.DEFAULT,
timeout: float = None,
metadata: Sequence[Tuple[str, str]] = (),
) -> firewall.FirewallRule:
r"""Updates the specified firewall rule.
.. code-block:: python
from google.cloud import appengine_admin_v1
def sample_update_ingress_rule():
# Create a client
client = appengine_admin_v1.FirewallClient()
# Initialize request argument(s)
request = appengine_admin_v1.UpdateIngressRuleRequest(
)
# Make the request
response = client.update_ingress_rule(request=request)
# Handle the response
print(response)
Args:
request (Union[google.cloud.appengine_admin_v1.types.UpdateIngressRuleRequest, dict]):
The request object. Request message for
`Firewall.UpdateIngressRule`.
retry (google.api_core.retry.Retry): Designation of what errors, if any,
should be retried.
timeout (float): The timeout for this request.
metadata (Sequence[Tuple[str, str]]): Strings which should be
sent along with the request as metadata.
Returns:
google.cloud.appengine_admin_v1.types.FirewallRule:
A single firewall rule that is
evaluated against incoming traffic and
provides an action to take on matched
requests.
"""
# Create or coerce a protobuf request object.
request = appengine.UpdateIngressRuleRequest(request)
# Wrap the RPC method; this adds retry and timeout information,
# and friendly error handling.
rpc = gapic_v1.method_async.wrap_method(
self._client._transport.update_ingress_rule,
default_timeout=None,
client_info=DEFAULT_CLIENT_INFO,
)
# Certain fields should be provided within the metadata header;
# add these here.
metadata = tuple(metadata) + (
gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)),
)
# Send the request.
response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,)
# Done; return the response.
return response
async def delete_ingress_rule(
self,
request: Union[appengine.DeleteIngressRuleRequest, dict] = None,
*,
retry: OptionalRetry = gapic_v1.method.DEFAULT,
timeout: float = None,
metadata: Sequence[Tuple[str, str]] = (),
) -> None:
r"""Deletes the specified firewall rule.
.. code-block:: python
from google.cloud import appengine_admin_v1
def sample_delete_ingress_rule():
# Create a client
client = appengine_admin_v1.FirewallClient()
# Initialize request argument(s)
request = appengine_admin_v1.DeleteIngressRuleRequest(
)
# Make the request
client.delete_ingress_rule(request=request)
Args:
request (Union[google.cloud.appengine_admin_v1.types.DeleteIngressRuleRequest, dict]):
The request object. Request message for
`Firewall.DeleteIngressRule`.
retry (google.api_core.retry.Retry): Designation of what errors, if any,
should be retried.
timeout (float): The timeout for this request.
metadata (Sequence[Tuple[str, str]]): Strings which should be
sent along with the request as metadata.
"""
# Create or coerce a protobuf request object.
request = appengine.DeleteIngressRuleRequest(request)
# Wrap the RPC method; this adds retry and timeout information,
# and friendly error handling.
rpc = gapic_v1.method_async.wrap_method(
self._client._transport.delete_ingress_rule,
default_timeout=None,
client_info=DEFAULT_CLIENT_INFO,
)
# Certain fields should be provided within the metadata header;
# add these here.
metadata = tuple(metadata) + (
gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)),
)
# Send the request.
await rpc(
request, retry=retry, timeout=timeout, metadata=metadata,
)
async def __aenter__(self):
return self
async def __aexit__(self, exc_type, exc, tb):
await self.transport.close()
try:
DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(
gapic_version=pkg_resources.get_distribution(
"google-cloud-appengine-admin",
).version,
)
except pkg_resources.DistributionNotFound:
DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo()
__all__ = ("FirewallAsyncClient",)
|
|
# vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright 2010 United States Government as represented by the
# Administrator of the National Aeronautics and Space Administration.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
Helper code for the iSCSI volume driver.
"""
import contextlib
import os
import re
from oslo.config import cfg
from cinder import exception
from cinder.openstack.common import fileutils
from cinder.openstack.common import log as logging
from cinder.openstack.common import processutils as putils
from cinder.volume import utils as volume_utils
LOG = logging.getLogger(__name__)
iscsi_helper_opt = [cfg.StrOpt('iscsi_helper',
default='tgtadm',
help='iscsi target user-land tool to use'),
cfg.StrOpt('volumes_dir',
default='$state_path/volumes',
help='Volume configuration file storage '
'directory'),
cfg.StrOpt('iet_conf',
default='/etc/iet/ietd.conf',
help='IET configuration file'),
cfg.StrOpt('lio_initiator_iqns',
default='',
help=('Comma-separatd list of initiator IQNs '
'allowed to connect to the '
'iSCSI target. (From Nova compute nodes.)'
)
),
cfg.StrOpt('iscsi_iotype',
default='fileio',
help=('Sets the behavior of the iSCSI target '
'to either perform blockio or fileio '
'optionally, auto can be set and Cinder '
'will autodetect type of backing device')
)
]
CONF = cfg.CONF
CONF.register_opts(iscsi_helper_opt)
CONF.import_opt('volume_name_template', 'cinder.db')
class TargetAdmin(object):
"""iSCSI target administration.
Base class for iSCSI target admin helpers.
"""
def __init__(self, cmd, execute):
self._cmd = cmd
self.set_execute(execute)
def set_execute(self, execute):
"""Set the function to be used to execute commands."""
self._execute = execute
def _run(self, *args, **kwargs):
self._execute(self._cmd, *args, run_as_root=True, **kwargs)
def create_iscsi_target(self, name, tid, lun, path,
chap_auth=None, **kwargs):
"""Create a iSCSI target and logical unit"""
raise NotImplementedError()
def remove_iscsi_target(self, tid, lun, vol_id, **kwargs):
"""Remove a iSCSI target and logical unit"""
raise NotImplementedError()
def _new_target(self, name, tid, **kwargs):
"""Create a new iSCSI target."""
raise NotImplementedError()
def _delete_target(self, tid, **kwargs):
"""Delete a target."""
raise NotImplementedError()
def show_target(self, tid, iqn=None, **kwargs):
"""Query the given target ID."""
raise NotImplementedError()
def _new_logicalunit(self, tid, lun, path, **kwargs):
"""Create a new LUN on a target using the supplied path."""
raise NotImplementedError()
def _delete_logicalunit(self, tid, lun, **kwargs):
"""Delete a logical unit from a target."""
raise NotImplementedError()
class TgtAdm(TargetAdmin):
"""iSCSI target administration using tgtadm."""
def __init__(self, execute=putils.execute):
super(TgtAdm, self).__init__('tgtadm', execute)
def _get_target(self, iqn):
(out, err) = self._execute('tgt-admin', '--show', run_as_root=True)
lines = out.split('\n')
for line in lines:
if iqn in line:
parsed = line.split()
tid = parsed[1]
return tid[:-1]
return None
def create_iscsi_target(self, name, tid, lun, path,
chap_auth=None, **kwargs):
# Note(jdg) tid and lun aren't used by TgtAdm but remain for
# compatibility
fileutils.ensure_tree(CONF.volumes_dir)
vol_id = name.split(':')[1]
if chap_auth is None:
volume_conf = """
<target %s>
backing-store %s
</target>
""" % (name, path)
else:
volume_conf = """
<target %s>
backing-store %s
%s
</target>
""" % (name, path, chap_auth)
LOG.info(_('Creating iscsi_target for: %s') % vol_id)
volumes_dir = CONF.volumes_dir
volume_path = os.path.join(volumes_dir, vol_id)
f = open(volume_path, 'w+')
f.write(volume_conf)
f.close()
old_persist_file = None
old_name = kwargs.get('old_name', None)
if old_name is not None:
old_persist_file = os.path.join(volumes_dir, old_name)
try:
(out, err) = self._execute('tgt-admin',
'--update',
name,
run_as_root=True)
except exception.ProcessExecutionError as e:
LOG.error(_("Failed to create iscsi target for volume "
"id:%(vol_id)s: %(e)s")
% {'vol_id': vol_id, 'e': str(e)})
#Don't forget to remove the persistent file we created
os.unlink(volume_path)
raise exception.ISCSITargetCreateFailed(volume_id=vol_id)
iqn = '%s%s' % (CONF.iscsi_target_prefix, vol_id)
tid = self._get_target(iqn)
if tid is None:
LOG.error(_("Failed to create iscsi target for volume "
"id:%(vol_id)s. Please ensure your tgtd config file "
"contains 'include %(volumes_dir)s/*'") % {
'vol_id': vol_id,
'volumes_dir': volumes_dir,
})
raise exception.NotFound()
if old_persist_file is not None and os.path.exists(old_persist_file):
os.unlink(old_persist_file)
return tid
def remove_iscsi_target(self, tid, lun, vol_id, **kwargs):
LOG.info(_('Removing iscsi_target for: %s') % vol_id)
vol_uuid_file = CONF.volume_name_template % vol_id
volume_path = os.path.join(CONF.volumes_dir, vol_uuid_file)
if os.path.isfile(volume_path):
iqn = '%s%s' % (CONF.iscsi_target_prefix,
vol_uuid_file)
else:
raise exception.ISCSITargetRemoveFailed(volume_id=vol_id)
try:
# NOTE(vish): --force is a workaround for bug:
# https://bugs.launchpad.net/cinder/+bug/1159948
self._execute('tgt-admin',
'--force',
'--delete',
iqn,
run_as_root=True)
except exception.ProcessExecutionError as e:
LOG.error(_("Failed to remove iscsi target for volume "
"id:%(vol_id)s: %(e)s")
% {'vol_id': vol_id, 'e': str(e)})
raise exception.ISCSITargetRemoveFailed(volume_id=vol_id)
os.unlink(volume_path)
def show_target(self, tid, iqn=None, **kwargs):
if iqn is None:
raise exception.InvalidParameterValue(
err=_('valid iqn needed for show_target'))
tid = self._get_target(iqn)
if tid is None:
raise exception.NotFound()
class IetAdm(TargetAdmin):
"""iSCSI target administration using ietadm."""
def __init__(self, execute=putils.execute):
super(IetAdm, self).__init__('ietadm', execute)
def _iotype(self, path):
if CONF.iscsi_iotype == 'auto':
return 'blockio' if volume_utils.is_block(path) else 'fileio'
else:
return CONF.iscsi_iotype
@contextlib.contextmanager
def temporary_chown(self, path, owner_uid=None):
"""Temporarily chown a path.
:params path: The path to chown
:params owner_uid: UID of temporary owner (defaults to current user)
"""
if owner_uid is None:
owner_uid = os.getuid()
orig_uid = os.stat(path).st_uid
if orig_uid != owner_uid:
putils.execute('chown', owner_uid, path, run_as_root=True)
try:
yield
finally:
if orig_uid != owner_uid:
putils.execute('chown', orig_uid, path, run_as_root=True)
def create_iscsi_target(self, name, tid, lun, path,
chap_auth=None, **kwargs):
# NOTE (jdg): Address bug: 1175207
kwargs.pop('old_name', None)
self._new_target(name, tid, **kwargs)
self._new_logicalunit(tid, lun, path, **kwargs)
if chap_auth is not None:
(type, username, password) = chap_auth.split()
self._new_auth(tid, type, username, password, **kwargs)
conf_file = CONF.iet_conf
if os.path.exists(conf_file):
try:
volume_conf = """
Target %s
%s
Lun 0 Path=%s,Type=%s
""" % (name, chap_auth, path, self._iotype(path))
with self.temporary_chown(conf_file):
f = open(conf_file, 'a+')
f.write(volume_conf)
f.close()
except exception.ProcessExecutionError as e:
vol_id = name.split(':')[1]
LOG.error(_("Failed to create iscsi target for volume "
"id:%(vol_id)s: %(e)s")
% {'vol_id': vol_id, 'e': str(e)})
raise exception.ISCSITargetCreateFailed(volume_id=vol_id)
return tid
def remove_iscsi_target(self, tid, lun, vol_id, **kwargs):
LOG.info(_('Removing iscsi_target for volume: %s') % vol_id)
self._delete_logicalunit(tid, lun, **kwargs)
self._delete_target(tid, **kwargs)
vol_uuid_file = CONF.volume_name_template % vol_id
conf_file = CONF.iet_conf
if os.path.exists(conf_file):
with self.temporary_chown(conf_file):
try:
iet_conf_text = open(conf_file, 'r+')
full_txt = iet_conf_text.readlines()
new_iet_conf_txt = []
count = 0
for line in full_txt:
if count > 0:
count -= 1
continue
elif re.search(vol_uuid_file, line):
count = 2
continue
else:
new_iet_conf_txt.append(line)
iet_conf_text.seek(0)
iet_conf_text.truncate(0)
iet_conf_text.writelines(new_iet_conf_txt)
finally:
iet_conf_text.close()
def _new_target(self, name, tid, **kwargs):
self._run('--op', 'new',
'--tid=%s' % tid,
'--params', 'Name=%s' % name,
**kwargs)
def _delete_target(self, tid, **kwargs):
self._run('--op', 'delete',
'--tid=%s' % tid,
**kwargs)
def show_target(self, tid, iqn=None, **kwargs):
self._run('--op', 'show',
'--tid=%s' % tid,
**kwargs)
def _new_logicalunit(self, tid, lun, path, **kwargs):
self._run('--op', 'new',
'--tid=%s' % tid,
'--lun=%d' % lun,
'--params', 'Path=%s,Type=%s' % (path, self._iotype(path)),
**kwargs)
def _delete_logicalunit(self, tid, lun, **kwargs):
self._run('--op', 'delete',
'--tid=%s' % tid,
'--lun=%d' % lun,
**kwargs)
def _new_auth(self, tid, type, username, password, **kwargs):
self._run('--op', 'new',
'--tid=%s' % tid,
'--user',
'--params=%s=%s,Password=%s' % (type, username, password),
**kwargs)
class FakeIscsiHelper(object):
def __init__(self):
self.tid = 1
def set_execute(self, execute):
self._execute = execute
def create_iscsi_target(self, *args, **kwargs):
self.tid += 1
return self.tid
class LioAdm(TargetAdmin):
"""iSCSI target administration for LIO using python-rtslib."""
def __init__(self, execute=putils.execute):
super(LioAdm, self).__init__('rtstool', execute)
try:
self._execute('rtstool', 'verify')
except (OSError, exception.ProcessExecutionError):
LOG.error(_('rtstool is not installed correctly'))
raise
def _get_target(self, iqn):
(out, err) = self._execute('rtstool',
'get-targets',
run_as_root=True)
lines = out.split('\n')
for line in lines:
if iqn in line:
return line
return None
def create_iscsi_target(self, name, tid, lun, path,
chap_auth=None, **kwargs):
# tid and lun are not used
vol_id = name.split(':')[1]
LOG.info(_('Creating iscsi_target for volume: %s') % vol_id)
# rtstool requires chap_auth, but unit tests don't provide it
chap_auth_userid = 'test_id'
chap_auth_password = 'test_pass'
if chap_auth is not None:
(chap_auth_userid, chap_auth_password) = chap_auth.split(' ')[1:]
extra_args = []
if CONF.lio_initiator_iqns:
extra_args.append(CONF.lio_initiator_iqns)
try:
command_args = ['rtstool',
'create',
path,
name,
chap_auth_userid,
chap_auth_password]
if extra_args != []:
command_args += extra_args
self._execute(*command_args, run_as_root=True)
except exception.ProcessExecutionError as e:
LOG.error(_("Failed to create iscsi target for volume "
"id:%s.") % vol_id)
LOG.error("%s" % str(e))
raise exception.ISCSITargetCreateFailed(volume_id=vol_id)
iqn = '%s%s' % (CONF.iscsi_target_prefix, vol_id)
tid = self._get_target(iqn)
if tid is None:
LOG.error(_("Failed to create iscsi target for volume "
"id:%s.") % vol_id)
raise exception.NotFound()
return tid
def remove_iscsi_target(self, tid, lun, vol_id, **kwargs):
LOG.info(_('Removing iscsi_target: %s') % vol_id)
vol_uuid_name = 'volume-%s' % vol_id
iqn = '%s%s' % (CONF.iscsi_target_prefix, vol_uuid_name)
try:
self._execute('rtstool',
'delete',
iqn,
run_as_root=True)
except exception.ProcessExecutionError as e:
LOG.error(_("Failed to remove iscsi target for volume "
"id:%s.") % vol_id)
LOG.error("%s" % str(e))
raise exception.ISCSITargetRemoveFailed(volume_id=vol_id)
def show_target(self, tid, iqn=None, **kwargs):
if iqn is None:
raise exception.InvalidParameterValue(
err=_('valid iqn needed for show_target'))
tid = self._get_target(iqn)
if tid is None:
raise exception.NotFound()
def initialize_connection(self, volume, connector):
volume_iqn = volume['provider_location'].split(' ')[1]
(auth_method, auth_user, auth_pass) = \
volume['provider_auth'].split(' ', 3)
# Add initiator iqns to target ACL
try:
self._execute('rtstool', 'add-initiator',
volume_iqn,
auth_user,
auth_pass,
connector['initiator'],
run_as_root=True)
except exception.ProcessExecutionError as e:
LOG.error(_("Failed to add initiator iqn %s to target") %
connector['initiator'])
raise exception.ISCSITargetAttachFailed(volume_id=volume['id'])
def get_target_admin():
if CONF.iscsi_helper == 'tgtadm':
return TgtAdm()
elif CONF.iscsi_helper == 'fake':
return FakeIscsiHelper()
elif CONF.iscsi_helper == 'lioadm':
return LioAdm()
else:
return IetAdm()
|
|
# -*- coding: utf-8 -*-
"""
libfb2.sb
~~~~~~~~~
Reads frostbite2 sb and toc files. Thanks to gibbed for the original
analysis of the XOR trick for the obfuscation.
:copyright: (c) Copyright 2011 by Armin Ronacher, Richard Lacharite, Pilate.
:license: BSD, see LICENSE for more details.
"""
import os
import shutil
from StringIO import StringIO
from uuid import UUID
from itertools import izip, chain
from .utils import TypeReader, DecryptingTypeReader, \
open_fp_or_filename
from .types import Blob, SHA1, Unknown
CAS_CAT_HEADER = 'Nyan' * 4
CAS_HEADER = '\xfa\xce\x0f\xf0'
def generate_one(item):
yield item
class SBException(Exception):
pass
class CASException(Exception):
pass
class CommonFileAccessMethodsMixin(object):
"""Assumes that give accsess to a file returned by the :meth:`open`
method of the class.
"""
_parsed_contents = None
def get_raw_contents(self):
with self.open() as f:
return f.read()
def iter_parse_contents(self, selector):
with self.open() as f:
for obj in iterload(f, selector):
yield obj
def get_parsed_contents(self, cache=True):
if self._parsed_contents is not None:
return self._parsed_contents
with self.open() as f:
rv = load(f)
if cache:
self._parsed_contents = rv
return rv
class BundleFile(CommonFileAccessMethodsMixin):
def __init__(self, bundle, id, offset, size):
self.bundle = bundle
self.id = id
self.offset = offset
self.size = size
def iter_chunk_files(self):
if self.bundle.cat is None:
raise RuntimeError('Catalog not loaded')
meta = self.get_parsed_contents()
for chunk in meta['chunks']:
yield chunk['id'], self.bundle.cat.get_file(chunk['sha1'].hex)
def open(self):
f = open(self.bundle.basename + '.sb', 'rb')
f.seek(self.offset)
return TypeReader(f, self.size)
def __repr__(self):
return '<BundleFile %r>' % self.id
class SBParser(object):
"""Parses SB/Superbundle files. Each value read is put on on a stack
temporarily until something else consumes it. Even things such as
dictionary keys end up on there temporarily to aid debugging.
Instead of using this use :meth:`load`, :meth:`loads`, :meth:`iterload`
and :meth:`iterloads`.
"""
def __init__(self, reader):
self.reader = reader
def parse(self):
"""Parse a single object from the reader."""
gen = self.read_object()
rv = self.make_object(gen)
try:
gen.next()
except StopIteration:
return rv
raise RuntimeError('Garbage left in stream')
def iterparse(self, selector=None):
"""Parses objects that are below one of the selector."""
if not callable(selector):
selector = self.make_selector_function(selector)
iterator = self.read_object()
stack = []
for event in iterator:
event_type, event_value = event
if event_type in ('list_start', 'dict_start'):
if selector(stack):
yield self.make_object(chain([event], iterator))
else:
stack.append(None)
elif event_type in ('list_item', 'dict_key'):
stack[-1] = event_value
elif event_type in ('dict_end', 'list_end'):
stack.pop()
elif selector(stack):
yield self.make_object(chain([event], iterator))
def make_selector_function(self, selector):
if isinstance(selector, basestring):
selector = [x.strip() for x in selector.split(',')]
selectors = [self.parse_selector(x) for x in selector]
def selector_func(stack):
for selector in selectors:
if self.selector_matches(selector, stack):
return True
return False
return selector_func
def selector_matches(self, selector, stack):
if len(stack) != len(selector):
return False
for stack_part, selector_part in izip(stack, selector):
if selector_part is not None and \
selector_part != stack_part:
return False
return True
def parse_selector(self, selector):
test_selector = []
for part in selector.split('.'):
if part == '*':
test_selector.append(None)
elif part.isdigit():
test_selector.append(int(part))
else:
test_selector.append(part)
return test_selector
def make_object(self, iterator):
event_type, event_value = iterator.next()
if event_type == 'value':
return event_value
elif event_type == 'list_start':
rv = []
for event in iterator:
if event[0] == 'list_end':
break
assert event[0] == 'list_item', 'expected list item'
rv.append(self.make_object(iterator))
return rv
elif event_type == 'dict_start':
rv = {}
for event in iterator:
if event[0] == 'dict_end':
break
assert event[0] == 'dict_key', 'expected dict key'
key = event[1]
value = self.make_object(iterator)
rv[key] = value
return rv
elif event_type == 'blob_start':
rv = []
for event in iterator:
if event[0] == 'blob_end':
break
assert event[0] == 'blob_chunk', 'expected blob chunk'
rv.append(event[1])
return Blob(''.join(rv))
else:
raise RuntimeError('Unexpected event %r' % event_type)
def read_object(self, typecode=None):
if typecode is None:
typecode = self.reader.read_byte()
raw_typecode = typecode
flags = typecode >> 5
typecode = typecode & 0x1f
if typecode == 0:
yield 'value', None
elif typecode == 1:
for event in self.read_list():
yield event
elif typecode == 2:
for event in self.read_dict():
yield event
elif typecode == 5:
yield 'value', Unknown(5, self.reader.read(8))
elif typecode == 6:
yield 'value', bool(self.reader.read_byte())
elif typecode == 7:
yield 'value', self.reader.read_bstring()
elif typecode == 8:
yield 'value', self.reader.read_sst('l')
elif typecode == 9:
yield 'value', self.reader.read_sst('q')
elif typecode == 15:
yield 'value', UUID(bytes=self.reader.read(16))
elif typecode == 16:
yield 'value', SHA1(self.reader.read(20))
elif typecode == 19:
for event in self.read_blob():
yield event
else:
raise SBException('Unknown type marker %x (type=%d)' %
(raw_typecode, typecode))
def read_list(self):
size_info = self.reader.read_varint()
# We don't need the size_info since the collection is delimited
yield 'list_start', None
idx = 0
while 1:
typecode = self.reader.read_byte()
if typecode == 0:
break
yield 'list_item', idx
for event in self.read_object(typecode):
yield event
idx += 1
yield 'list_end', None
def read_dict(self):
size_info = self.reader.read_varint()
# We don't need the size_info since the collection is delimited
yield 'dict_start', None
while 1:
typecode = self.reader.read_byte()
if typecode == 0:
break
yield 'dict_key', self.reader.read_cstring()
for event in self.read_object(typecode=typecode):
yield event
yield 'dict_end', None
def read_blob(self):
to_read = self.reader.read_varint()
yield 'blob_start', to_read
while to_read > 0:
read_now = min(to_read, 4096)
yield 'blob_chunk', self.reader.read(read_now)
to_read -= read_now
yield 'blob_end', None
class Bundle(object):
"""Gives access to a SB and SB bundle. Pass it the basename
(for instance UI, Weapons etc.) and it will add .toc for the SB
and .sb for the actual contents.
:attr:`files` gives access to all files by id in a sanish way.
The contents of those files are not yet parsed.
"""
def __init__(self, basename, cat=None):
self.basename = basename
self.cat = cat
self.bundle_files = {}
self.root = load(basename + '.toc')
for bundle in self.root['bundles']:
if 'size' in bundle and 'offset' in bundle:
self.bundle_files[bundle['id']] = BundleFile(self, **bundle)
def list_files(self):
"""Lists all files in the bundle."""
return self.bundle_files.values()
def iter_files(self):
"""Iterates oveo all files in the bundle."""
return self.bundle_files.itervalues()
def get_file(self, id):
"""Opens a file by id."""
return self.bundle_files.get(id)
def iter_cas_file(fp_or_filename):
"""Iterates over all files in a CAS."""
with open_fp_or_filename(fp_or_filename) as f:
reader = TypeReader(f)
while 1:
header = self.read(4)
if not header:
break
if header != CAS_HEADER:
raise ValueError('Expected cas header, got %r' % header)
sha1 = SHA1(self.read(20))
data_length = self.read_sst('i')
padding = self.read(4)
rv = CASFile(sha1, self._fp.tell(), data_length, fp=self._fp)
self._fp.seek(data_length, 1)
yield rv
class CASFile(CommonFileAccessMethodsMixin):
"""A single file from a CAS."""
def __init__(self, sha1, offset, size, cas_num=-1,
cat=None, fp=None):
self.sha1 = sha1
self.fp = fp
self.offset = offset
self.size = size
self.cas_num = cas_num
self.cat = cat
def open(self):
if self.fp is not None:
f = os.fdopen(os.dup(self.fp.fileno()))
else:
f = self.cat.open_cas(self.cas_num)
f.seek(self.offset)
return TypeReader(f, self.size)
def __repr__(self):
return '<CASFile %r>' % self.sha1.hex
class CASCatalog(object):
"""Reads CAT files."""
def __init__(self, filename):
self.filename = os.path.abspath(filename)
self.files = {}
with open(filename, 'rb') as f:
reader = DecryptingTypeReader(f)
header = reader.read(len(CAS_CAT_HEADER))
if header != CAS_CAT_HEADER:
raise ValueError('Not a cas cat file')
while not reader.eof:
sha1 = SHA1(reader.read(20))
offset = reader.read_sst('i')
size = reader.read_sst('i')
cas_num = reader.read_sst('i')
self.files[sha1.hex] = CASFile(sha1, offset, size, cas_num,
cat=self)
def get_file(self, sha1):
"""Returns a file by its sha1 checksum."""
if hasattr(sha1, 'hex'):
sha1 = sha1.hex
return self.files.get(sha1)
def open_cas(self, num):
"""Opens a CAS by number. This is usually not needed to use directly
since :meth:`get_file` opens the CAS as necessary.
"""
directory, base = os.path.split(self.filename)
filename = '%s_%02d.cas' % (os.path.splitext(base)[0], num)
full_filename = os.path.join(directory, filename)
if os.path.isfile(full_filename):
return open(full_filename, 'rb')
def open_superbundle(self, name):
"""Opens a superbundle that is relative to the CAS catalog. This bundle
has to have a .toc and a .sb file.
"""
directory = os.path.dirname(self.filename)
basename = os.path.join(directory, name)
if os.path.isfile(basename + '.toc'):
return Bundle(basename, cat=self)
def decrypt(filename, new_filename=None):
"""Decrypts a file for debugging."""
if new_filename is None:
new_filename = filename + '.decrypt'
with open(new_filename, 'wb') as f:
with DecryptingTypeReader(filename) as reader:
shutil.copyfileobj(reader, f)
def loads(string):
"""Loads an SB object from a string."""
return load(StringIO(string))
def load(fp_or_filename):
"""Loads an SB object from a file."""
with open_fp_or_filename(fp_or_filename) as f:
reader = DecryptingTypeReader(f)
return SBParser(reader).parse()
def iterloads(string, selector):
"""Loads SB objects iteratively from from a string that match a selector."""
return iterload(StringIO(string), selector)
def iterload(fp_or_filename, selector):
"""Loads SB objects iteratively from from a file that match a selector."""
with open_fp_or_filename(fp_or_filename) as f:
reader = DecryptingTypeReader(f)
for obj in SBParser(reader).iterparse(selector):
yield obj
|
|
#!/usr/bin/env python3
# Copyright (c) 2015-2016 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
"""Test behavior of -maxuploadtarget.
* Verify that getdata requests for old blocks (>1week) are dropped
if uploadtarget has been reached.
* Verify that getdata requests for recent blocks are respecteved even
if uploadtarget has been reached.
* Verify that the upload counters are reset after 24 hours.
"""
from test_framework.mininode import *
from test_framework.test_framework import BitcoinTestFramework
from test_framework.util import *
import time
# TestNode: bare-bones "peer". Used mostly as a conduit for a test to sending
# p2p messages to a node, generating the messages in the main testing logic.
class TestNode(NodeConnCB):
def __init__(self):
super().__init__()
self.connection = None
self.ping_counter = 1
self.last_pong = msg_pong()
self.block_receive_map = {}
def add_connection(self, conn):
self.connection = conn
self.peer_disconnected = False
def on_inv(self, conn, message):
pass
# Track the last getdata message we receive (used in the test)
def on_getdata(self, conn, message):
self.last_getdata = message
def on_block(self, conn, message):
message.block.calc_sha256()
try:
self.block_receive_map[message.block.sha256] += 1
except KeyError as e:
self.block_receive_map[message.block.sha256] = 1
# Spin until verack message is received from the node.
# We use this to signal that our test can begin. This
# is called from the testing thread, so it needs to acquire
# the global lock.
def wait_for_verack(self):
def veracked():
return self.verack_received
return wait_until(veracked, timeout=10)
def wait_for_disconnect(self):
def disconnected():
return self.peer_disconnected
return wait_until(disconnected, timeout=10)
# Wrapper for the NodeConn's send_message function
def send_message(self, message):
self.connection.send_message(message)
def on_pong(self, conn, message):
self.last_pong = message
def on_close(self, conn):
self.peer_disconnected = True
class MaxUploadTest(BitcoinTestFramework):
def __init__(self):
super().__init__()
self.setup_clean_chain = True
self.num_nodes = 1
# Cache for utxos, as the listunspent may take a long time later in the test
self.utxo_cache = []
def setup_network(self):
# Start a node with maxuploadtarget of 200 MB (/24h)
self.nodes = []
self.nodes.append(start_node(0, self.options.tmpdir, ["-maxuploadtarget=800", "-blockmaxsize=999000"]))
def run_test(self):
# Before we connect anything, we first set the time on the node
# to be in the past, otherwise things break because the CNode
# time counters can't be reset backward after initialization
old_time = int(time.time() - 2*60*60*24*7)
self.nodes[0].setmocktime(old_time)
# Generate some old blocks
self.nodes[0].generate(130)
# test_nodes[0] will only request old blocks
# test_nodes[1] will only request new blocks
# test_nodes[2] will test resetting the counters
test_nodes = []
connections = []
for i in range(3):
test_nodes.append(TestNode())
connections.append(NodeConn('127.0.0.1', p2p_port(0), self.nodes[0], test_nodes[i]))
test_nodes[i].add_connection(connections[i])
NetworkThread().start() # Start up network handling in another thread
[x.wait_for_verack() for x in test_nodes]
# Test logic begins here
# Now mine a big block
mine_large_block(self.nodes[0], self.utxo_cache)
# Store the hash; we'll request this later
big_old_block = self.nodes[0].getbestblockhash()
old_block_size = self.nodes[0].getblock(big_old_block, True)['size']
big_old_block = int(big_old_block, 16)
# Advance to two days ago
self.nodes[0].setmocktime(int(time.time()) - 2*60*60*24)
# Mine one more block, so that the prior block looks old
mine_large_block(self.nodes[0], self.utxo_cache)
# We'll be requesting this new block too
big_new_block = self.nodes[0].getbestblockhash()
big_new_block = int(big_new_block, 16)
# test_nodes[0] will test what happens if we just keep requesting the
# the same big old block too many times (expect: disconnect)
getdata_request = msg_getdata()
getdata_request.inv.append(CInv(2, big_old_block))
max_bytes_per_day = 800*1024*1024
daily_buffer = 144 * 4000000
max_bytes_available = max_bytes_per_day - daily_buffer
success_count = max_bytes_available // old_block_size
# 576MB will be reserved for relaying new blocks, so expect this to
# succeed for ~235 tries.
for i in range(success_count):
test_nodes[0].send_message(getdata_request)
test_nodes[0].sync_with_ping()
assert_equal(test_nodes[0].block_receive_map[big_old_block], i+1)
assert_equal(len(self.nodes[0].getpeerinfo()), 3)
# At most a couple more tries should succeed (depending on how long
# the test has been running so far).
for i in range(3):
test_nodes[0].send_message(getdata_request)
test_nodes[0].wait_for_disconnect()
assert_equal(len(self.nodes[0].getpeerinfo()), 2)
self.log.info("Peer 0 disconnected after downloading old block too many times")
# Requesting the current block on test_nodes[1] should succeed indefinitely,
# even when over the max upload target.
# We'll try 800 times
getdata_request.inv = [CInv(2, big_new_block)]
for i in range(800):
test_nodes[1].send_message(getdata_request)
test_nodes[1].sync_with_ping()
assert_equal(test_nodes[1].block_receive_map[big_new_block], i+1)
self.log.info("Peer 1 able to repeatedly download new block")
# But if test_nodes[1] tries for an old block, it gets disconnected too.
getdata_request.inv = [CInv(2, big_old_block)]
test_nodes[1].send_message(getdata_request)
test_nodes[1].wait_for_disconnect()
assert_equal(len(self.nodes[0].getpeerinfo()), 1)
self.log.info("Peer 1 disconnected after trying to download old block")
self.log.info("Advancing system time on node to clear counters...")
# If we advance the time by 24 hours, then the counters should reset,
# and test_nodes[2] should be able to retrieve the old block.
self.nodes[0].setmocktime(int(time.time()))
test_nodes[2].sync_with_ping()
test_nodes[2].send_message(getdata_request)
test_nodes[2].sync_with_ping()
assert_equal(test_nodes[2].block_receive_map[big_old_block], 1)
self.log.info("Peer 2 able to download old block")
[c.disconnect_node() for c in connections]
#stop and start node 0 with 1MB maxuploadtarget, whitelist 127.0.0.1
self.log.info("Restarting nodes with -whitelist=127.0.0.1")
stop_node(self.nodes[0], 0)
self.nodes[0] = start_node(0, self.options.tmpdir, ["-whitelist=127.0.0.1", "-maxuploadtarget=1", "-blockmaxsize=999000"])
#recreate/reconnect 3 test nodes
test_nodes = []
connections = []
for i in range(3):
test_nodes.append(TestNode())
connections.append(NodeConn('127.0.0.1', p2p_port(0), self.nodes[0], test_nodes[i]))
test_nodes[i].add_connection(connections[i])
NetworkThread().start() # Start up network handling in another thread
[x.wait_for_verack() for x in test_nodes]
#retrieve 20 blocks which should be enough to break the 1MB limit
getdata_request.inv = [CInv(2, big_new_block)]
for i in range(20):
test_nodes[1].send_message(getdata_request)
test_nodes[1].sync_with_ping()
assert_equal(test_nodes[1].block_receive_map[big_new_block], i+1)
getdata_request.inv = [CInv(2, big_old_block)]
test_nodes[1].send_message(getdata_request)
test_nodes[1].wait_for_disconnect()
assert_equal(len(self.nodes[0].getpeerinfo()), 3) #node is still connected because of the whitelist
self.log.info("Peer 1 still connected after trying to download old block (whitelisted)")
[c.disconnect_node() for c in connections]
if __name__ == '__main__':
MaxUploadTest().main()
|
|
import numpy as np
import accpm
import scipy.optimize as opt
import config
def linear_predictor(X, w):
"""
Returns a vector of predictions (+1 or -1) given input data X and
parameter vector w.
"""
predictions = np.sign(np.dot(X, w))
return predictions
def initial_polyhedron(X):
"""
Returns the initial polyhedron defined by Ax <= b, a unit hypercube
centered at the origin.
"""
dimension = X.shape[1]
A = []
b = []
for i in range(dimension):
a_upper = [0]*dimension
a_lower = [0]*dimension
a_upper[i] = 1
a_lower[i] = -1
A.append(a_upper)
A.append(a_lower)
b.append(0.5)
b.append(0.5)
A = np.array(A)
b = np.array(b)
return (A, b)
def chebyshev_center(A, b):
"""
Computes the Chebyshev center of a polyhedron defined by Ax <= b.
"""
dimension = A.shape[1] + 1
bounds = []
for i in range(dimension):
bounds.append((None, None))
c = np.zeros(dimension)
c[-1] = -1
norms = []
for a_i in A:
norm_a_i = np.linalg.norm(a_i)
norms.append([norm_a_i])
norms = np.asarray(norms)
A = np.hstack((A, norms))
result = opt.linprog(c, A, b, bounds=bounds)
cc = result.x[:-1]
return cc
def random_vector(A, b):
"""
Generates a random vector satisfying Ax <= b through rejection
sampling.
"""
dimension = A.shape[1]
not_feasible = True
while not_feasible == True:
config.reject_counter = config.reject_counter + 1
if config.reject_counter == config.milestone:
config.milestone = config.milestone * 10
print(config.reject_counter, 'random vectors have been generated so far')
rand_vec = np.random.uniform(-0.5, 0.5, dimension)
if np.all(np.dot(A, rand_vec) <= b) == True:
not_feasible = False
return rand_vec
def query(A, b, X, Y, M, sample=1, w_best=None):
"""
Chooses a training pattern to have its label examined.
Parameters
----------------
A : ndarray
b : ndarray
Specifies the polyhedron defined by Ax <= b.
X : ndarray
Training data that the pattern to have its label examined is
chosen from.
Y : ndarray
Labels of the training data to be used for labelling.
M : int, optional
Specifies the number of points to sample from the polyhedron.
By default this is taken to be the number of features squared,
which is passed from the active function.
sample : 0, 1, optional
Specifies how the center of the polyhedron will be
approximated.
0 - w_best is used for this purpose.
1 (default) - M points are uniformly sampled from the
polyhedron and averaged.
w_best : ndarray, optional
If sample = 1, then w_best must be specified.
Returns
----------------
(x_chosen, y_chosen) : tuple
The training pattern chosen and its label.
(X, Y) : tuple
The data set X and Y with x_chosen and y_chosen removed,
respectively.
"""
if sample == 1:
dimension = X.shape[1]
sum = np.zeros(dimension)
for i in range(M):
rand_vec = random_vector(A, b)
sum = sum + rand_vec
g = sum/M
if sample == 0:
g = w_best
min_val = np.inf
ind = 0
for i in range(X.shape[0]):
current_val = np.dot(g, X[i])
if current_val < min_val:
ind = i
min_val = current_val
x_chosen = X[ind]
y_chosen = Y[ind]
X = np.delete(X, ind, axis=0)
Y = np.delete(Y, ind, axis=0)
return ((x_chosen, y_chosen), (X, Y))
def active(X, Y, iterations, center='ac', sample=1, testing=1, M=None):
"""
Computes the parameter vector for linear_predictor using a cutting
plane active learning procedure.
Parameters
----------------
X : ndarray
Training data. If iterations = n, then the active learning
procedure will choose n training patterns to be labelled.
Y : ndarray
Labels of training data. If iterations = n, then only n labels
will be used.
iterations : int
The number of points chosen to have their label examined. Must
be less than or equal to the number of training patterns.
center : 'ac', 'cc', 'random', optional
Specifies how, at each iteration, the center of the polyhedron
is to be computed.
'ac' (default) - analytic center
'cc' - Chebyshev center
'random' - random center
sample : 0, 1 (default), optional
Specifies how the center of the polyhedron will be
approximated in the query function.
testing : 0, 1 (default), 2, 3, optional
Specifies the information to be returned and to be printed as
the procedure runs.
0 - returns w_best only.
1 - returns w_best only and prints success summary.
2 - returns w_best only, prints success summary and
prints information at each iteration.
3 - returns w_best, the number j of cutting planes
generated and the array of iterations = n parameter
vectors generated.
M : int, optional
Specifies the number of points to sample from the polyhedron in
the query function. By default this is taken to be the number
of features squared.
Returns
----------------
w_best : ndarray
The parameter computed on the final iteration.
(Only when testing = 3) j : int
The number of cutting planes generated.
(Only when testing = 3) weights : list
List containing the iterations = n parameters computed.
"""
if center == 'ac':
center = accpm.analytic_center
if center == 'cc':
center = chebyshev_center
if center == 'random':
center = random_vector
(A, b) = initial_polyhedron(X)
weights = []
i = 0
j = 0
if M == None:
M = A.shape[1]*A.shape[1]
while i < iterations:
if testing == 2:
print('\nEntering iteration', i)
w_best = center(A, b)
weights.append(w_best)
query_outcome = query(A, b, X, Y, M,
sample=sample, w_best=w_best)
(x_chosen, y_chosen) = query_outcome[0]
(X, Y) = query_outcome[1]
if testing == 2:
print(' (x_chosen, y_chosen) is', (x_chosen, y_chosen))
print(' y_chosen * np.dot(w_best, x_chosen) gives',
y_chosen * np.dot(w_best, x_chosen))
if y_chosen * np.dot(w_best, x_chosen) <= 0:
a_cp = (-1 * y_chosen) * x_chosen
norm_a_cp = np.linalg.norm(a_cp)
a_cp = a_cp/norm_a_cp
b_cp = 0
A = np.vstack((A, a_cp))
b = np.hstack((b, b_cp))
j = j + 1
if testing == 2:
print('\n******** Cutting plane', i+1, 'added ********')
print(' w_best was', w_best)
print(' w_best updated to', accpm.analytic_center(A, b))
print(' a_cp is', a_cp)
i = i + 1
if testing == 3:
return (w_best, j, weights)
if testing == 1 or testing == 2:
print('******** Desired number of points queried ********')
print(' ', j, 'cutting plane(s) generated over', i, 'iterations')
return w_best
if testing == 0:
return w_best
|
|
# All fields except for BlobField written by Jonas Haag <[email protected]>
from django.db import models
from django.core.exceptions import ValidationError
from django.utils.importlib import import_module
__all__ = ('RawField', 'ListField', 'DictField', 'SetField',
'BlobField', 'EmbeddedModelField')
class _HandleAssignment(object):
"""
A placeholder class that provides a way to set the attribute on the model.
"""
def __init__(self, field):
self.field = field
def __get__(self, obj, type=None):
if obj is None:
raise AttributeError('Can only be accessed via an instance.')
return obj.__dict__[self.field.name]
def __set__(self, obj, value):
obj.__dict__[self.field.name] = self.field.to_python(value)
class RawField(models.Field):
""" Generic field to store anything your database backend allows you to. """
def get_internal_type(self):
return 'RawField'
class AbstractIterableField(models.Field):
"""
Abstract field for fields for storing iterable data type like ``list``,
``set`` and ``dict``.
You can pass an instance of a field as the first argument.
If you do, the iterable items will be piped through the passed field's
validation and conversion routines, converting the items to the
appropriate data type.
"""
def __init__(self, item_field=None, *args, **kwargs):
if item_field is None:
item_field = RawField()
self.item_field = item_field
default = kwargs.get('default', None if kwargs.get('null') else ())
if default is not None and not callable(default):
# ensure a new object is created every time the default is accessed
kwargs['default'] = lambda: self._type(default)
super(AbstractIterableField, self).__init__(*args, **kwargs)
def contribute_to_class(self, cls, name):
self.item_field.model = cls
self.item_field.name = name
super(AbstractIterableField, self).contribute_to_class(cls, name)
metaclass = getattr(self.item_field, '__metaclass__', None)
if issubclass(metaclass, models.SubfieldBase):
setattr(cls, self.name, _HandleAssignment(self))
def db_type(self, connection):
item_db_type = self.item_field.db_type(connection=connection)
return '%s:%s' % (self.__class__.__name__, item_db_type)
def _convert(self, func, values, *args, **kwargs):
if isinstance(values, (list, tuple, set)):
return self._type(func(value, *args, **kwargs) for value in values)
return values
def to_python(self, value):
return self._convert(self.item_field.to_python, value)
def pre_save(self, model_instance, add):
class fake_instance(object):
pass
fake_instance = fake_instance()
def wrapper(value):
assert not hasattr(self.item_field, 'attname')
fake_instance.value = value
self.item_field.attname = 'value'
try:
return self.item_field.pre_save(fake_instance, add)
finally:
del self.item_field.attname
return self._convert(wrapper, getattr(model_instance, self.attname))
def get_db_prep_value(self, value, connection, prepared=False):
return self._convert(self.item_field.get_db_prep_value, value,
connection=connection, prepared=prepared)
def get_db_prep_save(self, value, connection):
return self._convert(self.item_field.get_db_prep_save,
value, connection=connection)
def validate(self, values, model_instance):
try:
iter(values)
except TypeError:
raise ValidationError('Value of type %r is not iterable' % type(values))
def formfield(self, **kwargs):
raise NotImplementedError('No form field implemented for %r' % type(self))
class ListField(AbstractIterableField):
"""
Field representing a Python ``list``.
If the optional keyword argument `ordering` is given, it must be a callable
that is passed to :meth:`list.sort` as `key` argument. If `ordering` is
given, the items in the list will be sorted before sending them to the
database.
"""
_type = list
def __init__(self, *args, **kwargs):
self.ordering = kwargs.pop('ordering', None)
if self.ordering is not None and not callable(self.ordering):
raise TypeError("'ordering' has to be a callable or None, "
"not of type %r" % type(self.ordering))
super(ListField, self).__init__(*args, **kwargs)
def _convert(self, func, values, *args, **kwargs):
values = super(ListField, self)._convert(func, values, *args, **kwargs)
if values is not None and self.ordering is not None:
values.sort(key=self.ordering)
return values
class SetField(AbstractIterableField):
"""
Field representing a Python ``set``.
"""
_type = set
class DictField(AbstractIterableField):
"""
Field representing a Python ``dict``.
The field type conversions described in :class:`AbstractIterableField`
only affect values of the dictionary, not keys.
Depending on the backend, keys that aren't strings might not be allowed.
"""
_type = dict
def _convert(self, func, values, *args, **kwargs):
if values is None:
return None
return dict((key, func(value, *args, **kwargs))
for key, value in values.iteritems())
def validate(self, values, model_instance):
if not isinstance(values, dict):
raise ValidationError('Value is of type %r. Should be a dict.' % type(values))
class BlobField(models.Field):
"""
A field for storing blobs of binary data.
The value might either be a string (or something that can be converted to
a string), or a file-like object.
In the latter case, the object has to provide a ``read`` method from which
the blob is read.
"""
def get_internal_type(self):
return 'BlobField'
def formfield(self, **kwargs):
# A file widget is provided, but use model FileField or ImageField
# for storing specific files most of the time
from .widgets import BlobWidget
from django.forms import FileField
defaults = {'form_class': FileField, 'widget': BlobWidget}
defaults.update(kwargs)
return super(BlobField, self).formfield(**defaults)
def get_db_prep_value(self, value, connection, prepared=False):
if hasattr(value, 'read'):
return value.read()
else:
return str(value)
def get_db_prep_lookup(self, lookup_type, value, connection, prepared=False):
raise TypeError("BlobFields do not support lookups")
def value_to_string(self, obj):
return str(self._get_val_from_obj(obj))
class EmbeddedModelField(models.Field):
"""
Field that allows you to embed a model instance.
:param model: The (optional) model class that shall be embedded
"""
__metaclass__ = models.SubfieldBase
def __init__(self, embedded_model=None, *args, **kwargs):
self.embedded_model = embedded_model
kwargs.setdefault('default', None)
super(EmbeddedModelField, self).__init__(*args, **kwargs)
def db_type(self, connection):
return 'DictField:RawField'
def pre_save(self, model_instance, add):
embedded_instance = super(EmbeddedModelField, self).pre_save(model_instance, add)
if embedded_instance is None:
return None, None
if self.embedded_model is not None and \
not isinstance(embedded_instance, self.embedded_model):
raise TypeError("Expected instance of type %r, not %r"
% (type(self.embedded_model), type(embedded_instance)))
data = dict((field.name, field.pre_save(embedded_instance, add))
for field in embedded_instance._meta.fields)
return embedded_instance, data
def get_db_prep_value(self, (embedded_instance, embedded_dict), **kwargs):
if embedded_dict is None:
return None
values = dict()
for name, value in embedded_dict.iteritems():
field = embedded_instance._meta.get_field(name)
values[name] = field.get_db_prep_value(value, **kwargs)
if self.embedded_model is None:
values.update({'_module' : embedded_instance.__class__.__module__,
'_model' : embedded_instance.__class__.__name__})
return values
def to_python(self, values):
if not isinstance(values, dict):
return values
module, model = values.pop('_module', None), values.pop('_model', None)
if module is not None:
return getattr(import_module(module), model)(**values)
return self.embedded_model(**values)
|
|
# Copyright 2014 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS-IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Tests for modules/dashboard/."""
__author__ = 'Glenn De Jonghe ([email protected])'
import cgi
import itertools
import time
import json
import actions
from common import crypto
from common.utils import Namespace
from models import courses
from models import models
from models import resources_display
from models import transforms
from models.custom_modules import Module
from models.roles import Permission
from models.roles import Roles
from modules.dashboard import dashboard
from common import menus
from modules.dashboard.dashboard import DashboardHandler
from modules.dashboard.question_group_editor import QuestionGroupRESTHandler
from modules.dashboard.role_editor import RoleRESTHandler
from google.appengine.api import namespace_manager
class QuestionDashboardTestCase(actions.TestBase):
"""Tests Assets > Questions."""
COURSE_NAME = 'question_dashboard'
ADMIN_EMAIL = '[email protected]'
URL = 'dashboard?action=edit_questions'
def setUp(self):
super(QuestionDashboardTestCase, self).setUp()
actions.login(self.ADMIN_EMAIL, is_admin=True)
self.base = '/' + self.COURSE_NAME
context = actions.simple_add_course(
self.COURSE_NAME, self.ADMIN_EMAIL, 'Questions Dashboard')
self.old_namespace = namespace_manager.get_namespace()
namespace_manager.set_namespace('ns_%s' % self.COURSE_NAME)
self.course = courses.Course(None, context)
def tearDown(self):
namespace_manager.set_namespace(self.old_namespace)
super(QuestionDashboardTestCase, self).tearDown()
def test_unused_question(self):
# Create an unused question
unused_question_dto = models.QuestionDTO(None, {
'description': 'unused',
'type': 0
})
unused_question_id = models.QuestionDAO.save(unused_question_dto)
self.course.save()
dom = self.parse_html_string(self.get(self.URL).body)
question_row = dom.find('.//tr[@data-quid=\'{}\']'.format(
unused_question_id))
filter_data = json.loads(question_row.get('data-filter'))
self.assertEqual(filter_data['unused'], 1)
def test_table_entries(self):
# Create a question
mc_question_description = 'Test MC Question'
mc_question_dto = models.QuestionDTO(None, {
'description': mc_question_description,
'type': 0 # MC
})
mc_question_id = models.QuestionDAO.save(mc_question_dto)
# Create an assessment and add the question to the content.
# Also include a broken question ref to the assessment (and expect this
# doesn't break anything).
assessment_one = self.course.add_assessment()
assessment_one.title = 'Test Assessment One'
assessment_one.html_content = """
<question quid="%s" weight="1" instanceid="1"></question>
<question quid="broken" weight="1" instanceid="broken"></question>
""" % mc_question_id
# Create a second question
sa_question_description = 'Test SA Question'
sa_question_dto = models.QuestionDTO(None, {
'description': sa_question_description,
'type': 1 # SA
})
sa_question_id = models.QuestionDAO.save(sa_question_dto)
# Create a question group and add the second question
qg_description = 'Question Group'
qg_dto = models.QuestionGroupDTO(None, {
'description': qg_description,
'items': [{'question': str(sa_question_id)}]
})
qg_id = models.QuestionGroupDAO.save(qg_dto)
# Create a second assessment and add the question group to the content
assessment_two = self.course.add_assessment()
assessment_two.title = 'Test Assessment'
assessment_two.html_content = """
<question-group qgid="%s" instanceid="QG"></question-group>
""" % qg_id
self.course.save()
# Get the Assets > Question tab
dom = self.parse_html_string(self.get(self.URL).body)
asset_tables = dom.findall('.//table[@class="assets-table"]')
self.assertEquals(len(asset_tables), 2)
# First check Question Bank table
questions_table = asset_tables[0]
question_rows = questions_table.findall('./tbody/tr[@data-filter]')
self.assertEquals(len(question_rows), 2)
# Check edit link and description of the first question
first_row = list(question_rows[0])
first_cell = first_row[0]
self.assertEquals(first_cell.findall('a')[1].tail,
mc_question_description)
self.assertEquals(first_cell.find('a').get('href'), (
'dashboard?action=edit_question&key=%s' % mc_question_id))
# Check if the assessment is listed
location_link = first_row[2].find('ul/li/a')
self.assertEquals(location_link.get('href'), (
'assessment?name=%s' % assessment_one.unit_id))
self.assertEquals(location_link.text, assessment_one.title)
# Check second question (=row)
second_row = list(question_rows[1])
self.assertEquals(
second_row[0].findall('a')[1].tail, sa_question_description)
# Check whether the containing Question Group is listed
self.assertEquals(second_row[1].find('ul/li').text, qg_description)
# Now check Question Group table
question_groups_table = asset_tables[1]
row = question_groups_table.find('./tbody/tr')
# Check edit link and description
edit_link = row[0].find('a')
self.assertEquals(edit_link.tail, qg_description)
self.assertEquals(edit_link.get('href'), (
'dashboard?action=edit_question_group&key=%s' % qg_id))
# The question that is part of this group, should be listed
self.assertEquals(row[1].find('ul/li').text, sa_question_description)
# Assessment where this Question Group is located, should be linked
location_link = row[2].find('ul/li/a')
self.assertEquals(location_link.get('href'), (
'assessment?name=%s' % assessment_two.unit_id))
self.assertEquals(location_link.text, assessment_two.title)
def _load_tables(self):
asset_tables = self.parse_html_string(self.get(self.URL).body).findall(
'.//table[@class="assets-table"]')
self.assertEquals(len(asset_tables), 2)
return asset_tables
def test_no_questions_and_question_groups(self):
asset_tables = self._load_tables()
self.assertEquals(
asset_tables[0].find('./tfoot/tr/td').text, 'No questions available'
)
self.assertEquals(
asset_tables[1].find('./tfoot/tr/td').text,
'No question groups available'
)
def test_no_question_groups(self):
description = 'Question description'
models.QuestionDAO.save(models.QuestionDTO(None, {
'description': description
}))
asset_tables = self._load_tables()
self.assertEquals(
asset_tables[0].findall('./tbody/tr/td/a')[1].tail, description
)
self.assertEquals(
asset_tables[1].find('./tfoot/tr/td').text,
'No question groups available'
)
def test_no_questions(self):
description = 'Group description'
models.QuestionGroupDAO.save(models.QuestionGroupDTO(None, {
'description': description
}))
asset_tables = self._load_tables()
self.assertEquals(
asset_tables[0].find('./tfoot/tr/td').text, 'No questions available'
)
self.assertEquals(
asset_tables[1].find('./tbody/tr/td/a').tail, description
)
def test_if_buttons_are_present(self):
"""Tests if all buttons are present.
In the past it wasn't allowed to add a question group when there
were no questions yet.
"""
body = self.get(self.URL).body
self.assertIn('Add Short Answer', body)
self.assertIn('Add Multiple Choice', body)
self.assertIn('Add Question Group', body)
def test_adding_empty_question_group(self):
QG_URL = '/%s%s' % (self.COURSE_NAME, QuestionGroupRESTHandler.URI)
xsrf_token = crypto.XsrfTokenManager.create_xsrf_token(
QuestionGroupRESTHandler.XSRF_TOKEN)
description = 'Question Group'
payload = {
'description': description,
'version': QuestionGroupRESTHandler.SCHEMA_VERSIONS[0],
'introduction': '',
'items': []
}
response = self.put(QG_URL, {'request': transforms.dumps({
'xsrf_token': cgi.escape(xsrf_token),
'payload': transforms.dumps(payload)})})
self.assertEquals(response.status_int, 200)
payload = transforms.loads(response.body)
self.assertEquals(payload['status'], 200)
self.assertEquals(payload['message'], 'Saved.')
asset_tables = self._load_tables()
self.assertEquals(
asset_tables[1].find('./tbody/tr/td/a').tail, description
)
def test_last_modified_timestamp(self):
begin_time = time.time()
question_dto = models.QuestionDTO(None, {})
models.QuestionDAO.save(question_dto)
self.assertTrue((begin_time <= question_dto.last_modified) and (
question_dto.last_modified <= time.time()))
qg_dto = models.QuestionGroupDTO(None, {})
models.QuestionGroupDAO.save(qg_dto)
self.assertTrue((begin_time <= qg_dto.last_modified) and (
question_dto.last_modified <= time.time()))
asset_tables = self._load_tables()
self.assertEquals(
asset_tables[0].find('./tbody/tr/td[@data-timestamp]').get(
'data-timestamp', ''),
str(question_dto.last_modified)
)
self.assertEquals(
asset_tables[1].find('./tbody/tr/td[@data-timestamp]').get(
'data-timestamp', ''),
str(qg_dto.last_modified)
)
def test_question_clone(self):
# Add a question by just nailing it in to the datastore.
mc_question_description = 'Test MC Question'
mc_question_dto = models.QuestionDTO(None, {
'description': mc_question_description,
'type': 0 # MC
})
models.QuestionDAO.save(mc_question_dto)
# On the assets -> questions page, clone the question.
response = self.get(self.URL)
dom = self.parse_html_string(self.get(self.URL).body)
clone_link = dom.find('.//a[@class="icon md md-content-copy"]')
question_key = clone_link.get('data-key')
xsrf_token = dom.find('.//table[@id="question-table"]'
).get('data-clone-question-token')
self.post(
'dashboard?action=clone_question',
{
'key': question_key,
'xsrf_token': xsrf_token
})
response = self.get(self.URL)
self.assertIn(mc_question_description + ' (clone)', response.body)
def _call_add_to_question_group(self, qu_id, qg_id, weight, xsrf_token):
return self.post('dashboard', {
'action': 'add_to_question_group',
'question_id': qu_id,
'group_id': qg_id,
'weight': weight,
'xsrf_token': xsrf_token,
}, True)
def test_add_to_question_group(self):
# Create a question
question_description = 'Question'
question_dto = models.QuestionDTO(None, {
'description': question_description,
'type': 0 # MC
})
question_id = models.QuestionDAO.save(question_dto)
# No groups are present so no add_to_group icon should be present
self.assertIsNone(self._load_tables()[0].find('./tbody/tr/td[ul]div'))
# Create a group
qg_description = 'Question Group'
qg_dto = models.QuestionGroupDTO(None, {
'description': qg_description,
'items': []
})
qg_id = models.QuestionGroupDAO.save(qg_dto)
# Since we now have a group, the add_to_group icon should be visible
self.assertIsNotNone(
self._load_tables()[0].find('./tbody/tr/td[ul]/div'))
# Add Question to Question Group via post_add_to_question_group
asset_tables = self._load_tables()
xsrf_token = asset_tables[0].get('data-qg-xsrf-token', '')
response = self._call_add_to_question_group(
question_id, qg_id, 1, xsrf_token)
# Check if operation was successful
self.assertEquals(response.status_int, 200)
asset_tables = self._load_tables()
self.assertEquals(
asset_tables[0].find('./tbody/tr/td/ul/li').text,
qg_description
)
self.assertEquals(
asset_tables[1].find('./tbody/tr/td/ul/li').text,
question_description
)
# Check a bunch of calls that should fail
response = self._call_add_to_question_group(question_id, qg_id, 1, 'a')
self.assertEquals(response.status_int, 403)
response = transforms.loads(self._call_add_to_question_group(
-1, qg_id, 1, xsrf_token).body)
self.assertEquals(response['status'], 500)
response = transforms.loads(self._call_add_to_question_group(
question_id, -1, 1, xsrf_token).body)
self.assertEquals(response['status'], 500)
response = transforms.loads(self._call_add_to_question_group(
'a', qg_id, 1, xsrf_token).body)
self.assertEquals(response['status'], 500)
response = transforms.loads(self._call_add_to_question_group(
question_id, qg_id, 'a', xsrf_token).body)
self.assertEquals(response['status'], 500)
class CourseOutlineTestCase(actions.TestBase):
"""Tests the Course Outline."""
COURSE_NAME = 'outline'
ADMIN_EMAIL = '[email protected]'
STUDENT_EMAIL = '[email protected]'
URL = 'dashboard'
def setUp(self):
super(CourseOutlineTestCase, self).setUp()
actions.login(self.ADMIN_EMAIL, is_admin=True)
self.base = '/' + self.COURSE_NAME
context = actions.simple_add_course(
self.COURSE_NAME, self.ADMIN_EMAIL, 'Outline Testing')
self.course = courses.Course(None, context)
self.assessment = self.course.add_assessment()
self.assessment.title = 'Test Assessment'
self.link = self.course.add_link()
self.link.title = 'Test Link'
self.unit = self.course.add_unit()
self.unit.title = 'Test Unit'
self.lesson = self.course.add_lesson(self.unit)
self.lesson.title = 'Test Lesson'
self.course.save()
def _check_private_setting(self, li, ctype, key, is_private):
padlock = li.find('./div/div/div[2]')
self.assertEquals(padlock.get('data-component-type', ''), ctype)
self.assertEquals(padlock.get('data-key', ''), str(key))
lock_class = 'md-lock' if is_private else 'md-lock-open'
self.assertIn(lock_class, padlock.get('class', ''))
def _get_item_for(self, get_what):
dom = self.parse_html_string(self.get(self.URL).body)
course_outline = dom.find('.//div[@class="course-outline editable"]')
lis = course_outline.findall('.//ol[@class="course"]/li')
self.assertEquals(len(lis), 3)
if get_what == 'assessment':
return lis[0]
elif get_what == 'link':
return lis[1]
elif get_what == 'unit':
return lis[2]
elif get_what == 'lesson':
return lis[2].find('ol/li')
else:
self.fail('Test trying to find item we do not have')
def _check_syllabus_for_admin(self, private, title):
response = self.get('/%s/course' % self.COURSE_NAME)
dom = self.parse_html_string(response.body)
units = dom.findall('.//div[@id="gcb-main"]//li')
for unit in units:
text = ' '.join(''.join(unit.itertext()).split())
if title in text:
if private:
self.assertIn('(Private)', text)
else:
self.assertNotIn('(Private)', text)
def _check_syllabus_for_student(self, private, shown, title):
actions.login(self.STUDENT_EMAIL, is_admin=False)
response = self.get('/%s/course' % self.COURSE_NAME)
dom = self.parse_html_string(response.body)
units = dom.findall('.//div[@id="gcb-main"]//li')
found = False
for unit in units:
text = ' '.join(''.join(unit.itertext()).split())
if title in text:
found = True
if private:
if shown:
self.assertIsNone(unit.find('.//a'))
else:
self.fail('private hidden items should not be found.')
else:
self.assertIsNotNone(unit.find('.//a'))
if private and not shown:
self.assertFalse(found)
actions.login(self.ADMIN_EMAIL, is_admin=True)
def test_setting_combinations(self):
cases = ((self.unit, 'unit',),
(self.link, 'link'),
(self.assessment, 'assessment'))
for unit, kind in cases:
for private, shown in itertools.product([True, False], repeat=2):
unit.now_available = not private
unit.shown_when_unavailable = shown
self.course.save()
item = self._get_item_for(kind)
self._check_private_setting(item, 'unit', unit.unit_id, private)
self._check_syllabus_for_admin(private, unit.title)
self._check_syllabus_for_student(private, shown, unit.title)
def test_lesson_public_private(self):
self.lesson.now_available = True
self.course.save()
item = self._get_item_for('lesson')
self._check_private_setting(
item, 'lesson', self.lesson.lesson_id, False)
self.lesson.now_available = False
self.course.save()
item = self._get_item_for('lesson')
self._check_private_setting(
item, 'lesson', self.lesson.lesson_id, True)
def _check_item_label(self, li, href, title):
a = li.find('./div/div/div[@class="name"]/a')
self.assertEquals(a.get('href', ''), href)
self.assertEquals(a.text, title)
def test_title(self):
item = self._get_item_for('link')
self._check_item_label(item, '', self.link.title)
item = self._get_item_for('assessment')
self._check_item_label(
item, 'assessment?name=%s' % self.assessment.unit_id,
self.assessment.title)
item = self._get_item_for('unit')
self._check_item_label(
item, 'unit?unit=%s' % self.unit.unit_id, self.unit.title)
item = self._get_item_for('lesson')
self._check_item_label(
item, 'unit?unit=%s&lesson=%s' % (
self.unit.unit_id, self.lesson.lesson_id),
self.lesson.title)
class RoleEditorTestCase(actions.TestBase):
"""Tests the Roles tab and Role Editor."""
COURSE_NAME = 'role_editor'
ADMIN_EMAIL = '[email protected]'
URL = 'dashboard?action=edit_roles'
def setUp(self):
super(RoleEditorTestCase, self).setUp()
actions.login(self.ADMIN_EMAIL, is_admin=True)
self.base = '/' + self.COURSE_NAME
context = actions.simple_add_course(
self.COURSE_NAME, self.ADMIN_EMAIL, 'Roles Testing')
self.course = courses.Course(None, context)
self.old_namespace = namespace_manager.get_namespace()
namespace_manager.set_namespace('ns_%s' % self.COURSE_NAME)
self.old_registered_permission = Roles._REGISTERED_PERMISSIONS
Roles._REGISTERED_PERMISSIONS = {}
def tearDown(self):
Roles._REGISTERED_PERMISSIONS = self.old_registered_permission
namespace_manager.set_namespace(self.old_namespace)
super(RoleEditorTestCase, self).tearDown()
def _create_role(self, role):
role_dto = models.RoleDTO(None, {
'name': role,
})
return models.RoleDAO.save(role_dto)
def test_roles_tab(self):
role_name = 'Test Role'
role_id = self._create_role(role_name)
li = self.parse_html_string(self.get(self.URL).body).find('.//ul/li')
self.assertEquals(li.text, role_name)
self.assertEquals(li.find('a').get('href'), (
'dashboard?action=edit_role&key=%s' % role_id))
def test_editor_hooks(self):
module1 = Module('module1', '', [], [])
module2 = Module('module2', '', [], [])
module3 = Module('module3', '', [], [])
module4 = Module('module4', '', [], [])
Roles.register_permissions(module1, lambda unused: [
Permission('permissiona', 'a'), Permission('permissionb', 'b')])
Roles.register_permissions(module2, lambda unused: [
Permission('permissionc', 'c'), Permission('permissiond', 'd')])
Roles.register_permissions(module4, lambda unused: [
Permission('permissiong', 'g'), Permission('permissiond', 'h')])
handler = RoleRESTHandler()
handler.course = self.course
datastore_permissions = {
module1.name: ['permission', 'permissiona', 'permissionb'],
module2.name: ['permissionc', 'permissiond'],
module3.name: ['permissione', 'permissionf']
}
datastore_dict = {
'name': 'Role Name',
'users': ['[email protected]', '[email protected]'],
'permissions': datastore_permissions
}
editor_dict = handler.transform_for_editor_hook(datastore_dict)
self.assertEquals(editor_dict['name'], 'Role Name')
self.assertEquals(editor_dict['users'], '[email protected], [email protected]')
modules = editor_dict['modules']
# Test registered assigned permission
permissionc = modules[module2.name][0]
self.assertEquals(permissionc['assigned'], True)
self.assertEquals(permissionc['name'], 'permissionc')
self.assertEquals(permissionc['description'], 'c')
# Test unregistered module with assigned permission
permissionsf = modules[RoleRESTHandler.INACTIVE_MODULES][1]
self.assertEquals(permissionsf['assigned'], True)
self.assertEquals(permissionsf['name'], 'permissionf')
self.assertEquals(
permissionsf['description'],
'This permission was set by the module "module3" which is '
'currently not registered.'
)
# Test registered module with assigned unregistered permission
permission = modules[module1.name][2]
self.assertEquals(permission['assigned'], True)
self.assertEquals(permission['name'], 'permission')
self.assertEquals(
permission['description'],
'This permission is currently not registered.'
)
# Test registered unassigned permissions
permissiong = editor_dict['modules'][module4.name][0]
self.assertEquals(permissiong['assigned'], False)
self.assertEquals(permissiong['name'], 'permissiong')
self.assertEquals(permissiong['description'], 'g')
# Call the hook which gets called when saving
new_datastore_dict = handler.transform_after_editor_hook(datastore_dict)
# If original dict matches new dict then both hooks work correctly
self.assertEquals(datastore_dict, new_datastore_dict)
def test_not_unique_role_name(self):
role_name = 'Test Role'
role_id = self._create_role(role_name)
handler = RoleRESTHandler()
handler.course = self.course
editor_dict = {
'name': role_name
}
errors = []
handler.validate(editor_dict, role_id + 1, None, errors)
self.assertEquals(
errors[0], 'The role must have a unique non-empty name.')
class DashboardAccessTestCase(actions.TestBase):
ACCESS_COURSE_NAME = 'dashboard_access_yes'
NO_ACCESS_COURSE_NAME = 'dashboard_access_no'
ADMIN_EMAIL = '[email protected]'
USER_EMAIL = '[email protected]'
ROLE = 'test_role'
ACTION = 'test_action'
PERMISSION = 'can_access_dashboard'
PERMISSION_DESCRIPTION = 'Can Access Dashboard.'
def setUp(self):
super(DashboardAccessTestCase, self).setUp()
actions.login(self.ADMIN_EMAIL, is_admin=True)
context = actions.simple_add_course(
self.ACCESS_COURSE_NAME, self.ADMIN_EMAIL, 'Course with access')
self.course_with_access = courses.Course(None, context)
with Namespace(self.course_with_access.app_context.namespace):
role_dto = models.RoleDTO(None, {
'name': self.ROLE,
'users': [self.USER_EMAIL],
'permissions': {dashboard.custom_module.name: [self.PERMISSION]}
})
models.RoleDAO.save(role_dto)
context = actions.simple_add_course(
self.NO_ACCESS_COURSE_NAME, self.ADMIN_EMAIL,
'Course with no access'
)
self.course_without_access = courses.Course(None, context)
def test_content(self):
return self.render_page(
{'main_content': 'test', 'page_title': 'test'})
# save properties
self.old_menu_group = DashboardHandler.root_menu_group
# pylint: disable=W0212
self.old_get_acitons = DashboardHandler._custom_get_actions
# pylint: enable=W0212
# put a dummy method in
menu_group = menus.MenuGroup('test', 'Test Dashboard')
DashboardHandler.root_menu_group = menu_group
DashboardHandler.default_action = self.ACTION
DashboardHandler.add_nav_mapping(self.ACTION, self.ACTION)
DashboardHandler.add_sub_nav_mapping(self.ACTION, self.ACTION,
self.ACTION, action=self.ACTION, contents=test_content)
DashboardHandler.map_action_to_permission(
'get_%s' % self.ACTION, self.PERMISSION)
actions.logout()
def tearDown(self):
# restore properties
# pylint: disable=W0212
DashboardHandler.root_menu_group = self.old_menu_group
DashboardHandler._custom_get_actions = self.old_get_acitons
# pylint: enable=W0212
super(DashboardAccessTestCase, self).tearDown()
def test_dashboard_access_method(self):
with Namespace(self.course_with_access.app_context.namespace):
self.assertFalse(DashboardHandler.current_user_has_access(
self.course_with_access.app_context))
with Namespace(self.course_without_access.app_context.namespace):
self.assertFalse(DashboardHandler.current_user_has_access(
self.course_without_access.app_context))
actions.login(self.USER_EMAIL, is_admin=False)
with Namespace(self.course_with_access.app_context.namespace):
self.assertTrue(DashboardHandler.current_user_has_access(
self.course_with_access.app_context))
with Namespace(self.course_without_access.app_context.namespace):
self.assertFalse(DashboardHandler.current_user_has_access(
self.course_without_access.app_context))
actions.logout()
def _get_all_picker_options(self):
return self.parse_html_string(
self.get('/%s/dashboard' % self.ACCESS_COURSE_NAME).body
).findall('.//*[@id="gcb-course-picker-menu"]//a')
def test_course_picker(self):
actions.login(self.USER_EMAIL, is_admin=False)
picker_options = self._get_all_picker_options()
self.assertEquals(len(list(picker_options)), 0)
actions.logout()
actions.login(self.ADMIN_EMAIL, is_admin=True)
picker_options = self._get_all_picker_options()
# Expect 3 courses, as the default one is also considered for the picker
self.assertEquals(len(picker_options), 2)
actions.logout()
def _get_right_nav_links(self):
return self.parse_html_string(
self.get('/%s/' % self.ACCESS_COURSE_NAME).body
).findall(
'.//div[@id="gcb-nav-x"]/div/ul/li[@class="gcb-pull-right"]')
def test_dashboard_link(self):
# Not signed in => no dashboard or admin link visible
self.assertEquals(len(self._get_right_nav_links()), 0)
# Sign in user with dashboard permissions => dashboard link visible
actions.login(self.USER_EMAIL, is_admin=False)
links = self._get_right_nav_links()
self.assertEquals(len(links), 1)
self.assertEquals(links[0].find('a').get('href'), 'dashboard')
self.assertEquals(links[0].find('a').text, 'Dashboard')
# Sign in course admin => dashboard link visible
actions.login(self.ADMIN_EMAIL, is_admin=False)
links = self._get_right_nav_links()
self.assertEquals(len(links), 1)
self.assertEquals(links[0].find('a').get('href'), 'dashboard')
self.assertEquals(links[0].find('a').text, 'Dashboard')
class DashboardCustomNavTestCase(actions.TestBase):
"""Tests Assets > Questions."""
COURSE_NAME = 'custom_dashboard'
ADMIN_EMAIL = '[email protected]'
URL = 'dashboard?action=custom_mod'
ACTION = 'custom_mod'
CONTENT_PATH = './/div[@id="gcb-main-area"]/div[@id="gcb-main-content"]'
def setUp(self):
super(DashboardCustomNavTestCase, self).setUp()
actions.login(self.ADMIN_EMAIL, is_admin=True)
self.base = '/' + self.COURSE_NAME
context = actions.simple_add_course(
self.COURSE_NAME, self.ADMIN_EMAIL, 'Custom Dashboard')
self.old_namespace = namespace_manager.get_namespace()
namespace_manager.set_namespace('ns_%s' % self.COURSE_NAME)
self.course = courses.Course(None, context)
def tearDown(self):
namespace_manager.set_namespace(self.old_namespace)
super(DashboardCustomNavTestCase, self).tearDown()
def test_custom_top_nav(self):
# Add a new top level navigation action
DashboardHandler.add_nav_mapping(self.ACTION, 'CUSTOM_MOD')
class CustomNavHandler(object):
@classmethod
def show_page(cls, dashboard_handler):
dashboard_handler.render_page({
'page_title': dashboard_handler.format_title('CustomNav'),
'main_content': 'MainContent'})
DashboardHandler.add_custom_get_action(
self.ACTION, CustomNavHandler.show_page)
dom = self.parse_html_string(self.get('dashboard').body)
selected_nav_path = ('.//tr[@class="gcb-nav-bar-level-1"]'
'//a[@class="selected"]')
self.assertEquals('Edit', dom.find(selected_nav_path).text)
dom = self.parse_html_string(self.get(self.URL).body)
self.assertEquals('CUSTOM_MOD', dom.find(selected_nav_path).text)
self.assertEquals(
'MainContent', dom.find(self.CONTENT_PATH).text.strip())
DashboardHandler.remove_custom_get_action(self.ACTION)
# Add a new tab under the new navigation action
class CustomTabHandler(object):
@classmethod
def display_html(cls, unused_dashboard_handler):
return 'MainTabContent'
dashboard.DashboardHandler.add_sub_nav_mapping(
self.ACTION, 'cu_tab', 'CustomTab', action=self.ACTION,
contents=CustomTabHandler)
dom = self.parse_html_string(self.get(self.URL).body)
self.assertEquals('CUSTOM_MOD', dom.find(selected_nav_path).text)
self.assertEquals(
'MainTabContent', dom.find(self.CONTENT_PATH).text.strip())
selected_tab_path = ('.//*[@class="gcb-nav-bar-level-2"]'
'//a[@class="selected"]')
self.assertEquals('CustomTab', dom.find(selected_tab_path).text)
def test_first_tab(self):
url = 'dashboard?action=analytics_students'
dom = self.parse_html_string(self.get(url).body)
selected_tab_path = ('.//*[@class="gcb-nav-bar-level-2"]'
'//a[@class="selected"]')
self.assertEquals('Students', dom.find(selected_tab_path).text)
class TestLessonSchema(actions.TestBase):
COURSE_NAME = 'lesson_dashboard'
ADMIN_EMAIL = '[email protected]'
def setUp(self):
super(TestLessonSchema, self).setUp()
actions.login(self.ADMIN_EMAIL, is_admin=True)
context = actions.simple_add_course(
self.COURSE_NAME, self.ADMIN_EMAIL, 'Lesson Course')
self.old_namespace = namespace_manager.get_namespace()
namespace_manager.set_namespace('ns_%s' % self.COURSE_NAME)
self.course = courses.Course(None, context)
self.unit = self.course.add_unit()
self.course.save()
def tearDown(self):
namespace_manager.set_namespace(self.old_namespace)
super(TestLessonSchema, self).tearDown()
def test_video_field_hidden_in_new_lessons(self):
lesson = self.course.add_lesson(self.unit)
self.course.save()
schema = get_lesson_schema(self.course, lesson)
video_options = find_schema_field(schema, ['properties', 'video',
'_inputex'])
self.assertEqual(video_options['_type'], 'hidden')
def test_video_field_not_hidden_in_lessons_with_field_set(self):
lesson = self.course.add_lesson(self.unit)
lesson.video = 'oHg5SJYRHA0'
self.course.save()
schema = get_lesson_schema(self.course, lesson)
video_options = find_schema_field(schema, ['properties', 'video',
'_inputex'])
self.assertNotEqual(video_options.get('_type'), 'hidden')
def get_lesson_schema(course, lesson):
return resources_display.ResourceLesson.get_schema(
course, lesson.lesson_id).get_schema_dict()
def find_schema_field(schema, key):
for field, options in schema:
if field == key:
return options
|
|
from collections import OrderedDict
import pytest
import numpy as np
from astropy import units as u
from astropy.tests.helper import assert_quantity_allclose
from astropy.modeling.functional_models import (
Gaussian1D,
Sersic1D, Sine1D, Linear1D,
Lorentz1D, Voigt1D, Const1D,
Box1D, Trapezoid1D, RickerWavelet1D,
Moffat1D, Gaussian2D, Const2D, Ellipse2D,
Disk2D, Ring2D, Box2D, TrapezoidDisk2D,
RickerWavelet2D, AiryDisk2D, Moffat2D, Sersic2D,
KingProjectedAnalytic1D)
from astropy.modeling.powerlaws import (
PowerLaw1D, BrokenPowerLaw1D, SmoothlyBrokenPowerLaw1D,
ExponentialCutoffPowerLaw1D, LogParabola1D)
from astropy.modeling.polynomial import Polynomial1D, Polynomial2D
from astropy.modeling.fitting import LevMarLSQFitter
try:
from scipy import optimize # noqa
HAS_SCIPY = True
except ImportError:
HAS_SCIPY = False
FUNC_MODELS_1D = [
{'class': Gaussian1D,
'parameters': {'amplitude': 3 * u.Jy, 'mean': 2 * u.m, 'stddev': 30 * u.cm},
'evaluation': [(2600 * u.mm, 3 * u.Jy * np.exp(-2))],
'bounding_box': [0.35, 3.65] * u.m},
{'class': Sersic1D,
'parameters': {'amplitude': 3 * u.MJy / u.sr, 'r_eff': 2 * u.arcsec, 'n': 4},
'evaluation': [(3 * u.arcsec, 1.3237148119468918 * u.MJy/u.sr)],
'bounding_box': False},
{'class': Sine1D,
'parameters': {'amplitude': 3 * u.km / u.s, 'frequency': 0.25 * u.Hz, 'phase': 0.5},
'evaluation': [(1 * u.s, -3 * u.km / u.s)],
'bounding_box': False},
{'class': Linear1D,
'parameters': {'slope': 3 * u.km / u.s, 'intercept': 5000 * u.m},
'evaluation': [(6000 * u.ms, 23 * u.km)],
'bounding_box': False},
{'class': Lorentz1D,
'parameters': {'amplitude': 2 * u.Jy, 'x_0': 505 * u.nm, 'fwhm': 100 * u.AA},
'evaluation': [(0.51 * u.micron, 1 * u.Jy)],
'bounding_box': [255, 755] * u.nm},
{'class': Voigt1D,
'parameters': {'amplitude_L': 2 * u.Jy, 'x_0': 505 * u.nm,
'fwhm_L': 100 * u.AA, 'fwhm_G': 50 * u.AA},
'evaluation': [(0.51 * u.micron, 1.06264568 * u.Jy)],
'bounding_box': False},
{'class': Const1D,
'parameters': {'amplitude': 3 * u.Jy},
'evaluation': [(0.6 * u.micron, 3 * u.Jy)],
'bounding_box': False},
{'class': Box1D,
'parameters': {'amplitude': 3 * u.Jy, 'x_0': 4.4 * u.um, 'width': 1 * u.um},
'evaluation': [(4200 * u.nm, 3 * u.Jy), (1 * u.m, 0 * u.Jy)],
'bounding_box': [3.9, 4.9] * u.um},
{'class': Trapezoid1D,
'parameters': {'amplitude': 3 * u.Jy, 'x_0': 4.4 * u.um, 'width': 1 * u.um, 'slope': 5 * u.Jy / u.um},
'evaluation': [(4200 * u.nm, 3 * u.Jy), (1 * u.m, 0 * u.Jy)],
'bounding_box': [3.3, 5.5] * u.um},
{'class': RickerWavelet1D,
'parameters': {'amplitude': 3 * u.Jy, 'x_0': 4.4 * u.um, 'sigma': 1e-3 * u.mm},
'evaluation': [(1000 * u.nm, -0.09785050 * u.Jy)],
'bounding_box': [-5.6, 14.4] * u.um},
{'class': Moffat1D,
'parameters': {'amplitude': 3 * u.Jy, 'x_0': 4.4 * u.um, 'gamma': 1e-3 * u.mm, 'alpha': 1},
'evaluation': [(1000 * u.nm, 0.238853503 * u.Jy)],
'bounding_box': False},
{'class': KingProjectedAnalytic1D,
'parameters': {'amplitude': 1. * u.Msun/u.pc**2, 'r_core': 1. * u.pc, 'r_tide': 2. * u.pc},
'evaluation': [(0.5 * u.pc, 0.2 * u.Msun/u.pc**2)],
'bounding_box': [0. * u.pc, 2. * u.pc]}
]
FUNC_MODELS_2D = [
{'class': Gaussian2D,
'parameters': {'amplitude': 3 * u.Jy, 'x_mean': 2 * u.m, 'y_mean': 1 * u.m,
'x_stddev': 3 * u.m, 'y_stddev': 2 * u.m, 'theta': 45 * u.deg},
'evaluation': [(412.1320343 * u.cm, 3.121320343 * u.m, 3 * u.Jy * np.exp(-0.5))],
'bounding_box': [[-14.18257445, 16.18257445], [-10.75693665, 14.75693665]] * u.m},
{'class': Const2D,
'parameters': {'amplitude': 3 * u.Jy},
'evaluation': [(0.6 * u.micron, 0.2 * u.m, 3 * u.Jy)],
'bounding_box': False},
{'class': Disk2D,
'parameters': {'amplitude': 3 * u.Jy, 'x_0': 3 * u.m, 'y_0': 2 * u.m,
'R_0': 300 * u.cm},
'evaluation': [(5.8 * u.m, 201 * u.cm, 3 * u.Jy)],
'bounding_box': [[-1, 5], [0, 6]] * u.m},
{'class': TrapezoidDisk2D,
'parameters': {'amplitude': 3 * u.Jy, 'x_0': 1 * u.m, 'y_0': 2 * u.m,
'R_0': 100 * u.cm, 'slope': 1 * u.Jy / u.m},
'evaluation': [(3.5 * u.m, 2 * u.m, 1.5 * u.Jy)],
'bounding_box': [[-2, 6], [-3, 5]] * u.m},
{'class': Ellipse2D,
'parameters': {'amplitude': 3 * u.Jy, 'x_0': 3 * u.m, 'y_0': 2 * u.m,
'a': 300 * u.cm, 'b': 200 * u.cm, 'theta': 45 * u.deg},
'evaluation': [(4 * u.m, 300 * u.cm, 3 * u.Jy)],
'bounding_box': [[-0.76046808, 4.76046808], [0.68055697, 5.31944302]] * u.m},
{'class': Ring2D,
'parameters': {'amplitude': 3 * u.Jy, 'x_0': 3 * u.m, 'y_0': 2 * u.m,
'r_in': 2 * u.cm, 'r_out': 2.1 * u.cm},
'evaluation': [(302.05 * u.cm, 2 * u.m + 10 * u.um, 3 * u.Jy)],
'bounding_box': [[1.979, 2.021], [2.979, 3.021]] * u.m},
{'class': Box2D,
'parameters': {'amplitude': 3 * u.Jy, 'x_0': 3 * u.m, 'y_0': 2 * u.s,
'x_width': 4 * u.cm, 'y_width': 3 * u.s},
'evaluation': [(301 * u.cm, 3 * u.s, 3 * u.Jy)],
'bounding_box': [[0.5 * u.s, 3.5 * u.s], [2.98 * u.m, 3.02 * u.m]]},
{'class': RickerWavelet2D,
'parameters': {'amplitude': 3 * u.Jy, 'x_0': 3 * u.m, 'y_0': 2 * u.m,
'sigma': 1 * u.m},
'evaluation': [(4 * u.m, 2.5 * u.m, 0.602169107 * u.Jy)],
'bounding_box': False},
{'class': AiryDisk2D,
'parameters': {'amplitude': 3 * u.Jy, 'x_0': 3 * u.m, 'y_0': 2 * u.m,
'radius': 1 * u.m},
'evaluation': [(4 * u.m, 2.1 * u.m, 4.76998480e-05 * u.Jy)],
'bounding_box': False},
{'class': Moffat2D,
'parameters': {'amplitude': 3 * u.Jy, 'x_0': 4.4 * u.um, 'y_0': 3.5 * u.um,
'gamma': 1e-3 * u.mm, 'alpha': 1},
'evaluation': [(1000 * u.nm, 2 * u.um, 0.202565833 * u.Jy)],
'bounding_box': False},
{'class': Sersic2D,
'parameters': {'amplitude': 3 * u.MJy / u.sr, 'x_0': 1 * u.arcsec,
'y_0': 2 * u.arcsec, 'r_eff': 2 * u.arcsec, 'n': 4,
'ellip': 0, 'theta': 0},
'evaluation': [(3 * u.arcsec, 2.5 * u.arcsec, 2.829990489 * u.MJy/u.sr)],
'bounding_box': False},
]
POWERLAW_MODELS = [
{'class': PowerLaw1D,
'parameters': {'amplitude': 5 * u.kg, 'x_0': 10 * u.cm, 'alpha': 1},
'evaluation': [(1 * u.m, 500 * u.g)],
'bounding_box': False},
{'class': BrokenPowerLaw1D,
'parameters': {'amplitude': 5 * u.kg, 'x_break': 10 * u.cm, 'alpha_1': 1, 'alpha_2': -1},
'evaluation': [(1 * u.m, 50 * u.kg), (1 * u.cm, 50 * u.kg)],
'bounding_box': False},
{'class': SmoothlyBrokenPowerLaw1D,
'parameters': {'amplitude': 5 * u.kg, 'x_break': 10 * u.cm, 'alpha_1': 1, 'alpha_2': -1, 'delta': 1},
'evaluation': [(1 * u.m, 15.125 * u.kg), (1 * u.cm, 15.125 * u.kg)],
'bounding_box': False},
{'class': ExponentialCutoffPowerLaw1D,
'parameters': {'amplitude': 5 * u.kg, 'x_0': 10 * u.cm, 'alpha': 1, 'x_cutoff': 1 * u.m},
'evaluation': [(1 * u.um, 499999.5 * u.kg), (10 * u.m, 50 * np.exp(-10) * u.g)],
'bounding_box': False},
{'class': LogParabola1D,
'parameters': {'amplitude': 5 * u.kg, 'x_0': 10 * u.cm, 'alpha': 1, 'beta': 2},
'evaluation': [(1 * u.cm, 5 * 0.1 ** (-1 - 2 * np.log(0.1)) * u.kg)],
'bounding_box': False}
]
POLY_MODELS = [
{'class': Polynomial1D,
'parameters': {'degree': 2, 'c0': 3 * u.one, 'c1': 2 / u.m, 'c2': 3 / u.m**2},
'evaluation': [(3 * u.m, 36 * u.one)],
'bounding_box': False},
{'class': Polynomial1D,
'parameters': {'degree': 2, 'c0': 3 * u.kg, 'c1': 2 * u.kg / u.m, 'c2': 3 * u.kg / u.m**2},
'evaluation': [(3 * u.m, 36 * u.kg)],
'bounding_box': False},
{'class': Polynomial1D,
'parameters': {'degree': 2, 'c0': 3 * u.kg, 'c1': 2 * u.kg, 'c2': 3 * u.kg},
'evaluation': [(3 * u.one, 36 * u.kg)],
'bounding_box': False},
{'class': Polynomial2D,
'parameters': {'degree': 2, 'c0_0': 3 * u.one, 'c1_0': 2 / u.m, 'c2_0': 3 / u.m**2,
'c0_1': 3 / u.s, 'c0_2': -2 / u.s**2, 'c1_1': 5 / u.m / u.s},
'evaluation': [(3 * u.m, 2 * u.s, 64 * u.one)],
'bounding_box': False},
{'class': Polynomial2D,
'parameters': {'degree': 2, 'c0_0': 3 * u.kg, 'c1_0': 2 * u.kg / u.m, 'c2_0': 3 * u.kg / u.m**2,
'c0_1': 3 * u.kg / u.s, 'c0_2': -2 * u.kg / u.s**2, 'c1_1': 5 * u.kg / u.m / u.s},
'evaluation': [(3 * u.m, 2 * u.s, 64 * u.kg)],
'bounding_box': False},
{'class': Polynomial2D,
'parameters': {'degree': 2, 'c0_0': 3 * u.kg, 'c1_0': 2 * u.kg, 'c2_0': 3 * u.kg,
'c0_1': 3 * u.kg, 'c0_2': -2 * u.kg, 'c1_1': 5 * u.kg},
'evaluation': [(3 * u.one, 2 * u.one, 64 * u.kg)],
'bounding_box': False},
]
MODELS = FUNC_MODELS_1D + FUNC_MODELS_2D + POWERLAW_MODELS
SCIPY_MODELS = set([Sersic1D, Sersic2D, AiryDisk2D])
@pytest.mark.parametrize('model', MODELS)
def test_models_evaluate_without_units(model):
if not HAS_SCIPY and model['class'] in SCIPY_MODELS:
pytest.skip()
m = model['class'](**model['parameters'])
for args in model['evaluation']:
if len(args) == 2:
kwargs = OrderedDict(zip(('x', 'y'), args))
else:
kwargs = OrderedDict(zip(('x', 'y', 'z'), args))
if kwargs['x'].unit.is_equivalent(kwargs['y'].unit):
kwargs['x'] = kwargs['x'].to(kwargs['y'].unit)
mnu = m.without_units_for_data(**kwargs)
args = [x.value for x in kwargs.values()]
assert_quantity_allclose(mnu(*args[:-1]), args[-1])
@pytest.mark.parametrize('model', MODELS)
def test_models_evaluate_with_units(model):
if not HAS_SCIPY and model['class'] in SCIPY_MODELS:
pytest.skip()
m = model['class'](**model['parameters'])
for args in model['evaluation']:
assert_quantity_allclose(m(*args[:-1]), args[-1])
@pytest.mark.parametrize('model', MODELS)
def test_models_evaluate_with_units_x_array(model):
if not HAS_SCIPY and model['class'] in SCIPY_MODELS:
pytest.skip()
m = model['class'](**model['parameters'])
for args in model['evaluation']:
if len(args) == 2:
x, y = args
x_arr = u.Quantity([x, x])
result = m(x_arr)
assert_quantity_allclose(result, u.Quantity([y, y]))
else:
x, y, z = args
x_arr = u.Quantity([x, x])
y_arr = u.Quantity([y, y])
result = m(x_arr, y_arr)
assert_quantity_allclose(result, u.Quantity([z, z]))
@pytest.mark.parametrize('model', MODELS)
def test_models_evaluate_with_units_param_array(model):
if not HAS_SCIPY and model['class'] in SCIPY_MODELS:
pytest.skip()
params = {}
for key, value in model['parameters'].items():
if value is None or key == 'degree':
params[key] = value
else:
params[key] = np.repeat(value, 2)
params['n_models'] = 2
m = model['class'](**params)
for args in model['evaluation']:
if len(args) == 2:
x, y = args
x_arr = u.Quantity([x, x])
result = m(x_arr)
assert_quantity_allclose(result, u.Quantity([y, y]))
else:
x, y, z = args
x_arr = u.Quantity([x, x])
y_arr = u.Quantity([y, y])
result = m(x_arr, y_arr)
assert_quantity_allclose(result, u.Quantity([z, z]))
@pytest.mark.parametrize('model', MODELS)
def test_models_bounding_box(model):
# In some cases, having units in parameters caused bounding_box to break,
# so this is to ensure that it works correctly.
if not HAS_SCIPY and model['class'] in SCIPY_MODELS:
pytest.skip()
m = model['class'](**model['parameters'])
# In the following we need to explicitly test that the value is False
# since Quantities no longer evaluate as as True
if model['bounding_box'] is False:
# Check that NotImplementedError is raised, so that if bounding_box is
# implemented we remember to set bounding_box=True in the list of models
# above
with pytest.raises(NotImplementedError):
m.bounding_box
else:
# A bounding box may have inhomogeneous units so we need to check the
# values one by one.
for i in range(len(model['bounding_box'])):
bbox = m.bounding_box
assert_quantity_allclose(bbox[i], model['bounding_box'][i])
@pytest.mark.skipif('not HAS_SCIPY')
@pytest.mark.filterwarnings(r'ignore:.*:RuntimeWarning')
@pytest.mark.filterwarnings(r'ignore:Model is linear in parameters.*')
@pytest.mark.filterwarnings(r'ignore:The fit may be unsuccessful.*')
@pytest.mark.parametrize('model', MODELS)
def test_models_fitting(model):
m = model['class'](**model['parameters'])
if len(model['evaluation'][0]) == 2:
x = np.linspace(1, 3, 100) * model['evaluation'][0][0].unit
y = np.exp(-x.value ** 2) * model['evaluation'][0][1].unit
args = [x, y]
else:
x = np.linspace(1, 3, 100) * model['evaluation'][0][0].unit
y = np.linspace(1, 3, 100) * model['evaluation'][0][1].unit
z = np.exp(-x.value**2 - y.value**2) * model['evaluation'][0][2].unit
args = [x, y, z]
# Test that the model fits even if it has units on parameters
fitter = LevMarLSQFitter()
m_new = fitter(m, *args)
# Check that units have been put back correctly
for param_name in m.param_names:
par_bef = getattr(m, param_name)
par_aft = getattr(m_new, param_name)
if par_bef.unit is None:
# If the parameter used to not have a unit then had a radian unit
# for example, then we should allow that
assert par_aft.unit is None or par_aft.unit is u.rad
else:
assert par_aft.unit.is_equivalent(par_bef.unit)
|
|
'''
Hackpack Configure
A script to configure your TwiML apps and Twilio phone numbers to use your
hackpack's Heroku app.
Usage:
Auto-configure using your local_settings.py:
python configure.py
Deploy to new Twilio number and App Sid:
python configure.py --new
Deploy to specific App Sid:
python configure.py --app APxxxxxxxxxxxxxx
Deploy to specific Twilio number:
python configure.py --number +15556667777
Deploy to custom domain:
python configure.py --domain example.com
'''
from optparse import OptionParser
import subprocess
import logging
from twilio.rest import TwilioRestClient
from twilio import TwilioRestException
from hackpack import local_settings
class Configure(object):
def __init__(self, account_sid=local_settings.TWILIO_ACCOUNT_SID,
auth_token=local_settings.TWILIO_AUTH_TOKEN,
app_sid=local_settings.TWILIO_APP_SID,
phone_number=local_settings.TWILIO_CALLER_ID,
voice_url='/voice',
sms_url='/sms',
host=None):
self.account_sid = account_sid
self.auth_token = auth_token
self.app_sid = app_sid
self.phone_number = phone_number
self.host = host
self.voice_url = voice_url
self.sms_url = sms_url
self.friendly_phone_number = None
def start(self):
logging.info("Configuring your Twilio hackpack...")
logging.debug("Checking if credentials are set...")
if not self.account_sid:
raise ConfigurationError("ACCOUNT_SID is not set in " \
"local_settings.")
if not self.auth_token:
raise ConfigurationError("AUTH_TOKEN is not set in " \
"local_settings.")
logging.debug("Creating Twilio client...")
self.client = TwilioRestClient(self.account_sid, self.auth_token)
logging.debug("Checking if host is set.")
if not self.host:
logging.debug("Hostname is not set...")
self.host = self.getHerokuHostname()
# Check if urls are set.
logging.debug("Checking if all urls are set.")
if "http://" not in self.voice_url:
self.voice_url = self.host + self.voice_url
logging.debug("Setting voice_url with host: %s" % self.voice_url)
if "http://" not in self.sms_url:
self.sms_url = self.host + self.sms_url
logging.debug("Setting sms_url with host: %s" % self.sms_url)
if self.configureHackpack(self.voice_url, self.sms_url,
self.app_sid, self.phone_number):
# Configure Heroku environment variables.
self.setHerokuEnvironmentVariables(
TWILIO_ACCOUNT_SID=self.account_sid,
TWILIO_AUTH_TOKEN=self.auth_token,
TWILIO_APP_SID=self.app_sid,
TWILIO_CALLER_ID=self.phone_number)
# Ensure local environment variables are set.
self.printLocalEnvironmentVariableCommands(
TWILIO_ACCOUNT_SID=self.account_sid,
TWILIO_AUTH_TOKEN=self.auth_token,
TWILIO_APP_SID=self.app_sid,
TWILIO_CALLER_ID=self.phone_number)
logging.info("Hackpack is now configured. Call %s to test!"
% self.friendly_phone_number)
else:
logging.error("There was an error configuring your hackpack. " \
"Weak sauce.")
def configureHackpack(self, voice_url, sms_url, app_sid,
phone_number, *args):
# Check if app sid is configured and available.
if not app_sid:
app = self.createNewTwiMLApp(voice_url, sms_url)
else:
app = self.setAppRequestUrls(app_sid, voice_url, sms_url)
# Check if phone_number is set.
if not phone_number:
number = self.purchasePhoneNumber()
else:
number = self.retrievePhoneNumber(phone_number)
# Configure phone number to use App Sid.
logging.info("Setting %s to use application sid: %s" %
(number.friendly_name, app.sid))
try:
self.client.phone_numbers.update(number.sid,
voice_application_sid=app.sid,
sms_application_sid=app.sid)
logging.debug("Number set.")
except TwilioRestException, e:
raise ConfigurationError("An error occurred setting the " \
"application sid for %s: %s" % (number.friendly_name,
e))
# We're done!
if number:
return number
else:
raise ConfigurationError("An unknown error occurred configuring " \
"request urls for this hackpack.")
def createNewTwiMLApp(self, voice_url, sms_url):
logging.debug("Asking user to create new app sid...")
i = 0
while True:
i = i + 1
choice = raw_input("Your APP_SID is not configured in your " \
"local_settings. Create a new one? [y/n]").lower()
if choice == "y":
try:
logging.info("Creating new application...")
app = self.client.applications.create(voice_url=voice_url,
sms_url=sms_url,
friendly_name="Hackpack for Heroku and Flask")
break
except TwilioRestException, e:
raise ConfigurationError("Your Twilio app couldn't " \
"be created: %s" % e)
elif choice == "n" or i >= 3:
raise ConfigurationError("Your APP_SID setting must be " \
"set in local_settings.")
else:
logging.error("Please choose yes or no with a 'y' or 'n'")
if app:
logging.info("Application created: %s" % app.sid)
self.app_sid = app.sid
return app
else:
raise ConfigurationError("There was an unknown error " \
"creating your TwiML application.")
def setAppRequestUrls(self, app_sid, voice_url, sms_url):
logging.info("Setting request urls for application sid: %s" \
% app_sid)
try:
app = self.client.applications.update(app_sid, voice_url=voice_url,
sms_url=sms_url,
friendly_name="Hackpack for Heroku and Flask")
except TwilioRestException, e:
if "HTTP ERROR 404" in str(e):
raise ConfigurationError("This application sid was not " \
"found: %s" % app_sid)
else:
raise ConfigurationError("An error setting the request URLs " \
"occured: %s" % e)
if app:
logging.debug("Updated application sid: %s " % app.sid)
return app
else:
raise ConfigurationError("An unknown error occuring "\
"configuring request URLs for app sid.")
def retrievePhoneNumber(self, phone_number):
logging.debug("Retrieving phone number: %s" % phone_number)
try:
logging.debug("Getting sid for phone number: %s" % phone_number)
number = self.client.phone_numbers.list(
phone_number=phone_number)
except TwilioRestException, e:
raise ConfigurationError("An error setting the request URLs " \
"occured: %s" % e)
if number:
logging.debug("Retrieved sid: %s" % number[0].sid)
self.friendly_phone_number = number[0].friendly_name
return number[0]
else:
raise ConfigurationError("An unknown error occurred retrieving " \
"number: %s" % phone_number)
def purchasePhoneNumber(self):
logging.debug("Asking user to purchase phone number...")
i = 0
while True:
i = i + 1
# Find number to purchase
choice = raw_input("Your CALLER_ID is not configured in your " \
"local_settings. Purchase a new one? [y/n]").lower()
if choice == "y":
break
elif choice == "n" or i >= 3:
raise ConfigurationError("To configure this " \
"hackpack CALLER_ID must set in local_settings or " \
"a phone number must be purchased.")
else:
logging.error("Please choose yes or no with a 'y' or 'n'")
logging.debug("Confirming purchase...")
i = 0
while True:
i = i + 1
# Confirm phone number purchase.
choice = raw_input("Are you sure you want to purchase? " \
"Your Twilio account will be charged $1. [y/n]").lower()
if choice == "y":
try:
logging.debug("Purchasing phone number...")
number = self.client.phone_numbers.purchase(
area_code="646")
logging.debug("Phone number purchased: %s" %
number.friendly_name)
break
except TwilioRestException, e:
raise ConfigurationError("Your Twilio app couldn't " \
"be created: %s" % e)
elif choice == "n" or i >= 3:
raise ConfigurationError("To configure this " \
"hackpack CALLER_ID must set in local_settings or " \
"a phone number must be purchased.")
else:
logging.error("Please choose yes or no with a 'y' or 'n'")
# Return number or error out.
if number:
logging.debug("Returning phone number: %s " % number.friendly_name)
self.phone_number = number.phone_number
self.friendly_phone_number = number.friendly_name
return number
else:
raise ConfigurationError("There was an unknown error purchasing " \
"your phone number.")
def getHerokuHostname(self, git_config_path='./.git/config'):
logging.debug("Getting hostname from git configuration file: %s" \
% git_config_path)
# Load git configuration
try:
logging.debug("Loading git config...")
git_config = file(git_config_path).readlines()
except IOError, e:
raise ConfigurationError("Could not find .git config. Does it " \
"still exist? Failed path: %s" % e)
logging.debug("Finding Heroku remote in git configuration...")
subdomain = None
for line in git_config:
if "[email protected]" in line:
s = line.split(":")
subdomain = s[1].replace('.git', '')
logging.debug("Heroku remote found: %s" % subdomain)
if subdomain:
host = "http://%s.herokuapp.com" % subdomain.strip()
logging.debug("Returning full host: %s" % host)
return host
else:
raise ConfigurationError("Could not find Heroku remote in " \
"your .git config. Have you created the Heroku app?")
def printLocalEnvironmentVariableCommands(self, **kwargs):
logging.info("Copy/paste these commands to set your local " \
"environment to use this hackpack...")
print "\n"
for k, v in kwargs.iteritems():
if v:
print "export %s=%s" % (k, v)
print "\n"
def setHerokuEnvironmentVariables(self, **kwargs):
logging.info("Setting Heroku environment variables...")
envvars = ["%s=%s" % (k, v) for k, v in kwargs.iteritems() if v]
envvars.insert(0, "heroku")
envvars.insert(1, "config:add")
return subprocess.call(envvars)
class ConfigurationError(Exception):
def __init__(self, message):
#Exception.__init__(self, message)
logging.error(message)
# Logging configuration
logging.basicConfig(level=logging.INFO, format='%(message)s')
# Parser configuration
usage = "Twilio Hackpack Configurator - an easy way to configure " \
"configure your hackpack!\n%prog [options] arg1 arg2"
parser = OptionParser(usage=usage, version="Twilio Hackpack Configurator 1.0")
parser.add_option("-S", "--account_sid", default=None,
help="Use a specific Twilio ACCOUNT_SID.")
parser.add_option("-K", "--auth_token", default=None,
help="Use a specific Twilio AUTH_TOKEN.")
parser.add_option("-n", "--new", default=False, action="store_true",
help="Purchase new Twilio phone number and configure app to use " \
"your hackpack.")
parser.add_option("-N", "--new_app", default=False, action="store_true",
help="Create a new TwiML application sid to use for your " \
"hackpack.")
parser.add_option("-a", "--app_sid", default=None,
help="Configure specific AppSid to use your hackpack.")
parser.add_option("-#", "--phone-number", default=None,
help="Configure specific Twilio number to use your hackpack.")
parser.add_option("-v", "--voice_url", default=None,
help="Set the route for your Voice Request URL: (e.g. '/voice').")
parser.add_option("-s", "--sms_url", default=None,
help="Set the route for your SMS Request URL: (e.g. '/sms').")
parser.add_option("-d", "--domain", default=None,
help="Set a custom domain.")
parser.add_option("-D", "--debug", default=False,
action="store_true", help="Turn on debug output.")
def main():
(options, args) = parser.parse_args()
# Configurator configuration :)
configure = Configure()
# Options tree
if options.account_sid:
configure.account_sid = options.account_sid
if options.auth_token:
configure.auth_token = options.auth_token
if options.new:
configure.phone_number = None
if options.new_app:
configure.app_sid = None
if options.app_sid:
configure.app_sid = options.app_sid
if options.phone_number:
configure.phone_number = options.phone_number
if options.voice_url:
configure.voice_url = options.voice_url
if options.sms_url:
configure.sms_url = options.sms_url
if options.domain:
configure.host = options.domain
if options.debug:
logging.basicConfig(level=logging.DEBUG,
format='%(levelname)s - %(message)s')
configure.start()
if __name__ == "__main__":
main()
|
|
# Copyright 2011 OpenStack Foundation
# Copyright 2012 Justin Santa Barbara
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from lxml import etree
import mock
import mox
from oslo.config import cfg
from oslo.serialization import jsonutils
import webob
from nova.api.openstack.compute.contrib import security_groups as secgroups_v2
from nova.api.openstack import wsgi
from nova.api.openstack import xmlutil
from nova import compute
from nova.compute import power_state
from nova import context as context_maker
import nova.db
from nova import exception
from nova import objects
from nova.objects import instance as instance_obj
from nova import quota
from nova import test
from nova.tests.api.openstack import fakes
from nova.tests import fake_instance
from nova.tests import utils
CONF = cfg.CONF
FAKE_UUID1 = 'a47ae74e-ab08-447f-8eee-ffd43fc46c16'
FAKE_UUID2 = 'c6e6430a-6563-4efa-9542-5e93c9e97d18'
class AttrDict(dict):
def __getattr__(self, k):
return self[k]
def security_group_template(**kwargs):
sg = kwargs.copy()
sg.setdefault('tenant_id', '123')
sg.setdefault('name', 'test')
sg.setdefault('description', 'test-description')
return sg
def security_group_db(security_group, id=None):
attrs = security_group.copy()
if 'tenant_id' in attrs:
attrs['project_id'] = attrs.pop('tenant_id')
if id is not None:
attrs['id'] = id
attrs.setdefault('rules', [])
attrs.setdefault('instances', [])
return AttrDict(attrs)
def security_group_rule_template(**kwargs):
rule = kwargs.copy()
rule.setdefault('ip_protocol', 'tcp')
rule.setdefault('from_port', 22)
rule.setdefault('to_port', 22)
rule.setdefault('parent_group_id', 2)
return rule
def security_group_rule_db(rule, id=None):
attrs = rule.copy()
if 'ip_protocol' in attrs:
attrs['protocol'] = attrs.pop('ip_protocol')
return AttrDict(attrs)
def return_server(context, server_id,
columns_to_join=None, use_slave=False):
return fake_instance.fake_db_instance(
**{'id': int(server_id),
'power_state': 0x01,
'host': "localhost",
'uuid': FAKE_UUID1,
'name': 'asdf'})
def return_server_by_uuid(context, server_uuid,
columns_to_join=None,
use_slave=False):
return fake_instance.fake_db_instance(
**{'id': 1,
'power_state': 0x01,
'host': "localhost",
'uuid': server_uuid,
'name': 'asdf'})
def return_non_running_server(context, server_id, columns_to_join=None):
return fake_instance.fake_db_instance(
**{'id': server_id, 'power_state': power_state.SHUTDOWN,
'uuid': FAKE_UUID1, 'host': "localhost", 'name': 'asdf'})
def return_security_group_by_name(context, project_id, group_name):
return {'id': 1, 'name': group_name,
"instances": [{'id': 1, 'uuid': FAKE_UUID1}]}
def return_security_group_without_instances(context, project_id, group_name):
return {'id': 1, 'name': group_name}
def return_server_nonexistent(context, server_id, columns_to_join=None):
raise exception.InstanceNotFound(instance_id=server_id)
# NOTE(oomichi): v2.1 API does not support security group management (create/
# update/delete a security group). We don't need to test this class against
# v2.1 API.
class TestSecurityGroups(test.TestCase):
def setUp(self):
super(TestSecurityGroups, self).setUp()
self.controller = secgroups_v2.SecurityGroupController()
self.server_controller = (
secgroups_v2.ServerSecurityGroupController())
self.manager = secgroups_v2.SecurityGroupActionController()
# This needs to be done here to set fake_id because the derived
# class needs to be called first if it wants to set
# 'security_group_api' and this setUp method needs to be called.
if self.controller.security_group_api.id_is_uuid:
self.fake_id = '11111111-1111-1111-1111-111111111111'
else:
self.fake_id = '11111111'
def _assert_no_security_groups_reserved(self, context):
"""Check that no reservations are leaked during tests."""
result = quota.QUOTAS.get_project_quotas(context, context.project_id)
self.assertEqual(result['security_groups']['reserved'], 0)
def _assert_security_groups_in_use(self, project_id, user_id, in_use):
context = context_maker.get_admin_context()
result = quota.QUOTAS.get_user_quotas(context, project_id, user_id)
self.assertEqual(result['security_groups']['in_use'], in_use)
def test_create_security_group(self):
sg = security_group_template()
req = fakes.HTTPRequest.blank('/v2/fake/os-security-groups')
res_dict = self.controller.create(req, {'security_group': sg})
self.assertEqual(res_dict['security_group']['name'], 'test')
self.assertEqual(res_dict['security_group']['description'],
'test-description')
def test_create_security_group_with_no_name(self):
sg = security_group_template()
del sg['name']
req = fakes.HTTPRequest.blank('/v2/fake/os-security-groups')
self.assertRaises(webob.exc.HTTPBadRequest,
self.controller.create, req, sg)
self._assert_no_security_groups_reserved(req.environ['nova.context'])
def test_create_security_group_with_no_description(self):
sg = security_group_template()
del sg['description']
req = fakes.HTTPRequest.blank('/v2/fake/os-security-groups')
self.assertRaises(webob.exc.HTTPBadRequest, self.controller.create,
req, {'security_group': sg})
self._assert_no_security_groups_reserved(req.environ['nova.context'])
def test_create_security_group_with_empty_description(self):
sg = security_group_template()
sg['description'] = ""
req = fakes.HTTPRequest.blank('/v2/fake/os-security-groups')
try:
self.controller.create(req, {'security_group': sg})
self.fail('Should have raised BadRequest exception')
except webob.exc.HTTPBadRequest as exc:
self.assertEqual('description has a minimum character requirement'
' of 1.', exc.explanation)
except exception.InvalidInput as exc:
self.fail('Should have raised BadRequest exception instead of')
self._assert_no_security_groups_reserved(req.environ['nova.context'])
def test_create_security_group_with_blank_name(self):
sg = security_group_template(name='')
req = fakes.HTTPRequest.blank('/v2/fake/os-security-groups')
self.assertRaises(webob.exc.HTTPBadRequest, self.controller.create,
req, {'security_group': sg})
self._assert_no_security_groups_reserved(req.environ['nova.context'])
def test_create_security_group_with_whitespace_name(self):
sg = security_group_template(name=' ')
req = fakes.HTTPRequest.blank('/v2/fake/os-security-groups')
self.assertRaises(webob.exc.HTTPBadRequest, self.controller.create,
req, {'security_group': sg})
self._assert_no_security_groups_reserved(req.environ['nova.context'])
def test_create_security_group_with_blank_description(self):
sg = security_group_template(description='')
req = fakes.HTTPRequest.blank('/v2/fake/os-security-groups')
self.assertRaises(webob.exc.HTTPBadRequest, self.controller.create,
req, {'security_group': sg})
self._assert_no_security_groups_reserved(req.environ['nova.context'])
def test_create_security_group_with_whitespace_description(self):
sg = security_group_template(description=' ')
req = fakes.HTTPRequest.blank('/v2/fake/os-security-groups')
self.assertRaises(webob.exc.HTTPBadRequest, self.controller.create,
req, {'security_group': sg})
self._assert_no_security_groups_reserved(req.environ['nova.context'])
def test_create_security_group_with_duplicate_name(self):
sg = security_group_template()
# FIXME: Stub out _get instead of creating twice
req = fakes.HTTPRequest.blank('/v2/fake/os-security-groups')
self.controller.create(req, {'security_group': sg})
req = fakes.HTTPRequest.blank('/v2/fake/os-security-groups')
self.assertRaises(webob.exc.HTTPBadRequest, self.controller.create,
req, {'security_group': sg})
self._assert_no_security_groups_reserved(req.environ['nova.context'])
def test_create_security_group_with_no_body(self):
req = fakes.HTTPRequest.blank('/v2/fake/os-security-groups')
self.assertRaises(webob.exc.HTTPBadRequest,
self.controller.create, req, None)
self._assert_no_security_groups_reserved(req.environ['nova.context'])
def test_create_security_group_with_no_security_group(self):
body = {'no-securityGroup': None}
req = fakes.HTTPRequest.blank('/v2/fake/os-security-groups')
self.assertRaises(webob.exc.HTTPBadRequest,
self.controller.create, req, body)
self._assert_no_security_groups_reserved(req.environ['nova.context'])
def test_create_security_group_above_255_characters_name(self):
sg = security_group_template(name='1234567890' * 26)
req = fakes.HTTPRequest.blank('/v2/fake/os-security-groups')
self.assertRaises(webob.exc.HTTPBadRequest, self.controller.create,
req, {'security_group': sg})
self._assert_no_security_groups_reserved(req.environ['nova.context'])
def test_create_security_group_above_255_characters_description(self):
sg = security_group_template(description='1234567890' * 26)
req = fakes.HTTPRequest.blank('/v2/fake/os-security-groups')
self.assertRaises(webob.exc.HTTPBadRequest, self.controller.create,
req, {'security_group': sg})
self._assert_no_security_groups_reserved(req.environ['nova.context'])
def test_create_security_group_non_string_name(self):
sg = security_group_template(name=12)
req = fakes.HTTPRequest.blank('/v2/fake/os-security-groups')
self.assertRaises(webob.exc.HTTPBadRequest, self.controller.create,
req, {'security_group': sg})
self._assert_no_security_groups_reserved(req.environ['nova.context'])
def test_create_security_group_non_string_description(self):
sg = security_group_template(description=12)
req = fakes.HTTPRequest.blank('/v2/fake/os-security-groups')
self.assertRaises(webob.exc.HTTPBadRequest, self.controller.create,
req, {'security_group': sg})
self._assert_no_security_groups_reserved(req.environ['nova.context'])
def test_create_security_group_quota_limit(self):
req = fakes.HTTPRequest.blank('/v2/fake/os-security-groups')
for num in range(1, CONF.quota_security_groups):
name = 'test%s' % num
sg = security_group_template(name=name)
res_dict = self.controller.create(req, {'security_group': sg})
self.assertEqual(res_dict['security_group']['name'], name)
sg = security_group_template()
self.assertRaises(webob.exc.HTTPForbidden, self.controller.create,
req, {'security_group': sg})
def test_get_security_group_list(self):
groups = []
for i, name in enumerate(['default', 'test']):
sg = security_group_template(id=i + 1,
name=name,
description=name + '-desc',
rules=[])
groups.append(sg)
expected = {'security_groups': groups}
def return_security_groups(context, project_id):
return [security_group_db(sg) for sg in groups]
self.stubs.Set(nova.db, 'security_group_get_by_project',
return_security_groups)
req = fakes.HTTPRequest.blank('/v2/fake/os-security-groups')
res_dict = self.controller.index(req)
self.assertEqual(res_dict, expected)
def test_get_security_group_list_missing_group_id_rule(self):
groups = []
rule1 = security_group_rule_template(cidr='10.2.3.124/24',
parent_group_id=1,
group_id={}, id=88,
protocol='TCP')
rule2 = security_group_rule_template(cidr='10.2.3.125/24',
parent_group_id=1,
id=99, protocol=88,
group_id='HAS_BEEN_DELETED')
sg = security_group_template(id=1,
name='test',
description='test-desc',
rules=[rule1, rule2])
groups.append(sg)
# An expected rule here needs to be created as the api returns
# different attributes on the rule for a response than what was
# passed in. For example:
# "cidr": "0.0.0.0/0" ->"ip_range": {"cidr": "0.0.0.0/0"}
expected_rule = security_group_rule_template(
ip_range={'cidr': '10.2.3.124/24'}, parent_group_id=1,
group={}, id=88, ip_protocol='TCP')
expected = security_group_template(id=1,
name='test',
description='test-desc',
rules=[expected_rule])
expected = {'security_groups': [expected]}
def return_security_groups(context, project, search_opts):
return [security_group_db(sg) for sg in groups]
self.stubs.Set(self.controller.security_group_api, 'list',
return_security_groups)
req = fakes.HTTPRequest.blank('/v2/fake/os-security-groups')
res_dict = self.controller.index(req)
self.assertEqual(res_dict, expected)
def test_get_security_group_list_all_tenants(self):
all_groups = []
tenant_groups = []
for i, name in enumerate(['default', 'test']):
sg = security_group_template(id=i + 1,
name=name,
description=name + '-desc',
rules=[])
all_groups.append(sg)
if name == 'default':
tenant_groups.append(sg)
all = {'security_groups': all_groups}
tenant_specific = {'security_groups': tenant_groups}
def return_all_security_groups(context):
return [security_group_db(sg) for sg in all_groups]
self.stubs.Set(nova.db, 'security_group_get_all',
return_all_security_groups)
def return_tenant_security_groups(context, project_id):
return [security_group_db(sg) for sg in tenant_groups]
self.stubs.Set(nova.db, 'security_group_get_by_project',
return_tenant_security_groups)
path = '/v2/fake/os-security-groups'
req = fakes.HTTPRequest.blank(path, use_admin_context=True)
res_dict = self.controller.index(req)
self.assertEqual(res_dict, tenant_specific)
req = fakes.HTTPRequest.blank('%s?all_tenants=1' % path,
use_admin_context=True)
res_dict = self.controller.index(req)
self.assertEqual(res_dict, all)
def test_get_security_group_by_instance(self):
groups = []
for i, name in enumerate(['default', 'test']):
sg = security_group_template(id=i + 1,
name=name,
description=name + '-desc',
rules=[])
groups.append(sg)
expected = {'security_groups': groups}
def return_instance(context, server_id,
columns_to_join=None, use_slave=False):
self.assertEqual(server_id, FAKE_UUID1)
return return_server_by_uuid(context, server_id)
self.stubs.Set(nova.db, 'instance_get_by_uuid',
return_instance)
def return_security_groups(context, instance_uuid):
self.assertEqual(instance_uuid, FAKE_UUID1)
return [security_group_db(sg) for sg in groups]
self.stubs.Set(nova.db, 'security_group_get_by_instance',
return_security_groups)
req = fakes.HTTPRequest.blank('/v2/%s/servers/%s/os-security-groups' %
('fake', FAKE_UUID1))
res_dict = self.server_controller.index(req, FAKE_UUID1)
self.assertEqual(res_dict, expected)
@mock.patch('nova.db.instance_get_by_uuid')
@mock.patch('nova.db.security_group_get_by_instance', return_value=[])
def test_get_security_group_empty_for_instance(self, mock_sec_group,
mock_db_get_ins):
expected = {'security_groups': []}
def return_instance(context, server_id,
columns_to_join=None, use_slave=False):
self.assertEqual(server_id, FAKE_UUID1)
return return_server_by_uuid(context, server_id)
mock_db_get_ins.side_effect = return_instance
req = fakes.HTTPRequest.blank('/v2/%s/servers/%s/os-security-groups' %
('fake', FAKE_UUID1))
res_dict = self.server_controller.index(req, FAKE_UUID1)
self.assertEqual(expected, res_dict)
mock_sec_group.assert_called_once_with(req.environ['nova.context'],
FAKE_UUID1)
def test_get_security_group_by_instance_non_existing(self):
self.stubs.Set(nova.db, 'instance_get', return_server_nonexistent)
self.stubs.Set(nova.db, 'instance_get_by_uuid',
return_server_nonexistent)
req = fakes.HTTPRequest.blank('/v2/fake/servers/1/os-security-groups')
self.assertRaises(webob.exc.HTTPNotFound,
self.server_controller.index, req, '1')
def test_get_security_group_by_instance_invalid_id(self):
req = fakes.HTTPRequest.blank(
'/v2/fake/servers/invalid/os-security-groups')
self.assertRaises(webob.exc.HTTPNotFound,
self.server_controller.index, req, 'invalid')
def test_get_security_group_by_id(self):
sg = security_group_template(id=2, rules=[])
def return_security_group(context, group_id):
self.assertEqual(sg['id'], group_id)
return security_group_db(sg)
self.stubs.Set(nova.db, 'security_group_get',
return_security_group)
req = fakes.HTTPRequest.blank('/v2/fake/os-security-groups/2')
res_dict = self.controller.show(req, '2')
expected = {'security_group': sg}
self.assertEqual(res_dict, expected)
def test_get_security_group_by_invalid_id(self):
req = fakes.HTTPRequest.blank('/v2/fake/os-security-groups/invalid')
self.assertRaises(webob.exc.HTTPBadRequest, self.controller.delete,
req, 'invalid')
def test_get_security_group_by_non_existing_id(self):
req = fakes.HTTPRequest.blank('/v2/fake/os-security-groups/%s' %
self.fake_id)
self.assertRaises(webob.exc.HTTPNotFound, self.controller.delete,
req, self.fake_id)
def test_update_security_group(self):
sg = security_group_template(id=2, rules=[])
sg_update = security_group_template(id=2, rules=[],
name='update_name', description='update_desc')
def return_security_group(context, group_id):
self.assertEqual(sg['id'], group_id)
return security_group_db(sg)
def return_update_security_group(context, group_id, values,
columns_to_join=None):
self.assertEqual(sg_update['id'], group_id)
self.assertEqual(sg_update['name'], values['name'])
self.assertEqual(sg_update['description'], values['description'])
return security_group_db(sg_update)
self.stubs.Set(nova.db, 'security_group_update',
return_update_security_group)
self.stubs.Set(nova.db, 'security_group_get',
return_security_group)
req = fakes.HTTPRequest.blank('/v2/fake/os-security-groups/2')
res_dict = self.controller.update(req, '2',
{'security_group': sg_update})
expected = {'security_group': sg_update}
self.assertEqual(res_dict, expected)
def test_update_security_group_name_to_default(self):
sg = security_group_template(id=2, rules=[], name='default')
def return_security_group(context, group_id):
self.assertEqual(sg['id'], group_id)
return security_group_db(sg)
self.stubs.Set(nova.db, 'security_group_get',
return_security_group)
req = fakes.HTTPRequest.blank('/v2/fake/os-security-groups/2')
self.assertRaises(webob.exc.HTTPBadRequest, self.controller.update,
req, '2', {'security_group': sg})
def test_update_default_security_group_fail(self):
sg = security_group_template()
req = fakes.HTTPRequest.blank('/v2/fake/os-security-groups/1')
self.assertRaises(webob.exc.HTTPBadRequest, self.controller.update,
req, '1', {'security_group': sg})
def test_delete_security_group_by_id(self):
sg = security_group_template(id=1, project_id='fake_project',
user_id='fake_user', rules=[])
self.called = False
def security_group_destroy(context, id):
self.called = True
def return_security_group(context, group_id):
self.assertEqual(sg['id'], group_id)
return security_group_db(sg)
self.stubs.Set(nova.db, 'security_group_destroy',
security_group_destroy)
self.stubs.Set(nova.db, 'security_group_get',
return_security_group)
req = fakes.HTTPRequest.blank('/v2/fake/os-security-groups/1')
self.controller.delete(req, '1')
self.assertTrue(self.called)
def test_delete_security_group_by_admin(self):
sg = security_group_template(id=2, rules=[])
req = fakes.HTTPRequest.blank('/v2/fake/os-security-groups')
self.controller.create(req, {'security_group': sg})
context = req.environ['nova.context']
# Ensure quota usage for security group is correct.
self._assert_security_groups_in_use(context.project_id,
context.user_id, 2)
# Delete the security group by admin.
req = fakes.HTTPRequest.blank('/v2/fake/os-security-groups/2',
use_admin_context=True)
self.controller.delete(req, '2')
# Ensure quota for security group in use is released.
self._assert_security_groups_in_use(context.project_id,
context.user_id, 1)
def test_delete_security_group_by_invalid_id(self):
req = fakes.HTTPRequest.blank('/v2/fake/os-security-groups/invalid')
self.assertRaises(webob.exc.HTTPBadRequest, self.controller.delete,
req, 'invalid')
def test_delete_security_group_by_non_existing_id(self):
req = fakes.HTTPRequest.blank('/v2/fake/os-security-groups/%s'
% self.fake_id)
self.assertRaises(webob.exc.HTTPNotFound, self.controller.delete,
req, self.fake_id)
def test_delete_security_group_in_use(self):
sg = security_group_template(id=1, rules=[])
def security_group_in_use(context, id):
return True
def return_security_group(context, group_id):
self.assertEqual(sg['id'], group_id)
return security_group_db(sg)
self.stubs.Set(nova.db, 'security_group_in_use',
security_group_in_use)
self.stubs.Set(nova.db, 'security_group_get',
return_security_group)
req = fakes.HTTPRequest.blank('/v2/fake/os-security-groups/1')
self.assertRaises(webob.exc.HTTPBadRequest, self.controller.delete,
req, '1')
def test_associate_by_non_existing_security_group_name(self):
self.stubs.Set(nova.db, 'instance_get', return_server)
self.assertEqual(return_server(None, '1'),
nova.db.instance_get(None, '1'))
body = dict(addSecurityGroup=dict(name='non-existing'))
req = fakes.HTTPRequest.blank('/v2/fake/servers/1/action')
self.assertRaises(webob.exc.HTTPNotFound,
self.manager._addSecurityGroup, req, '1', body)
def test_associate_by_invalid_server_id(self):
body = dict(addSecurityGroup=dict(name='test'))
req = fakes.HTTPRequest.blank('/v2/fake/servers/invalid/action')
self.assertRaises(webob.exc.HTTPNotFound,
self.manager._addSecurityGroup, req, 'invalid', body)
def test_associate_without_body(self):
self.stubs.Set(nova.db, 'instance_get', return_server)
body = dict(addSecurityGroup=None)
req = fakes.HTTPRequest.blank('/v2/fake/servers/1/action')
self.assertRaises(webob.exc.HTTPBadRequest,
self.manager._addSecurityGroup, req, '1', body)
def test_associate_no_security_group_name(self):
self.stubs.Set(nova.db, 'instance_get', return_server)
body = dict(addSecurityGroup=dict())
req = fakes.HTTPRequest.blank('/v2/fake/servers/1/action')
self.assertRaises(webob.exc.HTTPBadRequest,
self.manager._addSecurityGroup, req, '1', body)
def test_associate_security_group_name_with_whitespaces(self):
self.stubs.Set(nova.db, 'instance_get', return_server)
body = dict(addSecurityGroup=dict(name=" "))
req = fakes.HTTPRequest.blank('/v2/fake/servers/1/action')
self.assertRaises(webob.exc.HTTPBadRequest,
self.manager._addSecurityGroup, req, '1', body)
def test_associate_non_existing_instance(self):
self.stubs.Set(nova.db, 'instance_get', return_server_nonexistent)
self.stubs.Set(nova.db, 'instance_get_by_uuid',
return_server_nonexistent)
body = dict(addSecurityGroup=dict(name="test"))
req = fakes.HTTPRequest.blank('/v2/fake/servers/1/action')
self.assertRaises(webob.exc.HTTPNotFound,
self.manager._addSecurityGroup, req, '1', body)
def test_associate_non_running_instance(self):
self.stubs.Set(nova.db, 'instance_get', return_non_running_server)
self.stubs.Set(nova.db, 'instance_get_by_uuid',
return_non_running_server)
self.stubs.Set(nova.db, 'security_group_get_by_name',
return_security_group_without_instances)
body = dict(addSecurityGroup=dict(name="test"))
req = fakes.HTTPRequest.blank('/v2/fake/servers/1/action')
self.manager._addSecurityGroup(req, '1', body)
def test_associate_already_associated_security_group_to_instance(self):
self.stubs.Set(nova.db, 'instance_get', return_server)
self.stubs.Set(nova.db, 'instance_get_by_uuid',
return_server_by_uuid)
self.stubs.Set(nova.db, 'security_group_get_by_name',
return_security_group_by_name)
body = dict(addSecurityGroup=dict(name="test"))
req = fakes.HTTPRequest.blank('/v2/fake/servers/1/action')
self.assertRaises(webob.exc.HTTPBadRequest,
self.manager._addSecurityGroup, req, '1', body)
def test_associate(self):
self.stubs.Set(nova.db, 'instance_get', return_server)
self.stubs.Set(nova.db, 'instance_get_by_uuid',
return_server_by_uuid)
self.mox.StubOutWithMock(nova.db, 'instance_add_security_group')
nova.db.instance_add_security_group(mox.IgnoreArg(),
mox.IgnoreArg(),
mox.IgnoreArg())
self.stubs.Set(nova.db, 'security_group_get_by_name',
return_security_group_without_instances)
self.mox.ReplayAll()
body = dict(addSecurityGroup=dict(name="test"))
req = fakes.HTTPRequest.blank('/v2/fake/servers/1/action')
self.manager._addSecurityGroup(req, '1', body)
def test_disassociate_by_non_existing_security_group_name(self):
self.stubs.Set(nova.db, 'instance_get', return_server)
self.assertEqual(return_server(None, '1'),
nova.db.instance_get(None, '1'))
body = dict(removeSecurityGroup=dict(name='non-existing'))
req = fakes.HTTPRequest.blank('/v2/fake/servers/1/action')
self.assertRaises(webob.exc.HTTPNotFound,
self.manager._removeSecurityGroup, req, '1', body)
def test_disassociate_by_invalid_server_id(self):
self.stubs.Set(nova.db, 'security_group_get_by_name',
return_security_group_by_name)
body = dict(removeSecurityGroup=dict(name='test'))
req = fakes.HTTPRequest.blank('/v2/fake/servers/invalid/action')
self.assertRaises(webob.exc.HTTPNotFound,
self.manager._removeSecurityGroup, req, 'invalid',
body)
def test_disassociate_without_body(self):
self.stubs.Set(nova.db, 'instance_get', return_server)
body = dict(removeSecurityGroup=None)
req = fakes.HTTPRequest.blank('/v2/fake/servers/1/action')
self.assertRaises(webob.exc.HTTPBadRequest,
self.manager._removeSecurityGroup, req, '1', body)
def test_disassociate_no_security_group_name(self):
self.stubs.Set(nova.db, 'instance_get', return_server)
body = dict(removeSecurityGroup=dict())
req = fakes.HTTPRequest.blank('/v2/fake/servers/1/action')
self.assertRaises(webob.exc.HTTPBadRequest,
self.manager._removeSecurityGroup, req, '1', body)
def test_disassociate_security_group_name_with_whitespaces(self):
self.stubs.Set(nova.db, 'instance_get', return_server)
body = dict(removeSecurityGroup=dict(name=" "))
req = fakes.HTTPRequest.blank('/v2/fake/servers/1/action')
self.assertRaises(webob.exc.HTTPBadRequest,
self.manager._removeSecurityGroup, req, '1', body)
def test_disassociate_non_existing_instance(self):
self.stubs.Set(nova.db, 'instance_get', return_server_nonexistent)
self.stubs.Set(nova.db, 'security_group_get_by_name',
return_security_group_by_name)
body = dict(removeSecurityGroup=dict(name="test"))
req = fakes.HTTPRequest.blank('/v2/fake/servers/1/action')
self.assertRaises(webob.exc.HTTPNotFound,
self.manager._removeSecurityGroup, req, '1', body)
def test_disassociate_non_running_instance(self):
self.stubs.Set(nova.db, 'instance_get', return_non_running_server)
self.stubs.Set(nova.db, 'instance_get_by_uuid',
return_non_running_server)
self.stubs.Set(nova.db, 'security_group_get_by_name',
return_security_group_by_name)
body = dict(removeSecurityGroup=dict(name="test"))
req = fakes.HTTPRequest.blank('/v2/fake/servers/1/action')
self.manager._removeSecurityGroup(req, '1', body)
def test_disassociate_already_associated_security_group_to_instance(self):
self.stubs.Set(nova.db, 'instance_get', return_server)
self.stubs.Set(nova.db, 'instance_get_by_uuid',
return_server_by_uuid)
self.stubs.Set(nova.db, 'security_group_get_by_name',
return_security_group_without_instances)
body = dict(removeSecurityGroup=dict(name="test"))
req = fakes.HTTPRequest.blank('/v2/fake/servers/1/action')
self.assertRaises(webob.exc.HTTPBadRequest,
self.manager._removeSecurityGroup, req, '1', body)
def test_disassociate(self):
self.stubs.Set(nova.db, 'instance_get', return_server)
self.stubs.Set(nova.db, 'instance_get_by_uuid',
return_server_by_uuid)
self.mox.StubOutWithMock(nova.db, 'instance_remove_security_group')
nova.db.instance_remove_security_group(mox.IgnoreArg(),
mox.IgnoreArg(),
mox.IgnoreArg())
self.stubs.Set(nova.db, 'security_group_get_by_name',
return_security_group_by_name)
self.mox.ReplayAll()
body = dict(removeSecurityGroup=dict(name="test"))
req = fakes.HTTPRequest.blank('/v2/fake/servers/1/action')
self.manager._removeSecurityGroup(req, '1', body)
# NOTE(oomichi): v2.1 API does not support security group management (create/
# update/delete a security group). We don't need to test this class against
# v2.1 API.
class TestSecurityGroupRules(test.TestCase):
def setUp(self):
super(TestSecurityGroupRules, self).setUp()
self.controller = secgroups_v2.SecurityGroupController()
if self.controller.security_group_api.id_is_uuid:
id1 = '11111111-1111-1111-1111-111111111111'
id2 = '22222222-2222-2222-2222-222222222222'
self.invalid_id = '33333333-3333-3333-3333-333333333333'
else:
id1 = 1
id2 = 2
self.invalid_id = '33333333'
self.sg1 = security_group_template(id=id1)
self.sg2 = security_group_template(
id=id2, name='authorize_revoke',
description='authorize-revoke testing')
db1 = security_group_db(self.sg1)
db2 = security_group_db(self.sg2)
def return_security_group(context, group_id, columns_to_join=None):
if group_id == db1['id']:
return db1
if group_id == db2['id']:
return db2
raise exception.NotFound()
self.stubs.Set(nova.db, 'security_group_get',
return_security_group)
self.parent_security_group = db2
self.controller = secgroups_v2.SecurityGroupRulesController()
def test_create_by_cidr(self):
rule = security_group_rule_template(cidr='10.2.3.124/24',
parent_group_id=self.sg2['id'])
req = fakes.HTTPRequest.blank('/v2/fake/os-security-group-rules')
res_dict = self.controller.create(req, {'security_group_rule': rule})
security_group_rule = res_dict['security_group_rule']
self.assertNotEqual(security_group_rule['id'], 0)
self.assertEqual(security_group_rule['parent_group_id'],
self.sg2['id'])
self.assertEqual(security_group_rule['ip_range']['cidr'],
"10.2.3.124/24")
def test_create_by_group_id(self):
rule = security_group_rule_template(group_id=self.sg1['id'],
parent_group_id=self.sg2['id'])
req = fakes.HTTPRequest.blank('/v2/fake/os-security-group-rules')
res_dict = self.controller.create(req, {'security_group_rule': rule})
security_group_rule = res_dict['security_group_rule']
self.assertNotEqual(security_group_rule['id'], 0)
self.assertEqual(security_group_rule['parent_group_id'],
self.sg2['id'])
def test_create_by_same_group_id(self):
rule1 = security_group_rule_template(group_id=self.sg1['id'],
from_port=80, to_port=80,
parent_group_id=self.sg2['id'])
self.parent_security_group['rules'] = [security_group_rule_db(rule1)]
rule2 = security_group_rule_template(group_id=self.sg1['id'],
from_port=81, to_port=81,
parent_group_id=self.sg2['id'])
req = fakes.HTTPRequest.blank('/v2/fake/os-security-group-rules')
res_dict = self.controller.create(req, {'security_group_rule': rule2})
security_group_rule = res_dict['security_group_rule']
self.assertNotEqual(security_group_rule['id'], 0)
self.assertEqual(security_group_rule['parent_group_id'],
self.sg2['id'])
self.assertEqual(security_group_rule['from_port'], 81)
self.assertEqual(security_group_rule['to_port'], 81)
def test_create_none_value_from_to_port(self):
rule = {'parent_group_id': self.sg1['id'],
'group_id': self.sg1['id']}
req = fakes.HTTPRequest.blank('/v2/fake/os-security-group-rules')
res_dict = self.controller.create(req, {'security_group_rule': rule})
security_group_rule = res_dict['security_group_rule']
self.assertIsNone(security_group_rule['from_port'])
self.assertIsNone(security_group_rule['to_port'])
self.assertEqual(security_group_rule['group']['name'], 'test')
self.assertEqual(security_group_rule['parent_group_id'],
self.sg1['id'])
def test_create_none_value_from_to_port_icmp(self):
rule = {'parent_group_id': self.sg1['id'],
'group_id': self.sg1['id'],
'ip_protocol': 'ICMP'}
req = fakes.HTTPRequest.blank('/v2/fake/os-security-group-rules')
res_dict = self.controller.create(req, {'security_group_rule': rule})
security_group_rule = res_dict['security_group_rule']
self.assertEqual(security_group_rule['ip_protocol'], 'ICMP')
self.assertEqual(security_group_rule['from_port'], -1)
self.assertEqual(security_group_rule['to_port'], -1)
self.assertEqual(security_group_rule['group']['name'], 'test')
self.assertEqual(security_group_rule['parent_group_id'],
self.sg1['id'])
def test_create_none_value_from_to_port_tcp(self):
rule = {'parent_group_id': self.sg1['id'],
'group_id': self.sg1['id'],
'ip_protocol': 'TCP'}
req = fakes.HTTPRequest.blank('/v2/fake/os-security-group-rules')
res_dict = self.controller.create(req, {'security_group_rule': rule})
security_group_rule = res_dict['security_group_rule']
self.assertEqual(security_group_rule['ip_protocol'], 'TCP')
self.assertEqual(security_group_rule['from_port'], 1)
self.assertEqual(security_group_rule['to_port'], 65535)
self.assertEqual(security_group_rule['group']['name'], 'test')
self.assertEqual(security_group_rule['parent_group_id'],
self.sg1['id'])
def test_create_by_invalid_cidr_json(self):
rule = security_group_rule_template(
ip_protocol="tcp",
from_port=22,
to_port=22,
parent_group_id=self.sg2['id'],
cidr="10.2.3.124/2433")
req = fakes.HTTPRequest.blank('/v2/fake/os-security-group-rules')
self.assertRaises(webob.exc.HTTPBadRequest, self.controller.create,
req, {'security_group_rule': rule})
def test_create_by_invalid_tcp_port_json(self):
rule = security_group_rule_template(
ip_protocol="tcp",
from_port=75534,
to_port=22,
parent_group_id=self.sg2['id'],
cidr="10.2.3.124/24")
req = fakes.HTTPRequest.blank('/v2/fake/os-security-group-rules')
self.assertRaises(webob.exc.HTTPBadRequest, self.controller.create,
req, {'security_group_rule': rule})
def test_create_by_invalid_icmp_port_json(self):
rule = security_group_rule_template(
ip_protocol="icmp",
from_port=1,
to_port=256,
parent_group_id=self.sg2['id'],
cidr="10.2.3.124/24")
req = fakes.HTTPRequest.blank('/v2/fake/os-security-group-rules')
self.assertRaises(webob.exc.HTTPBadRequest, self.controller.create,
req, {'security_group_rule': rule})
def test_create_add_existing_rules_by_cidr(self):
rule = security_group_rule_template(cidr='10.0.0.0/24',
parent_group_id=self.sg2['id'])
self.parent_security_group['rules'] = [security_group_rule_db(rule)]
req = fakes.HTTPRequest.blank('/v2/fake/os-security-group-rules')
self.assertRaises(webob.exc.HTTPBadRequest, self.controller.create,
req, {'security_group_rule': rule})
def test_create_add_existing_rules_by_group_id(self):
rule = security_group_rule_template(group_id=1)
self.parent_security_group['rules'] = [security_group_rule_db(rule)]
req = fakes.HTTPRequest.blank('/v2/fake/os-security-group-rules')
self.assertRaises(webob.exc.HTTPBadRequest, self.controller.create,
req, {'security_group_rule': rule})
def test_create_with_no_body(self):
req = fakes.HTTPRequest.blank('/v2/fake/os-security-group-rules')
self.assertRaises(webob.exc.HTTPBadRequest,
self.controller.create, req, None)
def test_create_with_no_security_group_rule_in_body(self):
rules = {'test': 'test'}
req = fakes.HTTPRequest.blank('/v2/fake/os-security-group-rules')
self.assertRaises(webob.exc.HTTPBadRequest,
self.controller.create, req, rules)
def test_create_with_invalid_parent_group_id(self):
rule = security_group_rule_template(parent_group_id='invalid')
req = fakes.HTTPRequest.blank('/v2/fake/os-security-group-rules')
self.assertRaises(webob.exc.HTTPBadRequest, self.controller.create,
req, {'security_group_rule': rule})
def test_create_with_non_existing_parent_group_id(self):
rule = security_group_rule_template(group_id=None,
parent_group_id=self.invalid_id)
req = fakes.HTTPRequest.blank('/v2/fake/os-security-group-rules')
self.assertRaises(webob.exc.HTTPNotFound, self.controller.create,
req, {'security_group_rule': rule})
def test_create_with_non_existing_group_id(self):
rule = security_group_rule_template(group_id='invalid',
parent_group_id=self.sg2['id'])
req = fakes.HTTPRequest.blank('/v2/fake/os-security-group-rules')
self.assertRaises(webob.exc.HTTPBadRequest, self.controller.create,
req, {'security_group_rule': rule})
def test_create_with_invalid_protocol(self):
rule = security_group_rule_template(ip_protocol='invalid-protocol',
cidr='10.2.2.0/24',
parent_group_id=self.sg2['id'])
req = fakes.HTTPRequest.blank('/v2/fake/os-security-group-rules')
self.assertRaises(webob.exc.HTTPBadRequest, self.controller.create,
req, {'security_group_rule': rule})
def test_create_with_no_protocol(self):
rule = security_group_rule_template(cidr='10.2.2.0/24',
parent_group_id=self.sg2['id'])
del rule['ip_protocol']
req = fakes.HTTPRequest.blank('/v2/fake/os-security-group-rules')
self.assertRaises(webob.exc.HTTPBadRequest, self.controller.create,
req, {'security_group_rule': rule})
def test_create_with_invalid_from_port(self):
rule = security_group_rule_template(from_port='666666',
cidr='10.2.2.0/24',
parent_group_id=self.sg2['id'])
req = fakes.HTTPRequest.blank('/v2/fake/os-security-group-rules')
self.assertRaises(webob.exc.HTTPBadRequest, self.controller.create,
req, {'security_group_rule': rule})
def test_create_with_invalid_to_port(self):
rule = security_group_rule_template(to_port='666666',
cidr='10.2.2.0/24',
parent_group_id=self.sg2['id'])
req = fakes.HTTPRequest.blank('/v2/fake/os-security-group-rules')
self.assertRaises(webob.exc.HTTPBadRequest, self.controller.create,
req, {'security_group_rule': rule})
def test_create_with_non_numerical_from_port(self):
rule = security_group_rule_template(from_port='invalid',
cidr='10.2.2.0/24',
parent_group_id=self.sg2['id'])
req = fakes.HTTPRequest.blank('/v2/fake/os-security-group-rules')
self.assertRaises(webob.exc.HTTPBadRequest, self.controller.create,
req, {'security_group_rule': rule})
def test_create_with_non_numerical_to_port(self):
rule = security_group_rule_template(to_port='invalid',
cidr='10.2.2.0/24',
parent_group_id=self.sg2['id'])
req = fakes.HTTPRequest.blank('/v2/fake/os-security-group-rules')
self.assertRaises(webob.exc.HTTPBadRequest, self.controller.create,
req, {'security_group_rule': rule})
def test_create_with_no_from_port(self):
rule = security_group_rule_template(cidr='10.2.2.0/24',
parent_group_id=self.sg2['id'])
del rule['from_port']
req = fakes.HTTPRequest.blank('/v2/fake/os-security-group-rules')
self.assertRaises(webob.exc.HTTPBadRequest, self.controller.create,
req, {'security_group_rule': rule})
def test_create_with_no_to_port(self):
rule = security_group_rule_template(cidr='10.2.2.0/24',
parent_group_id=self.sg2['id'])
del rule['to_port']
req = fakes.HTTPRequest.blank('/v2/fake/os-security-group-rules')
self.assertRaises(webob.exc.HTTPBadRequest, self.controller.create,
req, {'security_group_rule': rule})
def test_create_with_invalid_cidr(self):
rule = security_group_rule_template(cidr='10.2.2222.0/24',
parent_group_id=self.sg2['id'])
req = fakes.HTTPRequest.blank('/v2/fake/os-security-group-rules')
self.assertRaises(webob.exc.HTTPBadRequest, self.controller.create,
req, {'security_group_rule': rule})
def test_create_with_no_cidr_group(self):
rule = security_group_rule_template(parent_group_id=self.sg2['id'])
req = fakes.HTTPRequest.blank('/v2/fake/os-security-group-rules')
res_dict = self.controller.create(req, {'security_group_rule': rule})
security_group_rule = res_dict['security_group_rule']
self.assertNotEqual(security_group_rule['id'], 0)
self.assertEqual(security_group_rule['parent_group_id'],
self.parent_security_group['id'])
self.assertEqual(security_group_rule['ip_range']['cidr'],
"0.0.0.0/0")
def test_create_with_invalid_group_id(self):
rule = security_group_rule_template(group_id='invalid',
parent_group_id=self.sg2['id'])
req = fakes.HTTPRequest.blank('/v2/fake/os-security-group-rules')
self.assertRaises(webob.exc.HTTPBadRequest, self.controller.create,
req, {'security_group_rule': rule})
def test_create_with_empty_group_id(self):
rule = security_group_rule_template(group_id='',
parent_group_id=self.sg2['id'])
req = fakes.HTTPRequest.blank('/v2/fake/os-security-group-rules')
self.assertRaises(webob.exc.HTTPBadRequest, self.controller.create,
req, {'security_group_rule': rule})
def test_create_with_nonexist_group_id(self):
rule = security_group_rule_template(group_id=self.invalid_id,
parent_group_id=self.sg2['id'])
req = fakes.HTTPRequest.blank('/v2/fake/os-security-group-rules')
self.assertRaises(webob.exc.HTTPBadRequest, self.controller.create,
req, {'security_group_rule': rule})
def test_create_with_same_group_parent_id_and_group_id(self):
rule = security_group_rule_template(group_id=self.sg1['id'],
parent_group_id=self.sg1['id'])
req = fakes.HTTPRequest.blank('/v2/fake/os-security-group-rules')
res_dict = self.controller.create(req, {'security_group_rule': rule})
security_group_rule = res_dict['security_group_rule']
self.assertNotEqual(security_group_rule['id'], 0)
self.assertEqual(security_group_rule['parent_group_id'],
self.sg1['id'])
self.assertEqual(security_group_rule['group']['name'],
self.sg1['name'])
def _test_create_with_no_ports_and_no_group(self, proto):
rule = {'ip_protocol': proto, 'parent_group_id': self.sg2['id']}
req = fakes.HTTPRequest.blank('/v2/fake/os-security-group-rules')
self.assertRaises(webob.exc.HTTPBadRequest, self.controller.create,
req, {'security_group_rule': rule})
def _test_create_with_no_ports(self, proto):
rule = {'ip_protocol': proto, 'parent_group_id': self.sg2['id'],
'group_id': self.sg1['id']}
req = fakes.HTTPRequest.blank('/v2/fake/os-security-group-rules')
res_dict = self.controller.create(req, {'security_group_rule': rule})
security_group_rule = res_dict['security_group_rule']
expected_rule = {
'from_port': 1, 'group': {'tenant_id': '123', 'name': 'test'},
'ip_protocol': proto, 'to_port': 65535, 'parent_group_id':
self.sg2['id'], 'ip_range': {}, 'id': security_group_rule['id']
}
if proto == 'icmp':
expected_rule['to_port'] = -1
expected_rule['from_port'] = -1
self.assertEqual(expected_rule, security_group_rule)
def test_create_with_no_ports_icmp(self):
self._test_create_with_no_ports_and_no_group('icmp')
self._test_create_with_no_ports('icmp')
def test_create_with_no_ports_tcp(self):
self._test_create_with_no_ports_and_no_group('tcp')
self._test_create_with_no_ports('tcp')
def test_create_with_no_ports_udp(self):
self._test_create_with_no_ports_and_no_group('udp')
self._test_create_with_no_ports('udp')
def _test_create_with_ports(self, proto, from_port, to_port):
rule = {
'ip_protocol': proto, 'from_port': from_port, 'to_port': to_port,
'parent_group_id': self.sg2['id'], 'group_id': self.sg1['id']
}
req = fakes.HTTPRequest.blank('/v2/fake/os-security-group-rules')
res_dict = self.controller.create(req, {'security_group_rule': rule})
security_group_rule = res_dict['security_group_rule']
expected_rule = {
'from_port': from_port,
'group': {'tenant_id': '123', 'name': 'test'},
'ip_protocol': proto, 'to_port': to_port, 'parent_group_id':
self.sg2['id'], 'ip_range': {}, 'id': security_group_rule['id']
}
self.assertEqual(proto, security_group_rule['ip_protocol'])
self.assertEqual(from_port, security_group_rule['from_port'])
self.assertEqual(to_port, security_group_rule['to_port'])
self.assertEqual(expected_rule, security_group_rule)
def test_create_with_ports_icmp(self):
self._test_create_with_ports('icmp', 0, 1)
self._test_create_with_ports('icmp', 0, 0)
self._test_create_with_ports('icmp', 1, 0)
def test_create_with_ports_tcp(self):
self._test_create_with_ports('tcp', 1, 1)
self._test_create_with_ports('tcp', 1, 65535)
self._test_create_with_ports('tcp', 65535, 65535)
def test_create_with_ports_udp(self):
self._test_create_with_ports('udp', 1, 1)
self._test_create_with_ports('udp', 1, 65535)
self._test_create_with_ports('udp', 65535, 65535)
def test_delete(self):
rule = security_group_rule_template(id=self.sg2['id'],
parent_group_id=self.sg2['id'])
def security_group_rule_get(context, id):
return security_group_rule_db(rule)
def security_group_rule_destroy(context, id):
pass
self.stubs.Set(nova.db, 'security_group_rule_get',
security_group_rule_get)
self.stubs.Set(nova.db, 'security_group_rule_destroy',
security_group_rule_destroy)
req = fakes.HTTPRequest.blank('/v2/fake/os-security-group-rules/%s'
% self.sg2['id'])
self.controller.delete(req, self.sg2['id'])
def test_delete_invalid_rule_id(self):
req = fakes.HTTPRequest.blank('/v2/fake/os-security-group-rules' +
'/invalid')
self.assertRaises(webob.exc.HTTPBadRequest, self.controller.delete,
req, 'invalid')
def test_delete_non_existing_rule_id(self):
req = fakes.HTTPRequest.blank('/v2/fake/os-security-group-rules/%s'
% self.invalid_id)
self.assertRaises(webob.exc.HTTPNotFound, self.controller.delete,
req, self.invalid_id)
def test_create_rule_quota_limit(self):
req = fakes.HTTPRequest.blank('/v2/fake/os-security-group-rules')
for num in range(100, 100 + CONF.quota_security_group_rules):
rule = {
'ip_protocol': 'tcp', 'from_port': num,
'to_port': num, 'parent_group_id': self.sg2['id'],
'group_id': self.sg1['id']
}
self.controller.create(req, {'security_group_rule': rule})
rule = {
'ip_protocol': 'tcp', 'from_port': '121', 'to_port': '121',
'parent_group_id': self.sg2['id'], 'group_id': self.sg1['id']
}
self.assertRaises(webob.exc.HTTPForbidden, self.controller.create,
req, {'security_group_rule': rule})
def test_create_rule_cidr_allow_all(self):
rule = security_group_rule_template(cidr='0.0.0.0/0',
parent_group_id=self.sg2['id'])
req = fakes.HTTPRequest.blank('/v2/fake/os-security-group-rules')
res_dict = self.controller.create(req, {'security_group_rule': rule})
security_group_rule = res_dict['security_group_rule']
self.assertNotEqual(security_group_rule['id'], 0)
self.assertEqual(security_group_rule['parent_group_id'],
self.parent_security_group['id'])
self.assertEqual(security_group_rule['ip_range']['cidr'],
"0.0.0.0/0")
def test_create_rule_cidr_ipv6_allow_all(self):
rule = security_group_rule_template(cidr='::/0',
parent_group_id=self.sg2['id'])
req = fakes.HTTPRequest.blank('/v2/fake/os-security-group-rules')
res_dict = self.controller.create(req, {'security_group_rule': rule})
security_group_rule = res_dict['security_group_rule']
self.assertNotEqual(security_group_rule['id'], 0)
self.assertEqual(security_group_rule['parent_group_id'],
self.parent_security_group['id'])
self.assertEqual(security_group_rule['ip_range']['cidr'],
"::/0")
def test_create_rule_cidr_allow_some(self):
rule = security_group_rule_template(cidr='15.0.0.0/8',
parent_group_id=self.sg2['id'])
req = fakes.HTTPRequest.blank('/v2/fake/os-security-group-rules')
res_dict = self.controller.create(req, {'security_group_rule': rule})
security_group_rule = res_dict['security_group_rule']
self.assertNotEqual(security_group_rule['id'], 0)
self.assertEqual(security_group_rule['parent_group_id'],
self.parent_security_group['id'])
self.assertEqual(security_group_rule['ip_range']['cidr'],
"15.0.0.0/8")
def test_create_rule_cidr_bad_netmask(self):
rule = security_group_rule_template(cidr='15.0.0.0/0')
req = fakes.HTTPRequest.blank('/v2/fake/os-security-group-rules')
self.assertRaises(webob.exc.HTTPBadRequest, self.controller.create,
req, {'security_group_rule': rule})
class TestSecurityGroupRulesXMLDeserializer(test.TestCase):
def setUp(self):
super(TestSecurityGroupRulesXMLDeserializer, self).setUp()
self.deserializer = secgroups_v2.SecurityGroupRulesXMLDeserializer()
def test_create_request(self):
serial_request = """
<security_group_rule>
<parent_group_id>12</parent_group_id>
<from_port>22</from_port>
<to_port>22</to_port>
<group_id></group_id>
<ip_protocol>tcp</ip_protocol>
<cidr>10.0.0.0/24</cidr>
</security_group_rule>"""
request = self.deserializer.deserialize(serial_request)
expected = {
"security_group_rule": {
"parent_group_id": "12",
"from_port": "22",
"to_port": "22",
"ip_protocol": "tcp",
"group_id": "",
"cidr": "10.0.0.0/24",
},
}
self.assertEqual(request['body'], expected)
def test_create_no_protocol_request(self):
serial_request = """
<security_group_rule>
<parent_group_id>12</parent_group_id>
<from_port>22</from_port>
<to_port>22</to_port>
<group_id></group_id>
<cidr>10.0.0.0/24</cidr>
</security_group_rule>"""
request = self.deserializer.deserialize(serial_request)
expected = {
"security_group_rule": {
"parent_group_id": "12",
"from_port": "22",
"to_port": "22",
"group_id": "",
"cidr": "10.0.0.0/24",
},
}
self.assertEqual(request['body'], expected)
def test_corrupt_xml(self):
"""Should throw a 400 error on corrupt xml."""
self.assertRaises(
exception.MalformedRequestBody,
self.deserializer.deserialize,
utils.killer_xml_body())
class TestSecurityGroupXMLDeserializer(test.TestCase):
def setUp(self):
super(TestSecurityGroupXMLDeserializer, self).setUp()
self.deserializer = secgroups_v2.SecurityGroupXMLDeserializer()
def test_create_request(self):
serial_request = """
<security_group name="test">
<description>test</description>
</security_group>"""
request = self.deserializer.deserialize(serial_request)
expected = {
"security_group": {
"name": "test",
"description": "test",
},
}
self.assertEqual(request['body'], expected)
def test_create_no_description_request(self):
serial_request = """
<security_group name="test">
</security_group>"""
request = self.deserializer.deserialize(serial_request)
expected = {
"security_group": {
"name": "test",
},
}
self.assertEqual(request['body'], expected)
def test_create_no_name_request(self):
serial_request = """
<security_group>
<description>test</description>
</security_group>"""
request = self.deserializer.deserialize(serial_request)
expected = {
"security_group": {
"description": "test",
},
}
self.assertEqual(request['body'], expected)
def test_corrupt_xml(self):
"""Should throw a 400 error on corrupt xml."""
self.assertRaises(
exception.MalformedRequestBody,
self.deserializer.deserialize,
utils.killer_xml_body())
class TestSecurityGroupXMLSerializer(test.TestCase):
def setUp(self):
super(TestSecurityGroupXMLSerializer, self).setUp()
self.namespace = wsgi.XMLNS_V11
self.rule_serializer = secgroups_v2.SecurityGroupRuleTemplate()
self.index_serializer = secgroups_v2.SecurityGroupsTemplate()
self.default_serializer = secgroups_v2.SecurityGroupTemplate()
def _tag(self, elem):
tagname = elem.tag
self.assertEqual(tagname[0], '{')
tmp = tagname.partition('}')
namespace = tmp[0][1:]
self.assertEqual(namespace, self.namespace)
return tmp[2]
def _verify_security_group_rule(self, raw_rule, tree):
self.assertEqual(raw_rule['id'], tree.get('id'))
self.assertEqual(raw_rule['parent_group_id'],
tree.get('parent_group_id'))
seen = set()
expected = set(['ip_protocol', 'from_port', 'to_port',
'group', 'group/name', 'group/tenant_id',
'ip_range', 'ip_range/cidr'])
for child in tree:
child_tag = self._tag(child)
self.assertIn(child_tag, raw_rule)
seen.add(child_tag)
if child_tag in ('group', 'ip_range'):
for gr_child in child:
gr_child_tag = self._tag(gr_child)
self.assertIn(gr_child_tag, raw_rule[child_tag])
seen.add('%s/%s' % (child_tag, gr_child_tag))
self.assertEqual(gr_child.text,
raw_rule[child_tag][gr_child_tag])
else:
self.assertEqual(child.text, raw_rule[child_tag])
self.assertEqual(seen, expected)
def _verify_security_group(self, raw_group, tree):
rules = raw_group['rules']
self.assertEqual('security_group', self._tag(tree))
self.assertEqual(raw_group['id'], tree.get('id'))
self.assertEqual(raw_group['tenant_id'], tree.get('tenant_id'))
self.assertEqual(raw_group['name'], tree.get('name'))
self.assertEqual(2, len(tree))
for child in tree:
child_tag = self._tag(child)
if child_tag == 'rules':
self.assertEqual(2, len(child))
for idx, gr_child in enumerate(child):
self.assertEqual(self._tag(gr_child), 'rule')
self._verify_security_group_rule(rules[idx], gr_child)
else:
self.assertEqual('description', child_tag)
self.assertEqual(raw_group['description'], child.text)
def test_rule_serializer(self):
raw_rule = dict(
id='123',
parent_group_id='456',
ip_protocol='tcp',
from_port='789',
to_port='987',
group=dict(name='group', tenant_id='tenant'),
ip_range=dict(cidr='10.0.0.0/8'))
rule = dict(security_group_rule=raw_rule)
text = self.rule_serializer.serialize(rule)
tree = etree.fromstring(text)
self.assertEqual('security_group_rule', self._tag(tree))
self._verify_security_group_rule(raw_rule, tree)
def test_group_serializer(self):
rules = [dict(
id='123',
parent_group_id='456',
ip_protocol='tcp',
from_port='789',
to_port='987',
group=dict(name='group1', tenant_id='tenant1'),
ip_range=dict(cidr='10.55.44.0/24')),
dict(
id='654',
parent_group_id='321',
ip_protocol='udp',
from_port='234',
to_port='567',
group=dict(name='group2', tenant_id='tenant2'),
ip_range=dict(cidr='10.44.55.0/24'))]
raw_group = dict(
id='890',
description='description',
name='name',
tenant_id='tenant',
rules=rules)
sg_group = dict(security_group=raw_group)
text = self.default_serializer.serialize(sg_group)
tree = etree.fromstring(text)
self._verify_security_group(raw_group, tree)
def test_groups_serializer(self):
rules = [dict(
id='123',
parent_group_id='1234',
ip_protocol='tcp',
from_port='12345',
to_port='123456',
group=dict(name='group1', tenant_id='tenant1'),
ip_range=dict(cidr='10.123.0.0/24')),
dict(
id='234',
parent_group_id='2345',
ip_protocol='udp',
from_port='23456',
to_port='234567',
group=dict(name='group2', tenant_id='tenant2'),
ip_range=dict(cidr='10.234.0.0/24')),
dict(
id='345',
parent_group_id='3456',
ip_protocol='tcp',
from_port='34567',
to_port='345678',
group=dict(name='group3', tenant_id='tenant3'),
ip_range=dict(cidr='10.345.0.0/24')),
dict(
id='456',
parent_group_id='4567',
ip_protocol='udp',
from_port='45678',
to_port='456789',
group=dict(name='group4', tenant_id='tenant4'),
ip_range=dict(cidr='10.456.0.0/24'))]
groups = [dict(
id='567',
description='description1',
name='name1',
tenant_id='tenant1',
rules=rules[0:2]),
dict(
id='678',
description='description2',
name='name2',
tenant_id='tenant2',
rules=rules[2:4])]
sg_groups = dict(security_groups=groups)
text = self.index_serializer.serialize(sg_groups)
tree = etree.fromstring(text)
self.assertEqual('security_groups', self._tag(tree))
self.assertEqual(len(groups), len(tree))
for idx, child in enumerate(tree):
self._verify_security_group(groups[idx], child)
UUID1 = '00000000-0000-0000-0000-000000000001'
UUID2 = '00000000-0000-0000-0000-000000000002'
UUID3 = '00000000-0000-0000-0000-000000000003'
def fake_compute_get_all(*args, **kwargs):
base = {'id': 1, 'description': 'foo', 'user_id': 'bar',
'project_id': 'baz', 'deleted': False, 'deleted_at': None,
'updated_at': None, 'created_at': None}
db_list = [
fakes.stub_instance(
1, uuid=UUID1,
security_groups=[dict(base, **{'name': 'fake-0-0'}),
dict(base, **{'name': 'fake-0-1'})]),
fakes.stub_instance(
2, uuid=UUID2,
security_groups=[dict(base, **{'name': 'fake-1-0'}),
dict(base, **{'name': 'fake-1-1'})])
]
return instance_obj._make_instance_list(args[1],
objects.InstanceList(),
db_list,
['metadata', 'system_metadata',
'security_groups', 'info_cache'])
def fake_compute_get(*args, **kwargs):
inst = fakes.stub_instance(1, uuid=UUID3,
security_groups=[{'name': 'fake-2-0'},
{'name': 'fake-2-1'}])
return fake_instance.fake_instance_obj(args[1],
expected_attrs=instance_obj.INSTANCE_DEFAULT_FIELDS, **inst)
def fake_compute_create(*args, **kwargs):
return ([fake_compute_get(*args, **kwargs)], '')
def fake_get_instances_security_groups_bindings(inst, context, servers):
groups = {UUID1: [{'name': 'fake-0-0'}, {'name': 'fake-0-1'}],
UUID2: [{'name': 'fake-1-0'}, {'name': 'fake-1-1'}],
UUID3: [{'name': 'fake-2-0'}, {'name': 'fake-2-1'}]}
result = {}
for server in servers:
result[server['id']] = groups.get(server['id'])
return result
class SecurityGroupsOutputTestV21(test.TestCase):
base_url = '/v2/fake/servers'
content_type = 'application/json'
def setUp(self):
super(SecurityGroupsOutputTestV21, self).setUp()
fakes.stub_out_nw_api(self.stubs)
self.stubs.Set(compute.api.API, 'get', fake_compute_get)
self.stubs.Set(compute.api.API, 'get_all', fake_compute_get_all)
self.stubs.Set(compute.api.API, 'create', fake_compute_create)
self.flags(
osapi_compute_extension=[
'nova.api.openstack.compute.contrib.select_extensions'],
osapi_compute_ext_list=['Security_groups'])
self.app = self._setup_app()
def _setup_app(self):
return fakes.wsgi_app_v21(init_only=('os-security-groups', 'servers'))
def _make_request(self, url, body=None):
req = webob.Request.blank(url)
if body:
req.method = 'POST'
req.body = self._encode_body(body)
req.content_type = self.content_type
req.headers['Accept'] = self.content_type
res = req.get_response(self.app)
return res
def _encode_body(self, body):
return jsonutils.dumps(body)
def _get_server(self, body):
return jsonutils.loads(body).get('server')
def _get_servers(self, body):
return jsonutils.loads(body).get('servers')
def _get_groups(self, server):
return server.get('security_groups')
def test_create(self):
image_uuid = 'c905cedb-7281-47e4-8a62-f26bc5fc4c77'
server = dict(name='server_test', imageRef=image_uuid, flavorRef=2)
res = self._make_request(self.base_url, {'server': server})
self.assertEqual(res.status_int, 202)
server = self._get_server(res.body)
for i, group in enumerate(self._get_groups(server)):
name = 'fake-2-%s' % i
self.assertEqual(group.get('name'), name)
def test_show(self):
url = self.base_url + '/' + UUID3
res = self._make_request(url)
self.assertEqual(res.status_int, 200)
server = self._get_server(res.body)
for i, group in enumerate(self._get_groups(server)):
name = 'fake-2-%s' % i
self.assertEqual(group.get('name'), name)
def test_detail(self):
url = self.base_url + '/detail'
res = self._make_request(url)
self.assertEqual(res.status_int, 200)
for i, server in enumerate(self._get_servers(res.body)):
for j, group in enumerate(self._get_groups(server)):
name = 'fake-%s-%s' % (i, j)
self.assertEqual(group.get('name'), name)
def test_no_instance_passthrough_404(self):
def fake_compute_get(*args, **kwargs):
raise exception.InstanceNotFound(instance_id='fake')
self.stubs.Set(compute.api.API, 'get', fake_compute_get)
url = self.base_url + '/70f6db34-de8d-4fbd-aafb-4065bdfa6115'
res = self._make_request(url)
self.assertEqual(res.status_int, 404)
class SecurityGroupsOutputTestV2(SecurityGroupsOutputTestV21):
def _setup_app(self):
return fakes.wsgi_app(init_only=('servers',))
class SecurityGroupsOutputXmlTest(SecurityGroupsOutputTestV2):
content_type = 'application/xml'
class MinimalCreateServerTemplate(xmlutil.TemplateBuilder):
def construct(self):
root = xmlutil.TemplateElement('server', selector='server')
root.set('name')
root.set('id')
root.set('imageRef')
root.set('flavorRef')
return xmlutil.MasterTemplate(root, 1,
nsmap={None: xmlutil.XMLNS_V11})
def _encode_body(self, body):
serializer = self.MinimalCreateServerTemplate()
return serializer.serialize(body)
def _get_server(self, body):
return etree.XML(body)
def _get_servers(self, body):
return etree.XML(body).getchildren()
def _get_groups(self, server):
# NOTE(vish): we are adding security groups without an extension
# namespace so we don't break people using the existing
# functionality, but that means we need to use find with
# the existing server namespace.
namespace = server.nsmap[None]
return server.find('{%s}security_groups' % namespace).getchildren()
|
|
from __future__ import absolute_import
from __future__ import with_statement
import socket
import types
from anyjson import dumps
from itertools import count
from Queue import Empty, Queue as _Queue
from kombu.connection import BrokerConnection
from kombu.entity import Exchange, Queue
from kombu.exceptions import InconsistencyError, VersionMismatch
from kombu.messaging import Consumer, Producer
from kombu.utils import eventio # patch poll
from kombu.tests.utils import TestCase
from kombu.tests.utils import Mock, module_exists, skip_if_not_module
class _poll(eventio._select):
def poll(self, timeout):
events = []
for fd in self._rfd:
if fd.data:
events.append((fd.fileno(), eventio.POLL_READ))
return events
eventio.poll = _poll
from kombu.transport import redis # must import after poller patch
class ResponseError(Exception):
pass
class Client(object):
queues = {}
sets = {}
shard_hint = None
def __init__(self, db=None, port=None, **kwargs):
self.port = port
self.db = db
self._called = []
self._connection = None
self.bgsave_raises_ResponseError = False
self.connection = self._sconnection(self)
def bgsave(self):
self._called.append("BGSAVE")
if self.bgsave_raises_ResponseError:
raise ResponseError()
def delete(self, key):
self.queues.pop(key, None)
def sadd(self, key, member):
print("SADD %r: %r" % (key, member))
if key not in self.sets:
self.sets[key] = set()
self.sets[key].add(member)
def exists(self, key):
return key in self.queues or key in self.sets
def smembers(self, key):
return self.sets.get(key, set())
def srem(self, key):
self.sets.pop(key, None)
def llen(self, key):
try:
return self.queues[key].qsize()
except KeyError:
return 0
def lpush(self, key, value):
self.queues[key].put_nowait(value)
def parse_response(self, connection, type, **options):
cmd, queues = self.connection._sock.data.pop()
assert cmd == type
self.connection._sock.data = []
if type == "BRPOP":
item = self.brpop(queues, 0.001)
if item:
return item
raise Empty()
def brpop(self, keys, timeout=None):
key = keys[0]
try:
item = self.queues[key].get(timeout=timeout)
except Empty:
pass
else:
return key, item
def rpop(self, key):
try:
return self.queues[key].get_nowait()
except KeyError:
pass
def __contains__(self, k):
return k in self._called
def pipeline(self):
return Pipeline(self)
def encode(self, value):
return str(value)
def _new_queue(self, key):
self.queues[key] = _Queue()
class _sconnection(object):
disconnected = False
class _socket(object):
blocking = True
next_fileno = count(30).next
def __init__(self, *args):
self._fileno = self.next_fileno()
self.data = []
def fileno(self):
return self._fileno
def setblocking(self, blocking):
self.blocking = blocking
def __init__(self, client):
self.client = client
self._sock = self._socket()
def disconnect(self):
self.disconnected = True
def send_command(self, cmd, *args):
self._sock.data.append((cmd, args))
def info(self):
return {"foo": 1}
def pubsub(self, *args, **kwargs):
connection = self.connection
class ConnectionPool(object):
def get_connection(self, *args, **kwargs):
return connection
self.connection_pool = ConnectionPool()
return self
class Pipeline(object):
def __init__(self, client):
self.client = client
self.stack = []
def __getattr__(self, key):
if key not in self.__dict__:
def _add(*args, **kwargs):
self.stack.append((getattr(self.client, key), args, kwargs))
return self
return _add
return self.__dict__[key]
def execute(self):
stack = list(self.stack)
self.stack[:] = []
return [fun(*args, **kwargs) for fun, args, kwargs in stack]
class Channel(redis.Channel):
def _get_client(self):
return Client
def _get_response_error(self):
return ResponseError
def _new_queue(self, queue, **kwargs):
self.client._new_queue(queue)
def pipeline(self):
return Pipeline(Client())
class Transport(redis.Transport):
Channel = Channel
def _get_errors(self):
return ((KeyError, ), (IndexError, ))
class test_Channel(TestCase):
def setUp(self):
self.connection = BrokerConnection(transport=Transport)
self.channel = self.connection.channel()
def test_basic_consume_when_fanout_queue(self):
self.channel.exchange_declare(exchange="txconfan", type="fanout")
self.channel.queue_declare(queue="txconfanq")
self.channel.queue_bind(queue="txconfanq", exchange="txconfan")
self.assertIn("txconfanq", self.channel._fanout_queues)
self.channel.basic_consume("txconfanq", False, None, 1)
self.assertIn("txconfanq", self.channel.active_fanout_queues)
self.assertEqual(self.channel._fanout_to_queue.get("txconfan"),
"txconfanq")
def test_basic_cancel_unknown_delivery_tag(self):
self.assertIsNone(self.channel.basic_cancel("txaseqwewq"))
def test_subscribe_no_queues(self):
self.channel.subclient = Mock()
self.channel.active_fanout_queues.clear()
self.channel._subscribe()
self.assertFalse(self.channel.subclient.subscribe.called)
def test_subscribe(self):
self.channel.subclient = Mock()
self.channel.active_fanout_queues.add("a")
self.channel.active_fanout_queues.add("b")
self.channel._fanout_queues.update(a="a", b="b")
self.channel._subscribe()
self.assertTrue(self.channel.subclient.subscribe.called)
s_args, _ = self.channel.subclient.subscribe.call_args
self.assertItemsEqual(s_args[0], ["a", "b"])
self.channel.subclient.connection._sock = None
self.channel._subscribe()
self.channel.subclient.connection.connect.assert_called_with()
def test_handle_unsubscribe_message(self):
s = self.channel.subclient
s.subscribed = True
self.channel._handle_message(s, ["unsubscribe", "a", 0])
self.assertFalse(s.subscribed)
def test_handle_pmessage_message(self):
self.assertDictEqual(self.channel._handle_message(
self.channel.subclient,
["pmessage", "pattern", "channel", "data"]),
{"type": "pmessage",
"pattern": "pattern",
"channel": "channel",
"data": "data"})
def test_handle_message(self):
self.assertDictEqual(self.channel._handle_message(
self.channel.subclient,
["type", "channel", "data"]),
{"type": "type",
"pattern": None,
"channel": "channel",
"data": "data"})
def test_brpop_start_but_no_queues(self):
self.channel.active_queues.clear()
self.assertIsNone(self.channel._brpop_start())
def test_receive(self):
s = self.channel.subclient = Mock()
self.channel._fanout_to_queue["a"] = "b"
s.parse_response.return_value = ["message", "a",
dumps({"hello": "world"})]
payload, queue = self.channel._receive()
self.assertDictEqual(payload, {"hello": "world"})
self.assertEqual(queue, "b")
def test_receive_raises(self):
self.channel._in_listen = True
s = self.channel.subclient = Mock()
s.parse_response.side_effect = KeyError("foo")
with self.assertRaises(redis.Empty):
self.channel._receive()
self.assertFalse(self.channel._in_listen)
def test_receive_empty(self):
s = self.channel.subclient = Mock()
s.parse_response.return_value = None
with self.assertRaises(redis.Empty):
self.channel._receive()
def test_receive_different_message_Type(self):
s = self.channel.subclient = Mock()
s.parse_response.return_value = ["pmessage", "/foo/", 0, "data"]
with self.assertRaises(redis.Empty):
self.channel._receive()
def test_brpop_read_raises(self):
c = self.channel.client = Mock()
c.parse_response.side_effect = KeyError("foo")
with self.assertRaises(redis.Empty):
self.channel._brpop_read()
c.connection.disconnect.assert_called_with()
def test_brpop_read_gives_None(self):
c = self.channel.client = Mock()
c.parse_response.return_value = None
with self.assertRaises(redis.Empty):
self.channel._brpop_read()
def test_poll_error(self):
c = self.channel.client = Mock()
c.parse_response = Mock()
self.channel._poll_error("BRPOP")
c.parse_response.assert_called_with("BRPOP")
c.parse_response.side_effect = KeyError("foo")
self.assertIsNone(self.channel._poll_error("BRPOP"))
def test_put_fanout(self):
self.channel._in_poll = False
c = self.channel.client = Mock()
body = {"hello": "world"}
self.channel._put_fanout("exchange", body)
c.publish.assert_called_with("exchange", dumps(body))
def test_delete(self):
x = self.channel
self.channel._in_poll = False
delete = x.client.delete = Mock()
srem = x.client.srem = Mock()
x._delete("queue", "exchange", "routing_key", None)
delete.assert_has_call("queue")
srem.assert_has_call(x.keyprefix_queue % ("exchange", ),
x.sep.join(["routing_key", "", "queue"]))
def test_has_queue(self):
self.channel._in_poll = False
exists = self.channel.client.exists = Mock()
exists.return_value = True
self.assertTrue(self.channel._has_queue("foo"))
exists.assert_has_call("foo")
exists.return_value = False
self.assertFalse(self.channel._has_queue("foo"))
def test_close_when_closed(self):
self.channel.closed = True
self.channel.close()
def test_close_client_close_raises(self):
c = self.channel.client = Mock()
c.connection.disconnect.side_effect = self.channel.ResponseError()
self.channel.close()
c.connection.disconnect.assert_called_with()
def test_invalid_database_raises_ValueError(self):
self.channel.connection.client.virtual_host = "xfeqwewkfk"
with self.assertRaises(ValueError):
self.channel._create_client()
@skip_if_not_module("redis")
def test_get_client(self):
import redis as R
KombuRedis = redis.Channel._get_client(self.channel)
self.assertTrue(KombuRedis)
Rv = getattr(R, "__version__")
try:
R.__version__ = "2.4.0"
with self.assertRaises(VersionMismatch):
redis.Channel._get_client(self.channel)
finally:
if Rv is not None:
R.__version__ = Rv
@skip_if_not_module("redis")
def test_get_response_error(self):
from redis.exceptions import ResponseError
self.assertIs(redis.Channel._get_response_error(self.channel),
ResponseError)
def test_avail_client_when_not_in_poll(self):
self.channel._in_poll = False
c = self.channel.client = Mock()
self.assertIs(self.channel._avail_client, c)
def test_avail_client_when_in_poll(self):
self.channel._in_poll = True
cc = self.channel._create_client = Mock()
self.assertTrue(self.channel._avail_client)
cc.assert_called_with()
@skip_if_not_module("redis")
def test_transport_get_errors(self):
self.assertTrue(redis.Transport._get_errors(self.connection.transport))
@skip_if_not_module("redis")
def test_transport_get_errors_when_InvalidData_used(self):
from redis import exceptions
class ID(Exception):
pass
DataError = getattr(exceptions, "DataError", None)
InvalidData = getattr(exceptions, "InvalidData", None)
exceptions.InvalidData = ID
exceptions.DataError = None
try:
errors = redis.Transport._get_errors(self.connection.transport)
self.assertTrue(errors)
self.assertIn(ID, errors[1])
finally:
if DataError is not None:
exceptions.DataError = DataError
if InvalidData is not None:
exceptions.InvalidData = InvalidData
def test_empty_queues_key(self):
channel = self.channel
channel._in_poll = False
key = channel.keyprefix_queue % 'celery'
# Everything is fine, there is a list of queues.
channel.client.sadd(key, 'celery\x06\x16\x06\x16celery')
self.assertListEqual(channel.get_table('celery'),
[('celery', '', 'celery')])
# ... then for some reason, the _kombu.binding.celery key gets lost
channel.client.srem(key)
# which raises a channel error so that the consumer/publisher
# can recover by redeclaring the required entities.
with self.assertRaises(InconsistencyError):
self.channel.get_table("celery")
class test_Redis(TestCase):
def setUp(self):
self.connection = BrokerConnection(transport=Transport)
self.exchange = Exchange("test_Redis", type="direct")
self.queue = Queue("test_Redis", self.exchange, "test_Redis")
def tearDown(self):
self.connection.close()
def test_publish__get(self):
channel = self.connection.channel()
producer = Producer(channel, self.exchange, routing_key="test_Redis")
self.queue(channel).declare()
producer.publish({"hello": "world"})
self.assertDictEqual(self.queue(channel).get().payload,
{"hello": "world"})
self.assertIsNone(self.queue(channel).get())
self.assertIsNone(self.queue(channel).get())
self.assertIsNone(self.queue(channel).get())
def test_publish__consume(self):
connection = BrokerConnection(transport=Transport)
channel = connection.channel()
producer = Producer(channel, self.exchange, routing_key="test_Redis")
consumer = Consumer(channel, self.queue)
producer.publish({"hello2": "world2"})
_received = []
def callback(message_data, message):
_received.append(message_data)
message.ack()
consumer.register_callback(callback)
consumer.consume()
self.assertIn(channel, channel.connection.cycle._channels)
try:
connection.drain_events(timeout=1)
self.assertTrue(_received)
with self.assertRaises(socket.timeout):
connection.drain_events(timeout=0.01)
finally:
channel.close()
def test_purge(self):
channel = self.connection.channel()
producer = Producer(channel, self.exchange, routing_key="test_Redis")
self.queue(channel).declare()
for i in range(10):
producer.publish({"hello": "world-%s" % (i, )})
self.assertEqual(channel._size("test_Redis"), 10)
self.assertEqual(self.queue(channel).purge(), 10)
channel.close()
def test_db_values(self):
c1 = BrokerConnection(virtual_host=1,
transport=Transport).channel()
self.assertEqual(c1.client.db, 1)
c2 = BrokerConnection(virtual_host="1",
transport=Transport).channel()
self.assertEqual(c2.client.db, 1)
c3 = BrokerConnection(virtual_host="/1",
transport=Transport).channel()
self.assertEqual(c3.client.db, 1)
with self.assertRaises(Exception):
BrokerConnection(virtual_host="/foo",
transport=Transport).channel()
def test_db_port(self):
c1 = BrokerConnection(port=None, transport=Transport).channel()
self.assertEqual(c1.client.port, Transport.default_port)
c1.close()
c2 = BrokerConnection(port=9999, transport=Transport).channel()
self.assertEqual(c2.client.port, 9999)
c2.close()
def test_close_poller_not_active(self):
c = BrokerConnection(transport=Transport).channel()
cycle = c.connection.cycle
c.client.connection
c.close()
self.assertNotIn(c, cycle._channels)
def test_close_ResponseError(self):
c = BrokerConnection(transport=Transport).channel()
c.client.bgsave_raises_ResponseError = True
c.close()
def test_close_disconnects(self):
c = BrokerConnection(transport=Transport).channel()
conn1 = c.client.connection
conn2 = c.subclient.connection
c.close()
self.assertTrue(conn1.disconnected)
self.assertTrue(conn2.disconnected)
def test_get__Empty(self):
channel = self.connection.channel()
with self.assertRaises(Empty):
channel._get("does-not-exist")
channel.close()
def test_get_client(self):
myredis, exceptions = _redis_modules()
@module_exists(myredis, exceptions)
def _do_test():
conn = BrokerConnection(transport=Transport)
chan = conn.channel()
self.assertTrue(chan.Client)
self.assertTrue(chan.ResponseError)
self.assertTrue(conn.transport.connection_errors)
self.assertTrue(conn.transport.channel_errors)
_do_test()
def _redis_modules():
class ConnectionError(Exception):
pass
class AuthenticationError(Exception):
pass
class InvalidData(Exception):
pass
class InvalidResponse(Exception):
pass
class ResponseError(Exception):
pass
exceptions = types.ModuleType("redis.exceptions")
exceptions.ConnectionError = ConnectionError
exceptions.AuthenticationError = AuthenticationError
exceptions.InvalidData = InvalidData
exceptions.InvalidResponse = InvalidResponse
exceptions.ResponseError = ResponseError
class Redis(object):
pass
myredis = types.ModuleType("redis")
myredis.exceptions = exceptions
myredis.Redis = Redis
return myredis, exceptions
class test_MultiChannelPoller(TestCase):
Poller = redis.MultiChannelPoller
def test_close_unregisters_fds(self):
p = self.Poller()
poller = p._poller = Mock()
p._chan_to_sock.update({1: 1, 2: 2, 3: 3})
p.close()
self.assertEqual(poller.unregister.call_count, 3)
u_args = poller.unregister.call_args_list
self.assertItemsEqual(u_args, [((1, ), {}),
((2, ), {}),
((3, ), {})])
def test_close_when_unregister_raises_KeyError(self):
p = self.Poller()
p._poller = Mock()
p._chan_to_sock.update({1: 1})
p._poller.unregister.side_effect = KeyError(1)
p.close()
def test_close_resets_state(self):
p = self.Poller()
p._poller = Mock()
p._channels = Mock()
p._fd_to_chan = Mock()
p._chan_to_sock = Mock()
p._chan_to_sock.itervalues.return_value = []
p._chan_to_sock.values.return_value = [] # py3k
p.close()
p._channels.clear.assert_called_with()
p._fd_to_chan.clear.assert_called_with()
p._chan_to_sock.clear.assert_called_with()
self.assertIsNone(p._poller)
def test_register_when_registered_reregisters(self):
p = self.Poller()
p._poller = Mock()
channel, client, type = Mock(), Mock(), Mock()
sock = client.connection._sock = Mock()
sock.fileno.return_value = 10
p._chan_to_sock = {(channel, client, type): 6}
p._register(channel, client, type)
p._poller.unregister.assert_called_with(6)
self.assertTupleEqual(p._fd_to_chan[10], (channel, type))
self.assertEqual(p._chan_to_sock[(channel, client, type)], sock)
p._poller.register.assert_called_with(sock, p.eventflags)
# when client not connected yet
client.connection._sock = None
def after_connected():
client.connection._sock = Mock()
client.connection.connect.side_effect = after_connected
p._register(channel, client, type)
client.connection.connect.assert_called_with()
def test_register_BRPOP(self):
p = self.Poller()
channel = Mock()
channel.client.connection._sock = None
p._register = Mock()
channel._in_poll = False
p._register_BRPOP(channel)
self.assertEqual(channel._brpop_start.call_count, 1)
self.assertEqual(p._register.call_count, 1)
channel.client.connection._sock = Mock()
p._chan_to_sock[(channel, channel.client, "BRPOP")] = True
channel._in_poll = True
p._register_BRPOP(channel)
self.assertEqual(channel._brpop_start.call_count, 1)
self.assertEqual(p._register.call_count, 1)
def test_register_LISTEN(self):
p = self.Poller()
channel = Mock()
channel.subclient.connection._sock = None
channel._in_listen = False
p._register = Mock()
p._register_LISTEN(channel)
p._register.assert_called_with(channel, channel.subclient, "LISTEN")
self.assertEqual(p._register.call_count, 1)
self.assertEqual(channel._subscribe.call_count, 1)
channel._in_listen = True
channel.subclient.connection._sock = Mock()
p._register_LISTEN(channel)
self.assertEqual(p._register.call_count, 1)
self.assertEqual(channel._subscribe.call_count, 1)
def create_get(self, events=None, queues=None,
fanouts=None):
_pr = [] if events is None else events
_aq = [] if queues is None else queues
_af = [] if fanouts is None else fanouts
p = self.Poller()
p._poller = Mock()
p._poller.poll.return_value = _pr
p._register_BRPOP = Mock()
p._register_LISTEN = Mock()
channel = Mock()
p._channels = [channel]
channel.active_queues = _aq
channel.active_fanout_queues = _af
return p, channel
def test_get_no_actions(self):
p, channel = self.create_get()
with self.assertRaises(redis.Empty):
p.get()
def test_get_brpop_qos_allow(self):
p, channel = self.create_get(queues=["a_queue"])
channel.qos.can_consume.return_value = True
with self.assertRaises(redis.Empty):
p.get()
p._register_BRPOP.assert_called_with(channel)
def test_get_brpop_qos_disallow(self):
p, channel = self.create_get(queues=["a_queue"])
channel.qos.can_consume.return_value = False
with self.assertRaises(redis.Empty):
p.get()
self.assertFalse(p._register_BRPOP.called)
def test_get_listen(self):
p, channel = self.create_get(fanouts=["f_queue"])
with self.assertRaises(redis.Empty):
p.get()
p._register_LISTEN.assert_called_with(channel)
def test_get_receives_POLL_ERR(self):
p, channel = self.create_get(events=[(1, eventio.POLL_ERR)])
p._fd_to_chan[1] = (channel, "BRPOP")
with self.assertRaises(redis.Empty):
p.get()
channel._poll_error.assert_called_with("BRPOP")
def test_get_receives_multiple(self):
p, channel = self.create_get(events=[(1, eventio.POLL_ERR),
(1, eventio.POLL_ERR)])
p._fd_to_chan[1] = (channel, "BRPOP")
with self.assertRaises(redis.Empty):
p.get()
channel._poll_error.assert_called_with("BRPOP")
|
|
from __future__ import absolute_import
import re
from collections import deque
import six
class Token:
def __init__(self, **kwds):
self.buffer = None
self.__dict__.update(kwds)
def __str__(self):
return self.__dict__.__str__()
def regexec(regex, input):
matches = regex.match(input)
if matches:
return (input[matches.start():matches.end()],) + matches.groups()
return None
def detect_closing_bracket(string):
count = 0
pos = string.find('[')
while True:
if string[pos] == '[':
count += 1
if string[pos] == ']':
count -= 1
pos += 1
if count == 0:
return pos
def replace_string_brackets(splitted_string):
sval_replaced = []
old_delim = None
for i in splitted_string:
if old_delim is None:
sval_replaced.append(i)
if i in ('"', "'"):
old_delim = i
continue
if i in ('"', "'"):
if i == old_delim:
old_delim = None
sval_replaced.append(i)
continue
sval_replaced.append(re.sub(r'\[|\]', '*', i))
return ''.join(sval_replaced)
class Lexer(object):
RE_INPUT = re.compile(r'\r\n|\r')
RE_COMMENT = re.compile(r'^ *\/\/(-)?([^\n]*)')
RE_TAG = re.compile(r'^(\w[-:\w]*|#\{.*?\})')
RE_DOT_BLOCK_START = re.compile(r'^\.\n')
RE_FILTER = re.compile(r'^:(\w+)')
RE_DOCTYPE = re.compile(r'^(?:!!!|doctype) *([^\n]+)?')
RE_ID = re.compile(r'^#([\w-]+)')
RE_CLASS = re.compile(r'^\.([\w-]+)')
RE_STRING = re.compile(r'^(?:\| ?)([^\n]+)')
RE_TEXT = re.compile(r'^([^\n]+)')
RE_EXTENDS = re.compile(r'^extends? +([^\n]+)')
RE_PREPEND = re.compile(r'^prepend +([^\n]+)')
RE_APPEND = re.compile(r'^append +([^\n]+)')
RE_BLOCK = re.compile(r'''^block(( +(?:(prepend|append) +)?([^\n]*))|\n)''')
RE_YIELD = re.compile(r'^yield *')
RE_INCLUDE = re.compile(r'^include +([^\n]+)')
RE_ASSIGNMENT = re.compile(r'^(-\s+var\s+)?(\w+) += *([^;\n]+)( *;? *)')
RE_MIXIN = re.compile(r'^mixin +([-\w]+)(?: *\((.*)\))?')
RE_CALL = re.compile(r'^\+\s*([-.\w]+)(?: *\((.*)\))?')
RE_CONDITIONAL = re.compile(r'^(?:- *)?(if|unless|else if|elif|else)\b([^\n]*)')
RE_BLANK = re.compile(r'^\n *\n')
# RE_WHILE = re.compile(r'^while +([^\n]+)')
RE_EACH = re.compile(r'^(?:- *)?(?:each|for) +([\w, ]+) +in +([^\n]+)')
RE_CODE = re.compile(r'^(!?=|-)([^\n]+)')
RE_ATTR_INTERPOLATE = re.compile(r'#\{([^}]+)\}')
RE_ATTR_PARSE = re.compile(r'''^['"]|['"]$''')
RE_INDENT_TABS = re.compile(r'^\n(\t*) *')
RE_INDENT_SPACES = re.compile(r'^\n( *)')
RE_COLON = re.compile(r'^: *')
RE_INLINE = re.compile(r'(?<!\\)#\[')
RE_INLINE_ESCAPE = re.compile(r'\\#\[')
STRING_SPLITS = re.compile(r'([\'"])(.*?)(?<!\\)(\1)')
def __init__(self, string, **options):
if isinstance(string, six.binary_type):
string = six.text_type(string, 'utf8')
self.options = options
self.input = self.RE_INPUT.sub('\n', string)
self.colons = self.options.get('colons', False)
self.deferredTokens = deque()
self.lastIndents = 0
self.lineno = 1
self.stash = deque()
self.indentStack = deque()
self.indentRe = None
self.pipeless = False
self.isTextBlock = False
def tok(self, type, val=None):
return Token(type=type, line=self.lineno, val=val, inline_level=self.options.get('inline_level', 0))
def consume(self, len):
self.input = self.input[len:]
def scan(self, regexp, type):
captures = regexec(regexp, self.input)
# print regexp,type, self.input, captures
if captures:
# print captures
self.consume(len(captures[0]))
# print 'a',self.input
if len(captures) == 1:
return self.tok(type, None)
return self.tok(type, captures[1])
def defer(self, tok):
self.deferredTokens.append(tok)
def lookahead(self, n):
# print self.stash
fetch = n - len(self.stash)
while True:
fetch -= 1
if not fetch >= 0:
break
self.stash.append(self.next())
return self.stash[n - 1]
def indexOfDelimiters(self, start, end):
str, nstart, nend, pos = self.input, 0, 0, 0
for i, s in enumerate(str):
if start == s:
nstart += 1
elif end == s:
nend += 1
if nend == nstart:
pos = i
break
return pos
def stashed(self):
# print self.stash
return len(self.stash) and self.stash.popleft()
def deferred(self):
return len(self.deferredTokens) and self.deferredTokens.popleft()
def eos(self):
# print 'eos',bool(self.input)
if self.input:
return
if self.indentStack:
self.indentStack.popleft()
return self.tok('outdent')
else:
return self.tok('eos')
def consumeBlank(self):
captures = regexec(self.RE_BLANK, self.input)
if not captures:
return
self.lineno += 1
self.consume(len(captures[0]) - 1)
return captures
def blank(self):
if self.pipeless:
return
if self.consumeBlank():
return self.next()
def comment(self):
captures = regexec(self.RE_COMMENT, self.input)
if captures:
self.consume(len(captures[0]))
tok = self.tok('comment', captures[2])
tok.buffer = '-' != captures[1]
return tok
def tag(self):
captures = regexec(self.RE_TAG, self.input)
# print self.input,captures,re.match('^(\w[-:\w]*)',self.input)
if captures:
self.consume(len(captures[0]))
name = captures[1]
if name.endswith(':'):
name = name[:-1]
tok = self.tok('tag', name)
self.defer(self.tok(':'))
while self.input[0] == ' ':
self.input = self.input[1:]
else:
tok = self.tok('tag', name)
return tok
def textBlockStart(self):
captures = regexec(self.RE_DOT_BLOCK_START, self.input)
if captures is None:
return
if len(self.indentStack) > 0:
self.textBlockTagIndent = self.indentStack[0]
else:
self.textBlockTagIndent = 0
self.consume(1)
self.isTextBlock = True
return self.textBlockContinue(isStart=True)
def textBlockContinue(self, isStart=False):
if not self.isTextBlock:
return
tokens = deque()
while True:
if self.consumeBlank():
if not isStart:
tokens.append(self.tok('string', ''))
continue
eos = self.eos()
if eos is not None:
if isStart:
return eos
tokens.append(eos)
break
nextIndent = self.captureIndent()
if nextIndent is None or len(nextIndent[1]) <= self.textBlockTagIndent:
self.isTextBlock = False
if isStart:
return self.tok('newline')
break
padding = 0
if not isStart and len(nextIndent[1]) > self.textBlockIndent:
padding = len(nextIndent[1]) - self.textBlockIndent
self.consume(1 + padding)
self.input = '\n' + self.input
indent = self.indent()
if isStart:
self.textBlockIndent = indent.val
padding = 0
itoks = self.scanInline(self.RE_TEXT, 'string')
indentChar = self.indentRe == self.RE_INDENT_TABS and '\t' or ' '
if itoks:
itoks[0].val = (indentChar * padding) + itoks[0].val
if isStart:
for tok in itoks or []:
self.defer(tok)
return indent
tokens.extend(itoks)
if not tokens:
firstTok = None
else:
firstTok = tokens.popleft()
while tokens:
if tokens[-1].type == 'string' and not tokens[-1].val:
tokens.pop()
continue
self.defer(tokens.popleft())
self.isTextBlock = False
return firstTok
def filter(self):
return self.scan(self.RE_FILTER, 'filter')
def doctype(self):
# print self.scan(self.RE_DOCTYPE, 'doctype')
return self.scan(self.RE_DOCTYPE, 'doctype')
def id(self):
return self.scan(self.RE_ID, 'id')
def className(self):
return self.scan(self.RE_CLASS, 'class')
def processInline(self, val):
sval = self.STRING_SPLITS.split(val)
sval_stripped = [i.strip() for i in sval]
if sval_stripped.count('"') % 2 != 0 or sval_stripped.count("'") % 2 != 0:
raise Exception('Unbalanced quotes found inside inline jade at line %s.' % self.lineno)
sval_replaced = replace_string_brackets(sval)
start_inline = self.RE_INLINE.search(sval_replaced).start()
try:
closing = start_inline + detect_closing_bracket(sval_replaced[start_inline:])
except IndexError:
raise Exception('The end of the string was reached with no closing bracket found at line %s.' % self.lineno)
textl = val[:start_inline]
code = val[start_inline:closing][2:-1]
textr = val[closing:]
toks = deque()
toks.append(self.tok('string', self.RE_INLINE_ESCAPE.sub('#[', textl)))
ilexer = InlineLexer(code, inline_level=self.options.get('inline_level', 0) + 1)
while True:
tok = ilexer.advance()
if tok.type == 'eos':
break
toks.append(tok)
if self.RE_INLINE.search(textr):
toks.extend(self.processInline(textr))
else:
toks.append(self.tok('string', self.RE_INLINE_ESCAPE.sub('#[', textr)))
return toks
def scanInline(self, regexp, type):
ret = self.scan(regexp, type)
if ret is None:
return ret
if self.RE_INLINE.search(ret.val):
ret = self.processInline(ret.val)
if ret:
ret[0].val = ret[0].val.lstrip()
else:
ret.val = self.RE_INLINE_ESCAPE.sub('#[', ret.val)
ret = deque([ret])
return ret
def scanInlineProcess(self, regexp, type_):
toks = self.scanInline(regexp, type_)
if not toks:
return None
firstTok = toks.popleft()
for tok in toks:
self.defer(tok)
return firstTok
def string(self):
return self.scanInlineProcess(self.RE_STRING, 'string')
def text(self):
return self.scanInlineProcess(self.RE_TEXT, 'text')
def extends(self):
return self.scan(self.RE_EXTENDS, 'extends')
def prepend(self):
captures = regexec(self.RE_PREPEND, self.input)
if captures:
self.consume(len(captures[0]))
mode, name = 'prepend', captures[1]
tok = self.tok('block', name)
tok.mode = mode
return tok
def append(self):
captures = regexec(self.RE_APPEND, self.input)
if captures:
self.consume(len(captures[0]))
mode, name = 'append', captures[1]
tok = self.tok('block', name)
tok.mode = mode
return tok
def block(self):
captures = regexec(self.RE_BLOCK, self.input)
if captures:
self.consume(len(captures[0]))
mode = captures[3] or 'replace'
name = captures[4] or ''
tok = self.tok('block', name)
tok.mode = mode
return tok
def _yield(self):
return self.scan(self.RE_YIELD, 'yield')
def include(self):
return self.scan(self.RE_INCLUDE, 'include')
def assignment(self):
captures = regexec(self.RE_ASSIGNMENT, self.input)
if captures:
self.consume(len(captures[0]))
name, val = captures[2:4]
tok = self.tok('assignment')
tok.name = name
tok.val = val
return tok
def mixin(self):
captures = regexec(self.RE_MIXIN, self.input)
if captures:
self.consume(len(captures[0]))
tok = self.tok('mixin', captures[1])
tok.args = captures[2]
return tok
def call(self):
captures = regexec(self.RE_CALL, self.input)
if captures:
self.consume(len(captures[0]))
tok = self.tok('call', captures[1])
tok.args = captures[2]
return tok
def conditional(self):
captures = regexec(self.RE_CONDITIONAL, self.input)
if captures:
self.consume(len(captures[0]))
type, sentence = captures[1:]
tok = self.tok('conditional', type)
tok.sentence = sentence
return tok
# def _while(self):
# captures = regexec(self.RE_WHILE,self.input)
# if captures:
# self.consume(len(captures[0]))
# return self.tok('code','while(%s)'%captures[1])
def each(self):
captures = regexec(self.RE_EACH, self.input)
if captures:
self.consume(len(captures[0]))
tok = self.tok('each', None)
tok.keys = [x.strip() for x in captures[1].split(',')]
tok.code = captures[2]
return tok
def code(self):
captures = regexec(self.RE_CODE, self.input)
if captures:
self.consume(len(captures[0]))
flags, name = captures[1:]
tok = self.tok('code', name)
tok.escape = flags.startswith('=')
#print captures
tok.buffer = '=' in flags
# print tok.buffer
return tok
def attrs(self):
if '(' == self.input[0]:
index = self.indexOfDelimiters('(', ')')
string = self.input[1:index]
tok = self.tok('attrs')
l = len(string)
colons = self.colons
states = ['key']
class Namespace:
key = u''
val = u''
quote = u''
literal = True
def reset(self):
self.key = self.val = self.quote = u''
self.literal = True
def __str__(self):
return dict(key=self.key, val=self.val, quote=self.quote,
literal=self.literal).__str__()
ns = Namespace()
def state():
return states[-1]
def interpolate(attr):
attr, num = self.RE_ATTR_INTERPOLATE.subn(lambda matchobj: '%s+"{}".format(%s)+%s' % (ns.quote, matchobj.group(1), ns.quote), attr)
return attr, (num > 0)
self.consume(index + 1)
from .utils import odict
tok.attrs = odict()
tok.static_attrs = set()
str_nums = list(map(str, range(10)))
# print '------'
def parse(c):
real = c
if colons and ':' == c:
c = '='
ns.literal = ns.literal and (state() not in ('object', 'array',
'expr'))
# print ns, c, states
if c in (',', '\n') or (c == ' ' and state() == 'val' and len(states) == 2 and ns.val.strip()):
s = state()
if s in ('expr', 'array', 'string', 'object'):
ns.val += c
else:
states.append('key')
ns.val = ns.val.strip()
ns.key = ns.key.strip()
if not ns.key:
return
# ns.literal = ns.quote
if not ns.literal:
if '!' == ns.key[-1]:
ns.literal = True
ns.key = ns.key[:-1]
ns.key = ns.key.strip("'\"")
if not ns.val:
tok.attrs[ns.key] = True
else:
tok.attrs[ns.key], is_interpolated = interpolate(ns.val)
ns.literal = ns.literal and not is_interpolated
if ns.literal:
tok.static_attrs.add(ns.key)
ns.reset()
elif '=' == c:
s = state()
if s == 'key char':
ns.key += real
elif s in ('val', 'expr', 'array', 'string', 'object'):
ns.val += real
else:
states.append('val')
elif '(' == c:
if state() in ('val', 'expr'):
states.append('expr')
ns.val += c
elif ')' == c:
if state() in ('val', 'expr'):
states.pop()
ns.val += c
elif '{' == c:
if 'val' == state():
states.append('object')
ns.val += c
elif '}' == c:
if 'object' == state():
states.pop()
ns.val += c
elif '[' == c:
if 'val' == state():
states.append('array')
ns.val += c
elif ']' == c:
if 'array' == state():
states.pop()
ns.val += c
elif c in ('"', "'"):
s = state()
if 'key' == s:
states.append('key char')
elif 'key char' == s:
states.pop()
elif 'string' == s:
if c == ns.quote:
states.pop()
ns.val += c
else:
states.append('string')
ns.val += c
ns.quote = c
elif '' == c:
pass
else:
s = state()
ns.literal = ns.literal and (s in ('key', 'string') or c in str_nums)
# print c, s, ns.literal
if s in ('key', 'key char'):
ns.key += c
else:
ns.val += c
for char in string:
parse(char)
parse(',')
return tok
def captureIndent(self):
if self.indentRe:
captures = regexec(self.indentRe, self.input)
else:
regex = self.RE_INDENT_TABS
captures = regexec(regex, self.input)
if captures and not captures[1]:
regex = self.RE_INDENT_SPACES
captures = regexec(regex, self.input)
if captures and captures[1]:
self.indentRe = regex
return captures
def indent(self):
captures = self.captureIndent()
if captures:
indents = len(captures[1])
self.lineno += 1
self.consume(indents + 1)
if not self.input:
return self.tok('newline')
if self.input[0] in (' ', '\t'):
raise Exception('Invalid indentation, you can use tabs or spaces but not both')
if '\n' == self.input[0]:
return self.tok('newline')
if self.indentStack and indents < self.indentStack[0]:
while self.indentStack and self.indentStack[0] > indents:
self.stash.append(self.tok('outdent'))
self.indentStack.popleft()
tok = self.stash.pop()
elif indents and (not self.indentStack or indents != self.indentStack[0]):
self.indentStack.appendleft(indents)
tok = self.tok('indent', indents)
else:
tok = self.tok('newline')
return tok
def pipelessText(self):
if self.pipeless:
if '\n' == self.input[0]:
return
i = self.input.find('\n')
if -1 == i:
i = len(self.input)
str = self.input[:i]
self.consume(len(str))
return self.tok('text', str)
def colon(self):
return self.scan(self.RE_COLON, ':')
def advance(self):
return self.stashed() or self.next()
def next(self):
return self.deferred() \
or self.textBlockContinue() \
or self.blank() \
or self.eos() \
or self.pipelessText() \
or self._yield() \
or self.doctype() \
or self.extends() \
or self.append() \
or self.prepend() \
or self.block() \
or self.include() \
or self.mixin() \
or self.call() \
or self.conditional() \
or self.each() \
or self.assignment() \
or self.tag() \
or self.textBlockStart() \
or self.filter() \
or self.code() \
or self.id() \
or self.className() \
or self.attrs() \
or self.indent() \
or self.comment() \
or self.colon() \
or self.string() \
or self.text()
##or self._while() \
class InlineLexer(Lexer):
def next(self):
return self.deferred() \
or self.blank() \
or self.eos() \
or self.pipelessText() \
or self.mixin() \
or self.call() \
or self.assignment() \
or self.tag() \
or self.code() \
or self.id() \
or self.className() \
or self.attrs() \
or self.colon() \
or self.string() \
or self.text()
|
|
# -*- coding: utf-8 -*-
"""
Internal module for formatting output data in csv, html,
and latex files. This module also applies to display formatting.
"""
from __future__ import print_function
# pylint: disable=W0141
from pandas.core.dtypes.missing import isna, notna
from pandas.core.dtypes.common import (
is_categorical_dtype,
is_float_dtype,
is_period_arraylike,
is_integer_dtype,
is_interval_dtype,
is_datetimetz,
is_integer,
is_float,
is_scalar,
is_numeric_dtype,
is_datetime64_dtype,
is_timedelta64_dtype,
is_list_like)
from pandas.core.dtypes.generic import ABCSparseArray
from pandas.core.base import PandasObject
import pandas.core.common as com
from pandas.core.index import Index, MultiIndex, _ensure_index
from pandas import compat
from pandas.compat import (StringIO, lzip, map, zip, u)
from pandas.io.formats.terminal import get_terminal_size
from pandas.core.config import get_option, set_option
from pandas.io.common import (_expand_user, _stringify_path)
from pandas.io.formats.printing import adjoin, justify, pprint_thing
from pandas._libs import lib
from pandas._libs.tslib import (iNaT, Timestamp, Timedelta,
format_array_from_datetime)
from pandas.core.indexes.datetimes import DatetimeIndex
from pandas.core.indexes.period import PeriodIndex
import pandas as pd
import numpy as np
from functools import partial
common_docstring = """
Parameters
----------
buf : StringIO-like, optional
buffer to write to
columns : sequence, optional
the subset of columns to write; default None writes all columns
col_space : int, optional
the minimum width of each column
header : bool, optional
%(header)s
index : bool, optional
whether to print index (row) labels, default True
na_rep : string, optional
string representation of NAN to use, default 'NaN'
formatters : list or dict of one-parameter functions, optional
formatter functions to apply to columns' elements by position or name,
default None. The result of each function must be a unicode string.
List must be of length equal to the number of columns.
float_format : one-parameter function, optional
formatter function to apply to columns' elements if they are floats,
default None. The result of this function must be a unicode string.
sparsify : bool, optional
Set to False for a DataFrame with a hierarchical index to print every
multiindex key at each row, default True
index_names : bool, optional
Prints the names of the indexes, default True
line_width : int, optional
Width to wrap a line in characters, default no wrap
table_id : str, optional
id for the <table> element create by to_html
.. versionadded:: 0.23.0"""
_VALID_JUSTIFY_PARAMETERS = ("left", "right", "center", "justify",
"justify-all", "start", "end", "inherit",
"match-parent", "initial", "unset")
justify_docstring = """
justify : str, default None
How to justify the column labels. If None uses the option from
the print configuration (controlled by set_option), 'right' out
of the box. Valid values are
* left
* right
* center
* justify
* justify-all
* start
* end
* inherit
* match-parent
* initial
* unset
"""
return_docstring = """
Returns
-------
formatted : string (or unicode, depending on data and options)"""
docstring_to_string = common_docstring + justify_docstring + return_docstring
class CategoricalFormatter(object):
def __init__(self, categorical, buf=None, length=True, na_rep='NaN',
footer=True):
self.categorical = categorical
self.buf = buf if buf is not None else StringIO(u(""))
self.na_rep = na_rep
self.length = length
self.footer = footer
def _get_footer(self):
footer = ''
if self.length:
if footer:
footer += ', '
footer += "Length: {length}".format(length=len(self.categorical))
level_info = self.categorical._repr_categories_info()
# Levels are added in a newline
if footer:
footer += '\n'
footer += level_info
return compat.text_type(footer)
def _get_formatted_values(self):
return format_array(self.categorical.get_values(), None,
float_format=None, na_rep=self.na_rep)
def to_string(self):
categorical = self.categorical
if len(categorical) == 0:
if self.footer:
return self._get_footer()
else:
return u('')
fmt_values = self._get_formatted_values()
result = [u('{i}').format(i=i) for i in fmt_values]
result = [i.strip() for i in result]
result = u(', ').join(result)
result = [u('[') + result + u(']')]
if self.footer:
footer = self._get_footer()
if footer:
result.append(footer)
return compat.text_type(u('\n').join(result))
class SeriesFormatter(object):
def __init__(self, series, buf=None, length=True, header=True, index=True,
na_rep='NaN', name=False, float_format=None, dtype=True,
max_rows=None):
self.series = series
self.buf = buf if buf is not None else StringIO()
self.name = name
self.na_rep = na_rep
self.header = header
self.length = length
self.index = index
self.max_rows = max_rows
if float_format is None:
float_format = get_option("display.float_format")
self.float_format = float_format
self.dtype = dtype
self.adj = _get_adjustment()
self._chk_truncate()
def _chk_truncate(self):
from pandas.core.reshape.concat import concat
max_rows = self.max_rows
truncate_v = max_rows and (len(self.series) > max_rows)
series = self.series
if truncate_v:
if max_rows == 1:
row_num = max_rows
series = series.iloc[:max_rows]
else:
row_num = max_rows // 2
series = concat((series.iloc[:row_num],
series.iloc[-row_num:]))
self.tr_row_num = row_num
self.tr_series = series
self.truncate_v = truncate_v
def _get_footer(self):
name = self.series.name
footer = u('')
if getattr(self.series.index, 'freq', None) is not None:
footer += 'Freq: {freq}'.format(freq=self.series.index.freqstr)
if self.name is not False and name is not None:
if footer:
footer += ', '
series_name = pprint_thing(name,
escape_chars=('\t', '\r', '\n'))
footer += ((u"Name: {sname}".format(sname=series_name))
if name is not None else "")
if (self.length is True or
(self.length == 'truncate' and self.truncate_v)):
if footer:
footer += ', '
footer += 'Length: {length}'.format(length=len(self.series))
if self.dtype is not False and self.dtype is not None:
name = getattr(self.tr_series.dtype, 'name', None)
if name:
if footer:
footer += ', '
footer += u'dtype: {typ}'.format(typ=pprint_thing(name))
# level infos are added to the end and in a new line, like it is done
# for Categoricals
if is_categorical_dtype(self.tr_series.dtype):
level_info = self.tr_series._values._repr_categories_info()
if footer:
footer += "\n"
footer += level_info
return compat.text_type(footer)
def _get_formatted_index(self):
index = self.tr_series.index
is_multi = isinstance(index, MultiIndex)
if is_multi:
have_header = any(name for name in index.names)
fmt_index = index.format(names=True)
else:
have_header = index.name is not None
fmt_index = index.format(name=True)
return fmt_index, have_header
def _get_formatted_values(self):
values_to_format = self.tr_series._formatting_values()
return format_array(values_to_format, None,
float_format=self.float_format, na_rep=self.na_rep)
def to_string(self):
series = self.tr_series
footer = self._get_footer()
if len(series) == 0:
return 'Series([], ' + footer + ')'
fmt_index, have_header = self._get_formatted_index()
fmt_values = self._get_formatted_values()
if self.truncate_v:
n_header_rows = 0
row_num = self.tr_row_num
width = self.adj.len(fmt_values[row_num - 1])
if width > 3:
dot_str = '...'
else:
dot_str = '..'
# Series uses mode=center because it has single value columns
# DataFrame uses mode=left
dot_str = self.adj.justify([dot_str], width, mode='center')[0]
fmt_values.insert(row_num + n_header_rows, dot_str)
fmt_index.insert(row_num + 1, '')
if self.index:
result = self.adj.adjoin(3, *[fmt_index[1:], fmt_values])
else:
result = self.adj.adjoin(3, fmt_values).replace('\n ',
'\n').strip()
if self.header and have_header:
result = fmt_index[0] + '\n' + result
if footer:
result += '\n' + footer
return compat.text_type(u('').join(result))
class TextAdjustment(object):
def __init__(self):
self.encoding = get_option("display.encoding")
def len(self, text):
return compat.strlen(text, encoding=self.encoding)
def justify(self, texts, max_len, mode='right'):
return justify(texts, max_len, mode=mode)
def adjoin(self, space, *lists, **kwargs):
return adjoin(space, *lists, strlen=self.len,
justfunc=self.justify, **kwargs)
class EastAsianTextAdjustment(TextAdjustment):
def __init__(self):
super(EastAsianTextAdjustment, self).__init__()
if get_option("display.unicode.ambiguous_as_wide"):
self.ambiguous_width = 2
else:
self.ambiguous_width = 1
def len(self, text):
return compat.east_asian_len(text, encoding=self.encoding,
ambiguous_width=self.ambiguous_width)
def justify(self, texts, max_len, mode='right'):
# re-calculate padding space per str considering East Asian Width
def _get_pad(t):
return max_len - self.len(t) + len(t)
if mode == 'left':
return [x.ljust(_get_pad(x)) for x in texts]
elif mode == 'center':
return [x.center(_get_pad(x)) for x in texts]
else:
return [x.rjust(_get_pad(x)) for x in texts]
def _get_adjustment():
use_east_asian_width = get_option("display.unicode.east_asian_width")
if use_east_asian_width:
return EastAsianTextAdjustment()
else:
return TextAdjustment()
class TableFormatter(object):
is_truncated = False
show_dimensions = None
@property
def should_show_dimensions(self):
return (self.show_dimensions is True or
(self.show_dimensions == 'truncate' and self.is_truncated))
def _get_formatter(self, i):
if isinstance(self.formatters, (list, tuple)):
if is_integer(i):
return self.formatters[i]
else:
return None
else:
if is_integer(i) and i not in self.columns:
i = self.columns[i]
return self.formatters.get(i, None)
class DataFrameFormatter(TableFormatter):
"""
Render a DataFrame
self.to_string() : console-friendly tabular output
self.to_html() : html table
self.to_latex() : LaTeX tabular environment table
"""
__doc__ = __doc__ if __doc__ else ''
__doc__ += common_docstring + justify_docstring + return_docstring
def __init__(self, frame, buf=None, columns=None, col_space=None,
header=True, index=True, na_rep='NaN', formatters=None,
justify=None, float_format=None, sparsify=None,
index_names=True, line_width=None, max_rows=None,
max_cols=None, show_dimensions=False, decimal='.',
table_id=None, **kwds):
self.frame = frame
if buf is not None:
self.buf = _expand_user(_stringify_path(buf))
else:
self.buf = StringIO()
self.show_index_names = index_names
if sparsify is None:
sparsify = get_option("display.multi_sparse")
self.sparsify = sparsify
self.float_format = float_format
self.formatters = formatters if formatters is not None else {}
self.na_rep = na_rep
self.decimal = decimal
self.col_space = col_space
self.header = header
self.index = index
self.line_width = line_width
self.max_rows = max_rows
self.max_cols = max_cols
self.max_rows_displayed = min(max_rows or len(self.frame),
len(self.frame))
self.show_dimensions = show_dimensions
self.table_id = table_id
if justify is None:
self.justify = get_option("display.colheader_justify")
else:
self.justify = justify
self.kwds = kwds
if columns is not None:
self.columns = _ensure_index(columns)
self.frame = self.frame[self.columns]
else:
self.columns = frame.columns
self._chk_truncate()
self.adj = _get_adjustment()
def _chk_truncate(self):
"""
Checks whether the frame should be truncated. If so, slices
the frame up.
"""
from pandas.core.reshape.concat import concat
# Column of which first element is used to determine width of a dot col
self.tr_size_col = -1
# Cut the data to the information actually printed
max_cols = self.max_cols
max_rows = self.max_rows
if max_cols == 0 or max_rows == 0: # assume we are in the terminal
# (why else = 0)
(w, h) = get_terminal_size()
self.w = w
self.h = h
if self.max_rows == 0:
dot_row = 1
prompt_row = 1
if self.show_dimensions:
show_dimension_rows = 3
n_add_rows = (self.header + dot_row + show_dimension_rows +
prompt_row)
# rows available to fill with actual data
max_rows_adj = self.h - n_add_rows
self.max_rows_adj = max_rows_adj
# Format only rows and columns that could potentially fit the
# screen
if max_cols == 0 and len(self.frame.columns) > w:
max_cols = w
if max_rows == 0 and len(self.frame) > h:
max_rows = h
if not hasattr(self, 'max_rows_adj'):
self.max_rows_adj = max_rows
if not hasattr(self, 'max_cols_adj'):
self.max_cols_adj = max_cols
max_cols_adj = self.max_cols_adj
max_rows_adj = self.max_rows_adj
truncate_h = max_cols_adj and (len(self.columns) > max_cols_adj)
truncate_v = max_rows_adj and (len(self.frame) > max_rows_adj)
frame = self.frame
if truncate_h:
if max_cols_adj == 0:
col_num = len(frame.columns)
elif max_cols_adj == 1:
frame = frame.iloc[:, :max_cols]
col_num = max_cols
else:
col_num = (max_cols_adj // 2)
frame = concat((frame.iloc[:, :col_num],
frame.iloc[:, -col_num:]), axis=1)
self.tr_col_num = col_num
if truncate_v:
if max_rows_adj == 0:
row_num = len(frame)
if max_rows_adj == 1:
row_num = max_rows
frame = frame.iloc[:max_rows, :]
else:
row_num = max_rows_adj // 2
frame = concat((frame.iloc[:row_num, :],
frame.iloc[-row_num:, :]))
self.tr_row_num = row_num
self.tr_frame = frame
self.truncate_h = truncate_h
self.truncate_v = truncate_v
self.is_truncated = self.truncate_h or self.truncate_v
def _to_str_columns(self):
"""
Render a DataFrame to a list of columns (as lists of strings).
"""
frame = self.tr_frame
# may include levels names also
str_index = self._get_formatted_index(frame)
if not is_list_like(self.header) and not self.header:
stringified = []
for i, c in enumerate(frame):
fmt_values = self._format_col(i)
fmt_values = _make_fixed_width(fmt_values, self.justify,
minimum=(self.col_space or 0),
adj=self.adj)
stringified.append(fmt_values)
else:
if is_list_like(self.header):
if len(self.header) != len(self.columns):
raise ValueError(('Writing {ncols} cols but got {nalias} '
'aliases'
.format(ncols=len(self.columns),
nalias=len(self.header))))
str_columns = [[label] for label in self.header]
else:
str_columns = self._get_formatted_column_labels(frame)
stringified = []
for i, c in enumerate(frame):
cheader = str_columns[i]
header_colwidth = max(self.col_space or 0,
*(self.adj.len(x) for x in cheader))
fmt_values = self._format_col(i)
fmt_values = _make_fixed_width(fmt_values, self.justify,
minimum=header_colwidth,
adj=self.adj)
max_len = max(max(self.adj.len(x) for x in fmt_values),
header_colwidth)
cheader = self.adj.justify(cheader, max_len, mode=self.justify)
stringified.append(cheader + fmt_values)
strcols = stringified
if self.index:
strcols.insert(0, str_index)
# Add ... to signal truncated
truncate_h = self.truncate_h
truncate_v = self.truncate_v
if truncate_h:
col_num = self.tr_col_num
# infer from column header
col_width = self.adj.len(strcols[self.tr_size_col][0])
strcols.insert(self.tr_col_num + 1, ['...'.center(col_width)] *
(len(str_index)))
if truncate_v:
n_header_rows = len(str_index) - len(frame)
row_num = self.tr_row_num
for ix, col in enumerate(strcols):
# infer from above row
cwidth = self.adj.len(strcols[ix][row_num])
is_dot_col = False
if truncate_h:
is_dot_col = ix == col_num + 1
if cwidth > 3 or is_dot_col:
my_str = '...'
else:
my_str = '..'
if ix == 0:
dot_mode = 'left'
elif is_dot_col:
cwidth = self.adj.len(strcols[self.tr_size_col][0])
dot_mode = 'center'
else:
dot_mode = 'right'
dot_str = self.adj.justify([my_str], cwidth, mode=dot_mode)[0]
strcols[ix].insert(row_num + n_header_rows, dot_str)
return strcols
def to_string(self):
"""
Render a DataFrame to a console-friendly tabular output.
"""
from pandas import Series
frame = self.frame
if len(frame.columns) == 0 or len(frame.index) == 0:
info_line = (u('Empty {name}\nColumns: {col}\nIndex: {idx}')
.format(name=type(self.frame).__name__,
col=pprint_thing(frame.columns),
idx=pprint_thing(frame.index)))
text = info_line
else:
strcols = self._to_str_columns()
if self.line_width is None: # no need to wrap around just print
# the whole frame
text = self.adj.adjoin(1, *strcols)
elif (not isinstance(self.max_cols, int) or
self.max_cols > 0): # need to wrap around
text = self._join_multiline(*strcols)
else: # max_cols == 0. Try to fit frame to terminal
text = self.adj.adjoin(1, *strcols).split('\n')
max_len = Series(text).str.len().max()
headers = [ele[0] for ele in strcols]
# Size of last col determines dot col size. See
# `self._to_str_columns
size_tr_col = len(headers[self.tr_size_col])
max_len += size_tr_col # Need to make space for largest row
# plus truncate dot col
dif = max_len - self.w
# '+ 1' to avoid too wide repr (GH PR #17023)
adj_dif = dif + 1
col_lens = Series([Series(ele).apply(len).max()
for ele in strcols])
n_cols = len(col_lens)
counter = 0
while adj_dif > 0 and n_cols > 1:
counter += 1
mid = int(round(n_cols / 2.))
mid_ix = col_lens.index[mid]
col_len = col_lens[mid_ix]
adj_dif -= (col_len + 1) # adjoin adds one
col_lens = col_lens.drop(mid_ix)
n_cols = len(col_lens)
max_cols_adj = n_cols - self.index # subtract index column
self.max_cols_adj = max_cols_adj
# Call again _chk_truncate to cut frame appropriately
# and then generate string representation
self._chk_truncate()
strcols = self._to_str_columns()
text = self.adj.adjoin(1, *strcols)
if not self.index:
text = text.replace('\n ', '\n').strip()
self.buf.writelines(text)
if self.should_show_dimensions:
self.buf.write("\n\n[{nrows} rows x {ncols} columns]"
.format(nrows=len(frame), ncols=len(frame.columns)))
def _join_multiline(self, *strcols):
lwidth = self.line_width
adjoin_width = 1
strcols = list(strcols)
if self.index:
idx = strcols.pop(0)
lwidth -= np.array([self.adj.len(x)
for x in idx]).max() + adjoin_width
col_widths = [np.array([self.adj.len(x) for x in col]).max() if
len(col) > 0 else 0 for col in strcols]
col_bins = _binify(col_widths, lwidth)
nbins = len(col_bins)
if self.truncate_v:
nrows = self.max_rows_adj + 1
else:
nrows = len(self.frame)
str_lst = []
st = 0
for i, ed in enumerate(col_bins):
row = strcols[st:ed]
if self.index:
row.insert(0, idx)
if nbins > 1:
if ed <= len(strcols) and i < nbins - 1:
row.append([' \\'] + [' '] * (nrows - 1))
else:
row.append([' '] * nrows)
str_lst.append(self.adj.adjoin(adjoin_width, *row))
st = ed
return '\n\n'.join(str_lst)
def to_latex(self, column_format=None, longtable=False, encoding=None,
multicolumn=False, multicolumn_format=None, multirow=False):
"""
Render a DataFrame to a LaTeX tabular/longtable environment output.
"""
from pandas.io.formats.latex import LatexFormatter
latex_renderer = LatexFormatter(self, column_format=column_format,
longtable=longtable,
multicolumn=multicolumn,
multicolumn_format=multicolumn_format,
multirow=multirow)
if encoding is None:
encoding = 'ascii' if compat.PY2 else 'utf-8'
if hasattr(self.buf, 'write'):
latex_renderer.write_result(self.buf)
elif isinstance(self.buf, compat.string_types):
import codecs
with codecs.open(self.buf, 'w', encoding=encoding) as f:
latex_renderer.write_result(f)
else:
raise TypeError('buf is not a file name and it has no write '
'method')
def _format_col(self, i):
frame = self.tr_frame
formatter = self._get_formatter(i)
values_to_format = frame.iloc[:, i]._formatting_values()
return format_array(values_to_format, formatter,
float_format=self.float_format, na_rep=self.na_rep,
space=self.col_space, decimal=self.decimal)
def to_html(self, classes=None, notebook=False, border=None):
"""
Render a DataFrame to a html table.
Parameters
----------
classes : str or list-like
classes to include in the `class` attribute of the opening
``<table>`` tag, in addition to the default "dataframe".
notebook : {True, False}, optional, default False
Whether the generated HTML is for IPython Notebook.
border : int
A ``border=border`` attribute is included in the opening
``<table>`` tag. Default ``pd.options.html.border``.
.. versionadded:: 0.19.0
"""
from pandas.io.formats.html import HTMLFormatter
html_renderer = HTMLFormatter(self, classes=classes,
max_rows=self.max_rows,
max_cols=self.max_cols,
notebook=notebook,
border=border,
table_id=self.table_id)
if hasattr(self.buf, 'write'):
html_renderer.write_result(self.buf)
elif isinstance(self.buf, compat.string_types):
with open(self.buf, 'w') as f:
html_renderer.write_result(f)
else:
raise TypeError('buf is not a file name and it has no write '
' method')
def _get_formatted_column_labels(self, frame):
from pandas.core.index import _sparsify
columns = frame.columns
if isinstance(columns, MultiIndex):
fmt_columns = columns.format(sparsify=False, adjoin=False)
fmt_columns = lzip(*fmt_columns)
dtypes = self.frame.dtypes._values
# if we have a Float level, they don't use leading space at all
restrict_formatting = any(l.is_floating for l in columns.levels)
need_leadsp = dict(zip(fmt_columns, map(is_numeric_dtype, dtypes)))
def space_format(x, y):
if (y not in self.formatters and
need_leadsp[x] and not restrict_formatting):
return ' ' + y
return y
str_columns = list(zip(*[[space_format(x, y) for y in x]
for x in fmt_columns]))
if self.sparsify:
str_columns = _sparsify(str_columns)
str_columns = [list(x) for x in zip(*str_columns)]
else:
fmt_columns = columns.format()
dtypes = self.frame.dtypes
need_leadsp = dict(zip(fmt_columns, map(is_numeric_dtype, dtypes)))
str_columns = [[' ' + x if not self._get_formatter(i) and
need_leadsp[x] else x]
for i, (col, x) in enumerate(zip(columns,
fmt_columns))]
if self.show_index_names and self.has_index_names:
for x in str_columns:
x.append('')
# self.str_columns = str_columns
return str_columns
@property
def has_index_names(self):
return _has_names(self.frame.index)
@property
def has_column_names(self):
return _has_names(self.frame.columns)
def _get_formatted_index(self, frame):
# Note: this is only used by to_string() and to_latex(), not by
# to_html().
index = frame.index
columns = frame.columns
show_index_names = self.show_index_names and self.has_index_names
show_col_names = (self.show_index_names and self.has_column_names)
fmt = self._get_formatter('__index__')
if isinstance(index, MultiIndex):
fmt_index = index.format(sparsify=self.sparsify, adjoin=False,
names=show_index_names, formatter=fmt)
else:
fmt_index = [index.format(name=show_index_names, formatter=fmt)]
fmt_index = [tuple(_make_fixed_width(list(x), justify='left',
minimum=(self.col_space or 0),
adj=self.adj)) for x in fmt_index]
adjoined = self.adj.adjoin(1, *fmt_index).split('\n')
# empty space for columns
if show_col_names:
col_header = ['{x}'.format(x=x)
for x in self._get_column_name_list()]
else:
col_header = [''] * columns.nlevels
if self.header:
return col_header + adjoined
else:
return adjoined
def _get_column_name_list(self):
names = []
columns = self.frame.columns
if isinstance(columns, MultiIndex):
names.extend('' if name is None else name
for name in columns.names)
else:
names.append('' if columns.name is None else columns.name)
return names
# ----------------------------------------------------------------------
# Array formatters
def format_array(values, formatter, float_format=None, na_rep='NaN',
digits=None, space=None, justify='right', decimal='.'):
if is_categorical_dtype(values):
fmt_klass = CategoricalArrayFormatter
elif is_interval_dtype(values):
fmt_klass = IntervalArrayFormatter
elif is_float_dtype(values.dtype):
fmt_klass = FloatArrayFormatter
elif is_period_arraylike(values):
fmt_klass = PeriodArrayFormatter
elif is_integer_dtype(values.dtype):
fmt_klass = IntArrayFormatter
elif is_datetimetz(values):
fmt_klass = Datetime64TZFormatter
elif is_datetime64_dtype(values.dtype):
fmt_klass = Datetime64Formatter
elif is_timedelta64_dtype(values.dtype):
fmt_klass = Timedelta64Formatter
else:
fmt_klass = GenericArrayFormatter
if space is None:
space = get_option("display.column_space")
if float_format is None:
float_format = get_option("display.float_format")
if digits is None:
digits = get_option("display.precision")
fmt_obj = fmt_klass(values, digits=digits, na_rep=na_rep,
float_format=float_format, formatter=formatter,
space=space, justify=justify, decimal=decimal)
return fmt_obj.get_result()
class GenericArrayFormatter(object):
def __init__(self, values, digits=7, formatter=None, na_rep='NaN',
space=12, float_format=None, justify='right', decimal='.',
quoting=None, fixed_width=True):
self.values = values
self.digits = digits
self.na_rep = na_rep
self.space = space
self.formatter = formatter
self.float_format = float_format
self.justify = justify
self.decimal = decimal
self.quoting = quoting
self.fixed_width = fixed_width
def get_result(self):
fmt_values = self._format_strings()
return _make_fixed_width(fmt_values, self.justify)
def _format_strings(self):
if self.float_format is None:
float_format = get_option("display.float_format")
if float_format is None:
fmt_str = ('{{x: .{prec:d}g}}'
.format(prec=get_option("display.precision")))
float_format = lambda x: fmt_str.format(x=x)
else:
float_format = self.float_format
formatter = (
self.formatter if self.formatter is not None else
(lambda x: pprint_thing(x, escape_chars=('\t', '\r', '\n'))))
def _format(x):
if self.na_rep is not None and is_scalar(x) and isna(x):
if x is None:
return 'None'
elif x is pd.NaT:
return 'NaT'
return self.na_rep
elif isinstance(x, PandasObject):
return u'{x}'.format(x=x)
else:
# object dtype
return u'{x}'.format(x=formatter(x))
vals = self.values
if isinstance(vals, Index):
vals = vals._values
elif isinstance(vals, ABCSparseArray):
vals = vals.values
is_float_type = lib.map_infer(vals, is_float) & notna(vals)
leading_space = is_float_type.any()
fmt_values = []
for i, v in enumerate(vals):
if not is_float_type[i] and leading_space:
fmt_values.append(u' {v}'.format(v=_format(v)))
elif is_float_type[i]:
fmt_values.append(float_format(v))
else:
fmt_values.append(u' {v}'.format(v=_format(v)))
return fmt_values
class FloatArrayFormatter(GenericArrayFormatter):
"""
"""
def __init__(self, *args, **kwargs):
GenericArrayFormatter.__init__(self, *args, **kwargs)
# float_format is expected to be a string
# formatter should be used to pass a function
if self.float_format is not None and self.formatter is None:
if callable(self.float_format):
self.formatter = self.float_format
self.float_format = None
def _value_formatter(self, float_format=None, threshold=None):
"""Returns a function to be applied on each value to format it
"""
# the float_format parameter supersedes self.float_format
if float_format is None:
float_format = self.float_format
# we are going to compose different functions, to first convert to
# a string, then replace the decimal symbol, and finally chop according
# to the threshold
# when there is no float_format, we use str instead of '%g'
# because str(0.0) = '0.0' while '%g' % 0.0 = '0'
if float_format:
def base_formatter(v):
return float_format(value=v) if notna(v) else self.na_rep
else:
def base_formatter(v):
return str(v) if notna(v) else self.na_rep
if self.decimal != '.':
def decimal_formatter(v):
return base_formatter(v).replace('.', self.decimal, 1)
else:
decimal_formatter = base_formatter
if threshold is None:
return decimal_formatter
def formatter(value):
if notna(value):
if abs(value) > threshold:
return decimal_formatter(value)
else:
return decimal_formatter(0.0)
else:
return self.na_rep
return formatter
def get_result_as_array(self):
"""
Returns the float values converted into strings using
the parameters given at initialisation, as a numpy array
"""
if self.formatter is not None:
return np.array([self.formatter(x) for x in self.values])
if self.fixed_width:
threshold = get_option("display.chop_threshold")
else:
threshold = None
# if we have a fixed_width, we'll need to try different float_format
def format_values_with(float_format):
formatter = self._value_formatter(float_format, threshold)
# separate the wheat from the chaff
values = self.values
mask = isna(values)
if hasattr(values, 'to_dense'): # sparse numpy ndarray
values = values.to_dense()
values = np.array(values, dtype='object')
values[mask] = self.na_rep
imask = (~mask).ravel()
values.flat[imask] = np.array([formatter(val)
for val in values.ravel()[imask]])
if self.fixed_width:
return _trim_zeros(values, self.na_rep)
return values
# There is a special default string when we are fixed-width
# The default is otherwise to use str instead of a formatting string
if self.float_format is None:
if self.fixed_width:
float_format = partial('{value: .{digits:d}f}'.format,
digits=self.digits)
else:
float_format = self.float_format
else:
float_format = lambda value: self.float_format % value
formatted_values = format_values_with(float_format)
if not self.fixed_width:
return formatted_values
# we need do convert to engineering format if some values are too small
# and would appear as 0, or if some values are too big and take too
# much space
if len(formatted_values) > 0:
maxlen = max(len(x) for x in formatted_values)
too_long = maxlen > self.digits + 6
else:
too_long = False
with np.errstate(invalid='ignore'):
abs_vals = np.abs(self.values)
# this is pretty arbitrary for now
# large values: more that 8 characters including decimal symbol
# and first digit, hence > 1e6
has_large_values = (abs_vals > 1e6).any()
has_small_values = ((abs_vals < 10**(-self.digits)) &
(abs_vals > 0)).any()
if has_small_values or (too_long and has_large_values):
float_format = partial('{value: .{digits:d}e}'.format,
digits=self.digits)
formatted_values = format_values_with(float_format)
return formatted_values
def _format_strings(self):
# shortcut
if self.formatter is not None:
return [self.formatter(x) for x in self.values]
return list(self.get_result_as_array())
class IntArrayFormatter(GenericArrayFormatter):
def _format_strings(self):
formatter = self.formatter or (lambda x: '{x: d}'.format(x=x))
fmt_values = [formatter(x) for x in self.values]
return fmt_values
class Datetime64Formatter(GenericArrayFormatter):
def __init__(self, values, nat_rep='NaT', date_format=None, **kwargs):
super(Datetime64Formatter, self).__init__(values, **kwargs)
self.nat_rep = nat_rep
self.date_format = date_format
def _format_strings(self):
""" we by definition have DO NOT have a TZ """
values = self.values
if not isinstance(values, DatetimeIndex):
values = DatetimeIndex(values)
if self.formatter is not None and callable(self.formatter):
return [self.formatter(x) for x in values]
fmt_values = format_array_from_datetime(
values.asi8.ravel(),
format=_get_format_datetime64_from_values(values,
self.date_format),
na_rep=self.nat_rep).reshape(values.shape)
return fmt_values.tolist()
class IntervalArrayFormatter(GenericArrayFormatter):
def __init__(self, values, *args, **kwargs):
GenericArrayFormatter.__init__(self, values, *args, **kwargs)
def _format_strings(self):
formatter = self.formatter or str
fmt_values = np.array([formatter(x) for x in self.values])
return fmt_values
class PeriodArrayFormatter(IntArrayFormatter):
def _format_strings(self):
from pandas.core.indexes.period import IncompatibleFrequency
try:
values = PeriodIndex(self.values).to_native_types()
except IncompatibleFrequency:
# periods may contains different freq
values = Index(self.values, dtype='object').to_native_types()
formatter = self.formatter or (lambda x: '{x}'.format(x=x))
fmt_values = [formatter(x) for x in values]
return fmt_values
class CategoricalArrayFormatter(GenericArrayFormatter):
def __init__(self, values, *args, **kwargs):
GenericArrayFormatter.__init__(self, values, *args, **kwargs)
def _format_strings(self):
fmt_values = format_array(self.values.get_values(), self.formatter,
float_format=self.float_format,
na_rep=self.na_rep, digits=self.digits,
space=self.space, justify=self.justify)
return fmt_values
def format_percentiles(percentiles):
"""
Outputs rounded and formatted percentiles.
Parameters
----------
percentiles : list-like, containing floats from interval [0,1]
Returns
-------
formatted : list of strings
Notes
-----
Rounding precision is chosen so that: (1) if any two elements of
``percentiles`` differ, they remain different after rounding
(2) no entry is *rounded* to 0% or 100%.
Any non-integer is always rounded to at least 1 decimal place.
Examples
--------
Keeps all entries different after rounding:
>>> format_percentiles([0.01999, 0.02001, 0.5, 0.666666, 0.9999])
['1.999%', '2.001%', '50%', '66.667%', '99.99%']
No element is rounded to 0% or 100% (unless already equal to it).
Duplicates are allowed:
>>> format_percentiles([0, 0.5, 0.02001, 0.5, 0.666666, 0.9999])
['0%', '50%', '2.0%', '50%', '66.67%', '99.99%']
"""
percentiles = np.asarray(percentiles)
# It checks for np.NaN as well
with np.errstate(invalid='ignore'):
if not is_numeric_dtype(percentiles) or not np.all(percentiles >= 0) \
or not np.all(percentiles <= 1):
raise ValueError("percentiles should all be in the interval [0,1]")
percentiles = 100 * percentiles
int_idx = (percentiles.astype(int) == percentiles)
if np.all(int_idx):
out = percentiles.astype(int).astype(str)
return [i + '%' for i in out]
unique_pcts = np.unique(percentiles)
to_begin = unique_pcts[0] if unique_pcts[0] > 0 else None
to_end = 100 - unique_pcts[-1] if unique_pcts[-1] < 100 else None
# Least precision that keeps percentiles unique after rounding
prec = -np.floor(np.log10(np.min(
np.ediff1d(unique_pcts, to_begin=to_begin, to_end=to_end)
))).astype(int)
prec = max(1, prec)
out = np.empty_like(percentiles, dtype=object)
out[int_idx] = percentiles[int_idx].astype(int).astype(str)
out[~int_idx] = percentiles[~int_idx].round(prec).astype(str)
return [i + '%' for i in out]
def _is_dates_only(values):
# return a boolean if we are only dates (and don't have a timezone)
values = DatetimeIndex(values)
if values.tz is not None:
return False
values_int = values.asi8
consider_values = values_int != iNaT
one_day_nanos = (86400 * 1e9)
even_days = np.logical_and(consider_values,
values_int % int(one_day_nanos) != 0).sum() == 0
if even_days:
return True
return False
def _format_datetime64(x, tz=None, nat_rep='NaT'):
if x is None or (is_scalar(x) and isna(x)):
return nat_rep
if tz is not None or not isinstance(x, Timestamp):
x = Timestamp(x, tz=tz)
return str(x)
def _format_datetime64_dateonly(x, nat_rep='NaT', date_format=None):
if x is None or (is_scalar(x) and isna(x)):
return nat_rep
if not isinstance(x, Timestamp):
x = Timestamp(x)
if date_format:
return x.strftime(date_format)
else:
return x._date_repr
def _get_format_datetime64(is_dates_only, nat_rep='NaT', date_format=None):
if is_dates_only:
return lambda x, tz=None: _format_datetime64_dateonly(
x, nat_rep=nat_rep, date_format=date_format)
else:
return lambda x, tz=None: _format_datetime64(x, tz=tz, nat_rep=nat_rep)
def _get_format_datetime64_from_values(values, date_format):
""" given values and a date_format, return a string format """
is_dates_only = _is_dates_only(values)
if is_dates_only:
return date_format or "%Y-%m-%d"
return date_format
class Datetime64TZFormatter(Datetime64Formatter):
def _format_strings(self):
""" we by definition have a TZ """
values = self.values.astype(object)
is_dates_only = _is_dates_only(values)
formatter = (self.formatter or
_get_format_datetime64(is_dates_only,
date_format=self.date_format))
fmt_values = [formatter(x) for x in values]
return fmt_values
class Timedelta64Formatter(GenericArrayFormatter):
def __init__(self, values, nat_rep='NaT', box=False, **kwargs):
super(Timedelta64Formatter, self).__init__(values, **kwargs)
self.nat_rep = nat_rep
self.box = box
def _format_strings(self):
formatter = (self.formatter or
_get_format_timedelta64(self.values, nat_rep=self.nat_rep,
box=self.box))
fmt_values = np.array([formatter(x) for x in self.values])
return fmt_values
def _get_format_timedelta64(values, nat_rep='NaT', box=False):
"""
Return a formatter function for a range of timedeltas.
These will all have the same format argument
If box, then show the return in quotes
"""
values_int = values.astype(np.int64)
consider_values = values_int != iNaT
one_day_nanos = (86400 * 1e9)
even_days = np.logical_and(consider_values,
values_int % one_day_nanos != 0).sum() == 0
all_sub_day = np.logical_and(
consider_values, np.abs(values_int) >= one_day_nanos).sum() == 0
if even_days:
format = None
elif all_sub_day:
format = 'sub_day'
else:
format = 'long'
def _formatter(x):
if x is None or (is_scalar(x) and isna(x)):
return nat_rep
if not isinstance(x, Timedelta):
x = Timedelta(x)
result = x._repr_base(format=format)
if box:
result = "'{res}'".format(res=result)
return result
return _formatter
def _make_fixed_width(strings, justify='right', minimum=None, adj=None):
if len(strings) == 0 or justify == 'all':
return strings
if adj is None:
adj = _get_adjustment()
max_len = max(adj.len(x) for x in strings)
if minimum is not None:
max_len = max(minimum, max_len)
conf_max = get_option("display.max_colwidth")
if conf_max is not None and max_len > conf_max:
max_len = conf_max
def just(x):
if conf_max is not None:
if (conf_max > 3) & (adj.len(x) > max_len):
x = x[:max_len - 3] + '...'
return x
strings = [just(x) for x in strings]
result = adj.justify(strings, max_len, mode=justify)
return result
def _trim_zeros(str_floats, na_rep='NaN'):
"""
Trims zeros, leaving just one before the decimal points if need be.
"""
trimmed = str_floats
def _cond(values):
non_na = [x for x in values if x != na_rep]
return (len(non_na) > 0 and all(x.endswith('0') for x in non_na) and
not (any(('e' in x) or ('E' in x) for x in non_na)))
while _cond(trimmed):
trimmed = [x[:-1] if x != na_rep else x for x in trimmed]
# leave one 0 after the decimal points if need be.
return [x + "0" if x.endswith('.') and x != na_rep else x for x in trimmed]
def _has_names(index):
if isinstance(index, MultiIndex):
return com._any_not_none(*index.names)
else:
return index.name is not None
class EngFormatter(object):
"""
Formats float values according to engineering format.
Based on matplotlib.ticker.EngFormatter
"""
# The SI engineering prefixes
ENG_PREFIXES = {
-24: "y",
-21: "z",
-18: "a",
-15: "f",
-12: "p",
-9: "n",
-6: "u",
-3: "m",
0: "",
3: "k",
6: "M",
9: "G",
12: "T",
15: "P",
18: "E",
21: "Z",
24: "Y"
}
def __init__(self, accuracy=None, use_eng_prefix=False):
self.accuracy = accuracy
self.use_eng_prefix = use_eng_prefix
def __call__(self, num):
""" Formats a number in engineering notation, appending a letter
representing the power of 1000 of the original number. Some examples:
>>> format_eng(0) # for self.accuracy = 0
' 0'
>>> format_eng(1000000) # for self.accuracy = 1,
# self.use_eng_prefix = True
' 1.0M'
>>> format_eng("-1e-6") # for self.accuracy = 2
# self.use_eng_prefix = False
'-1.00E-06'
@param num: the value to represent
@type num: either a numeric value or a string that can be converted to
a numeric value (as per decimal.Decimal constructor)
@return: engineering formatted string
"""
import decimal
import math
dnum = decimal.Decimal(str(num))
if decimal.Decimal.is_nan(dnum):
return 'NaN'
if decimal.Decimal.is_infinite(dnum):
return 'inf'
sign = 1
if dnum < 0: # pragma: no cover
sign = -1
dnum = -dnum
if dnum != 0:
pow10 = decimal.Decimal(int(math.floor(dnum.log10() / 3) * 3))
else:
pow10 = decimal.Decimal(0)
pow10 = pow10.min(max(self.ENG_PREFIXES.keys()))
pow10 = pow10.max(min(self.ENG_PREFIXES.keys()))
int_pow10 = int(pow10)
if self.use_eng_prefix:
prefix = self.ENG_PREFIXES[int_pow10]
else:
if int_pow10 < 0:
prefix = 'E-{pow10:02d}'.format(pow10=-int_pow10)
else:
prefix = 'E+{pow10:02d}'.format(pow10=int_pow10)
mant = sign * dnum / (10**pow10)
if self.accuracy is None: # pragma: no cover
format_str = u("{mant: g}{prefix}")
else:
format_str = (u("{{mant: .{acc:d}f}}{{prefix}}")
.format(acc=self.accuracy))
formatted = format_str.format(mant=mant, prefix=prefix)
return formatted # .strip()
def set_eng_float_format(accuracy=3, use_eng_prefix=False):
"""
Alter default behavior on how float is formatted in DataFrame.
Format float in engineering format. By accuracy, we mean the number of
decimal digits after the floating point.
See also EngFormatter.
"""
set_option("display.float_format", EngFormatter(accuracy, use_eng_prefix))
set_option("display.column_space", max(12, accuracy + 9))
def _binify(cols, line_width):
adjoin_width = 1
bins = []
curr_width = 0
i_last_column = len(cols) - 1
for i, w in enumerate(cols):
w_adjoined = w + adjoin_width
curr_width += w_adjoined
if i_last_column == i:
wrap = curr_width + 1 > line_width and i > 0
else:
wrap = curr_width + 2 > line_width and i > 0
if wrap:
bins.append(i)
curr_width = w_adjoined
bins.append(len(cols))
return bins
def get_level_lengths(levels, sentinel=''):
"""For each index in each level the function returns lengths of indexes.
Parameters
----------
levels : list of lists
List of values on for level.
sentinel : string, optional
Value which states that no new index starts on there.
Returns
----------
Returns list of maps. For each level returns map of indexes (key is index
in row and value is length of index).
"""
if len(levels) == 0:
return []
control = [True for x in levels[0]]
result = []
for level in levels:
last_index = 0
lengths = {}
for i, key in enumerate(level):
if control[i] and key == sentinel:
pass
else:
control[i] = False
lengths[last_index] = i - last_index
last_index = i
lengths[last_index] = len(level) - last_index
result.append(lengths)
return result
def buffer_put_lines(buf, lines):
"""
Appends lines to a buffer.
Parameters
----------
buf
The buffer to write to
lines
The lines to append.
"""
if any(isinstance(x, compat.text_type) for x in lines):
lines = [compat.text_type(x) for x in lines]
buf.write('\n'.join(lines))
|
|
#! /usr/bin/env python3
# -*- coding: utf-8 -*-
# vi:ts=4:et
# Wekan API Python CLI, originally from here, where is more details:
# https://github.com/wekan/wekan/wiki/New-card-with-Python3-and-REST-API
# TODO:
# addcustomfieldtoboard: There is error: Settings must be object. So adding does not work yet.
try:
# python 3
from urllib.parse import urlencode
except ImportError:
# python 2
from urllib import urlencode
import json
import requests
import sys
arguments = len(sys.argv) - 1
if arguments == 0:
print("=== Wekan API Python CLI: Shows IDs for addcard ===")
print("AUTHORID is USERID that writes card.")
print("If *nix: chmod +x api.py => ./api.py users")
print("Syntax:")
print(" python3 api.py users # All users")
print(" python3 api.py boards # All Public Boards")
print(" python3 api.py boards USERID # Boards of USERID")
print(" python3 api.py board BOARDID # Info of BOARDID")
print(" python3 api.py customfields BOARDID # Custom Fields of BOARDID")
print(" python3 api.py customfield BOARDID CUSTOMFIELDID # Info of CUSTOMFIELDID")
print(" python3 api.py addcustomfieldtoboard AUTHORID BOARDID NAME TYPE SETTINGS SHOWONCARD AUTOMATICALLYONCARD SHOWLABELONMINICARD SHOWSUMATTOPOFLIST # Add Custom Field to Board")
print(" python3 api.py swimlanes BOARDID # Swimlanes of BOARDID")
print(" python3 api.py lists BOARDID # Lists of BOARDID")
print(" python3 api.py list BOARDID LISTID # Info of LISTID")
print(" python3 api.py createlist BOARDID LISTTITLE # Create list")
print(" python3 api.py addcard AUTHORID BOARDID SWIMLANEID LISTID CARDTITLE CARDDESCRIPTION")
print(" python3 api.py editcard BOARDID LISTID CARDID NEWCARDTITLE NEWCARDDESCRIPTION")
print(" python3 api.py listattachments BOARDID # List attachments")
# TODO:
# print(" python3 api.py attachmentjson BOARDID ATTACHMENTID # One attachment as JSON base64")
# print(" python3 api.py attachmentbinary BOARDID ATTACHMENTID # One attachment as binary file")
# print(" python3 api.py attachmentdownload BOARDID ATTACHMENTID # One attachment as file")
# print(" python3 api.py attachmentsdownload BOARDID # All attachments as files")
exit
# ------- SETTINGS START -------------
# Username is your Wekan username or email address.
# OIDC/OAuth2 etc uses email address as username.
username = 'testtest'
password = 'testtest'
wekanurl = 'http://localhost:4000/'
# ------- SETTINGS END -------------
"""
EXAMPLE:
python3 api.py
OR:
chmod +x api.py
./api.py
=== Wekan API Python CLI: Shows IDs for addcard ===
AUTHORID is USERID that writes card.
Syntax:
python3 api.py users # All users
python3 api.py boards USERID # Boards of USERID
python3 api.py board BOARDID # Info of BOARDID
python3 api.py customfields BOARDID # Custom Fields of BOARDID
python3 api.py customfield BOARDID CUSTOMFIELDID # Info of CUSTOMFIELDID
python3 api.py addcustomfieldtoboard AUTHORID BOARDID NAME TYPE SETTINGS SHOWONCARD AUTOMATICALLYONCARD SHOWLABELONMINICARD SHOWSUMATTOPOFLIST # Add Custom Field to Board
python3 api.py swimlanes BOARDID # Swimlanes of BOARDID
python3 api.py lists BOARDID # Lists of BOARDID
python3 api.py list BOARDID LISTID # Info of LISTID
python3 api.py createlist BOARDID LISTTITLE # Create list
python3 api.py addcard AUTHORID BOARDID SWIMLANEID LISTID CARDTITLE CARDDESCRIPTION
python3 api.py editcard BOARDID LISTID CARDID NEWCARDTITLE NEWCARDDESCRIPTION
python3 api.py listattachments BOARDID # List attachments
python3 api.py attachmentjson BOARDID ATTACHMENTID # One attachment as JSON base64
python3 api.py attachmentbinary BOARDID ATTACHMENTID # One attachment as binary file
=== ADD CUSTOM FIELD TO BOARD ===
Type: text, number, date, dropdown, checkbox, currency, stringtemplate.
python3 api.py addcustomfieldtoboard cmx3gmHLKwAXLqjxz LcDW4QdooAx8hsZh8 "SomeField" "date" "" true true true true
=== USERS ===
python3 api.py users
=> abcd1234
=== BOARDS ===
python3 api.py boards abcd1234
=== SWIMLANES ===
python3 api.py swimlanes dYZ
[{"_id":"Jiv","title":"Default"}
]
=== LISTS ===
python3 api.py lists dYZ
[]
There is no lists, so create a list:
=== CREATE LIST ===
python3 api.py createlist dYZ 'Test'
{"_id":"7Kp"}
# python3 api.py addcard AUTHORID BOARDID SWIMLANEID LISTID CARDTITLE CARDDESCRIPTION
python3 api.py addcard ppg dYZ Jiv 7Kp 'Test card' 'Test description'
=== LIST ATTACHMENTS WITH DOWNLOAD URLs ====
python3 api.py listattachments BOARDID
"""
# ------- API URL GENERATION START -----------
loginurl = 'users/login'
wekanloginurl = wekanurl + loginurl
apiboards = 'api/boards/'
apiattachments = 'api/attachments/'
apiusers = 'api/users'
e = 'export'
s = '/'
l = 'lists'
sw = 'swimlane'
sws = 'swimlanes'
cs = 'cards'
cf = 'custom-fields'
bs = 'boards'
atl = 'attachmentslist'
at = 'attachment'
ats = 'attachments'
users = wekanurl + apiusers
# ------- API URL GENERATION END -----------
# ------- LOGIN TOKEN START -----------
data = {"username": username, "password": password}
body = requests.post(wekanloginurl, data=data)
d = body.json()
apikey = d['token']
# ------- LOGIN TOKEN END -----------
if arguments == 10:
if sys.argv[1] == 'addcustomfieldtoboard':
# ------- ADD CUSTOM FIELD TO BOARD START -----------
authorid = sys.argv[2]
boardid = sys.argv[3]
name = sys.argv[4]
type1 = sys.argv[5]
settings = str(json.loads(sys.argv[6]))
# There is error: Settings must be object. So this does not work yet.
#settings = {'currencyCode': 'EUR'}
print(type(settings))
showoncard = sys.argv[7]
automaticallyoncard = sys.argv[8]
showlabelonminicard = sys.argv[9]
showsumattopoflist = sys.argv[10]
customfieldtoboard = wekanurl + apiboards + boardid + s + cf
# Add Custom Field to Board
headers = {'Accept': 'application/json', 'Authorization': 'Bearer {}'.format(apikey)}
post_data = {'authorId': '{}'.format(authorid), 'name': '{}'.format(name), 'type': '{}'.format(type1), 'settings': '{}'.format(settings), 'showoncard': '{}'.format(showoncard), 'automaticallyoncard': '{}'.format(automaticallyoncard), 'showlabelonminicard': '{}'.format(showlabelonminicard), 'showsumattopoflist': '{}'.format(showsumattopoflist)}
body = requests.post(customfieldtoboard, data=post_data, headers=headers)
print(body.text)
# ------- ADD CUSTOM FIELD TO BOARD END -----------
if arguments == 7:
if sys.argv[1] == 'addcard':
# ------- ADD CARD START -----------
authorid = sys.argv[2]
boardid = sys.argv[3]
swimlaneid = sys.argv[4]
listid = sys.argv[5]
cardtitle = sys.argv[6]
carddescription = sys.argv[7]
cardtolist = wekanurl + apiboards + boardid + s + l + s + listid + s + cs
# Add card
headers = {'Accept': 'application/json', 'Authorization': 'Bearer {}'.format(apikey)}
post_data = {'authorId': '{}'.format(authorid), 'title': '{}'.format(cardtitle), 'description': '{}'.format(carddescription), 'swimlaneId': '{}'.format(swimlaneid)}
body = requests.post(cardtolist, data=post_data, headers=headers)
print(body.text)
# ------- ADD CARD END -----------
if arguments == 6:
if sys.argv[1] == 'editcard':
# ------- EDIT CARD START -----------
boardid = sys.argv[2]
listid = sys.argv[3]
cardid = sys.argv[4]
newcardtitle = sys.argv[5]
newcarddescription = sys.argv[6]
edcard = wekanurl + apiboards + boardid + s + l + s + listid + s + cs + s + cardid
print(edcard)
headers = {'Accept': 'application/json', 'Authorization': 'Bearer {}'.format(apikey)}
put_data = {'title': '{}'.format(newcardtitle), 'description': '{}'.format(newcarddescription)}
body = requests.put(edcard, data=put_data, headers=headers)
print("=== EDIT CARD ===\n")
body = requests.get(edcard, headers=headers)
data2 = body.text.replace('}',"}\n")
print(data2)
# ------- EDIT CARD END -----------
if arguments == 3:
if sys.argv[1] == 'createlist':
# ------- CREATE LIST START -----------
boardid = sys.argv[2]
listtitle = sys.argv[3]
list = wekanurl + apiboards + boardid + s + l
headers = {'Accept': 'application/json', 'Authorization': 'Bearer {}'.format(apikey)}
post_data = {'title': '{}'.format(listtitle)}
body = requests.post(list, data=post_data, headers=headers)
print("=== CREATE LIST ===\n")
print(body.text)
# ------- CREATE LIST END -----------
if sys.argv[1] == 'list':
# ------- LIST OF BOARD START -----------
boardid = sys.argv[2]
listid = sys.argv[3]
listone = wekanurl + apiboards + boardid + s + l + s + listid
headers = {'Accept': 'application/json', 'Authorization': 'Bearer {}'.format(apikey)}
print("=== INFO OF ONE LIST ===\n")
body = requests.get(listone, headers=headers)
data2 = body.text.replace('}',"}\n")
print(data2)
# ------- LISTS OF BOARD END -----------
if sys.argv[1] == 'customfield':
# ------- INFO OF CUSTOM FIELD START -----------
boardid = sys.argv[2]
customfieldid = sys.argv[3]
customfieldone = wekanurl + apiboards + boardid + s + cf + s + customfieldid
headers = {'Accept': 'application/json', 'Authorization': 'Bearer {}'.format(apikey)}
print("=== INFO OF ONE CUSTOM FIELD ===\n")
body = requests.get(customfieldone, headers=headers)
data2 = body.text.replace('}',"}\n")
print(data2)
# ------- INFO OF CUSTOM FIELD END -----------
if arguments == 2:
# ------- BOARDS LIST START -----------
userid = sys.argv[2]
boards = users + s + userid + s + bs
if sys.argv[1] == 'boards':
headers = {'Accept': 'application/json', 'Authorization': 'Bearer {}'.format(apikey)}
#post_data = {'userId': '{}'.format(userid)}
body = requests.get(boards, headers=headers)
print("=== BOARDS ===\n")
data2 = body.text.replace('}',"}\n")
print(data2)
# ------- BOARDS LIST END -----------
if sys.argv[1] == 'board':
# ------- BOARD INFO START -----------
boardid = sys.argv[2]
board = wekanurl + apiboards + boardid
headers = {'Accept': 'application/json', 'Authorization': 'Bearer {}'.format(apikey)}
body = requests.get(board, headers=headers)
print("=== BOARD ===\n")
data2 = body.text.replace('}',"}\n")
print(data2)
# ------- BOARD INFO END -----------
if sys.argv[1] == 'customfields':
# ------- CUSTOM FIELDS OF BOARD START -----------
boardid = sys.argv[2]
boardcustomfields = wekanurl + apiboards + boardid + s + cf
headers = {'Accept': 'application/json', 'Authorization': 'Bearer {}'.format(apikey)}
body = requests.get(boardcustomfields, headers=headers)
print("=== CUSTOM FIELDS OF BOARD ===\n")
data2 = body.text.replace('}',"}\n")
print(data2)
# ------- CUSTOM FIELDS OF BOARD END -----------
if sys.argv[1] == 'swimlanes':
boardid = sys.argv[2]
swimlanes = wekanurl + apiboards + boardid + s + sws
# ------- SWIMLANES OF BOARD START -----------
headers = {'Accept': 'application/json', 'Authorization': 'Bearer {}'.format(apikey)}
print("=== SWIMLANES ===\n")
body = requests.get(swimlanes, headers=headers)
data2 = body.text.replace('}',"}\n")
print(data2)
# ------- SWIMLANES OF BOARD END -----------
if sys.argv[1] == 'lists':
# ------- LISTS OF BOARD START -----------
boardid = sys.argv[2]
lists = wekanurl + apiboards + boardid + s + l
headers = {'Accept': 'application/json', 'Authorization': 'Bearer {}'.format(apikey)}
print("=== LISTS ===\n")
body = requests.get(lists, headers=headers)
data2 = body.text.replace('}',"}\n")
print(data2)
# ------- LISTS OF BOARD END -----------
if sys.argv[1] == 'listattachments':
# ------- LISTS OF ATTACHMENTS START -----------
boardid = sys.argv[2]
listattachments = wekanurl + apiboards + boardid + s + ats
headers = {'Accept': 'application/json', 'Authorization': 'Bearer {}'.format(apikey)}
print("=== LIST OF ATTACHMENTS ===\n")
body = requests.get(listattachments, headers=headers)
data2 = body.text.replace('}',"}\n")
print(data2)
# ------- LISTS OF ATTACHMENTS END -----------
if arguments == 1:
if sys.argv[1] == 'users':
# ------- LIST OF USERS START -----------
headers = {'Accept': 'application/json', 'Authorization': 'Bearer {}'.format(apikey)}
print(users)
print("=== USERS ===\n")
body = requests.get(users, headers=headers)
data2 = body.text.replace('}',"}\n")
print(data2)
# ------- LIST OF USERS END -----------
if sys.argv[1] == 'boards':
# ------- LIST OF PUBLIC BOARDS START -----------
headers = {'Accept': 'application/json', 'Authorization': 'Bearer {}'.format(apikey)}
print("=== PUBLIC BOARDS ===\n")
listpublicboards = wekanurl + apiboards
body = requests.get(listpublicboards, headers=headers)
data2 = body.text.replace('}',"}\n")
print(data2)
# ------- LIST OF PUBLIC BOARDS END -----------
|
|
from __future__ import division, print_function, absolute_import
import os
import sys
from scipy._lib.six import xrange, u
import numpy as np
from numpy.testing import (verbose, run_module_suite, assert_,
assert_raises, assert_array_equal, assert_equal,
assert_almost_equal, assert_allclose)
try:
from hashbrowns._distance.distance import cdist
except ModuleNotFoundError:
top_path = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
sys.path.append(top_path)
from hashbrowns._distance.distance import cdist
from hashbrowns.storage import storage
from multiprocessing import Pool
def _get_filepath(self, filename=None):
if filename is None:
return os.path.dirname(__file__)
return os.path.join(os.path.dirname(__file__), filename)
def _assert_unique(a):
assert_(len(a) == len(set(a)))
class TestDictStorage(object):
def test_pack_hashes(self):
L = 4
n = 10
hashlist = np.random.randint(0, 2, (n,L)).astype(np.uint)
indices = np.arange(n)
bytes = np.array([np.random.bytes(1) for _ in range(n)])
indices_output = {i for i, x in zip(indices, hashlist[:,3]) if x == 1}
hytes_output = {b for b, x in zip(bytes, hashlist[:,3]) if x == 1}
storage_config = {"dict":None}
dict_storage = storage(L, storage_config)
def task(args):
storage, ids, hashlist = args
storage.pack_hashes(ids, hashlist)
return storage
pool = Pool(2)
out1, out2 = pool.map(task, ((dict_storage, ids, hashlist)
for ids in (indices, bytes)))
assert_(out1 is not out2)
tmp1 = out1.storage[3]
tmp2 = out2.storage[3]
_assert_unique(tmp1)
_assert_unique(tmp2)
assert_equal(indices_output, set(tmp1))
assert_equal(bytes_output, set(tmp2))
def test_clear(self):
n = 10
L = 4
indices = np.arange(n)
hashlist = np.random.randint(0, 2, (n,L)).astype(np.uint)
dict_storage = storage(L, storage_config)
dict_storage.pack_hashes(indices, hashlist)
for storage in dict_storage.storage:
assert_(not not storage)
dict_storage.clear(2)
assert_(not dict_storage.storage[2])
dict_storage.clear()
for storage in dict_storage.storage:
assert_(not storage)
def test_keys(self):
n = 10
L = 4
indices = np.arange(n)
hashlist = np.random.randint(0, 2, (n,L)).astype(np.uint)
dict_storage = storage(L, storage_config)
dict_storage.pack_hashes(indices, hashlist)
view = dict_storage.keys()
view_list = list(view)
dict_storage.clear(0)
assert_(view != view_list)
assert_(not (view ^ view_list))
def test_get_lists(self):
def test_redis_storage(self):
try:
num_hashtables = 4
num_points = 1000
hashlist = np.load(self.get_filepath("test_data/sample_hashlist.npy"))
output1 = open(self.get_filepath("test_data/hash_output1.txt")).read()
output1 = [int(x) for x in output1.split()]
output2 = open(self.get_filepath("test_data/hash_output2.txt")).read()
output2 = output2.split()
ids1 = np.arange(num_points)
ids2 = np.load(self.get_filepath("test_data/string_ids.npy"))
storage_config = {"redis":{}}
redis_storage = storage(num_hashtables, storage_config)
pool = Pool(2)
res1 = pool.apply_async(redis_storage.store_hashes,
(ids1, hashlist))
res1.get()
self.assertEqual(output1, redis_storage.get_list(0, 18))
redis_storage.clear()
redis_storage.close()
res2 = pool.apply_async(redis_storage.store_hashes,
(ids2, hashlist))
res2.get()
self.assertEqual(output2, redis_storage.get_list(0, 18))
redis_storage.clear()
except Exception as e:
redis_storage.clear()
raise e
def test_shelve_storage(self):
try:
num_hashtables = 4
num_points = 1000
hashlist = np.load(self.get_filepath("test_data/sample_hashlist.npy"))
output1 = open(self.get_filepath("test_data/hash_output1.txt")).read()
output1 = [int(x) for x in output1.split()]
output2 = open(self.get_filepath("test_data/hash_output2.txt")).read()
output2 = output2.split()
ids1 = np.arange(num_points)
ids2 = np.load(self.get_filepath("test_data/string_ids.npy"))
filename1 = self.get_filepath("shelve00")
filename2 = self.get_filepath("shelve01")
storage_config = {"shelve":{}}
shelve_storage1 = storage(num_hashtables, storage_config, filename1)
shelve_storage2 = storage(num_hashtables, storage_config, filename2)
pool = Pool(2)
res1 = pool.apply_async(shelve_storage1.store_hashes,
(ids1, hashlist))
res2 = pool.apply_async(shelve_storage2.store_hashes,
(ids2, hashlist))
res1.get()
res2.get()
self.assertEqual(output1, shelve_storage1.get_list(0, 18))
self.assertEqual(output2, shelve_storage2.get_list(0, 18))
os.remove(filename1)
os.remove(filename2)
except Exception as e:
os.remove(filename1)
os.remove(filename2)
raise e
def test_dbm_storage(self):
try:
num_hashtables = 4
num_points = 1000
hashlist = np.load(self.get_filepath("test_data/sample_hashlist.npy"))
output1 = open(self.get_filepath("test_data/hash_output1.txt")).read()
output1 = [int(x) for x in output1.split()]
output2 = open(self.get_filepath("test_data/hash_output2.txt")).read()
output2 = output2.split()
ids1 = np.arange(num_points)
ids2 = np.load(self.get_filepath("test_data/string_ids.npy"))
filename1 = self.get_filepath("dbm00")
filename2 = self.get_filepath("dbm01")
storage_config = {"dbm":{}}
dbm_storage1 = storage(num_hashtables, storage_config, filename1)
dbm_storage2 = storage(num_hashtables, storage_config, filename2)
pool = Pool(2)
res1 = pool.apply_async(dbm_storage1.store_hashes,
(ids1, hashlist))
res2 = pool.apply_async(dbm_storage2.store_hashes,
(ids2, hashlist))
res1.get()
res2.get()
self.assertEqual(output1, dbm_storage1.get_list(0, 18))
self.assertEqual(output2, dbm_storage2.get_list(0, 18))
os.remove(filename1)
os.remove(filename2)
except Exception as e:
os.remove(filename1)
os.remove(filename2)
raise e
def test_sqlite_storage(self):
try:
num_hashtables = 4
num_points = 1000
hashlist = np.load(self.get_filepath("test_data/sample_hashlist.npy"))
output1 = open(self.get_filepath("test_data/hash_output1.txt")).read()
output1 = [int(x) for x in output1.split()]
output2 = open(self.get_filepath("test_data/hash_output2.txt")).read()
output2 = output2.split()
ids1 = np.arange(num_points)
ids2 = np.load(self.get_filepath("test_data/string_ids.npy"))
filename = self.get_filepath("sql.db")
storage_config = {"sqlite":{}}
sqlite_storage1 = storage(num_hashtables, storage_config, filename)
sqlite_storage2 = storage(num_hashtables, storage_config, filename)
pool = Pool(2)
res1 = pool.apply_async(sqlite_storage1.store_hashes,
(ids1, hashlist))
res2 = pool.apply_async(sqlite_storage2.store_hashes,
(ids2, hashlist))
res1.get()
res2.get()
self.assertEqual(output1, sqlite_storage1.get_list(0, 18))
self.assertEqual(output2, sqlite_storage2.get_list(0, 18))
os.remove(filename)
except Exception as e:
os.remove(filename)
raise e
if __name__ == '__main__':
unittest.main()
|
|
'''Library / toolkit for creating command line programs with minimal effort.'''
# Copyright (c) 2013-2016, 2018 Benjamin Althues <[email protected]>
#
# Permission to use, copy, modify, and distribute this software for any
# purpose with or without fee is hereby granted, provided that the above
# copyright notice and this permission notice appear in all copies.
#
# THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
# OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
from __future__ import absolute_import
__docformat__ = 'restructuredtext'
__author__ = "Benjamin Althues"
__copyright__ = "Copyright (C) 2013-2016, 2018 Benjamin Althues"
__version_info__ = (0, 4, 0, 'final', 0)
__version__ = '0.4.0'
from collections import OrderedDict
import getopt
import sys
class CommandExit(Exception):
def __init__(self, val):
self.err = val
def __str__(self):
return repr(self.err)
class OptionError(AttributeError):
'''Options/Flags AttributeError exception'''
class dictobject(dict):
'''A dictionary with getters by attribute, used for flags '''
def __getattr__(self, name):
if name in self:
return self[name]
else:
raise OptionError("Option '{}' is not defined".format(name))
class CommandBase(object):
'''Base class for (sub)commands'''
usagestr = 'usage: command [options]'
'''String. Usage synopsis'''
description = ''
'''String. Small description of subcommand'''
optionList = {}
'''Dictionary of options (as a tuple of 2-tuples).
This will be transformed to an OrderedDict when initializing the object.
Example::
optionList = (
('help', ('h', False, 'show this help information')),
('dry-run', ('n', False,
'only print output without actually running')),
# To specify that an option requires an argument
# just add a string that describes it
('file', ('f', '<filename>', 'use specified file')),
# Use an empty string to ommit short option
('debug', ('', False, 'show debug information')),
)
'''
usageTextExtra = ''
'''String. Optional extra usage information'''
commands = {}
'''Dictionary of commands and the callables they invoke.'''
def __init__(self, argv=sys.argv[1:]):
'''Initialize (sub)command object
:Parameters:
- `argv`: List of arguments. E.g. `sys.argv[1:]`
'''
# Instance vars
self.error = None
'''Thrown by GetoptError when parsing illegal arguments.'''
self.flags = {}
'''Dict of parsed options and corresponding arguments, if any.'''
self.args = []
'''List of parsed postional arguments'''
self.parentFlags = {}
'''Dict of registered `flags` of parent Command object.'''
self.usage = ''
'''String with usage information
The string is compiled using the values found for
`usagestr`, `description`, `optionList` and `usageTextExtra`.
'''
self.optionList = OrderedDict(self.optionList)
# Local vars
longopts = []
padding = 0
shortopts = ''
# Calculate padding needed for option arguments in usage info
for flag, val in self.optionList.items():
optlen = len(flag) + 2
optlen += 4 if val[0] else 0
optlen += len(val[1]) + 1 if val[0] and val[1] else 0
optlen += len(val[1]) + 1 if val[1] else 0
padding = optlen if optlen > padding else padding
# Create usage information and build dict of possible flags
opthelp = ''
for flag, val in self.optionList.items():
spec = flag + '=' if val[1] else flag
longopts.append(spec)
self.flags.update({flag: None})
if val[1]:
flagstring_long = ('{flag}={argument}'
.format(flag=flag, argument=val[1]))
if val[0]:
flagstring_short = ('{flag} {argument}'
.format(flag=val[0], argument=val[1]))
else:
flagstring_long = flag
flagstring_short = val[0]
if val[0]:
shortopts += val[0] + ':' if val[1] else val[0]
optline = ('-{short}, --{flag}'
.format(short=flagstring_short,
flag=flagstring_long))
else:
optline = '--{flag}'.format(flag=flagstring_long)
opthelp += ('{options:{padding}} {desc}\n'
.format(options=optline, padding=padding, desc=val[2]))
self.usage = self.usagestr
if self.description:
self.usage += '\n\n{desc}'.format(desc=self.description)
if self.optionList:
self.usage += '\n\nOptions:\n{opts}'.format(opts=opthelp)
if self.usageTextExtra:
self.usage += '\n{help}'.format(help=self.usageTextExtra)
# Parse arguments and options
try:
opts, self.args = getopt.getopt(argv, shortopts, longopts)
except getopt.GetoptError as err:
self.error = err
return # Stop when an invalid option is parsed
for opt in opts:
# Compare each option with optionList and set values for flags
for flag, val in self.optionList.items():
if opt[0][1] != '-':
# Short tags
if opt[0][1] == val[0]:
if val[1]:
self.flags[flag] = opt[1]
else:
self.flags[flag] = True
else:
# Long tags
if opt[0][2:] == flag:
if val[1]:
self.flags[flag] = opt[1]
else:
self.flags[flag] = True
# Convert to dictobject to allow getting flags by attribute name
self.flags = dictobject(self.flags)
def run(self):
if not self.args:
print(self.usage)
raise CommandExit(2)
elif self.args[0] in self.commands:
return self.commands[self.args[0]](argv=self.args[1:])
else:
print('error: command {cmd} does not exist'
.format(cmd=self.args[0]))
raise CommandExit(1)
def registerParentFlag(self, optionName, value):
'''Register a flag of a parent command
:Parameters:
- `optionName`: String. Name of option
- `value`: Mixed. Value of parsed flag`
'''
self.parentFlags.update({optionName: value})
return self
def run_and_exit(command_class):
'''A shortcut for reading from sys.argv and exiting the interpreter'''
cmd = command_class(sys.argv[1:])
if cmd.error:
print('error: {0}'.format(cmd.error))
sys.exit(1)
else:
sys.exit(cmd.run())
|
|
# !/usr/bin/env python
# -*- coding: utf-8 -*-
#
# This file is part of Erebus, a web dashboard for tor relays.
#
# :copyright: (c) 2015, The Tor Project, Inc.
# (c) 2015, Damian Johnson
# (c) 2015, Cristobal Leiva
#
# :license: See LICENSE for licensing information.
"""
Websocket-related functions, including the main websockets controller,
base websocket class and custom websocket classes (such as bandwidth, log).
"""
import json
import cyclone.websocket
import stem.util.log
import stem.util.enum
from erebus.server.handlers import graph, info, log
from erebus.util import msg
WebSocketType = stem.util.enum.Enum(
('BANDWIDTH', 'bandwidth'), ('STATUS', 'status'), ('LOG', 'log'),
('INFO', 'info'),
)
WEBSOCKETS = None
def ws_controller():
"""
Provides the WEBSOCKETS singleton.
:returns: :class:`~erebus.server.websockets.WebSocketController`
"""
return WEBSOCKETS
def init_websockets():
"""
Initializes the websockets controller instance and returns the instance.
:returns: :class:`~erebus.server.websockets.WebSocketController`
"""
global WEBSOCKETS
WEBSOCKETS = WebSocketController()
return WEBSOCKETS
class WebSocketController:
"""
Tracks the global state of websockets.
"""
def __init__(self):
"""
Initialize an empty list for each of the websocket types.
"""
self._websockets = dict([(ws_type, []) for ws_type in WebSocketType])
def add_websocket(self, ws_type, ws):
"""
Adds a websocket object to the list of active websockets of a
certain type.
:param str ws_type: string indicating type of websocket.
:param Class ws: :class:`~cyclone.websocket.WebSocketHandler` to
be added to the list of active websockets.
"""
if self._websockets[ws_type] is not None:
if ws not in self._websockets[ws_type]:
self._websockets[ws_type].append(ws)
else:
stem.util.log.notice(msg('ws.add_error', type=ws_type))
def remove_websocket(self, ws_type, ws):
"""
Removes a websocket object of the list of active websockets of a
certain type.
:param str ws_type: string indicating type of websocket.
:param Class ws: :class:`~cyclone.websocket.WebSocketHandler` to
be removed from the list of active websockets.
"""
if self._websockets[ws_type] is not None:
if ws in self._websockets[ws_type]:
self._websockets[ws_type].remove(ws)
else:
stem.util.log.notice(msg('ws.remove_error', type=ws_type))
def get_websockets(self, ws_type):
"""
Provides a list of active websockets of a certain type.
:param str ws_type: string indicating type of websocket.
"""
ws = self._websockets.get(ws_type, None)
if ws is None:
stem.util.log.notice(msg('ws.get_error', type=ws_type))
return ws
def send_data(self, ws_type, data):
"""
Send JSON encoded data to a list of websockets of a certain type.
:param str ws_type: string indicating type of websocket.
:param dict data: data to be encoded in JSON.
"""
ws_listeners = self.get_websockets(ws_type)
if ws_listeners is not None:
for ws in ws_listeners:
try:
ws.sendMessage(json.dumps(data))
except cyclone.websocket.FrameDecodeError as exc:
stem.util.log.error(
msg('ws.send_error', type=ws_type, error=exc))
def receive_message(self, message, ws):
"""
Parse requests received from the client. This requests could be
addressed to any of the websocket types, and they consist of
plain-text messages.
:param str message: string containing a plain-text message to be
parsed.
:param Class ws: :class:`~cyclone.websocket.WebSocketHandler` to
which the message was sent.
"""
message = json.loads(message)
# TODO: check for proper format of message
# Currently we are assuming the message its valid
if ws.ws_type() == WebSocketType.BANDWIDTH:
# Bandwidth cache was requested
if message['request'] == 'BW-CACHE':
bw = graph.bw_handler()
if bw is not None:
self.send_data(ws.ws_type(), bw.get_cache())
elif ws.ws_type() == WebSocketType.INFO:
# Relay info was requested. This is info must be delivered upon
# a request is received by the client, since it's not like BW
# or LOG events which are sent by tor events.
if message['request'] == 'INFO':
self.send_data(ws.ws_type(), info.get_info())
elif ws.ws_type() == WebSocketType.LOG:
# Log cache was requested
if message['request'] == 'LOG-CACHE':
logger = log.log_handler()
if logger is not None:
self.send_data(ws.ws_type(), logger.get_cache())
# A log filter was sent
if message['request'] == 'LOG-FILTER':
logger = log.log_handler()
if logger is not None:
log.update_filter()
def bw_event(self, event):
"""
Handler for BW event, to be attached as a listener to tor controller.
Whenever an event is received, get BW info and send it through
BW websocket.
:param Class event: :class:`~stem.response.events.Event` delivered
by stem.
"""
bw_stats = graph.bw_handler().get_info(event)
self.send_data(WebSocketType.BANDWIDTH, bw_stats)
def listen_erebus_log(self, logged_events):
"""
Handler to initialize erebus log listening. This function will be
called when erebus is started and will receive a list of events
to listen for.
:param set logged_events: **set** of event types to listen.
"""
log.init_log_handler(logged_events, self._erebus_event)
def listen_tor_log(self):
"""
Handler to initialize tor log listening. This function will be
called when a tor control connection is made. The set of events
to listen for are already supposed to be configured by previously
calling listen_erebus_log.
"""
logger = log.log_handler()
logger.init_tor_log(self._tor_event)
def _erebus_event(self, record):
"""
Handler for listening to single erebus events.
:param record: log entry formatted by `~stem.util.log`
"""
logger = log.log_handler()
entry = logger._erebus_event(record)
if entry is not None:
self.send_data(WebSocketType.LOG, entry)
def _tor_event(self, record):
"""
Handler for listening to single tor events.
:param Class record: a valid :class:`~stem.response.` subclass.
"""
logger = log.log_handler()
entry = logger._tor_event(record)
if entry is not None:
self.send_data(WebSocketType.LOG, entry)
def reset_listener(self, controller, state, timestamp):
"""
Handler to be called whenever the connection to tor is lost, so
to notice the client that tor is down. This function is attached
as a status listener, so it will be called by stem.
:param Class controller: :class:`~stem.control.BaseController`.
:param Class state: :class:`~stem.control.State` enumeration for
states that a controller can have.
:param float timestamp: Unix timestamp.
"""
current_status = info.get_status(state)
self.send_data(WebSocketType.INFO, current_status)
def startup_info(self):
"""
Handler to be called when a tor control connection is made and
it's necessary to send relay info to the client.
"""
relay_info = info.get_info()
self.send_data(WebSocketType.INFO, relay_info)
class BaseWSHandler(cyclone.websocket.WebSocketHandler):
"""
Base class to be implemented by custom websockets.
"""
def getType(self):
"""
Each subclass must define its type.
:raises: **NotImplementedError** if the subclass doesn't implement
this.
"""
raise NotImplementedError('Should be implemented by subclasses')
def connectionMade(self, *args, **kwargs):
"""
When a connection is made, keep track of it by adding it to
websocket controller.
"""
ws = ws_controller()
if ws is not None:
ws.add_websocket(self.ws_type(), self)
stem.util.log.debug(msg('ws.opened', type=self.ws_type()))
def connectionLost(self, reason):
"""
Remove websocket from websocket controller when the connection is lost.
:param str reason: reason why the connection was lost.
"""
ws = ws_controller()
if ws is not None:
ws.remove_websocket(self.ws_type(), self)
stem.util.log.debug(
msg('ws.opened', type=self.ws_type(), reason=reason))
def messageReceived(self, message):
"""
Gets called when a message is received from the client.
:param str message: plain text message that was received.
"""
ws = ws_controller()
if ws is not None:
# Pass message to websocket controller
ws.receive_message(message, self)
class BandwidthWSHandler(BaseWSHandler):
"""
Bandwidth websocket subclass.
"""
def ws_type(self):
return WebSocketType.BANDWIDTH
class StatusWSHandler(BaseWSHandler):
"""
Status websocket subclass (status refers to state of tor control
connection).
"""
def ws_type(self):
return WebSocketType.STATUS
class LogWSHandler(BaseWSHandler):
"""
Log websocket subclass.
"""
def ws_type(self):
return WebSocketType.LOG
class InfoWSHandler(BaseWSHandler):
"""
(Relay) info websocket subclass.
"""
def ws_type(self):
return WebSocketType.INFO
|
|
from __future__ import division
import re
import datetime
from decimal import Decimal
from django.utils import six
STRFDATETIME = re.compile('([dgGhHis])')
STRFDATETIME_REPL = lambda x: '%%(%s)s' % x.group()
def nice_repr(timedelta, display="long", sep=", "):
"""
Turns a datetime.timedelta object into a nice string repr.
display can be "sql", "iso8601", "minimal", "short" or "long" [default].
>>> from datetime import timedelta as td
>>> nice_repr(td(days=1, hours=2, minutes=3, seconds=4))
'1 day, 2 hours, 3 minutes, 4 seconds'
>>> nice_repr(td(days=1, seconds=1), "minimal")
'1d, 1s'
>>> nice_repr(datetime.timedelta(days=1))
'1 day'
>>> nice_repr(datetime.timedelta(days=0))
'0 seconds'
>>> nice_repr(datetime.timedelta(seconds=1))
'1 second'
>>> nice_repr(datetime.timedelta(seconds=10))
'10 seconds'
>>> nice_repr(datetime.timedelta(seconds=30))
'30 seconds'
>>> nice_repr(datetime.timedelta(seconds=60))
'1 minute'
>>> nice_repr(datetime.timedelta(seconds=150))
'2 minutes, 30 seconds'
>>> nice_repr(datetime.timedelta(seconds=1800))
'30 minutes'
>>> nice_repr(datetime.timedelta(seconds=3600))
'1 hour'
>>> nice_repr(datetime.timedelta(seconds=3601))
'1 hour, 1 second'
>>> nice_repr(datetime.timedelta(seconds=19800))
'5 hours, 30 minutes'
>>> nice_repr(datetime.timedelta(seconds=91800))
'1 day, 1 hour, 30 minutes'
>>> nice_repr(datetime.timedelta(seconds=302400))
'3 days, 12 hours'
Tests for handling zero:
>>> nice_repr(td(seconds=0), 'minimal')
'0s'
>>> nice_repr(td(seconds=0), 'short')
'0 sec'
>>> nice_repr(td(seconds=0), 'long')
'0 seconds'
"""
assert isinstance(timedelta, datetime.timedelta), "First argument must be a timedelta."
result = []
weeks = int(timedelta.days / 7)
days = timedelta.days % 7
hours = int(timedelta.seconds / 3600)
minutes = int((timedelta.seconds % 3600) / 60)
seconds = timedelta.seconds % 60
if display == "sql":
days += weeks * 7
return "%i %02i:%02i:%02i" % (days, hours, minutes, seconds)
elif display == "iso8601":
return iso8601_repr(timedelta)
elif display == 'minimal':
words = ["w", "d", "h", "m", "s"]
elif display == 'short':
words = [" wks", " days", " hrs", " min", " sec"]
elif display == 'long':
words = [" weeks", " days", " hours", " minutes", " seconds"]
else:
# Use django template-style formatting.
# Valid values are:
# d,g,G,h,H,i,s
return STRFDATETIME.sub(STRFDATETIME_REPL, display) % {
'd': days,
'g': hours,
'G': hours if hours > 9 else '0%s' % hours,
'h': hours,
'H': hours if hours > 9 else '0%s' % hours,
'i': minutes if minutes > 9 else '0%s' % minutes,
's': seconds if seconds > 9 else '0%s' % seconds
}
values = [weeks, days, hours, minutes, seconds]
for i in range(len(values)):
if values[i]:
if values[i] == 1 and len(words[i]) > 1:
result.append("%i%s" % (values[i], words[i].rstrip('s')))
else:
result.append("%i%s" % (values[i], words[i]))
# values with less than one second, which are considered zeroes
if len(result) == 0:
# display as 0 of the smallest unit
result.append('0%s' % (words[-1]))
return sep.join(result)
def iso8601_repr(timedelta, format=None):
"""
Represent a timedelta as an ISO8601 duration.
http://en.wikipedia.org/wiki/ISO_8601#Durations
>>> from datetime import timedelta as td
>>> iso8601_repr(td(days=1, hours=2, minutes=3, seconds=4))
'P1DT2H3M4S'
>>> iso8601_repr(td(hours=1, minutes=10, seconds=20), 'alt')
'PT01:10:20'
"""
years = int(timedelta.days / 365)
weeks = int((timedelta.days % 365) / 7)
days = timedelta.days % 7
hours = int(timedelta.seconds / 3600)
minutes = int((timedelta.seconds % 3600) / 60)
seconds = timedelta.seconds % 60
if format == 'alt':
if years or weeks or days:
raise ValueError('Does not support alt format for durations > 1 day')
return 'PT{0:02d}:{1:02d}:{2:02d}'.format(hours, minutes, seconds)
formatting = (
('P', (
('Y', years),
('W', weeks),
('D', days),
)),
('T', (
('H', hours),
('M', minutes),
('S', seconds),
)),
)
result = []
for category, subcats in formatting:
result += category
for format, value in subcats:
if value:
result.append('%d%c' % (value, format))
if result[-1] == 'T':
result = result[:-1]
return "".join(result)
def parse(string):
"""
Parse a string into a timedelta object.
>>> parse("1 day")
datetime.timedelta(1)
>>> parse("2 days")
datetime.timedelta(2)
>>> parse("1 d")
datetime.timedelta(1)
>>> parse("1 hour")
datetime.timedelta(0, 3600)
>>> parse("1 hours")
datetime.timedelta(0, 3600)
>>> parse("1 hr")
datetime.timedelta(0, 3600)
>>> parse("1 hrs")
datetime.timedelta(0, 3600)
>>> parse("1h")
datetime.timedelta(0, 3600)
>>> parse("1wk")
datetime.timedelta(7)
>>> parse("1 week")
datetime.timedelta(7)
>>> parse("1 weeks")
datetime.timedelta(7)
>>> parse("2 wks")
datetime.timedelta(14)
>>> parse("1 sec")
datetime.timedelta(0, 1)
>>> parse("1 secs")
datetime.timedelta(0, 1)
>>> parse("1 s")
datetime.timedelta(0, 1)
>>> parse("1 second")
datetime.timedelta(0, 1)
>>> parse("1 seconds")
datetime.timedelta(0, 1)
>>> parse("1 minute")
datetime.timedelta(0, 60)
>>> parse("1 min")
datetime.timedelta(0, 60)
>>> parse("1 m")
datetime.timedelta(0, 60)
>>> parse("1 minutes")
datetime.timedelta(0, 60)
>>> parse("1 mins")
datetime.timedelta(0, 60)
>>> parse("2 ws")
Traceback (most recent call last):
...
TypeError: '2 ws' is not a valid time interval
>>> parse("2 ds")
Traceback (most recent call last):
...
TypeError: '2 ds' is not a valid time interval
>>> parse("2 hs")
Traceback (most recent call last):
...
TypeError: '2 hs' is not a valid time interval
>>> parse("2 ms")
Traceback (most recent call last):
...
TypeError: '2 ms' is not a valid time interval
>>> parse("2 ss")
Traceback (most recent call last):
...
TypeError: '2 ss' is not a valid time interval
>>> parse("")
Traceback (most recent call last):
...
TypeError: '' is not a valid time interval
>>> parse("1.5 days")
datetime.timedelta(1, 43200)
>>> parse("3 weeks")
datetime.timedelta(21)
>>> parse("4.2 hours")
datetime.timedelta(0, 15120)
>>> parse(".5 hours")
datetime.timedelta(0, 1800)
>>> parse(" hours")
Traceback (most recent call last):
...
TypeError: 'hours' is not a valid time interval
>>> parse("1 hour, 5 mins")
datetime.timedelta(0, 3900)
>>> parse("-2 days")
datetime.timedelta(-2)
>>> parse("-1 day 0:00:01")
datetime.timedelta(-1, 1)
>>> parse("-1 day, -1:01:01")
datetime.timedelta(-2, 82739)
>>> parse("-1 weeks, 2 days, -3 hours, 4 minutes, -5 seconds")
datetime.timedelta(-5, 11045)
>>> parse("0 seconds")
datetime.timedelta(0)
>>> parse("0 days")
datetime.timedelta(0)
>>> parse("0 weeks")
datetime.timedelta(0)
>>> zero = datetime.timedelta(0)
>>> parse(nice_repr(zero))
datetime.timedelta(0)
>>> parse(nice_repr(zero, 'minimal'))
datetime.timedelta(0)
>>> parse(nice_repr(zero, 'short'))
datetime.timedelta(0)
>>> parse(' 50 days 00:00:00 ')
datetime.timedelta(50)
"""
string = string.strip()
if string == "":
raise TypeError("'%s' is not a valid time interval" % string)
# This is the format we get from sometimes Postgres, sqlite,
# and from serialization
d = re.match(r'^((?P<days>[-+]?\d+) days?,? )?(?P<sign>[-+]?)(?P<hours>\d+):'
r'(?P<minutes>\d+)(:(?P<seconds>\d+(\.\d+)?))?$',
six.text_type(string))
if d:
d = d.groupdict(0)
if d['sign'] == '-':
for k in 'hours', 'minutes', 'seconds':
d[k] = '-' + d[k]
d.pop('sign', None)
else:
# This is the more flexible format
d = re.match(
r'^((?P<weeks>-?((\d*\.\d+)|\d+))\W*w((ee)?(k(s)?)?)(,)?\W*)?'
r'((?P<days>-?((\d*\.\d+)|\d+))\W*d(ay(s)?)?(,)?\W*)?'
r'((?P<hours>-?((\d*\.\d+)|\d+))\W*h(ou)?(r(s)?)?(,)?\W*)?'
r'((?P<minutes>-?((\d*\.\d+)|\d+))\W*m(in(ute)?(s)?)?(,)?\W*)?'
r'((?P<seconds>-?((\d*\.\d+)|\d+))\W*s(ec(ond)?(s)?)?)?\W*$',
six.text_type(string))
if not d:
raise TypeError("'%s' is not a valid time interval" % string)
d = d.groupdict(0)
return datetime.timedelta(**dict(( (k, float(v)) for k,v in d.items())))
def divide(obj1, obj2, as_float=False):
"""
Allows for the division of timedeltas by other timedeltas, or by
floats/Decimals
>>> from datetime import timedelta as td
>>> divide(td(1), td(1))
1
>>> divide(td(2), td(1))
2
>>> divide(td(32), 16)
datetime.timedelta(2)
>>> divide(datetime.timedelta(1), datetime.timedelta(hours=6))
4
>>> divide(datetime.timedelta(2), datetime.timedelta(3))
0
>>> divide(datetime.timedelta(8), datetime.timedelta(3), as_float=True)
2.6666666666666665
>>> divide(datetime.timedelta(8), 2.0)
datetime.timedelta(4)
>>> divide(datetime.timedelta(8), 2, as_float=True)
Traceback (most recent call last):
...
AssertionError: as_float=True is inappropriate when dividing timedelta by a number.
"""
assert isinstance(obj1, datetime.timedelta), "First argument must be a timedelta."
assert isinstance(obj2, (datetime.timedelta, int, float, Decimal)), "Second argument must be a timedelta or number"
sec1 = obj1.days * 86400 + obj1.seconds
if isinstance(obj2, datetime.timedelta):
sec2 = obj2.days * 86400 + obj2.seconds
value = sec1 / sec2
if as_float:
return value
return int(value)
else:
if as_float:
assert None, "as_float=True is inappropriate when dividing timedelta by a number."
secs = sec1 / obj2
if isinstance(secs, Decimal):
secs = float(secs)
return datetime.timedelta(seconds=secs)
def modulo(obj1, obj2):
"""
Allows for remainder division of timedelta by timedelta or integer.
>>> from datetime import timedelta as td
>>> modulo(td(5), td(2))
datetime.timedelta(1)
>>> modulo(td(6), td(3))
datetime.timedelta(0)
>>> modulo(td(15), 4 * 3600 * 24)
datetime.timedelta(3)
>>> modulo(5, td(1))
Traceback (most recent call last):
...
AssertionError: First argument must be a timedelta.
>>> modulo(td(1), 2.8)
Traceback (most recent call last):
...
AssertionError: Second argument must be a timedelta or int.
"""
assert isinstance(obj1, datetime.timedelta), "First argument must be a timedelta."
assert isinstance(obj2, (datetime.timedelta, int)), "Second argument must be a timedelta or int."
sec1 = obj1.days * 86400 + obj1.seconds
if isinstance(obj2, datetime.timedelta):
sec2 = obj2.days * 86400 + obj2.seconds
return datetime.timedelta(seconds=sec1 % sec2)
else:
return datetime.timedelta(seconds=(sec1 % obj2))
def percentage(obj1, obj2):
"""
What percentage of obj2 is obj1? We want the answer as a float.
>>> percentage(datetime.timedelta(4), datetime.timedelta(2))
200.0
>>> percentage(datetime.timedelta(2), datetime.timedelta(4))
50.0
"""
assert isinstance(obj1, datetime.timedelta), "First argument must be a timedelta."
assert isinstance(obj2, datetime.timedelta), "Second argument must be a timedelta."
return divide(obj1 * 100, obj2, as_float=True)
def decimal_percentage(obj1, obj2):
"""
>>> decimal_percentage(datetime.timedelta(4), datetime.timedelta(2))
Decimal('200.0')
>>> decimal_percentage(datetime.timedelta(2), datetime.timedelta(4))
Decimal('50.0')
"""
return Decimal(str(percentage(obj1, obj2)))
def multiply(obj, val):
"""
Allows for the multiplication of timedeltas by float values.
>>> multiply(datetime.timedelta(seconds=20), 1.5)
datetime.timedelta(0, 30)
>>> multiply(datetime.timedelta(1), 2.5)
datetime.timedelta(2, 43200)
>>> multiply(datetime.timedelta(1), 3)
datetime.timedelta(3)
>>> multiply(datetime.timedelta(1), Decimal("5.5"))
datetime.timedelta(5, 43200)
>>> multiply(datetime.date.today(), 2.5)
Traceback (most recent call last):
...
AssertionError: First argument must be a timedelta.
>>> multiply(datetime.timedelta(1), "2")
Traceback (most recent call last):
...
AssertionError: Second argument must be a number.
"""
assert isinstance(obj, datetime.timedelta), "First argument must be a timedelta."
assert isinstance(val, (int, float, Decimal)), "Second argument must be a number."
sec = obj.days * 86400 + obj.seconds
sec *= val
if isinstance(sec, Decimal):
sec = float(sec)
return datetime.timedelta(seconds=sec)
def round_to_nearest(obj, timedelta):
"""
The obj is rounded to the nearest whole number of timedeltas.
obj can be a timedelta, datetime or time object.
>>> round_to_nearest(datetime.datetime(2012, 1, 1, 9, 43), datetime.timedelta(1))
datetime.datetime(2012, 1, 1, 0, 0)
>>> round_to_nearest(datetime.datetime(2012, 1, 1, 9, 43), datetime.timedelta(hours=1))
datetime.datetime(2012, 1, 1, 10, 0)
>>> round_to_nearest(datetime.datetime(2012, 1, 1, 9, 43), datetime.timedelta(minutes=15))
datetime.datetime(2012, 1, 1, 9, 45)
>>> round_to_nearest(datetime.datetime(2012, 1, 1, 9, 43), datetime.timedelta(minutes=1))
datetime.datetime(2012, 1, 1, 9, 43)
>>> td = datetime.timedelta(minutes=30)
>>> round_to_nearest(datetime.timedelta(minutes=0), td)
datetime.timedelta(0)
>>> round_to_nearest(datetime.timedelta(minutes=14), td)
datetime.timedelta(0)
>>> round_to_nearest(datetime.timedelta(minutes=15), td)
datetime.timedelta(0, 1800)
>>> round_to_nearest(datetime.timedelta(minutes=29), td)
datetime.timedelta(0, 1800)
>>> round_to_nearest(datetime.timedelta(minutes=30), td)
datetime.timedelta(0, 1800)
>>> round_to_nearest(datetime.timedelta(minutes=42), td)
datetime.timedelta(0, 1800)
>>> round_to_nearest(datetime.timedelta(hours=7, minutes=22), td)
datetime.timedelta(0, 27000)
>>> td = datetime.timedelta(minutes=15)
>>> round_to_nearest(datetime.timedelta(minutes=0), td)
datetime.timedelta(0)
>>> round_to_nearest(datetime.timedelta(minutes=14), td)
datetime.timedelta(0, 900)
>>> round_to_nearest(datetime.timedelta(minutes=15), td)
datetime.timedelta(0, 900)
>>> round_to_nearest(datetime.timedelta(minutes=29), td)
datetime.timedelta(0, 1800)
>>> round_to_nearest(datetime.timedelta(minutes=30), td)
datetime.timedelta(0, 1800)
>>> round_to_nearest(datetime.timedelta(minutes=42), td)
datetime.timedelta(0, 2700)
>>> round_to_nearest(datetime.timedelta(hours=7, minutes=22), td)
datetime.timedelta(0, 26100)
>>> td = datetime.timedelta(minutes=30)
>>> round_to_nearest(datetime.datetime(2010,1,1,9,22), td)
datetime.datetime(2010, 1, 1, 9, 30)
>>> round_to_nearest(datetime.datetime(2010,1,1,9,32), td)
datetime.datetime(2010, 1, 1, 9, 30)
>>> round_to_nearest(datetime.datetime(2010,1,1,9,42), td)
datetime.datetime(2010, 1, 1, 9, 30)
>>> round_to_nearest(datetime.time(0,20), td)
datetime.time(0, 30)
TODO: test with tzinfo (non-naive) datetimes/times.
"""
assert isinstance(obj, (datetime.datetime, datetime.timedelta, datetime.time)), "First argument must be datetime, time or timedelta."
assert isinstance(timedelta, datetime.timedelta), "Second argument must be a timedelta."
time_only = False
if isinstance(obj, datetime.timedelta):
counter = datetime.timedelta(0)
elif isinstance(obj, datetime.datetime):
counter = datetime.datetime.combine(obj.date(), datetime.time(0, tzinfo=obj.tzinfo))
elif isinstance(obj, datetime.time):
counter = datetime.datetime.combine(datetime.date.today(), datetime.time(0, tzinfo=obj.tzinfo))
obj = datetime.datetime.combine(datetime.date.today(), obj)
time_only = True
diff = abs(obj - counter)
while counter < obj:
old_diff = diff
counter += timedelta
diff = abs(obj - counter)
if counter == obj:
result = obj
elif diff <= old_diff:
result = counter
else:
result = counter - timedelta
if time_only:
return result.time()
else:
return result
def decimal_hours(timedelta, decimal_places=None):
"""
Return a decimal value of the number of hours that this timedelta
object refers to.
"""
hours = Decimal(timedelta.days*24) + Decimal(timedelta.seconds) / 3600
if decimal_places:
return hours.quantize(Decimal(str(10**-decimal_places)))
return hours
def week_containing(date):
if date.weekday():
date -= datetime.timedelta(date.weekday())
return date, date + datetime.timedelta(6)
try:
datetime.timedelta().total_seconds
def total_seconds(timedelta):
return timedelta.total_seconds()
except AttributeError:
def total_seconds(timedelta):
"""
Python < 2.7 does not have datetime.timedelta.total_seconds
"""
return timedelta.days * 86400 + timedelta.seconds
if __name__ == "__main__":
import doctest
doctest.testmod()
|
|
"""Implement the Google Smart Home traits."""
import logging
from homeassistant.components import (
alarm_control_panel,
binary_sensor,
camera,
cover,
fan,
group,
input_boolean,
input_select,
light,
lock,
media_player,
scene,
script,
sensor,
switch,
vacuum,
)
from homeassistant.components.climate import const as climate
from homeassistant.components.humidifier import const as humidifier
from homeassistant.const import (
ATTR_ASSUMED_STATE,
ATTR_CODE,
ATTR_DEVICE_CLASS,
ATTR_ENTITY_ID,
ATTR_SUPPORTED_FEATURES,
ATTR_TEMPERATURE,
SERVICE_ALARM_ARM_AWAY,
SERVICE_ALARM_ARM_CUSTOM_BYPASS,
SERVICE_ALARM_ARM_HOME,
SERVICE_ALARM_ARM_NIGHT,
SERVICE_ALARM_DISARM,
SERVICE_ALARM_TRIGGER,
SERVICE_TURN_OFF,
SERVICE_TURN_ON,
STATE_ALARM_ARMED_AWAY,
STATE_ALARM_ARMED_CUSTOM_BYPASS,
STATE_ALARM_ARMED_HOME,
STATE_ALARM_ARMED_NIGHT,
STATE_ALARM_DISARMED,
STATE_ALARM_PENDING,
STATE_ALARM_TRIGGERED,
STATE_IDLE,
STATE_LOCKED,
STATE_OFF,
STATE_ON,
STATE_PAUSED,
STATE_PLAYING,
STATE_STANDBY,
STATE_UNAVAILABLE,
STATE_UNKNOWN,
TEMP_CELSIUS,
TEMP_FAHRENHEIT,
)
from homeassistant.core import DOMAIN as HA_DOMAIN
from homeassistant.helpers.network import get_url
from homeassistant.util import color as color_util, dt, temperature as temp_util
from .const import (
CHALLENGE_ACK_NEEDED,
CHALLENGE_FAILED_PIN_NEEDED,
CHALLENGE_PIN_NEEDED,
ERR_ALREADY_ARMED,
ERR_ALREADY_DISARMED,
ERR_CHALLENGE_NOT_SETUP,
ERR_FUNCTION_NOT_SUPPORTED,
ERR_NOT_SUPPORTED,
ERR_VALUE_OUT_OF_RANGE,
)
from .error import ChallengeNeeded, SmartHomeError
_LOGGER = logging.getLogger(__name__)
PREFIX_TRAITS = "action.devices.traits."
TRAIT_CAMERA_STREAM = f"{PREFIX_TRAITS}CameraStream"
TRAIT_ONOFF = f"{PREFIX_TRAITS}OnOff"
TRAIT_DOCK = f"{PREFIX_TRAITS}Dock"
TRAIT_STARTSTOP = f"{PREFIX_TRAITS}StartStop"
TRAIT_BRIGHTNESS = f"{PREFIX_TRAITS}Brightness"
TRAIT_COLOR_SETTING = f"{PREFIX_TRAITS}ColorSetting"
TRAIT_SCENE = f"{PREFIX_TRAITS}Scene"
TRAIT_TEMPERATURE_SETTING = f"{PREFIX_TRAITS}TemperatureSetting"
TRAIT_LOCKUNLOCK = f"{PREFIX_TRAITS}LockUnlock"
TRAIT_FANSPEED = f"{PREFIX_TRAITS}FanSpeed"
TRAIT_MODES = f"{PREFIX_TRAITS}Modes"
TRAIT_INPUTSELECTOR = f"{PREFIX_TRAITS}InputSelector"
TRAIT_OPENCLOSE = f"{PREFIX_TRAITS}OpenClose"
TRAIT_VOLUME = f"{PREFIX_TRAITS}Volume"
TRAIT_ARMDISARM = f"{PREFIX_TRAITS}ArmDisarm"
TRAIT_HUMIDITY_SETTING = f"{PREFIX_TRAITS}HumiditySetting"
TRAIT_TRANSPORT_CONTROL = f"{PREFIX_TRAITS}TransportControl"
TRAIT_MEDIA_STATE = f"{PREFIX_TRAITS}MediaState"
PREFIX_COMMANDS = "action.devices.commands."
COMMAND_ONOFF = f"{PREFIX_COMMANDS}OnOff"
COMMAND_GET_CAMERA_STREAM = f"{PREFIX_COMMANDS}GetCameraStream"
COMMAND_DOCK = f"{PREFIX_COMMANDS}Dock"
COMMAND_STARTSTOP = f"{PREFIX_COMMANDS}StartStop"
COMMAND_PAUSEUNPAUSE = f"{PREFIX_COMMANDS}PauseUnpause"
COMMAND_BRIGHTNESS_ABSOLUTE = f"{PREFIX_COMMANDS}BrightnessAbsolute"
COMMAND_COLOR_ABSOLUTE = f"{PREFIX_COMMANDS}ColorAbsolute"
COMMAND_ACTIVATE_SCENE = f"{PREFIX_COMMANDS}ActivateScene"
COMMAND_THERMOSTAT_TEMPERATURE_SETPOINT = (
f"{PREFIX_COMMANDS}ThermostatTemperatureSetpoint"
)
COMMAND_THERMOSTAT_TEMPERATURE_SET_RANGE = (
f"{PREFIX_COMMANDS}ThermostatTemperatureSetRange"
)
COMMAND_THERMOSTAT_SET_MODE = f"{PREFIX_COMMANDS}ThermostatSetMode"
COMMAND_LOCKUNLOCK = f"{PREFIX_COMMANDS}LockUnlock"
COMMAND_FANSPEED = f"{PREFIX_COMMANDS}SetFanSpeed"
COMMAND_MODES = f"{PREFIX_COMMANDS}SetModes"
COMMAND_INPUT = f"{PREFIX_COMMANDS}SetInput"
COMMAND_OPENCLOSE = f"{PREFIX_COMMANDS}OpenClose"
COMMAND_SET_VOLUME = f"{PREFIX_COMMANDS}setVolume"
COMMAND_VOLUME_RELATIVE = f"{PREFIX_COMMANDS}volumeRelative"
COMMAND_ARMDISARM = f"{PREFIX_COMMANDS}ArmDisarm"
COMMAND_MEDIA_NEXT = f"{PREFIX_COMMANDS}mediaNext"
COMMAND_MEDIA_PAUSE = f"{PREFIX_COMMANDS}mediaPause"
COMMAND_MEDIA_PREVIOUS = f"{PREFIX_COMMANDS}mediaPrevious"
COMMAND_MEDIA_RESUME = f"{PREFIX_COMMANDS}mediaResume"
COMMAND_MEDIA_SEEK_RELATIVE = f"{PREFIX_COMMANDS}mediaSeekRelative"
COMMAND_MEDIA_SEEK_TO_POSITION = f"{PREFIX_COMMANDS}mediaSeekToPosition"
COMMAND_MEDIA_SHUFFLE = f"{PREFIX_COMMANDS}mediaShuffle"
COMMAND_MEDIA_STOP = f"{PREFIX_COMMANDS}mediaStop"
COMMAND_SET_HUMIDITY = f"{PREFIX_COMMANDS}SetHumidity"
TRAITS = []
def register_trait(trait):
"""Decorate a function to register a trait."""
TRAITS.append(trait)
return trait
def _google_temp_unit(units):
"""Return Google temperature unit."""
if units == TEMP_FAHRENHEIT:
return "F"
return "C"
class _Trait:
"""Represents a Trait inside Google Assistant skill."""
commands = []
@staticmethod
def might_2fa(domain, features, device_class):
"""Return if the trait might ask for 2FA."""
return False
def __init__(self, hass, state, config):
"""Initialize a trait for a state."""
self.hass = hass
self.state = state
self.config = config
def sync_attributes(self):
"""Return attributes for a sync request."""
raise NotImplementedError
def query_attributes(self):
"""Return the attributes of this trait for this entity."""
raise NotImplementedError
def can_execute(self, command, params):
"""Test if command can be executed."""
return command in self.commands
async def execute(self, command, data, params, challenge):
"""Execute a trait command."""
raise NotImplementedError
@register_trait
class BrightnessTrait(_Trait):
"""Trait to control brightness of a device.
https://developers.google.com/actions/smarthome/traits/brightness
"""
name = TRAIT_BRIGHTNESS
commands = [COMMAND_BRIGHTNESS_ABSOLUTE]
@staticmethod
def supported(domain, features, device_class):
"""Test if state is supported."""
if domain == light.DOMAIN:
return features & light.SUPPORT_BRIGHTNESS
return False
def sync_attributes(self):
"""Return brightness attributes for a sync request."""
return {}
def query_attributes(self):
"""Return brightness query attributes."""
domain = self.state.domain
response = {}
if domain == light.DOMAIN:
brightness = self.state.attributes.get(light.ATTR_BRIGHTNESS)
if brightness is not None:
response["brightness"] = int(100 * (brightness / 255))
else:
response["brightness"] = 0
return response
async def execute(self, command, data, params, challenge):
"""Execute a brightness command."""
domain = self.state.domain
if domain == light.DOMAIN:
await self.hass.services.async_call(
light.DOMAIN,
light.SERVICE_TURN_ON,
{
ATTR_ENTITY_ID: self.state.entity_id,
light.ATTR_BRIGHTNESS_PCT: params["brightness"],
},
blocking=True,
context=data.context,
)
@register_trait
class CameraStreamTrait(_Trait):
"""Trait to stream from cameras.
https://developers.google.com/actions/smarthome/traits/camerastream
"""
name = TRAIT_CAMERA_STREAM
commands = [COMMAND_GET_CAMERA_STREAM]
stream_info = None
@staticmethod
def supported(domain, features, device_class):
"""Test if state is supported."""
if domain == camera.DOMAIN:
return features & camera.SUPPORT_STREAM
return False
def sync_attributes(self):
"""Return stream attributes for a sync request."""
return {
"cameraStreamSupportedProtocols": ["hls"],
"cameraStreamNeedAuthToken": False,
"cameraStreamNeedDrmEncryption": False,
}
def query_attributes(self):
"""Return camera stream attributes."""
return self.stream_info or {}
async def execute(self, command, data, params, challenge):
"""Execute a get camera stream command."""
url = await self.hass.components.camera.async_request_stream(
self.state.entity_id, "hls"
)
self.stream_info = {"cameraStreamAccessUrl": f"{get_url(self.hass)}{url}"}
@register_trait
class OnOffTrait(_Trait):
"""Trait to offer basic on and off functionality.
https://developers.google.com/actions/smarthome/traits/onoff
"""
name = TRAIT_ONOFF
commands = [COMMAND_ONOFF]
@staticmethod
def supported(domain, features, device_class):
"""Test if state is supported."""
return domain in (
group.DOMAIN,
input_boolean.DOMAIN,
switch.DOMAIN,
fan.DOMAIN,
light.DOMAIN,
media_player.DOMAIN,
humidifier.DOMAIN,
)
def sync_attributes(self):
"""Return OnOff attributes for a sync request."""
return {}
def query_attributes(self):
"""Return OnOff query attributes."""
return {"on": self.state.state not in (STATE_OFF, STATE_UNKNOWN)}
async def execute(self, command, data, params, challenge):
"""Execute an OnOff command."""
domain = self.state.domain
if domain == group.DOMAIN:
service_domain = HA_DOMAIN
service = SERVICE_TURN_ON if params["on"] else SERVICE_TURN_OFF
else:
service_domain = domain
service = SERVICE_TURN_ON if params["on"] else SERVICE_TURN_OFF
await self.hass.services.async_call(
service_domain,
service,
{ATTR_ENTITY_ID: self.state.entity_id},
blocking=True,
context=data.context,
)
@register_trait
class ColorSettingTrait(_Trait):
"""Trait to offer color temperature functionality.
https://developers.google.com/actions/smarthome/traits/colortemperature
"""
name = TRAIT_COLOR_SETTING
commands = [COMMAND_COLOR_ABSOLUTE]
@staticmethod
def supported(domain, features, device_class):
"""Test if state is supported."""
if domain != light.DOMAIN:
return False
return features & light.SUPPORT_COLOR_TEMP or features & light.SUPPORT_COLOR
def sync_attributes(self):
"""Return color temperature attributes for a sync request."""
attrs = self.state.attributes
features = attrs.get(ATTR_SUPPORTED_FEATURES, 0)
response = {}
if features & light.SUPPORT_COLOR:
response["colorModel"] = "hsv"
if features & light.SUPPORT_COLOR_TEMP:
# Max Kelvin is Min Mireds K = 1000000 / mireds
# Min Kelvin is Max Mireds K = 1000000 / mireds
response["colorTemperatureRange"] = {
"temperatureMaxK": color_util.color_temperature_mired_to_kelvin(
attrs.get(light.ATTR_MIN_MIREDS)
),
"temperatureMinK": color_util.color_temperature_mired_to_kelvin(
attrs.get(light.ATTR_MAX_MIREDS)
),
}
return response
def query_attributes(self):
"""Return color temperature query attributes."""
features = self.state.attributes.get(ATTR_SUPPORTED_FEATURES, 0)
color = {}
if features & light.SUPPORT_COLOR:
color_hs = self.state.attributes.get(light.ATTR_HS_COLOR)
brightness = self.state.attributes.get(light.ATTR_BRIGHTNESS, 1)
if color_hs is not None:
color["spectrumHsv"] = {
"hue": color_hs[0],
"saturation": color_hs[1] / 100,
"value": brightness / 255,
}
if features & light.SUPPORT_COLOR_TEMP:
temp = self.state.attributes.get(light.ATTR_COLOR_TEMP)
# Some faulty integrations might put 0 in here, raising exception.
if temp == 0:
_LOGGER.warning(
"Entity %s has incorrect color temperature %s",
self.state.entity_id,
temp,
)
elif temp is not None:
color["temperatureK"] = color_util.color_temperature_mired_to_kelvin(
temp
)
response = {}
if color:
response["color"] = color
return response
async def execute(self, command, data, params, challenge):
"""Execute a color temperature command."""
if "temperature" in params["color"]:
temp = color_util.color_temperature_kelvin_to_mired(
params["color"]["temperature"]
)
min_temp = self.state.attributes[light.ATTR_MIN_MIREDS]
max_temp = self.state.attributes[light.ATTR_MAX_MIREDS]
if temp < min_temp or temp > max_temp:
raise SmartHomeError(
ERR_VALUE_OUT_OF_RANGE,
f"Temperature should be between {min_temp} and {max_temp}",
)
await self.hass.services.async_call(
light.DOMAIN,
SERVICE_TURN_ON,
{ATTR_ENTITY_ID: self.state.entity_id, light.ATTR_COLOR_TEMP: temp},
blocking=True,
context=data.context,
)
elif "spectrumRGB" in params["color"]:
# Convert integer to hex format and left pad with 0's till length 6
hex_value = f"{params['color']['spectrumRGB']:06x}"
color = color_util.color_RGB_to_hs(
*color_util.rgb_hex_to_rgb_list(hex_value)
)
await self.hass.services.async_call(
light.DOMAIN,
SERVICE_TURN_ON,
{ATTR_ENTITY_ID: self.state.entity_id, light.ATTR_HS_COLOR: color},
blocking=True,
context=data.context,
)
elif "spectrumHSV" in params["color"]:
color = params["color"]["spectrumHSV"]
saturation = color["saturation"] * 100
brightness = color["value"] * 255
await self.hass.services.async_call(
light.DOMAIN,
SERVICE_TURN_ON,
{
ATTR_ENTITY_ID: self.state.entity_id,
light.ATTR_HS_COLOR: [color["hue"], saturation],
light.ATTR_BRIGHTNESS: brightness,
},
blocking=True,
context=data.context,
)
@register_trait
class SceneTrait(_Trait):
"""Trait to offer scene functionality.
https://developers.google.com/actions/smarthome/traits/scene
"""
name = TRAIT_SCENE
commands = [COMMAND_ACTIVATE_SCENE]
@staticmethod
def supported(domain, features, device_class):
"""Test if state is supported."""
return domain in (scene.DOMAIN, script.DOMAIN)
def sync_attributes(self):
"""Return scene attributes for a sync request."""
# Neither supported domain can support sceneReversible
return {}
def query_attributes(self):
"""Return scene query attributes."""
return {}
async def execute(self, command, data, params, challenge):
"""Execute a scene command."""
# Don't block for scripts as they can be slow.
await self.hass.services.async_call(
self.state.domain,
SERVICE_TURN_ON,
{ATTR_ENTITY_ID: self.state.entity_id},
blocking=self.state.domain != script.DOMAIN,
context=data.context,
)
@register_trait
class DockTrait(_Trait):
"""Trait to offer dock functionality.
https://developers.google.com/actions/smarthome/traits/dock
"""
name = TRAIT_DOCK
commands = [COMMAND_DOCK]
@staticmethod
def supported(domain, features, device_class):
"""Test if state is supported."""
return domain == vacuum.DOMAIN
def sync_attributes(self):
"""Return dock attributes for a sync request."""
return {}
def query_attributes(self):
"""Return dock query attributes."""
return {"isDocked": self.state.state == vacuum.STATE_DOCKED}
async def execute(self, command, data, params, challenge):
"""Execute a dock command."""
await self.hass.services.async_call(
self.state.domain,
vacuum.SERVICE_RETURN_TO_BASE,
{ATTR_ENTITY_ID: self.state.entity_id},
blocking=True,
context=data.context,
)
@register_trait
class StartStopTrait(_Trait):
"""Trait to offer StartStop functionality.
https://developers.google.com/actions/smarthome/traits/startstop
"""
name = TRAIT_STARTSTOP
commands = [COMMAND_STARTSTOP, COMMAND_PAUSEUNPAUSE]
@staticmethod
def supported(domain, features, device_class):
"""Test if state is supported."""
return domain == vacuum.DOMAIN
def sync_attributes(self):
"""Return StartStop attributes for a sync request."""
return {
"pausable": self.state.attributes.get(ATTR_SUPPORTED_FEATURES, 0)
& vacuum.SUPPORT_PAUSE
!= 0
}
def query_attributes(self):
"""Return StartStop query attributes."""
return {
"isRunning": self.state.state == vacuum.STATE_CLEANING,
"isPaused": self.state.state == vacuum.STATE_PAUSED,
}
async def execute(self, command, data, params, challenge):
"""Execute a StartStop command."""
if command == COMMAND_STARTSTOP:
if params["start"]:
await self.hass.services.async_call(
self.state.domain,
vacuum.SERVICE_START,
{ATTR_ENTITY_ID: self.state.entity_id},
blocking=True,
context=data.context,
)
else:
await self.hass.services.async_call(
self.state.domain,
vacuum.SERVICE_STOP,
{ATTR_ENTITY_ID: self.state.entity_id},
blocking=True,
context=data.context,
)
elif command == COMMAND_PAUSEUNPAUSE:
if params["pause"]:
await self.hass.services.async_call(
self.state.domain,
vacuum.SERVICE_PAUSE,
{ATTR_ENTITY_ID: self.state.entity_id},
blocking=True,
context=data.context,
)
else:
await self.hass.services.async_call(
self.state.domain,
vacuum.SERVICE_START,
{ATTR_ENTITY_ID: self.state.entity_id},
blocking=True,
context=data.context,
)
@register_trait
class TemperatureSettingTrait(_Trait):
"""Trait to offer handling both temperature point and modes functionality.
https://developers.google.com/actions/smarthome/traits/temperaturesetting
"""
name = TRAIT_TEMPERATURE_SETTING
commands = [
COMMAND_THERMOSTAT_TEMPERATURE_SETPOINT,
COMMAND_THERMOSTAT_TEMPERATURE_SET_RANGE,
COMMAND_THERMOSTAT_SET_MODE,
]
# We do not support "on" as we are unable to know how to restore
# the last mode.
hvac_to_google = {
climate.HVAC_MODE_HEAT: "heat",
climate.HVAC_MODE_COOL: "cool",
climate.HVAC_MODE_OFF: "off",
climate.HVAC_MODE_AUTO: "auto",
climate.HVAC_MODE_HEAT_COOL: "heatcool",
climate.HVAC_MODE_FAN_ONLY: "fan-only",
climate.HVAC_MODE_DRY: "dry",
}
google_to_hvac = {value: key for key, value in hvac_to_google.items()}
preset_to_google = {climate.PRESET_ECO: "eco"}
google_to_preset = {value: key for key, value in preset_to_google.items()}
@staticmethod
def supported(domain, features, device_class):
"""Test if state is supported."""
if domain == climate.DOMAIN:
return True
return (
domain == sensor.DOMAIN and device_class == sensor.DEVICE_CLASS_TEMPERATURE
)
@property
def climate_google_modes(self):
"""Return supported Google modes."""
modes = []
attrs = self.state.attributes
for mode in attrs.get(climate.ATTR_HVAC_MODES, []):
google_mode = self.hvac_to_google.get(mode)
if google_mode and google_mode not in modes:
modes.append(google_mode)
for preset in attrs.get(climate.ATTR_PRESET_MODES, []):
google_mode = self.preset_to_google.get(preset)
if google_mode and google_mode not in modes:
modes.append(google_mode)
return modes
def sync_attributes(self):
"""Return temperature point and modes attributes for a sync request."""
response = {}
attrs = self.state.attributes
domain = self.state.domain
response["thermostatTemperatureUnit"] = _google_temp_unit(
self.hass.config.units.temperature_unit
)
if domain == sensor.DOMAIN:
device_class = attrs.get(ATTR_DEVICE_CLASS)
if device_class == sensor.DEVICE_CLASS_TEMPERATURE:
response["queryOnlyTemperatureSetting"] = True
elif domain == climate.DOMAIN:
modes = self.climate_google_modes
# Some integrations don't support modes (e.g. opentherm), but Google doesn't
# support changing the temperature if we don't have any modes. If there's
# only one Google doesn't support changing it, so the default mode here is
# only cosmetic.
if len(modes) == 0:
modes.append("heat")
if "off" in modes and any(
mode in modes for mode in ("heatcool", "heat", "cool")
):
modes.append("on")
response["availableThermostatModes"] = ",".join(modes)
return response
def query_attributes(self):
"""Return temperature point and modes query attributes."""
response = {}
attrs = self.state.attributes
domain = self.state.domain
unit = self.hass.config.units.temperature_unit
if domain == sensor.DOMAIN:
device_class = attrs.get(ATTR_DEVICE_CLASS)
if device_class == sensor.DEVICE_CLASS_TEMPERATURE:
current_temp = self.state.state
if current_temp not in (STATE_UNKNOWN, STATE_UNAVAILABLE):
response["thermostatTemperatureAmbient"] = round(
temp_util.convert(float(current_temp), unit, TEMP_CELSIUS), 1
)
elif domain == climate.DOMAIN:
operation = self.state.state
preset = attrs.get(climate.ATTR_PRESET_MODE)
supported = attrs.get(ATTR_SUPPORTED_FEATURES, 0)
if preset in self.preset_to_google:
response["thermostatMode"] = self.preset_to_google[preset]
else:
response["thermostatMode"] = self.hvac_to_google.get(operation)
current_temp = attrs.get(climate.ATTR_CURRENT_TEMPERATURE)
if current_temp is not None:
response["thermostatTemperatureAmbient"] = round(
temp_util.convert(current_temp, unit, TEMP_CELSIUS), 1
)
current_humidity = attrs.get(climate.ATTR_CURRENT_HUMIDITY)
if current_humidity is not None:
response["thermostatHumidityAmbient"] = current_humidity
if operation in (climate.HVAC_MODE_AUTO, climate.HVAC_MODE_HEAT_COOL):
if supported & climate.SUPPORT_TARGET_TEMPERATURE_RANGE:
response["thermostatTemperatureSetpointHigh"] = round(
temp_util.convert(
attrs[climate.ATTR_TARGET_TEMP_HIGH], unit, TEMP_CELSIUS
),
1,
)
response["thermostatTemperatureSetpointLow"] = round(
temp_util.convert(
attrs[climate.ATTR_TARGET_TEMP_LOW], unit, TEMP_CELSIUS
),
1,
)
else:
target_temp = attrs.get(ATTR_TEMPERATURE)
if target_temp is not None:
target_temp = round(
temp_util.convert(target_temp, unit, TEMP_CELSIUS), 1
)
response["thermostatTemperatureSetpointHigh"] = target_temp
response["thermostatTemperatureSetpointLow"] = target_temp
else:
target_temp = attrs.get(ATTR_TEMPERATURE)
if target_temp is not None:
response["thermostatTemperatureSetpoint"] = round(
temp_util.convert(target_temp, unit, TEMP_CELSIUS), 1
)
return response
async def execute(self, command, data, params, challenge):
"""Execute a temperature point or mode command."""
domain = self.state.domain
if domain == sensor.DOMAIN:
raise SmartHomeError(
ERR_NOT_SUPPORTED, "Execute is not supported by sensor"
)
# All sent in temperatures are always in Celsius
unit = self.hass.config.units.temperature_unit
min_temp = self.state.attributes[climate.ATTR_MIN_TEMP]
max_temp = self.state.attributes[climate.ATTR_MAX_TEMP]
if command == COMMAND_THERMOSTAT_TEMPERATURE_SETPOINT:
temp = temp_util.convert(
params["thermostatTemperatureSetpoint"], TEMP_CELSIUS, unit
)
if unit == TEMP_FAHRENHEIT:
temp = round(temp)
if temp < min_temp or temp > max_temp:
raise SmartHomeError(
ERR_VALUE_OUT_OF_RANGE,
f"Temperature should be between {min_temp} and {max_temp}",
)
await self.hass.services.async_call(
climate.DOMAIN,
climate.SERVICE_SET_TEMPERATURE,
{ATTR_ENTITY_ID: self.state.entity_id, ATTR_TEMPERATURE: temp},
blocking=True,
context=data.context,
)
elif command == COMMAND_THERMOSTAT_TEMPERATURE_SET_RANGE:
temp_high = temp_util.convert(
params["thermostatTemperatureSetpointHigh"], TEMP_CELSIUS, unit
)
if unit == TEMP_FAHRENHEIT:
temp_high = round(temp_high)
if temp_high < min_temp or temp_high > max_temp:
raise SmartHomeError(
ERR_VALUE_OUT_OF_RANGE,
(
f"Upper bound for temperature range should be between "
f"{min_temp} and {max_temp}"
),
)
temp_low = temp_util.convert(
params["thermostatTemperatureSetpointLow"], TEMP_CELSIUS, unit
)
if unit == TEMP_FAHRENHEIT:
temp_low = round(temp_low)
if temp_low < min_temp or temp_low > max_temp:
raise SmartHomeError(
ERR_VALUE_OUT_OF_RANGE,
(
f"Lower bound for temperature range should be between "
f"{min_temp} and {max_temp}"
),
)
supported = self.state.attributes.get(ATTR_SUPPORTED_FEATURES)
svc_data = {ATTR_ENTITY_ID: self.state.entity_id}
if supported & climate.SUPPORT_TARGET_TEMPERATURE_RANGE:
svc_data[climate.ATTR_TARGET_TEMP_HIGH] = temp_high
svc_data[climate.ATTR_TARGET_TEMP_LOW] = temp_low
else:
svc_data[ATTR_TEMPERATURE] = (temp_high + temp_low) / 2
await self.hass.services.async_call(
climate.DOMAIN,
climate.SERVICE_SET_TEMPERATURE,
svc_data,
blocking=True,
context=data.context,
)
elif command == COMMAND_THERMOSTAT_SET_MODE:
target_mode = params["thermostatMode"]
supported = self.state.attributes.get(ATTR_SUPPORTED_FEATURES)
if target_mode == "on":
await self.hass.services.async_call(
climate.DOMAIN,
SERVICE_TURN_ON,
{ATTR_ENTITY_ID: self.state.entity_id},
blocking=True,
context=data.context,
)
return
if target_mode == "off":
await self.hass.services.async_call(
climate.DOMAIN,
SERVICE_TURN_OFF,
{ATTR_ENTITY_ID: self.state.entity_id},
blocking=True,
context=data.context,
)
return
if target_mode in self.google_to_preset:
await self.hass.services.async_call(
climate.DOMAIN,
climate.SERVICE_SET_PRESET_MODE,
{
climate.ATTR_PRESET_MODE: self.google_to_preset[target_mode],
ATTR_ENTITY_ID: self.state.entity_id,
},
blocking=True,
context=data.context,
)
return
await self.hass.services.async_call(
climate.DOMAIN,
climate.SERVICE_SET_HVAC_MODE,
{
ATTR_ENTITY_ID: self.state.entity_id,
climate.ATTR_HVAC_MODE: self.google_to_hvac[target_mode],
},
blocking=True,
context=data.context,
)
@register_trait
class HumiditySettingTrait(_Trait):
"""Trait to offer humidity setting functionality.
https://developers.google.com/actions/smarthome/traits/humiditysetting
"""
name = TRAIT_HUMIDITY_SETTING
commands = [COMMAND_SET_HUMIDITY]
@staticmethod
def supported(domain, features, device_class):
"""Test if state is supported."""
if domain == humidifier.DOMAIN:
return True
return domain == sensor.DOMAIN and device_class == sensor.DEVICE_CLASS_HUMIDITY
def sync_attributes(self):
"""Return humidity attributes for a sync request."""
response = {}
attrs = self.state.attributes
domain = self.state.domain
if domain == sensor.DOMAIN:
device_class = attrs.get(ATTR_DEVICE_CLASS)
if device_class == sensor.DEVICE_CLASS_HUMIDITY:
response["queryOnlyHumiditySetting"] = True
elif domain == humidifier.DOMAIN:
response["humiditySetpointRange"] = {
"minPercent": round(
float(self.state.attributes[humidifier.ATTR_MIN_HUMIDITY])
),
"maxPercent": round(
float(self.state.attributes[humidifier.ATTR_MAX_HUMIDITY])
),
}
return response
def query_attributes(self):
"""Return humidity query attributes."""
response = {}
attrs = self.state.attributes
domain = self.state.domain
if domain == sensor.DOMAIN:
device_class = attrs.get(ATTR_DEVICE_CLASS)
if device_class == sensor.DEVICE_CLASS_HUMIDITY:
current_humidity = self.state.state
if current_humidity not in (STATE_UNKNOWN, STATE_UNAVAILABLE):
response["humidityAmbientPercent"] = round(float(current_humidity))
elif domain == humidifier.DOMAIN:
target_humidity = attrs.get(humidifier.ATTR_HUMIDITY)
if target_humidity is not None:
response["humiditySetpointPercent"] = round(float(target_humidity))
return response
async def execute(self, command, data, params, challenge):
"""Execute a humidity command."""
domain = self.state.domain
if domain == sensor.DOMAIN:
raise SmartHomeError(
ERR_NOT_SUPPORTED, "Execute is not supported by sensor"
)
if command == COMMAND_SET_HUMIDITY:
await self.hass.services.async_call(
humidifier.DOMAIN,
humidifier.SERVICE_SET_HUMIDITY,
{
ATTR_ENTITY_ID: self.state.entity_id,
humidifier.ATTR_HUMIDITY: params["humidity"],
},
blocking=True,
context=data.context,
)
@register_trait
class LockUnlockTrait(_Trait):
"""Trait to lock or unlock a lock.
https://developers.google.com/actions/smarthome/traits/lockunlock
"""
name = TRAIT_LOCKUNLOCK
commands = [COMMAND_LOCKUNLOCK]
@staticmethod
def supported(domain, features, device_class):
"""Test if state is supported."""
return domain == lock.DOMAIN
@staticmethod
def might_2fa(domain, features, device_class):
"""Return if the trait might ask for 2FA."""
return True
def sync_attributes(self):
"""Return LockUnlock attributes for a sync request."""
return {}
def query_attributes(self):
"""Return LockUnlock query attributes."""
return {"isLocked": self.state.state == STATE_LOCKED}
async def execute(self, command, data, params, challenge):
"""Execute an LockUnlock command."""
if params["lock"]:
service = lock.SERVICE_LOCK
else:
_verify_pin_challenge(data, self.state, challenge)
service = lock.SERVICE_UNLOCK
await self.hass.services.async_call(
lock.DOMAIN,
service,
{ATTR_ENTITY_ID: self.state.entity_id},
blocking=True,
context=data.context,
)
@register_trait
class ArmDisArmTrait(_Trait):
"""Trait to Arm or Disarm a Security System.
https://developers.google.com/actions/smarthome/traits/armdisarm
"""
name = TRAIT_ARMDISARM
commands = [COMMAND_ARMDISARM]
state_to_service = {
STATE_ALARM_ARMED_HOME: SERVICE_ALARM_ARM_HOME,
STATE_ALARM_ARMED_AWAY: SERVICE_ALARM_ARM_AWAY,
STATE_ALARM_ARMED_NIGHT: SERVICE_ALARM_ARM_NIGHT,
STATE_ALARM_ARMED_CUSTOM_BYPASS: SERVICE_ALARM_ARM_CUSTOM_BYPASS,
STATE_ALARM_TRIGGERED: SERVICE_ALARM_TRIGGER,
}
state_to_support = {
STATE_ALARM_ARMED_HOME: alarm_control_panel.const.SUPPORT_ALARM_ARM_HOME,
STATE_ALARM_ARMED_AWAY: alarm_control_panel.const.SUPPORT_ALARM_ARM_AWAY,
STATE_ALARM_ARMED_NIGHT: alarm_control_panel.const.SUPPORT_ALARM_ARM_NIGHT,
STATE_ALARM_ARMED_CUSTOM_BYPASS: alarm_control_panel.const.SUPPORT_ALARM_ARM_CUSTOM_BYPASS,
STATE_ALARM_TRIGGERED: alarm_control_panel.const.SUPPORT_ALARM_TRIGGER,
}
@staticmethod
def supported(domain, features, device_class):
"""Test if state is supported."""
return domain == alarm_control_panel.DOMAIN
@staticmethod
def might_2fa(domain, features, device_class):
"""Return if the trait might ask for 2FA."""
return True
def _supported_states(self):
"""Return supported states."""
features = self.state.attributes.get(ATTR_SUPPORTED_FEATURES, 0)
return [
state
for state, required_feature in self.state_to_support.items()
if features & required_feature != 0
]
def sync_attributes(self):
"""Return ArmDisarm attributes for a sync request."""
response = {}
levels = []
for state in self._supported_states():
# level synonyms are generated from state names
# 'armed_away' becomes 'armed away' or 'away'
level_synonym = [state.replace("_", " ")]
if state != STATE_ALARM_TRIGGERED:
level_synonym.append(state.split("_")[1])
level = {
"level_name": state,
"level_values": [{"level_synonym": level_synonym, "lang": "en"}],
}
levels.append(level)
response["availableArmLevels"] = {"levels": levels, "ordered": False}
return response
def query_attributes(self):
"""Return ArmDisarm query attributes."""
if "next_state" in self.state.attributes:
armed_state = self.state.attributes["next_state"]
else:
armed_state = self.state.state
response = {"isArmed": armed_state in self.state_to_service}
if response["isArmed"]:
response.update({"currentArmLevel": armed_state})
return response
async def execute(self, command, data, params, challenge):
"""Execute an ArmDisarm command."""
if params["arm"] and not params.get("cancel"):
arm_level = params.get("armLevel")
# If no arm level given, we can only arm it if there is
# only one supported arm type. We never default to triggered.
if not arm_level:
states = self._supported_states()
if STATE_ALARM_TRIGGERED in states:
states.remove(STATE_ALARM_TRIGGERED)
if len(states) != 1:
raise SmartHomeError(ERR_NOT_SUPPORTED, "ArmLevel missing")
arm_level = states[0]
if self.state.state == arm_level:
raise SmartHomeError(ERR_ALREADY_ARMED, "System is already armed")
if self.state.attributes["code_arm_required"]:
_verify_pin_challenge(data, self.state, challenge)
service = self.state_to_service[arm_level]
# disarm the system without asking for code when
# 'cancel' arming action is received while current status is pending
elif (
params["arm"]
and params.get("cancel")
and self.state.state == STATE_ALARM_PENDING
):
service = SERVICE_ALARM_DISARM
else:
if self.state.state == STATE_ALARM_DISARMED:
raise SmartHomeError(ERR_ALREADY_DISARMED, "System is already disarmed")
_verify_pin_challenge(data, self.state, challenge)
service = SERVICE_ALARM_DISARM
await self.hass.services.async_call(
alarm_control_panel.DOMAIN,
service,
{
ATTR_ENTITY_ID: self.state.entity_id,
ATTR_CODE: data.config.secure_devices_pin,
},
blocking=True,
context=data.context,
)
@register_trait
class FanSpeedTrait(_Trait):
"""Trait to control speed of Fan.
https://developers.google.com/actions/smarthome/traits/fanspeed
"""
name = TRAIT_FANSPEED
commands = [COMMAND_FANSPEED]
speed_synonyms = {
fan.SPEED_OFF: ["stop", "off"],
fan.SPEED_LOW: ["slow", "low", "slowest", "lowest"],
fan.SPEED_MEDIUM: ["medium", "mid", "middle"],
fan.SPEED_HIGH: ["high", "max", "fast", "highest", "fastest", "maximum"],
}
@staticmethod
def supported(domain, features, device_class):
"""Test if state is supported."""
if domain != fan.DOMAIN:
return False
return features & fan.SUPPORT_SET_SPEED
def sync_attributes(self):
"""Return speed point and modes attributes for a sync request."""
modes = self.state.attributes.get(fan.ATTR_SPEED_LIST, [])
speeds = []
for mode in modes:
if mode not in self.speed_synonyms:
continue
speed = {
"speed_name": mode,
"speed_values": [
{"speed_synonym": self.speed_synonyms.get(mode), "lang": "en"}
],
}
speeds.append(speed)
return {
"availableFanSpeeds": {"speeds": speeds, "ordered": True},
"reversible": bool(
self.state.attributes.get(ATTR_SUPPORTED_FEATURES, 0)
& fan.SUPPORT_DIRECTION
),
}
def query_attributes(self):
"""Return speed point and modes query attributes."""
attrs = self.state.attributes
response = {}
speed = attrs.get(fan.ATTR_SPEED)
if speed is not None:
response["on"] = speed != fan.SPEED_OFF
response["currentFanSpeedSetting"] = speed
return response
async def execute(self, command, data, params, challenge):
"""Execute an SetFanSpeed command."""
await self.hass.services.async_call(
fan.DOMAIN,
fan.SERVICE_SET_SPEED,
{ATTR_ENTITY_ID: self.state.entity_id, fan.ATTR_SPEED: params["fanSpeed"]},
blocking=True,
context=data.context,
)
@register_trait
class ModesTrait(_Trait):
"""Trait to set modes.
https://developers.google.com/actions/smarthome/traits/modes
"""
name = TRAIT_MODES
commands = [COMMAND_MODES]
SYNONYMS = {
"sound mode": ["sound mode", "effects"],
"option": ["option", "setting", "mode", "value"],
}
@staticmethod
def supported(domain, features, device_class):
"""Test if state is supported."""
if domain == input_select.DOMAIN:
return True
if domain == humidifier.DOMAIN and features & humidifier.SUPPORT_MODES:
return True
if domain != media_player.DOMAIN:
return False
return features & media_player.SUPPORT_SELECT_SOUND_MODE
def sync_attributes(self):
"""Return mode attributes for a sync request."""
def _generate(name, settings):
mode = {
"name": name,
"name_values": [
{"name_synonym": self.SYNONYMS.get(name, [name]), "lang": "en"}
],
"settings": [],
"ordered": False,
}
for setting in settings:
mode["settings"].append(
{
"setting_name": setting,
"setting_values": [
{
"setting_synonym": self.SYNONYMS.get(
setting, [setting]
),
"lang": "en",
}
],
}
)
return mode
attrs = self.state.attributes
modes = []
if self.state.domain == media_player.DOMAIN:
if media_player.ATTR_SOUND_MODE_LIST in attrs:
modes.append(
_generate("sound mode", attrs[media_player.ATTR_SOUND_MODE_LIST])
)
elif self.state.domain == input_select.DOMAIN:
modes.append(_generate("option", attrs[input_select.ATTR_OPTIONS]))
elif self.state.domain == humidifier.DOMAIN:
if humidifier.ATTR_AVAILABLE_MODES in attrs:
modes.append(_generate("mode", attrs[humidifier.ATTR_AVAILABLE_MODES]))
payload = {"availableModes": modes}
return payload
def query_attributes(self):
"""Return current modes."""
attrs = self.state.attributes
response = {}
mode_settings = {}
if self.state.domain == media_player.DOMAIN:
if media_player.ATTR_SOUND_MODE_LIST in attrs:
mode_settings["sound mode"] = attrs.get(media_player.ATTR_SOUND_MODE)
elif self.state.domain == input_select.DOMAIN:
mode_settings["option"] = self.state.state
elif self.state.domain == humidifier.DOMAIN:
if humidifier.ATTR_MODE in attrs:
mode_settings["mode"] = attrs.get(humidifier.ATTR_MODE)
if mode_settings:
response["on"] = self.state.state not in (STATE_OFF, STATE_UNKNOWN)
response["currentModeSettings"] = mode_settings
return response
async def execute(self, command, data, params, challenge):
"""Execute a SetModes command."""
settings = params.get("updateModeSettings")
if self.state.domain == input_select.DOMAIN:
option = params["updateModeSettings"]["option"]
await self.hass.services.async_call(
input_select.DOMAIN,
input_select.SERVICE_SELECT_OPTION,
{
ATTR_ENTITY_ID: self.state.entity_id,
input_select.ATTR_OPTION: option,
},
blocking=True,
context=data.context,
)
return
if self.state.domain == humidifier.DOMAIN:
requested_mode = settings["mode"]
await self.hass.services.async_call(
humidifier.DOMAIN,
humidifier.SERVICE_SET_MODE,
{
humidifier.ATTR_MODE: requested_mode,
ATTR_ENTITY_ID: self.state.entity_id,
},
blocking=True,
context=data.context,
)
return
if self.state.domain != media_player.DOMAIN:
_LOGGER.info(
"Received an Options command for unrecognised domain %s",
self.state.domain,
)
return
sound_mode = settings.get("sound mode")
if sound_mode:
await self.hass.services.async_call(
media_player.DOMAIN,
media_player.SERVICE_SELECT_SOUND_MODE,
{
ATTR_ENTITY_ID: self.state.entity_id,
media_player.ATTR_SOUND_MODE: sound_mode,
},
blocking=True,
context=data.context,
)
@register_trait
class InputSelectorTrait(_Trait):
"""Trait to set modes.
https://developers.google.com/assistant/smarthome/traits/inputselector
"""
name = TRAIT_INPUTSELECTOR
commands = [COMMAND_INPUT]
SYNONYMS = {}
@staticmethod
def supported(domain, features, device_class):
"""Test if state is supported."""
if domain == media_player.DOMAIN and (
features & media_player.SUPPORT_SELECT_SOURCE
):
return True
return False
def sync_attributes(self):
"""Return mode attributes for a sync request."""
attrs = self.state.attributes
inputs = [
{"key": source, "names": [{"name_synonym": [source], "lang": "en"}]}
for source in attrs.get(media_player.ATTR_INPUT_SOURCE_LIST, [])
]
payload = {"availableInputs": inputs, "orderedInputs": True}
return payload
def query_attributes(self):
"""Return current modes."""
attrs = self.state.attributes
return {"currentInput": attrs.get(media_player.ATTR_INPUT_SOURCE, "")}
async def execute(self, command, data, params, challenge):
"""Execute an SetInputSource command."""
requested_source = params.get("newInput")
await self.hass.services.async_call(
media_player.DOMAIN,
media_player.SERVICE_SELECT_SOURCE,
{
ATTR_ENTITY_ID: self.state.entity_id,
media_player.ATTR_INPUT_SOURCE: requested_source,
},
blocking=True,
context=data.context,
)
@register_trait
class OpenCloseTrait(_Trait):
"""Trait to open and close a cover.
https://developers.google.com/actions/smarthome/traits/openclose
"""
# Cover device classes that require 2FA
COVER_2FA = (
cover.DEVICE_CLASS_DOOR,
cover.DEVICE_CLASS_GARAGE,
cover.DEVICE_CLASS_GATE,
)
name = TRAIT_OPENCLOSE
commands = [COMMAND_OPENCLOSE]
override_position = None
@staticmethod
def supported(domain, features, device_class):
"""Test if state is supported."""
if domain == cover.DOMAIN:
return True
return domain == binary_sensor.DOMAIN and device_class in (
binary_sensor.DEVICE_CLASS_DOOR,
binary_sensor.DEVICE_CLASS_GARAGE_DOOR,
binary_sensor.DEVICE_CLASS_LOCK,
binary_sensor.DEVICE_CLASS_OPENING,
binary_sensor.DEVICE_CLASS_WINDOW,
)
@staticmethod
def might_2fa(domain, features, device_class):
"""Return if the trait might ask for 2FA."""
return domain == cover.DOMAIN and device_class in OpenCloseTrait.COVER_2FA
def sync_attributes(self):
"""Return opening direction."""
response = {}
if self.state.domain == binary_sensor.DOMAIN:
response["queryOnlyOpenClose"] = True
return response
def query_attributes(self):
"""Return state query attributes."""
domain = self.state.domain
response = {}
if self.override_position is not None:
response["openPercent"] = self.override_position
elif domain == cover.DOMAIN:
# When it's an assumed state, we will return that querying state
# is not supported.
if self.state.attributes.get(ATTR_ASSUMED_STATE):
raise SmartHomeError(
ERR_NOT_SUPPORTED, "Querying state is not supported"
)
if self.state.state == STATE_UNKNOWN:
raise SmartHomeError(
ERR_NOT_SUPPORTED, "Querying state is not supported"
)
position = self.override_position or self.state.attributes.get(
cover.ATTR_CURRENT_POSITION
)
if position is not None:
response["openPercent"] = position
elif self.state.state != cover.STATE_CLOSED:
response["openPercent"] = 100
else:
response["openPercent"] = 0
elif domain == binary_sensor.DOMAIN:
if self.state.state == STATE_ON:
response["openPercent"] = 100
else:
response["openPercent"] = 0
return response
async def execute(self, command, data, params, challenge):
"""Execute an Open, close, Set position command."""
domain = self.state.domain
if domain == cover.DOMAIN:
svc_params = {ATTR_ENTITY_ID: self.state.entity_id}
if params["openPercent"] == 0:
service = cover.SERVICE_CLOSE_COVER
should_verify = False
elif params["openPercent"] == 100:
service = cover.SERVICE_OPEN_COVER
should_verify = True
elif (
self.state.attributes.get(ATTR_SUPPORTED_FEATURES, 0)
& cover.SUPPORT_SET_POSITION
):
service = cover.SERVICE_SET_COVER_POSITION
should_verify = True
svc_params[cover.ATTR_POSITION] = params["openPercent"]
else:
raise SmartHomeError(
ERR_FUNCTION_NOT_SUPPORTED, "Setting a position is not supported"
)
if (
should_verify
and self.state.attributes.get(ATTR_DEVICE_CLASS)
in OpenCloseTrait.COVER_2FA
):
_verify_pin_challenge(data, self.state, challenge)
await self.hass.services.async_call(
cover.DOMAIN, service, svc_params, blocking=True, context=data.context
)
if (
self.state.attributes.get(ATTR_ASSUMED_STATE)
or self.state.state == STATE_UNKNOWN
):
self.override_position = params["openPercent"]
@register_trait
class VolumeTrait(_Trait):
"""Trait to control brightness of a device.
https://developers.google.com/actions/smarthome/traits/volume
"""
name = TRAIT_VOLUME
commands = [COMMAND_SET_VOLUME, COMMAND_VOLUME_RELATIVE]
@staticmethod
def supported(domain, features, device_class):
"""Test if state is supported."""
if domain == media_player.DOMAIN:
return features & media_player.SUPPORT_VOLUME_SET
return False
def sync_attributes(self):
"""Return brightness attributes for a sync request."""
return {}
def query_attributes(self):
"""Return brightness query attributes."""
response = {}
level = self.state.attributes.get(media_player.ATTR_MEDIA_VOLUME_LEVEL)
muted = self.state.attributes.get(media_player.ATTR_MEDIA_VOLUME_MUTED)
if level is not None:
# Convert 0.0-1.0 to 0-100
response["currentVolume"] = int(level * 100)
response["isMuted"] = bool(muted)
return response
async def _execute_set_volume(self, data, params):
level = params["volumeLevel"]
await self.hass.services.async_call(
media_player.DOMAIN,
media_player.SERVICE_VOLUME_SET,
{
ATTR_ENTITY_ID: self.state.entity_id,
media_player.ATTR_MEDIA_VOLUME_LEVEL: level / 100,
},
blocking=True,
context=data.context,
)
async def _execute_volume_relative(self, data, params):
# This could also support up/down commands using relativeSteps
relative = params["volumeRelativeLevel"]
current = self.state.attributes.get(media_player.ATTR_MEDIA_VOLUME_LEVEL)
await self.hass.services.async_call(
media_player.DOMAIN,
media_player.SERVICE_VOLUME_SET,
{
ATTR_ENTITY_ID: self.state.entity_id,
media_player.ATTR_MEDIA_VOLUME_LEVEL: current + relative / 100,
},
blocking=True,
context=data.context,
)
async def execute(self, command, data, params, challenge):
"""Execute a brightness command."""
if command == COMMAND_SET_VOLUME:
await self._execute_set_volume(data, params)
elif command == COMMAND_VOLUME_RELATIVE:
await self._execute_volume_relative(data, params)
else:
raise SmartHomeError(ERR_NOT_SUPPORTED, "Command not supported")
def _verify_pin_challenge(data, state, challenge):
"""Verify a pin challenge."""
if not data.config.should_2fa(state):
return
if not data.config.secure_devices_pin:
raise SmartHomeError(ERR_CHALLENGE_NOT_SETUP, "Challenge is not set up")
if not challenge:
raise ChallengeNeeded(CHALLENGE_PIN_NEEDED)
pin = challenge.get("pin")
if pin != data.config.secure_devices_pin:
raise ChallengeNeeded(CHALLENGE_FAILED_PIN_NEEDED)
def _verify_ack_challenge(data, state, challenge):
"""Verify an ack challenge."""
if not data.config.should_2fa(state):
return
if not challenge or not challenge.get("ack"):
raise ChallengeNeeded(CHALLENGE_ACK_NEEDED)
MEDIA_COMMAND_SUPPORT_MAPPING = {
COMMAND_MEDIA_NEXT: media_player.SUPPORT_NEXT_TRACK,
COMMAND_MEDIA_PAUSE: media_player.SUPPORT_PAUSE,
COMMAND_MEDIA_PREVIOUS: media_player.SUPPORT_PREVIOUS_TRACK,
COMMAND_MEDIA_RESUME: media_player.SUPPORT_PLAY,
COMMAND_MEDIA_SEEK_RELATIVE: media_player.SUPPORT_SEEK,
COMMAND_MEDIA_SEEK_TO_POSITION: media_player.SUPPORT_SEEK,
COMMAND_MEDIA_SHUFFLE: media_player.SUPPORT_SHUFFLE_SET,
COMMAND_MEDIA_STOP: media_player.SUPPORT_STOP,
}
MEDIA_COMMAND_ATTRIBUTES = {
COMMAND_MEDIA_NEXT: "NEXT",
COMMAND_MEDIA_PAUSE: "PAUSE",
COMMAND_MEDIA_PREVIOUS: "PREVIOUS",
COMMAND_MEDIA_RESUME: "RESUME",
COMMAND_MEDIA_SEEK_RELATIVE: "SEEK_RELATIVE",
COMMAND_MEDIA_SEEK_TO_POSITION: "SEEK_TO_POSITION",
COMMAND_MEDIA_SHUFFLE: "SHUFFLE",
COMMAND_MEDIA_STOP: "STOP",
}
@register_trait
class TransportControlTrait(_Trait):
"""Trait to control media playback.
https://developers.google.com/actions/smarthome/traits/transportcontrol
"""
name = TRAIT_TRANSPORT_CONTROL
commands = [
COMMAND_MEDIA_NEXT,
COMMAND_MEDIA_PAUSE,
COMMAND_MEDIA_PREVIOUS,
COMMAND_MEDIA_RESUME,
COMMAND_MEDIA_SEEK_RELATIVE,
COMMAND_MEDIA_SEEK_TO_POSITION,
COMMAND_MEDIA_SHUFFLE,
COMMAND_MEDIA_STOP,
]
@staticmethod
def supported(domain, features, device_class):
"""Test if state is supported."""
if domain == media_player.DOMAIN:
for feature in MEDIA_COMMAND_SUPPORT_MAPPING.values():
if features & feature:
return True
return False
def sync_attributes(self):
"""Return opening direction."""
response = {}
if self.state.domain == media_player.DOMAIN:
features = self.state.attributes.get(ATTR_SUPPORTED_FEATURES, 0)
support = []
for command, feature in MEDIA_COMMAND_SUPPORT_MAPPING.items():
if features & feature:
support.append(MEDIA_COMMAND_ATTRIBUTES[command])
response["transportControlSupportedCommands"] = support
return response
def query_attributes(self):
"""Return the attributes of this trait for this entity."""
return {}
async def execute(self, command, data, params, challenge):
"""Execute a media command."""
service_attrs = {ATTR_ENTITY_ID: self.state.entity_id}
if command == COMMAND_MEDIA_SEEK_RELATIVE:
service = media_player.SERVICE_MEDIA_SEEK
rel_position = params["relativePositionMs"] / 1000
seconds_since = 0 # Default to 0 seconds
if self.state.state == STATE_PLAYING:
now = dt.utcnow()
upd_at = self.state.attributes.get(
media_player.ATTR_MEDIA_POSITION_UPDATED_AT, now
)
seconds_since = (now - upd_at).total_seconds()
position = self.state.attributes.get(media_player.ATTR_MEDIA_POSITION, 0)
max_position = self.state.attributes.get(
media_player.ATTR_MEDIA_DURATION, 0
)
service_attrs[media_player.ATTR_MEDIA_SEEK_POSITION] = min(
max(position + seconds_since + rel_position, 0), max_position
)
elif command == COMMAND_MEDIA_SEEK_TO_POSITION:
service = media_player.SERVICE_MEDIA_SEEK
max_position = self.state.attributes.get(
media_player.ATTR_MEDIA_DURATION, 0
)
service_attrs[media_player.ATTR_MEDIA_SEEK_POSITION] = min(
max(params["absPositionMs"] / 1000, 0), max_position
)
elif command == COMMAND_MEDIA_NEXT:
service = media_player.SERVICE_MEDIA_NEXT_TRACK
elif command == COMMAND_MEDIA_PAUSE:
service = media_player.SERVICE_MEDIA_PAUSE
elif command == COMMAND_MEDIA_PREVIOUS:
service = media_player.SERVICE_MEDIA_PREVIOUS_TRACK
elif command == COMMAND_MEDIA_RESUME:
service = media_player.SERVICE_MEDIA_PLAY
elif command == COMMAND_MEDIA_SHUFFLE:
service = media_player.SERVICE_SHUFFLE_SET
# Google Assistant only supports enabling shuffle
service_attrs[media_player.ATTR_MEDIA_SHUFFLE] = True
elif command == COMMAND_MEDIA_STOP:
service = media_player.SERVICE_MEDIA_STOP
else:
raise SmartHomeError(ERR_NOT_SUPPORTED, "Command not supported")
await self.hass.services.async_call(
media_player.DOMAIN,
service,
service_attrs,
blocking=True,
context=data.context,
)
@register_trait
class MediaStateTrait(_Trait):
"""Trait to get media playback state.
https://developers.google.com/actions/smarthome/traits/mediastate
"""
name = TRAIT_MEDIA_STATE
commands = []
activity_lookup = {
STATE_OFF: "INACTIVE",
STATE_IDLE: "STANDBY",
STATE_PLAYING: "ACTIVE",
STATE_ON: "STANDBY",
STATE_PAUSED: "STANDBY",
STATE_STANDBY: "STANDBY",
STATE_UNAVAILABLE: "INACTIVE",
STATE_UNKNOWN: "INACTIVE",
}
playback_lookup = {
STATE_OFF: "STOPPED",
STATE_IDLE: "STOPPED",
STATE_PLAYING: "PLAYING",
STATE_ON: "STOPPED",
STATE_PAUSED: "PAUSED",
STATE_STANDBY: "STOPPED",
STATE_UNAVAILABLE: "STOPPED",
STATE_UNKNOWN: "STOPPED",
}
@staticmethod
def supported(domain, features, device_class):
"""Test if state is supported."""
return domain == media_player.DOMAIN
def sync_attributes(self):
"""Return attributes for a sync request."""
return {"supportActivityState": True, "supportPlaybackState": True}
def query_attributes(self):
"""Return the attributes of this trait for this entity."""
return {
"activityState": self.activity_lookup.get(self.state.state, "INACTIVE"),
"playbackState": self.playback_lookup.get(self.state.state, "STOPPED"),
}
|
|
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
import mxnet as mx
import numpy as np
from mxnet.test_utils import *
from common import assertRaises, with_seed
import shutil
import tempfile
import unittest
from nose.tools import raises
def _get_data(url, dirname):
import os, tarfile
download(url, dirname=dirname, overwrite=False)
fname = os.path.join(dirname, url.split('/')[-1])
tar = tarfile.open(fname)
source_images = [os.path.join(dirname, x.name) for x in tar.getmembers() if x.isfile()]
if len(source_images) < 1 or not os.path.isfile(source_images[0]):
# skip extracting if exists
tar.extractall(path=dirname)
tar.close()
return source_images
def _generate_objects():
num = np.random.randint(1, 10)
xy = np.random.rand(num, 2)
wh = np.random.rand(num, 2) / 2
left = (xy[:, 0] - wh[:, 0])[:, np.newaxis]
right = (xy[:, 0] + wh[:, 0])[:, np.newaxis]
top = (xy[:, 1] - wh[:, 1])[:, np.newaxis]
bot = (xy[:, 1] + wh[:, 1])[:, np.newaxis]
boxes = np.maximum(0., np.minimum(1., np.hstack((left, top, right, bot))))
cid = np.random.randint(0, 20, size=num)
label = np.hstack((cid[:, np.newaxis], boxes)).ravel().tolist()
return [2, 5] + label
class TestImage(unittest.TestCase):
IMAGES_URL = "http://data.mxnet.io/data/test_images.tar.gz"
IMAGES = []
IMAGES_DIR = None
@classmethod
def setupClass(cls):
cls.IMAGES_DIR = tempfile.mkdtemp()
cls.IMAGES = _get_data(cls.IMAGES_URL, cls.IMAGES_DIR)
print("Loaded {} images".format(len(cls.IMAGES)))
@classmethod
def teardownClass(cls):
if cls.IMAGES_DIR:
print("cleanup {}".format(cls.IMAGES_DIR))
shutil.rmtree(cls.IMAGES_DIR)
@raises(mx.base.MXNetError)
def test_imread_not_found(self):
x = mx.img.image.imread("/139810923jadjsajlskd.___adskj/blah.jpg")
def test_imread_vs_imdecode(self):
for img in TestImage.IMAGES:
with open(img, 'rb') as fp:
str_image = fp.read()
image = mx.image.imdecode(str_image, to_rgb=0)
image_read = mx.img.image.imread(img)
same(image.asnumpy(), image_read.asnumpy())
def test_imdecode(self):
try:
import cv2
except ImportError:
return
for img in TestImage.IMAGES:
with open(img, 'rb') as fp:
str_image = fp.read()
image = mx.image.imdecode(str_image, to_rgb=0)
cv_image = cv2.imread(img)
assert_almost_equal(image.asnumpy(), cv_image)
def test_scale_down(self):
assert mx.image.scale_down((640, 480), (720, 120)) == (640, 106)
assert mx.image.scale_down((360, 1000), (480, 500)) == (360, 375)
assert mx.image.scale_down((300, 400), (0, 0)) == (0, 0)
def test_resize_short(self):
try:
import cv2
except ImportError:
return
for img in TestImage.IMAGES:
cv_img = cv2.imread(img)
mx_img = mx.nd.array(cv_img[:, :, (2, 1, 0)])
h, w, _ = cv_img.shape
for _ in range(3):
new_size = np.random.randint(1, 1000)
if h > w:
new_h, new_w = new_size * h // w, new_size
else:
new_h, new_w = new_size, new_size * w // h
for interp in range(0, 2):
# area-based/lanczos don't match with cv2?
cv_resized = cv2.resize(cv_img, (new_w, new_h), interpolation=interp)
mx_resized = mx.image.resize_short(mx_img, new_size, interp)
assert_almost_equal(mx_resized.asnumpy()[:, :, (2, 1, 0)], cv_resized, atol=3)
def test_color_normalize(self):
for _ in range(10):
mean = np.random.rand(3) * 255
std = np.random.rand(3) + 1
width = np.random.randint(100, 500)
height = np.random.randint(100, 500)
src = np.random.rand(height, width, 3) * 255.
mx_result = mx.image.color_normalize(mx.nd.array(src),
mx.nd.array(mean), mx.nd.array(std))
assert_almost_equal(mx_result.asnumpy(), (src - mean) / std, atol=1e-3)
def test_imageiter(self):
def check_imageiter(dtype='float32'):
im_list = [[np.random.randint(0, 5), x] for x in TestImage.IMAGES]
fname = './data/test_imageiter.lst'
file_list = ['\t'.join([str(k), str(np.random.randint(0, 5)), x])
for k, x in enumerate(TestImage.IMAGES)]
with open(fname, 'w') as f:
for line in file_list:
f.write(line + '\n')
test_list = ['imglist', 'path_imglist']
for test in test_list:
imglist = im_list if test == 'imglist' else None
path_imglist = fname if test == 'path_imglist' else None
test_iter = mx.image.ImageIter(2, (3, 224, 224), label_width=1, imglist=imglist,
path_imglist=path_imglist, path_root='', dtype=dtype)
# test batch data shape
for _ in range(3):
for batch in test_iter:
assert batch.data[0].shape == (2, 3, 224, 224)
test_iter.reset()
# test last batch handle(discard)
test_iter = mx.image.ImageIter(3, (3, 224, 224), label_width=1, imglist=imglist,
path_imglist=path_imglist, path_root='', dtype=dtype, last_batch_handle='discard')
i = 0
for batch in test_iter:
i += 1
assert i == 5
# test last_batch_handle(pad)
test_iter = mx.image.ImageIter(3, (3, 224, 224), label_width=1, imglist=imglist,
path_imglist=path_imglist, path_root='', dtype=dtype, last_batch_handle='pad')
i = 0
for batch in test_iter:
if i == 0:
first_three_data = batch.data[0][:2]
if i == 5:
last_three_data = batch.data[0][1:]
i += 1
assert i == 6
assert np.array_equal(first_three_data.asnumpy(), last_three_data.asnumpy())
# test last_batch_handle(roll_over)
test_iter = mx.image.ImageIter(3, (3, 224, 224), label_width=1, imglist=imglist,
path_imglist=path_imglist, path_root='', dtype=dtype, last_batch_handle='roll_over')
i = 0
for batch in test_iter:
if i == 0:
first_image = batch.data[0][0]
i += 1
assert i == 5
test_iter.reset()
first_batch_roll_over = test_iter.next()
assert np.array_equal(
first_batch_roll_over.data[0][1].asnumpy(), first_image.asnumpy())
assert first_batch_roll_over.pad == 2
# test iteratopr work properly after calling reset several times when last_batch_handle is roll_over
for _ in test_iter:
pass
test_iter.reset()
first_batch_roll_over_twice = test_iter.next()
assert np.array_equal(
first_batch_roll_over_twice.data[0][2].asnumpy(), first_image.asnumpy())
assert first_batch_roll_over_twice.pad == 1
# we've called next once
i = 1
for _ in test_iter:
i += 1
# test the third epoch with size 6
assert i == 6
# test shuffle option for sanity test
test_iter = mx.image.ImageIter(3, (3, 224, 224), label_width=1, imglist=imglist, shuffle=True,
path_imglist=path_imglist, path_root='', dtype=dtype, last_batch_handle='pad')
for _ in test_iter:
pass
for dtype in ['int32', 'float32', 'int64', 'float64']:
check_imageiter(dtype)
# test with default dtype
check_imageiter()
@with_seed()
def test_augmenters(self):
# ColorNormalizeAug
mean = np.random.rand(3) * 255
std = np.random.rand(3) + 1
width = np.random.randint(100, 500)
height = np.random.randint(100, 500)
src = np.random.rand(height, width, 3) * 255.
# We test numpy and mxnet NDArray inputs
color_norm_aug = mx.image.ColorNormalizeAug(mean=mx.nd.array(mean), std=std)
out_image = color_norm_aug(mx.nd.array(src))
assert_almost_equal(out_image.asnumpy(), (src - mean) / std, atol=1e-3)
# only test if all augmenters will work
# TODO(Joshua Zhang): verify the augmenter outputs
im_list = [[0, x] for x in TestImage.IMAGES]
test_iter = mx.image.ImageIter(2, (3, 224, 224), label_width=1, imglist=im_list,
resize=640, rand_crop=True, rand_resize=True, rand_mirror=True, mean=True,
std=np.array([1.1, 1.03, 1.05]), brightness=0.1, contrast=0.1, saturation=0.1,
hue=0.1, pca_noise=0.1, rand_gray=0.2, inter_method=10, path_root='', shuffle=True)
for batch in test_iter:
pass
def test_image_detiter(self):
im_list = [_generate_objects() + [x] for x in TestImage.IMAGES]
det_iter = mx.image.ImageDetIter(2, (3, 300, 300), imglist=im_list, path_root='')
for _ in range(3):
for batch in det_iter:
pass
det_iter.reset()
val_iter = mx.image.ImageDetIter(2, (3, 300, 300), imglist=im_list, path_root='')
det_iter = val_iter.sync_label_shape(det_iter)
# test file list
fname = './data/test_imagedetiter.lst'
im_list = [[k] + _generate_objects() + [x] for k, x in enumerate(TestImage.IMAGES)]
with open(fname, 'w') as f:
for line in im_list:
line = '\t'.join([str(k) for k in line])
f.write(line + '\n')
det_iter = mx.image.ImageDetIter(2, (3, 400, 400), path_imglist=fname,
path_root='')
for batch in det_iter:
pass
def test_det_augmenters(self):
# only test if all augmenters will work
# TODO(Joshua Zhang): verify the augmenter outputs
im_list = [_generate_objects() + [x] for x in TestImage.IMAGES]
det_iter = mx.image.ImageDetIter(2, (3, 300, 300), imglist=im_list, path_root='',
resize=640, rand_crop=1, rand_pad=1, rand_gray=0.1, rand_mirror=True, mean=True,
std=np.array([1.1, 1.03, 1.05]), brightness=0.1, contrast=0.1, saturation=0.1,
pca_noise=0.1, hue=0.1, inter_method=10, min_object_covered=0.5,
aspect_ratio_range=(0.2, 5), area_range=(0.1, 4.0), min_eject_coverage=0.5,
max_attempts=50)
for batch in det_iter:
pass
if __name__ == '__main__':
import nose
nose.runmodule()
|
|
import copy
import json
import numbers
import pdb
import geojson
import geo
import spatialref
def geo_feature(geo):
"""extract a feature from various types of input"""
feature = geo
try: feature = feature.__geo_interace__
except AttributeError: pass
return feature
def geo_features(geo):
"""extracts a list of features, for multiple types of input"""
features = geo
try: features = geo.__geo_interface__
except AttributeError: pass
try: features = features['features']
except TypeError: pass
return features
def geo_feature_collection(geo, srs=None):
"""return a feature collection for multiple types of input.
Add coordinate ref system, if srs arg given."""
# handle geo_interface
try: geo = geo.__geo_interface__
except AttributeError: pass
# input is already a feature collection?
type = None
try: type = geo['type']
except: pass
isfc = (type=='FeatureCollection')
if isfc:
fc = geo
else:
features = geo_features(geo)
fc = geojson.FeatureCollection(features)
# add coordinate reference system, if srs supplied
if srs: fc.crs = spatialref.geojson_crs(srs)
return fc
#If features are seg and node list for example, avoids memory overhead of generating
#a full geojson representation of input.
def write_geojson(features, outFileName, srs=None):
features = geo_features(features)
with open(outFileName,'w') as f:
# FeatureCollection header
f.write('{\n"type": "FeatureCollection",\n')
# spatial ref spec
if srs:
f.write('"crs": ')
json.dump(spatialref.geojson_crs(srs),f,indent=2)
f.write(',\n')
# features header
f.write('"features": [\n')
# features
for feature in features:
geojson.dump(feature,f,indent=2)
if feature != features[-1]: f.write(',')
f.write('\n\n')
# close features
f.write(']\n')
# close FeatureCollection
f.write('}\n')
def filter_features(features, feature_func=None, geom_type=None, col_specs=None, clip_rect=None):
features = geo_features(features)
new_features = []
for feature in features:
if clip_rect and not coordinates_intersect_rect_q(feature['geometry']['coordinates'],
clip_rect): continue
if geom_type and feature['geometry']['type'] != geom_type: continue
new_feature = copy.deepcopy(feature)
if feature_func:
# feature_func may edit feature
if not feature_func(new_feature,new_feature['properties']): continue
new_features.append(new_feature)
if col_specs:
props = new_feature['properties']
out = {}
for (col_name,col_type,prop_name) in col_specs:
if prop_name in props:
out[col_name] = props[prop_name]
new_feature['properties'] = out
return new_features
def bbox(features):
features = geo_features(features)
bbox = geo.BBox()
for feature in features:
coords = feature['geometry']['coordinates']
_add_coords_to_bbox(bbox, coords)
return bbox.rect()
def _add_coords_to_bbox(bbox,coords):
if isinstance(coords[0], numbers.Number):
#single point
bbox.add_point(coords)
elif isinstance(coords[0][0], numbers.Number):
#list of points
bbox.add_points(coords)
else:
# nested point lists
for l in coords:
_add_coords_to_bbox(bbox, l)
# allows for nested cooordinate lists
def coordinates_intersect_rect_q(coords, rect):
if isinstance(coords[0], numbers.Number):
#single point
return geo.points_intersect_rect_q((coords,), rect)
elif isinstance(coords[0][0], numbers.Number):
#list of points
return geo.points_intersect_rect_q(coords, rect)
else:
# nested point lists
for l in coords:
if coordinates_intersect_rect_q(l, rect): return True
return False
def _test_coordinates_intersect_rect_q():
coords1 = [[0,0], [10,10], [20,20]]
coords2 = [[100,100], [20,20]]
coords_l2 = [coords1, coords2]
coords3 = [12,50]
assert coordinates_intersect_rect_q(coords1,[15,15,30,30])
assert not coordinates_intersect_rect_q(coords1,[25,15,30,30])
assert coordinates_intersect_rect_q(coords_l2,[90,90,101,101])
assert coordinates_intersect_rect_q(coords3, [11,49,13,51])
def test():
_test_coordinates_intersect_rect_q()
features = [
{
"geometry": {
"type": "LineString",
"coordinates": [ [10,11],[20,21] ]
},
"type": "Feature",
"properties": {
"OBJECTID": 1,
"FULLNAME": "THE UPLANDS PATH",
"CATEGORY": "PEDESTRIAN"
}
},
{
"geometry": {
"type": "LineString",
"coordinates": [ [100,110],[200,210] ]
},
"type": "Feature",
"properties": {
"OBJECTID": 2,
"FULLNAME": "TRAIL",
"CATEGORY": "PEDESTRIAN"
}
},
{
"geometry": {
"type": "Point",
"coordinates": [ 100,110 ]
},
"type": "Feature",
"properties": {
"OBJECTID": 10,
"FULLNAME": "pointy dude",
"CATEGORY": "LIGHT RAIL STOP"
}
}]
class GeoThingy(object):
@property
def __geo_interface__(self): return self.geo
def __init__(self,features):
self.geo = geojson.FeatureCollection(features)
class GeoFeatureThingy(object):
@property
def __geo_interface__(self): return self.geo
def __init__(self,feature):
self.geo = feature
thingys = [GeoFeatureThingy(feature) for feature in features]
#print 'DEB writing files to test write_geojson()'
#write_geojson(features,'foo.geojson')
#write_geojson(thingys,'thingy.geojson')
# geo_features()
out = geo_features(features)
assert len(out)==3 and out[0]['type'] == 'Feature'
out = geo_features(geojson.FeatureCollection(features))
assert len(out)==3 and out[0]['type'] == 'Feature'
out = geo_features(GeoThingy(features))
assert len(out)==3 and out[0]['type'] == 'Feature'
#geo_feature_collection()
berkeley_url = "http://www.spatialreference.org/ref/epsg/wgs-84-utm-zone-10n/"
srs=spatialref.SRS(url=berkeley_url)
fc = geo_feature_collection(GeoThingy(features),srs=srs)
assert fc['crs']['properties'] and fc['crs']['type']
assert len(fc['features']) == 3
fc = geo_feature_collection(fc)
assert fc['crs']['properties'] and fc['crs']['type']
assert len(fc['features']) == 3
fc = geo_feature_collection(features,srs=srs)
assert fc['crs']['properties'] and fc['crs']['type']
assert len(fc['features']) == 3
# filter_features() - trivial case
assert len(filter_features([])) == 0
# check for deep copy
out = filter_features(features)
assert len(out) == 3
assert out[1]['properties']['FULLNAME'] == 'TRAIL'
out[1]['properties']['FULLNAME'] = 'Matt Davis'
assert features[1]['properties']['FULLNAME'] == 'TRAIL'
assert out[1]['properties']['FULLNAME'] == 'Matt Davis'
# geom_type
out = filter_features(features, geom_type='Point')
assert len(out)==1 and out[0]['geometry']['type']=='Point'
# feature_func
def ffunc(feature,props):
if not props['OBJECTID'] > 1: return False
props['FORMAL_NAME'] = 'Ms. ' + props['FULLNAME']
return True
out = filter_features(features, feature_func=ffunc)
assert len(out)==2
assert out[0]['properties']['FORMAL_NAME'] == 'Ms. TRAIL'
# col_specs
specs = [('num','INT','OBJECTID'),('name','INT','FORMAL_NAME')]
out = filter_features(features, ffunc, geom_type='LineString', col_specs=specs)
assert len(out) == 1
assert out[0]['properties']['name'] == 'Ms. TRAIL'
# clip_rect
out = filter_features(features, clip_rect= (0,0,15,15))
assert len(out)==1
assert out[0]['properties']['OBJECTID'] == 1
# bbox
assert bbox(features) == (10, 11, 200, 210)
print 'geofeatures PASS'
#doit
if __name__=="__main__":
test()
|
|
# -*- coding: utf-8 -*-
from __future__ import with_statement
import sys
from django.core.urlresolvers import clear_url_caches, reverse
from django.utils import six
from cms.api import create_page, create_title
from cms.apphook_pool import apphook_pool
from cms.appresolver import applications_page_check, clear_app_resolvers, get_app_patterns
from cms.models import Title
from cms.test_utils.testcases import CMSTestCase, SettingsOverrideTestCase
from cms.test_utils.util.context_managers import SettingsOverride
from cms.tests.menu_utils import DumbPageLanguageUrl
from cms.utils.compat.dj import get_user_model
from cms.utils.conf import get_cms_setting
from cms.utils.i18n import force_language
APP_NAME = 'SampleApp'
NS_APP_NAME = 'NamespacedApp'
APP_MODULE = "cms.test_utils.project.sampleapp.cms_app"
class ApphooksTestCase(CMSTestCase):
def setUp(self):
clear_app_resolvers()
clear_url_caches()
if APP_MODULE in sys.modules:
del sys.modules[APP_MODULE]
self.reload_urls()
apphook_pool.clear()
def tearDown(self):
clear_app_resolvers()
clear_url_caches()
if APP_MODULE in sys.modules:
del sys.modules[APP_MODULE]
self.reload_urls()
apphook_pool.clear()
def reload_urls(self):
from django.conf import settings
url_modules = [
'cms.urls',
# TODO: Add here intermediary modules which may
# include() the 'cms.urls' if it isn't included
# directly in the root urlconf.
# '...',
'cms.test_utils.project.second_cms_urls_for_apphook_tests',
'cms.test_utils.project.urls_for_apphook_tests',
settings.ROOT_URLCONF,
]
clear_app_resolvers()
clear_url_caches()
for module in url_modules:
if module in sys.modules:
del sys.modules[module]
def create_base_structure(self, apphook, title_langs, namespace=None):
apphook_pool.clear()
superuser = get_user_model().objects.create_superuser('admin', '[email protected]', 'admin')
self.superuser = superuser
page = create_page("home", "nav_playground.html", "en",
created_by=superuser, published=True)
create_title('de', page.get_title(), page)
page.publish('de')
child_page = create_page("child_page", "nav_playground.html", "en",
created_by=superuser, published=True, parent=page)
create_title('de', child_page.get_title(), child_page)
child_page.publish('de')
child_child_page = create_page("child_child_page", "nav_playground.html",
"en", created_by=superuser, published=True, parent=child_page, apphook=apphook,
apphook_namespace=namespace)
create_title("de", child_child_page.get_title(), child_child_page)
child_child_page.publish('de')
# publisher_public is set to draft on publish, issue with onetoone reverse
child_child_page = self.reload(child_child_page)
if isinstance(title_langs, six.string_types):
titles = child_child_page.publisher_public.get_title_obj(title_langs)
else:
titles = [child_child_page.publisher_public.get_title_obj(l) for l in title_langs]
self.reload_urls()
return titles
def test_explicit_apphooks(self):
"""
Test explicit apphook loading with the CMS_APPHOOKS setting.
"""
apphooks = (
'%s.%s' % (APP_MODULE, APP_NAME),
)
with SettingsOverride(CMS_APPHOOKS=apphooks):
apphook_pool.clear()
hooks = apphook_pool.get_apphooks()
app_names = [hook[0] for hook in hooks]
self.assertEqual(len(hooks), 1)
self.assertEqual(app_names, [APP_NAME])
apphook_pool.clear()
def test_implicit_apphooks(self):
"""
Test implicit apphook loading with INSTALLED_APPS cms_app.py
"""
apps = ['cms.test_utils.project.sampleapp']
with SettingsOverride(INSTALLED_APPS=apps, ROOT_URLCONF='cms.test_utils.project.urls_for_apphook_tests'):
apphook_pool.clear()
hooks = apphook_pool.get_apphooks()
app_names = [hook[0] for hook in hooks]
self.assertEqual(len(hooks), 3)
self.assertIn(NS_APP_NAME, app_names)
self.assertIn(APP_NAME, app_names)
apphook_pool.clear()
def test_apphook_on_root(self):
with SettingsOverride(ROOT_URLCONF='cms.test_utils.project.urls_for_apphook_tests'):
apphook_pool.clear()
superuser = get_user_model().objects.create_superuser('admin', '[email protected]', 'admin')
page = create_page("apphooked-page", "nav_playground.html", "en",
created_by=superuser, published=True, apphook="SampleApp")
blank_page = create_page("not-apphooked-page", "nav_playground.html", "en",
created_by=superuser, published=True, apphook="", slug='blankapp')
english_title = page.title_set.all()[0]
self.assertEqual(english_title.language, 'en')
create_title("de", "aphooked-page-de", page)
self.assertTrue(page.publish('en'))
self.assertTrue(page.publish('de'))
self.assertTrue(blank_page.publish('en'))
with force_language("en"):
response = self.client.get(self.get_pages_root())
self.assertTemplateUsed(response, 'sampleapp/home.html')
self.assertContains(response, '<--noplaceholder-->')
response = self.client.get('/en/blankapp/')
self.assertTemplateUsed(response, 'nav_playground.html')
apphook_pool.clear()
def test_apphook_on_root_reverse(self):
with SettingsOverride(ROOT_URLCONF='cms.test_utils.project.urls_for_apphook_tests'):
apphook_pool.clear()
superuser = get_user_model().objects.create_superuser('admin', '[email protected]', 'admin')
page = create_page("apphooked-page", "nav_playground.html", "en",
created_by=superuser, published=True, apphook="SampleApp")
create_title("de", "aphooked-page-de", page)
self.assertTrue(page.publish('de'))
self.assertTrue(page.publish('en'))
self.reload_urls()
self.assertFalse(reverse('sample-settings').startswith('//'))
apphook_pool.clear()
def test_get_page_for_apphook(self):
with SettingsOverride(ROOT_URLCONF='cms.test_utils.project.second_urls_for_apphook_tests'):
en_title, de_title = self.create_base_structure(APP_NAME, ['en', 'de'])
with force_language("en"):
path = reverse('sample-settings')
request = self.get_request(path)
request.LANGUAGE_CODE = 'en'
attached_to_page = applications_page_check(request, path=path[1:]) # strip leading slash
self.assertEqual(attached_to_page.pk, en_title.page.pk)
response = self.client.get(path)
self.assertEqual(response.status_code, 200)
self.assertTemplateUsed(response, 'sampleapp/home.html')
self.assertContains(response, en_title.title)
with force_language("de"):
path = reverse('sample-settings')
request = self.get_request(path)
request.LANGUAGE_CODE = 'de'
attached_to_page = applications_page_check(request, path=path[1:]) # strip leading slash and language prefix
self.assertEqual(attached_to_page.pk, de_title.page.pk)
response = self.client.get(path)
self.assertEqual(response.status_code, 200)
self.assertTemplateUsed(response, 'sampleapp/home.html')
self.assertContains(response, de_title.title)
apphook_pool.clear()
def test_apphook_permissions(self):
with SettingsOverride(ROOT_URLCONF='cms.test_utils.project.second_urls_for_apphook_tests'):
en_title, de_title = self.create_base_structure(APP_NAME, ['en', 'de'])
with force_language("en"):
path = reverse('sample-settings')
response = self.client.get(path)
self.assertEqual(response.status_code, 200)
page = en_title.page.publisher_public
page.login_required = True
page.save()
page.publish('en')
response = self.client.get(path)
self.assertEqual(response.status_code, 302)
apphook_pool.clear()
def test_get_page_for_apphook_on_preview_or_edit(self):
with SettingsOverride(ROOT_URLCONF='cms.test_utils.project.urls_3'):
if get_user_model().USERNAME_FIELD == 'email':
superuser = get_user_model().objects.create_superuser('admin', '[email protected]', '[email protected]')
else:
superuser = get_user_model().objects.create_superuser('admin', '[email protected]', 'admin')
page = create_page("home", "nav_playground.html", "en",
created_by=superuser, published=True, apphook=APP_NAME)
create_title('de', page.get_title(), page)
page.publish('en')
page.publish('de')
page.save()
public_page = page.get_public_object()
with self.login_user_context(superuser):
with force_language("en"):
path = reverse('sample-settings')
request = self.get_request(path + '?%s' % get_cms_setting('CMS_TOOLBAR_URL__EDIT_ON'))
request.LANGUAGE_CODE = 'en'
attached_to_page = applications_page_check(request, path=path[1:]) # strip leading slash
response = self.client.get(path+"?edit")
self.assertContains(response, '?redirect=')
with force_language("de"):
path = reverse('sample-settings')
request = self.get_request(path + '?%s' % get_cms_setting('CMS_TOOLBAR_URL__EDIT_ON'))
request.LANGUAGE_CODE = 'de'
attached_to_page = applications_page_check(request, path=path[1:]) # strip leading slash
self.assertEqual(attached_to_page.pk, public_page.pk)
def test_get_root_page_for_apphook_with_instance_namespace(self):
with SettingsOverride(ROOT_URLCONF='cms.test_utils.project.second_urls_for_apphook_tests'):
en_title = self.create_base_structure(NS_APP_NAME, 'en', 'instance_ns')
self.reload_urls()
with force_language("en"):
reverse("example_app:example")
reverse("example1:example")
reverse("example2:example")
path = reverse('namespaced_app_ns:sample-root')
path_instance = reverse('instance_ns:sample-root')
self.assertEqual(path, path_instance)
request = self.get_request(path)
request.LANGUAGE_CODE = 'en'
attached_to_page = applications_page_check(request, path=path[1:]) # strip leading slash
self.assertEqual(attached_to_page.pk, en_title.page.pk)
apphook_pool.clear()
def test_get_child_page_for_apphook_with_instance_namespace(self):
with SettingsOverride(ROOT_URLCONF='cms.test_utils.project.second_urls_for_apphook_tests'):
en_title = self.create_base_structure(NS_APP_NAME, 'en', 'instance_ns')
with force_language("en"):
path = reverse('namespaced_app_ns:sample-settings')
path_instance1 = reverse('instance_ns:sample-settings')
path_instance2 = reverse('namespaced_app_ns:sample-settings', current_app='instance_ns')
self.assertEqual(path, path_instance1)
self.assertEqual(path, path_instance2)
request = self.get_request(path)
request.LANGUAGE_CODE = 'en'
attached_to_page = applications_page_check(request, path=path[1:]) # strip leading slash
self.assertEqual(attached_to_page.pk, en_title.page_id)
apphook_pool.clear()
def test_get_sub_page_for_apphook_with_implicit_current_app(self):
with SettingsOverride(ROOT_URLCONF='cms.test_utils.project.second_urls_for_apphook_tests'):
en_title = self.create_base_structure(NS_APP_NAME, 'en', 'namespaced_app_ns')
with force_language("en"):
path = reverse('namespaced_app_ns:current-app')
request = self.get_request(path)
request.LANGUAGE_CODE = 'en'
attached_to_page = applications_page_check(request, path=path[1:]) # strip leading slash
self.assertEqual(attached_to_page.pk, en_title.page.pk)
response = self.client.get(path)
self.assertEqual(response.status_code, 200)
self.assertTemplateUsed(response, 'sampleapp/app.html')
self.assertContains(response, 'namespaced_app_ns')
self.assertContains(response, path)
apphook_pool.clear()
def test_get_i18n_apphook_with_explicit_current_app(self):
with SettingsOverride(ROOT_URLCONF='cms.test_utils.project.second_urls_for_apphook_tests'):
titles = self.create_base_structure(NS_APP_NAME, ['en', 'de'], 'instance_1')
public_de_title = titles[1]
de_title = Title.objects.get(page=public_de_title.page.publisher_draft, language="de")
de_title.slug = "de"
de_title.save()
de_title.page.publish('de')
page2 = create_page("page2", "nav_playground.html",
"en", created_by=self.superuser, published=True, parent=de_title.page.parent,
apphook=NS_APP_NAME,
apphook_namespace="instance_2")
create_title("de", "de_title", page2, slug="slug")
page2.publish('de')
clear_app_resolvers()
clear_url_caches()
if APP_MODULE in sys.modules:
del sys.modules[APP_MODULE]
self.reload_urls()
with force_language("de"):
reverse('namespaced_app_ns:current-app', current_app="instance_1")
reverse('namespaced_app_ns:current-app', current_app="instance_2")
reverse('namespaced_app_ns:current-app')
with force_language("en"):
reverse('namespaced_app_ns:current-app', current_app="instance_1")
reverse('namespaced_app_ns:current-app', current_app="instance_2")
reverse('namespaced_app_ns:current-app')
def test_apphook_include_extra_parameters(self):
with SettingsOverride(ROOT_URLCONF='cms.test_utils.project.second_urls_for_apphook_tests'):
self.create_base_structure(NS_APP_NAME, ['en', 'de'], 'instance_1')
with force_language("en"):
path = reverse('namespaced_app_ns:extra_second')
request = self.get_request(path)
request.LANGUAGE_CODE = 'en'
response = self.client.get(path)
self.assertEqual(response.status_code, 200)
self.assertTemplateUsed(response, 'sampleapp/extra.html')
self.assertContains(response, 'someopts')
def test_get_sub_page_for_apphook_with_explicit_current_app(self):
with SettingsOverride(ROOT_URLCONF='cms.test_utils.project.second_urls_for_apphook_tests'):
en_title = self.create_base_structure(NS_APP_NAME, 'en', 'instance_ns')
with force_language("en"):
path = reverse('namespaced_app_ns:current-app')
request = self.get_request(path)
request.LANGUAGE_CODE = 'en'
attached_to_page = applications_page_check(request, path=path[1:]) # strip leading slash
self.assertEqual(attached_to_page.pk, en_title.page.pk)
response = self.client.get(path)
self.assertEqual(response.status_code, 200)
self.assertTemplateUsed(response, 'sampleapp/app.html')
self.assertContains(response, 'instance_ns')
self.assertContains(response, path)
apphook_pool.clear()
def test_include_urlconf(self):
with SettingsOverride(ROOT_URLCONF='cms.test_utils.project.second_urls_for_apphook_tests'):
self.create_base_structure(APP_NAME, 'en')
path = reverse('extra_second')
response = self.client.get(path)
self.assertEqual(response.status_code, 200)
self.assertTemplateUsed(response, 'sampleapp/extra.html')
self.assertContains(response, "test included urlconf")
path = reverse('extra_first')
response = self.client.get(path)
self.assertEqual(response.status_code, 200)
self.assertTemplateUsed(response, 'sampleapp/extra.html')
self.assertContains(response, "test urlconf")
with force_language("de"):
path = reverse('extra_first')
response = self.client.get(path)
self.assertEqual(response.status_code, 200)
self.assertTemplateUsed(response, 'sampleapp/extra.html')
self.assertContains(response, "test urlconf")
with force_language("de"):
path = reverse('extra_second')
response = self.client.get(path)
self.assertEqual(response.status_code, 200)
self.assertTemplateUsed(response, 'sampleapp/extra.html')
self.assertContains(response, "test included urlconf")
apphook_pool.clear()
def test_apphook_breaking_under_home_with_new_path_caching(self):
with SettingsOverride(CMS_PERMISSION=False, ROOT_URLCONF='cms.test_utils.project.urls_2'):
home = create_page("home", "nav_playground.html", "en", published=True)
child = create_page("child", "nav_playground.html", "en", published=True, parent=home)
# not-home is what breaks stuff, because it contains the slug of the home page
not_home = create_page("not-home", "nav_playground.html", "en", published=True, parent=child)
create_page("subchild", "nav_playground.html", "en", published=True, parent=not_home, apphook='SampleApp')
with force_language("en"):
self.reload_urls()
urlpatterns = get_app_patterns()
resolver = urlpatterns[0]
url = resolver.reverse('sample-root')
self.assertEqual(url, 'child/not-home/subchild/')
def test_apphook_urlpattern_order(self):
# this one includes the actual cms.urls, so it can be tested if
# they are loaded in the correct order (the cms page pattern must be last)
# (the other testcases replicate the inclusion code and thus don't test this)
with SettingsOverride(ROOT_URLCONF='cms.test_utils.project.urls'):
self.create_base_structure(APP_NAME, 'en')
path = reverse('extra_second')
response = self.client.get(path)
self.assertEqual(response.status_code, 200)
self.assertTemplateUsed(response, 'sampleapp/extra.html')
self.assertContains(response, "test included urlconf")
def test_apphooks_receive_url_params(self):
# make sure that urlparams actually reach the apphook views
with SettingsOverride(ROOT_URLCONF='cms.test_utils.project.urls'):
self.create_base_structure(APP_NAME, 'en')
path = reverse('sample-params', kwargs=dict(my_params='is-my-param-really-in-the-context-QUESTIONMARK'))
response = self.client.get(path)
self.assertEqual(response.status_code, 200)
self.assertTemplateUsed(response, 'sampleapp/home.html')
self.assertContains(response, 'my_params: is-my-param-really-in-the-context-QUESTIONMARK')
def test_multiple_apphooks(self):
# test for #1538
with SettingsOverride(ROOT_URLCONF='cms.test_utils.project.third_urls_for_apphook_tests'):
apphook_pool.clear()
superuser = get_user_model().objects.create_superuser('admin', '[email protected]', 'admin')
create_page("home", "nav_playground.html", "en", created_by=superuser, published=True, )
create_page("apphook1-page", "nav_playground.html", "en",
created_by=superuser, published=True, apphook="SampleApp")
create_page("apphook2-page", "nav_playground.html", "en",
created_by=superuser, published=True, apphook="SampleApp2")
reverse('sample-root')
reverse('sample2-root')
apphook_pool.clear()
class ApphooksPageLanguageUrlTestCase(SettingsOverrideTestCase):
settings_overrides = {'ROOT_URLCONF': 'cms.test_utils.project.second_urls_for_apphook_tests'}
def setUp(self):
clear_app_resolvers()
clear_url_caches()
if APP_MODULE in sys.modules:
del sys.modules[APP_MODULE]
self.reload_urls()
def tearDown(self):
clear_app_resolvers()
clear_url_caches()
if APP_MODULE in sys.modules:
del sys.modules[APP_MODULE]
apphook_pool.clear()
def reload_urls(self):
from django.conf import settings
url_modules = [
'cms.urls',
'cms.test_utils.project.second_cms_urls_for_apphook_tests',
settings.ROOT_URLCONF,
]
clear_app_resolvers()
clear_url_caches()
for module in url_modules:
if module in sys.modules:
del sys.modules[module]
def test_page_language_url_for_apphook(self):
apphook_pool.clear()
superuser = get_user_model().objects.create_superuser('admin', '[email protected]', 'admin')
page = create_page("home", "nav_playground.html", "en",
created_by=superuser)
create_title('de', page.get_title(), page)
page.publish('en')
page.publish('de')
child_page = create_page("child_page", "nav_playground.html", "en",
created_by=superuser, parent=page)
create_title('de', child_page.get_title(), child_page)
child_page.publish('en')
child_page.publish('de')
child_child_page = create_page("child_child_page", "nav_playground.html",
"en", created_by=superuser, parent=child_page, apphook='SampleApp')
create_title("de", '%s_de' % child_child_page.get_title(), child_child_page)
child_child_page.publish('en')
child_child_page.publish('de')
# publisher_public is set to draft on publish, issue with one to one reverse
child_child_page = self.reload(child_child_page)
with force_language("en"):
path = reverse('extra_first')
request = self.get_request(path)
request.LANGUAGE_CODE = 'en'
request.current_page = child_child_page
fake_context = {'request': request}
tag = DumbPageLanguageUrl()
output = tag.get_context(fake_context, 'en')
url = output['content']
self.assertEqual(url, '/en/child_page/child_child_page/extra_1/')
output = tag.get_context(fake_context, 'de')
url = output['content']
# look the extra "_de"
self.assertEqual(url, '/de/child_page/child_child_page_de/extra_1/')
output = tag.get_context(fake_context, 'fr')
url = output['content']
self.assertEqual(url, '/fr/child_page/child_child_page/extra_1/')
apphook_pool.clear()
|
|
import operator
import warnings
from haystack import connections, connection_router
from haystack.backends import SQ
from haystack.constants import REPR_OUTPUT_SIZE, ITERATOR_LOAD_PER_QUERY, DEFAULT_OPERATOR
from haystack.exceptions import NotHandled
from haystack.inputs import Raw, Clean, AutoQuery
from haystack.utils import log as logging
class SearchQuerySet(object):
"""
Provides a way to specify search parameters and lazily load results.
Supports chaining (a la QuerySet) to narrow the search.
"""
def __init__(self, using=None, query=None):
# ``_using`` should only ever be a value other than ``None`` if it's
# been forced with the ``.using`` method.
self._using = using
self.query = None
self._determine_backend()
# If ``query`` is present, it should override even what the routers
# think.
if query is not None:
self.query = query
self._result_cache = []
self._result_count = None
self._cache_full = False
self._load_all = False
self._ignored_result_count = 0
self.log = logging.getLogger('haystack')
def _determine_backend(self):
# A backend has been manually selected. Use it instead.
if self._using is not None:
return self._using
# No backend, so rely on the routers to figure out what's right.
from haystack import connections
hints = {}
if self.query:
hints['models'] = self.query.models
backend_alias = connection_router.for_read(**hints)
if isinstance(backend_alias, (list, tuple)) and len(backend_alias):
# We can only effectively read from one engine.
backend_alias = backend_alias[0]
# The ``SearchQuery`` might swap itself out for a different variant
# here.
if self.query:
self.query = self.query.using(backend_alias)
else:
self.query = connections[backend_alias].get_query()
def __getstate__(self):
"""
For pickling.
"""
len(self)
obj_dict = self.__dict__.copy()
obj_dict['_iter'] = None
obj_dict['log'] = None
return obj_dict
def __setstate__(self, data_dict):
"""
For unpickling.
"""
self.__dict__ = data_dict
self.log = logging.getLogger('haystack')
def __repr__(self):
data = list(self[:REPR_OUTPUT_SIZE])
if len(self) > REPR_OUTPUT_SIZE:
data[-1] = "...(remaining elements truncated)..."
return repr(data)
def __len__(self):
if not self._result_count:
self._result_count = self.query.get_count()
# Some backends give weird, false-y values here. Convert to zero.
if not self._result_count:
self._result_count = 0
# This needs to return the actual number of hits, not what's in the cache.
return self._result_count - self._ignored_result_count
def __iter__(self):
if self._cache_is_full():
# We've got a fully populated cache. Let Python do the hard work.
return iter(self._result_cache)
return self._manual_iter()
def __and__(self, other):
if isinstance(other, EmptySearchQuerySet):
return other._clone()
combined = self._clone()
combined.query.combine(other.query, SQ.AND)
return combined
def __or__(self, other):
combined = self._clone()
if isinstance(other, EmptySearchQuerySet):
return combined
combined.query.combine(other.query, SQ.OR)
return combined
def _cache_is_full(self):
if not self.query.has_run():
return False
if len(self) <= 0:
return True
try:
self._result_cache.index(None)
return False
except ValueError:
# No ``None``s found in the results. Check the length of the cache.
return len(self._result_cache) > 0
def _manual_iter(self):
# If we're here, our cache isn't fully populated.
# For efficiency, fill the cache as we go if we run out of results.
# Also, this can't be part of the __iter__ method due to Python's rules
# about generator functions.
current_position = 0
current_cache_max = 0
while True:
if len(self._result_cache) > 0:
try:
current_cache_max = self._result_cache.index(None)
except ValueError:
current_cache_max = len(self._result_cache)
while current_position < current_cache_max:
yield self._result_cache[current_position]
current_position += 1
if self._cache_is_full():
raise StopIteration
# We've run out of results and haven't hit our limit.
# Fill more of the cache.
if not self._fill_cache(current_position, current_position + ITERATOR_LOAD_PER_QUERY):
raise StopIteration
def _fill_cache(self, start, end, **kwargs):
# Tell the query where to start from and how many we'd like.
self.query._reset()
self.query.set_limits(start, end)
results = self.query.get_results(**kwargs)
if results == None or len(results) == 0:
return False
# Setup the full cache now that we know how many results there are.
# We need the ``None``s as placeholders to know what parts of the
# cache we have/haven't filled.
# Using ``None`` like this takes up very little memory. In testing,
# an array of 100,000 ``None``s consumed less than .5 Mb, which ought
# to be an acceptable loss for consistent and more efficient caching.
if len(self._result_cache) == 0:
self._result_cache = [None for i in xrange(self.query.get_count())]
if start is None:
start = 0
if end is None:
end = self.query.get_count()
to_cache = self.post_process_results(results)
# Assign by slice.
self._result_cache[start:start + len(to_cache)] = to_cache
return True
def post_process_results(self, results):
to_cache = []
# Check if we wish to load all objects.
if self._load_all:
original_results = []
models_pks = {}
loaded_objects = {}
# Remember the search position for each result so we don't have to resort later.
for result in results:
original_results.append(result)
models_pks.setdefault(result.model, []).append(result.pk)
# Load the objects for each model in turn.
for model in models_pks:
try:
ui = connections[self.query._using].get_unified_index()
index = ui.get_index(model)
objects = index.read_queryset(using=self.query._using)
loaded_objects[model] = objects.in_bulk(models_pks[model])
except NotHandled:
self.log.warning("Model '%s.%s' not handled by the routers.", self.app_label, self.model_name)
# Revert to old behaviour
loaded_objects[model] = model._default_manager.in_bulk(models_pks[model])
for result in results:
if self._load_all:
# We have to deal with integer keys being cast from strings
model_objects = loaded_objects.get(result.model, {})
if not result.pk in model_objects:
try:
result.pk = int(result.pk)
except ValueError:
pass
try:
result._object = model_objects[result.pk]
except KeyError:
# The object was either deleted since we indexed or should
# be ignored; fail silently.
self._ignored_result_count += 1
continue
to_cache.append(result)
return to_cache
def __getitem__(self, k):
"""
Retrieves an item or slice from the set of results.
"""
if not isinstance(k, (slice, int, long)):
raise TypeError
assert ((not isinstance(k, slice) and (k >= 0))
or (isinstance(k, slice) and (k.start is None or k.start >= 0)
and (k.stop is None or k.stop >= 0))), \
"Negative indexing is not supported."
# Remember if it's a slice or not. We're going to treat everything as
# a slice to simply the logic and will `.pop()` at the end as needed.
if isinstance(k, slice):
is_slice = True
start = k.start
if k.stop is not None:
bound = int(k.stop)
else:
bound = None
else:
is_slice = False
start = k
bound = k + 1
# We need check to see if we need to populate more of the cache.
if len(self._result_cache) <= 0 or (None in self._result_cache[start:bound] and not self._cache_is_full()):
try:
self._fill_cache(start, bound)
except StopIteration:
# There's nothing left, even though the bound is higher.
pass
# Cache should be full enough for our needs.
if is_slice:
return self._result_cache[start:bound]
else:
return self._result_cache[start]
# Methods that return a SearchQuerySet.
def all(self):
"""Returns all results for the query."""
return self._clone()
def none(self):
"""Returns an empty result list for the query."""
return self._clone(klass=EmptySearchQuerySet)
def filter(self, *args, **kwargs):
"""Narrows the search based on certain attributes and the default operator."""
if DEFAULT_OPERATOR == 'OR':
return self.filter_or(*args, **kwargs)
else:
return self.filter_and(*args, **kwargs)
def exclude(self, *args, **kwargs):
"""Narrows the search by ensuring certain attributes are not included."""
clone = self._clone()
clone.query.add_filter(~SQ(*args, **kwargs))
return clone
def filter_and(self, *args, **kwargs):
"""Narrows the search by looking for (and including) certain attributes."""
clone = self._clone()
clone.query.add_filter(SQ(*args, **kwargs))
return clone
def filter_or(self, *args, **kwargs):
"""Narrows the search by ensuring certain attributes are not included."""
clone = self._clone()
clone.query.add_filter(SQ(*args, **kwargs), use_or=True)
return clone
def order_by(self, *args):
"""Alters the order in which the results should appear."""
clone = self._clone()
for field in args:
clone.query.add_order_by(field)
return clone
def order_by_distance(self, **kwargs):
"""Alters the order in which the results should appear."""
clone = self._clone()
clone.query.add_order_by_distance(**kwargs)
return clone
def highlight(self):
"""Adds highlighting to the results."""
clone = self._clone()
clone.query.add_highlight()
return clone
def models(self, *models):
"""Accepts an arbitrary number of Model classes to include in the search."""
clone = self._clone()
for model in models:
if not model in connections[self.query._using].get_unified_index().get_indexed_models():
warnings.warn('The model %r is not registered for search.' % model)
clone.query.add_model(model)
return clone
def result_class(self, klass):
"""
Allows specifying a different class to use for results.
Overrides any previous usages. If ``None`` is provided, Haystack will
revert back to the default ``SearchResult`` object.
"""
clone = self._clone()
clone.query.set_result_class(klass)
return clone
def boost(self, term, boost):
"""Boosts a certain aspect of the query."""
clone = self._clone()
clone.query.add_boost(term, boost)
return clone
def facet(self, field):
"""Adds faceting to a query for the provided field."""
clone = self._clone()
clone.query.add_field_facet(field)
return clone
def within(self, field, point_1, point_2):
"""Spatial: Adds a bounding box search to the query."""
clone = self._clone()
clone.query.add_within(field, point_1, point_2)
return clone
def dwithin(self, field, point, distance):
"""Spatial: Adds a distance-based search to the query."""
clone = self._clone()
clone.query.add_dwithin(field, point, distance)
return clone
def distance(self, field, point):
"""
Spatial: Denotes results must have distance measurements from the
provided point.
"""
clone = self._clone()
clone.query.add_distance(field, point)
return clone
def date_facet(self, field, start_date, end_date, gap_by, gap_amount=1):
"""Adds faceting to a query for the provided field by date."""
clone = self._clone()
clone.query.add_date_facet(field, start_date, end_date, gap_by, gap_amount=gap_amount)
return clone
def query_facet(self, field, query):
"""Adds faceting to a query for the provided field with a custom query."""
clone = self._clone()
clone.query.add_query_facet(field, query)
return clone
def narrow(self, query):
"""Pushes existing facet choices into the search."""
clone = self._clone()
clone.query.add_narrow_query(query)
return clone
def raw_search(self, query_string, **kwargs):
"""Passes a raw query directly to the backend."""
return self.filter(content=Raw(query_string, **kwargs))
def load_all(self):
"""Efficiently populates the objects in the search results."""
clone = self._clone()
clone._load_all = True
return clone
def auto_query(self, query_string, fieldname='content'):
"""
Performs a best guess constructing the search query.
This method is somewhat naive but works well enough for the simple,
common cases.
"""
kwargs = {
fieldname: AutoQuery(query_string)
}
return self.filter(**kwargs)
def autocomplete(self, **kwargs):
"""
A shortcut method to perform an autocomplete search.
Must be run against fields that are either ``NgramField`` or
``EdgeNgramField``.
"""
clone = self._clone()
query_bits = []
for field_name, query in kwargs.items():
for word in query.split(' '):
bit = clone.query.clean(word.strip())
kwargs = {
field_name: bit,
}
query_bits.append(SQ(**kwargs))
return clone.filter(reduce(operator.__and__, query_bits))
def using(self, connection_name):
"""
Allows switching which connection the ``SearchQuerySet`` uses to
search in.
"""
clone = self._clone()
clone.query = self.query.using(connection_name)
clone._using = connection_name
return clone
# Methods that do not return a SearchQuerySet.
def count(self):
"""Returns the total number of matching results."""
return len(self)
def best_match(self):
"""Returns the best/top search result that matches the query."""
return self[0]
def latest(self, date_field):
"""Returns the most recent search result that matches the query."""
clone = self._clone()
clone.query.clear_order_by()
clone.query.add_order_by("-%s" % date_field)
return clone.best_match()
def more_like_this(self, model_instance):
"""Finds similar results to the object passed in."""
clone = self._clone()
clone.query.more_like_this(model_instance)
return clone
def facet_counts(self):
"""
Returns the facet counts found by the query.
This will cause the query to execute and should generally be used when
presenting the data.
"""
if self.query.has_run():
return self.query.get_facet_counts()
else:
clone = self._clone()
return clone.query.get_facet_counts()
def spelling_suggestion(self, preferred_query=None):
"""
Returns the spelling suggestion found by the query.
To work, you must set ``INCLUDE_SPELLING`` within your connection's
settings dictionary to ``True``. Otherwise, ``None`` will be returned.
This will cause the query to execute and should generally be used when
presenting the data.
"""
if self.query.has_run():
return self.query.get_spelling_suggestion(preferred_query)
else:
clone = self._clone()
return clone.query.get_spelling_suggestion(preferred_query)
def values(self, *fields):
"""
Returns a list of dictionaries, each containing the key/value pairs for
the result, exactly like Django's ``ValuesQuerySet``.
"""
qs = self._clone(klass=ValuesSearchQuerySet)
qs._fields.extend(fields)
return qs
def values_list(self, *fields, **kwargs):
"""
Returns a list of field values as tuples, exactly like Django's
``QuerySet.values``.
Optionally accepts a ``flat=True`` kwarg, which in the case of a
single field being provided, will return a flat list of that field
rather than a list of tuples.
"""
flat = kwargs.pop("flat", False)
if flat and len(fields) > 1:
raise TypeError("'flat' is not valid when values_list is called with more than one field.")
qs = self._clone(klass=ValuesListSearchQuerySet)
qs._fields.extend(fields)
qs._flat = flat
return qs
# Utility methods.
def _clone(self, klass=None):
if klass is None:
klass = self.__class__
query = self.query._clone()
clone = klass(query=query)
clone._load_all = self._load_all
return clone
class EmptySearchQuerySet(SearchQuerySet):
"""
A stubbed SearchQuerySet that behaves as normal but always returns no
results.
"""
def __len__(self):
return 0
def _cache_is_full(self):
# Pretend the cache is always full with no results.
return True
def _clone(self, klass=None):
clone = super(EmptySearchQuerySet, self)._clone(klass=klass)
clone._result_cache = []
return clone
def _fill_cache(self, start, end):
return False
def facet_counts(self):
return {}
class ValuesListSearchQuerySet(SearchQuerySet):
"""
A ``SearchQuerySet`` which returns a list of field values as tuples, exactly
like Django's ``ValuesListQuerySet``.
"""
def __init__(self, *args, **kwargs):
super(ValuesListSearchQuerySet, self).__init__(*args, **kwargs)
self._flat = False
self._fields = []
# Removing this dependency would require refactoring much of the backend
# code (_process_results, etc.) and these aren't large enough to make it
# an immediate priority:
self._internal_fields = ['id', 'django_ct', 'django_id', 'score']
def _clone(self, klass=None):
clone = super(ValuesListSearchQuerySet, self)._clone(klass=klass)
clone._fields = self._fields
clone._flat = self._flat
return clone
def _fill_cache(self, start, end):
query_fields = set(self._internal_fields)
query_fields.update(self._fields)
kwargs = {
'fields': query_fields
}
return super(ValuesListSearchQuerySet, self)._fill_cache(start, end, **kwargs)
def post_process_results(self, results):
to_cache = []
if self._flat:
accum = to_cache.extend
else:
accum = to_cache.append
for result in results:
accum([getattr(result, i, None) for i in self._fields])
return to_cache
class ValuesSearchQuerySet(ValuesListSearchQuerySet):
"""
A ``SearchQuerySet`` which returns a list of dictionaries, each containing
the key/value pairs for the result, exactly like Django's
``ValuesQuerySet``.
"""
def _fill_cache(self, start, end):
query_fields = set(self._internal_fields)
query_fields.update(self._fields)
kwargs = {
'fields': query_fields
}
return super(ValuesListSearchQuerySet, self)._fill_cache(start, end, **kwargs)
def post_process_results(self, results):
to_cache = []
for result in results:
to_cache.append(dict((i, getattr(result, i, None)) for i in self._fields))
return to_cache
class RelatedSearchQuerySet(SearchQuerySet):
"""
A variant of the SearchQuerySet that can handle `load_all_queryset`s.
This is predominantly different in the `_fill_cache` method, as it is
far less efficient but needs to fill the cache before it to maintain
consistency.
"""
_load_all_querysets = {}
_result_cache = []
def _cache_is_full(self):
return len(self._result_cache) >= len(self)
def _manual_iter(self):
# If we're here, our cache isn't fully populated.
# For efficiency, fill the cache as we go if we run out of results.
# Also, this can't be part of the __iter__ method due to Python's rules
# about generator functions.
current_position = 0
current_cache_max = 0
while True:
current_cache_max = len(self._result_cache)
while current_position < current_cache_max:
yield self._result_cache[current_position]
current_position += 1
if self._cache_is_full():
raise StopIteration
# We've run out of results and haven't hit our limit.
# Fill more of the cache.
start = current_position + self._ignored_result_count
if not self._fill_cache(start, start + ITERATOR_LOAD_PER_QUERY):
raise StopIteration
def _fill_cache(self, start, end):
# Tell the query where to start from and how many we'd like.
self.query._reset()
self.query.set_limits(start, end)
results = self.query.get_results()
if len(results) == 0:
return False
if start is None:
start = 0
if end is None:
end = self.query.get_count()
# Check if we wish to load all objects.
if self._load_all:
original_results = []
models_pks = {}
loaded_objects = {}
# Remember the search position for each result so we don't have to resort later.
for result in results:
original_results.append(result)
models_pks.setdefault(result.model, []).append(result.pk)
# Load the objects for each model in turn.
for model in models_pks:
if model in self._load_all_querysets:
# Use the overriding queryset.
loaded_objects[model] = self._load_all_querysets[model].in_bulk(models_pks[model])
else:
# Check the SearchIndex for the model for an override.
try:
index = connections[self.query._using].get_unified_index().get_index(model)
qs = index.load_all_queryset()
loaded_objects[model] = qs.in_bulk(models_pks[model])
except NotHandled:
# The model returned doesn't seem to be handled by the
# routers. We should silently fail and populate
# nothing for those objects.
loaded_objects[model] = []
if len(results) + len(self._result_cache) < len(self) and len(results) < ITERATOR_LOAD_PER_QUERY:
self._ignored_result_count += ITERATOR_LOAD_PER_QUERY - len(results)
for result in results:
if self._load_all:
# We have to deal with integer keys being cast from strings; if this
# fails we've got a character pk.
try:
result.pk = int(result.pk)
except ValueError:
pass
try:
result._object = loaded_objects[result.model][result.pk]
except (KeyError, IndexError):
# The object was either deleted since we indexed or should
# be ignored; fail silently.
self._ignored_result_count += 1
continue
self._result_cache.append(result)
return True
def __getitem__(self, k):
"""
Retrieves an item or slice from the set of results.
"""
if not isinstance(k, (slice, int, long)):
raise TypeError
assert ((not isinstance(k, slice) and (k >= 0))
or (isinstance(k, slice) and (k.start is None or k.start >= 0)
and (k.stop is None or k.stop >= 0))), \
"Negative indexing is not supported."
# Remember if it's a slice or not. We're going to treat everything as
# a slice to simply the logic and will `.pop()` at the end as needed.
if isinstance(k, slice):
is_slice = True
start = k.start
if k.stop is not None:
bound = int(k.stop)
else:
bound = None
else:
is_slice = False
start = k
bound = k + 1
# We need check to see if we need to populate more of the cache.
if len(self._result_cache) <= 0 or not self._cache_is_full():
try:
while len(self._result_cache) < bound and not self._cache_is_full():
current_max = len(self._result_cache) + self._ignored_result_count
self._fill_cache(current_max, current_max + ITERATOR_LOAD_PER_QUERY)
except StopIteration:
# There's nothing left, even though the bound is higher.
pass
# Cache should be full enough for our needs.
if is_slice:
return self._result_cache[start:bound]
else:
return self._result_cache[start]
def load_all_queryset(self, model, queryset):
"""
Allows for specifying a custom ``QuerySet`` that changes how ``load_all``
will fetch records for the provided model.
This is useful for post-processing the results from the query, enabling
things like adding ``select_related`` or filtering certain data.
"""
clone = self._clone()
clone._load_all_querysets[model] = queryset
return clone
def _clone(self, klass=None):
if klass is None:
klass = self.__class__
query = self.query._clone()
clone = klass(query=query)
clone._load_all = self._load_all
clone._load_all_querysets = self._load_all_querysets
return clone
|
|
# Copyright 2014-2020 Scalyr Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import unicode_literals
from __future__ import absolute_import
import time
import os
import platform
import sys
if False:
from typing import Dict
from typing import Tuple
try:
import pathlib
except ImportError:
import pathlib2 as pathlib # type: ignore
import pytest
from scalyr_agent import scalyr_logging
from tests.unit.copying_manager_tests.common import (
CopyingManagerCommonTest,
TestableCopyingManager,
TestableCopyingManagerFlowController,
TestableLogFile,
TestEnvironBuilder,
TestingConfiguration,
)
from scalyr_agent import util as scalyr_util
import six
from six.moves import range
import mock
log = scalyr_logging.getLogger(__name__)
log.setLevel(scalyr_logging.DEBUG_LEVEL_0)
# mock library does not have PropertyMock in python 2.6, so we just keep it None.
if sys.version_info >= (2, 7):
PropertyMock = mock.PropertyMock
else:
PropertyMock = None
def pytest_generate_tests(metafunc):
"""
Run all tests for each configuration.
"""
if "worker_type" in metafunc.fixturenames:
test_params = [["thread", 1, 1], ["thread", 2, 2]]
# if the OS is not Windows / OS X and python version > 2.7 then also do the multiprocess workers testing.
if platform.system() not in ["Windows", "Darwin"] and sys.version_info >= (
2,
7,
):
test_params.extend([["process", 1, 1], ["process", 2, 2]])
metafunc.parametrize(
"worker_type, workers_count, worker_sessions_count", test_params
)
class CopyingManagerTest(CopyingManagerCommonTest):
@pytest.fixture(autouse=True)
def setup(self, worker_type, workers_count, worker_sessions_count):
super(CopyingManagerTest, self).setup()
self.use_multiprocessing_workers = worker_type == "process"
self.workers_count = workers_count
self.worker_sessions_count = worker_sessions_count
def teardown(self):
if self._instance is not None:
self._instance.stop_manager()
self._instance.cleanup()
super(CopyingManagerTest, self).teardown()
def _init_test_environment(
self,
use_pipelining=False,
config_data=None,
disable_flow_control=False,
):
pipeline_threshold = 1.1
if use_pipelining:
pipeline_threshold = 0.0
if config_data is None:
config_data = {}
if "workers" not in config_data:
workers = []
for i in range(self.workers_count - 1):
worker_config = {
"id": "key_id_%s" % i,
"api_key": "key_%s" % i,
}
workers.append(worker_config)
config_data["workers"] = workers
config_data["default_sessions_per_worker"] = self.worker_sessions_count
config_data["use_multiprocess_workers"] = self.use_multiprocessing_workers
config_data["disable_max_send_rate_enforcement_overrides"] = True
config_data["pipeline_threshold"] = pipeline_threshold
config_data["implicit_agent_log_collection"] = False
self._env_builder = TestEnvironBuilder()
self._env_builder.init_agent_dirs()
self._env_builder.init_config(config_data)
scalyr_logging.set_log_destination(
use_disk=True,
logs_directory=six.text_type(self._env_builder.config.agent_log_path),
agent_log_file_path="agent.log",
agent_debug_log_file_suffix="_debug",
)
scalyr_logging.__log_manager__.set_log_level(scalyr_logging.DEBUG_LEVEL_5)
self._env_builder.config.disable_flow_control = disable_flow_control
self._env_builder.config.skip_agent_log_change = False
def _create_manager_instance(self, auto_start=True):
self._instance = TestableCopyingManager(self._env_builder.config, [])
if auto_start:
self._instance.start_manager()
self._instance.run_and_stop_at(
TestableCopyingManagerFlowController.SLEEPING
)
return self._instance
def _init_manager(
self,
log_files_number=1,
auto_start=True,
use_pipelining=False,
config_data=None,
disable_flow_control=False,
): # type: (int, bool, bool, Dict, bool) -> Tuple[Tuple[TestableLogFile, ...], TestableCopyingManager]
if self._env_builder is None:
self._init_test_environment(
use_pipelining=use_pipelining,
config_data=config_data,
disable_flow_control=disable_flow_control,
)
if log_files_number is not None:
files = self._env_builder.recreate_files( # type: ignore
log_files_number, self._env_builder.non_glob_logs_dir # type: ignore
)
else:
files = tuple()
manager = self._create_manager_instance(auto_start=auto_start)
return files, manager # type: ignore
class TestBasic(CopyingManagerTest):
def test_multiple_workers(self):
_, manager = self._init_manager(2)
assert (
len(manager.worker_sessions)
== self.worker_sessions_count * self.workers_count
)
worker_pids = set(worker.get_pid() for worker in manager.worker_sessions)
if self.use_multiprocessing_workers:
assert len(worker_pids) == self.worker_sessions_count * self.workers_count
assert os.getpid() not in worker_pids
else:
# in case of non multiprocess workers, all workers has the same process id as the main process.
assert worker_pids == set([os.getpid()])
def test_generate_status(self):
(test_file, test_file2), manager = self._init_manager(2)
test_file.append_lines("line1")
test_file2.append_lines("line2")
assert set(self._wait_for_rpc_and_respond()) == set(["line1", "line2"])
status = manager.generate_status()
assert status.health_check_result == "Good"
return
def test_health_check_status(self):
(test_file, test_file2), manager = self._init_manager(2)
manager._CopyingManager__last_scan_attempt_time = time.time()
status = manager.generate_status()
assert status.health_check_result == "Good"
def test_health_check_status_failed(self):
(test_file, test_file2), manager = self._init_manager(2)
manager._CopyingManager__last_scan_attempt_time = time.time() - (1000 * 65)
status = manager.generate_status()
assert (
status.health_check_result
== "Failed, max time since last scan attempt (60.0 seconds) exceeded"
)
def test_health_check_status_worker_failed(self):
(test_file, test_file2), manager = self._init_manager(2)
# get all workers and simulate their last attempt timeout.
for worker in manager.worker_sessions:
worker.change_last_attempt_time(time.time() - (1000 * 65))
status = manager.generate_status()
if self.worker_sessions_count > 1 or self.workers_count > 1:
assert status.worker_sessions_health_check == "Some workers have failed."
assert status.health_check_result == "Good"
else:
assert (
status.worker_sessions_health_check
== "Worker session 'default-0' failed, max time since last copy attempt (60.0 seconds) exceeded"
)
assert status.health_check_result == "Good"
def test_failed_health_check_status_and_failed_worker(self):
(test_file, test_file2), manager = self._init_manager(2)
manager._CopyingManager__last_scan_attempt_time = time.time() - (1000 * 65)
# get all workers and simulate their last attempt timeout.
for worker in manager.worker_sessions:
worker.change_last_attempt_time(time.time() - (1000 * 65))
status = manager.generate_status()
if self.worker_sessions_count > 1 or self.workers_count > 1:
assert status.worker_sessions_health_check == "Some workers have failed."
assert (
status.health_check_result
== "Failed, max time since last scan attempt (60.0 seconds) exceeded"
)
else:
assert (
status.worker_sessions_health_check
== "Worker session 'default-0' failed, max time since last copy attempt (60.0 seconds) exceeded"
)
assert (
status.health_check_result
== "Failed, max time since last scan attempt (60.0 seconds) exceeded"
)
def test_checkpoints(self):
(test_file, test_file2), manager = self._init_manager(2)
# also add non-copying manager related checkpoints files, to be sure that the copying manager does not
# touch them. This emulates the case where some agent monitors also store their own state in checkpoint files
# and we must not consolidate them with the worker checkpoints.
monitor_checkpoint_file_names = [
"windows-event-checkpoints.json",
"docker-checkpoints.json",
"journald-checkpoints.json",
]
monitors_checkpoint_paths = {}
for name in monitor_checkpoint_file_names:
monitor_checkpoint_path = pathlib.Path(
self._env_builder.config.agent_data_path, name
)
check_text = "{0}. Do not delete me, please.".format(name)
# write some text to the monitor checkpoint files, just to verify that it is not changed later.
monitors_checkpoint_paths[monitor_checkpoint_path] = check_text
monitor_checkpoint_path.write_text(check_text)
test_file.append_lines("line1")
test_file2.append_lines("line2")
assert set(self._wait_for_rpc_and_respond()) == set(["line1", "line2"])
# stop the manager and write some lines.
# When manager is stared, it should pick recent checkpoints and read those lines.
manager.stop_manager()
test_file.append_lines("Line3")
test_file.append_lines("Line4")
self._instance = manager = TestableCopyingManager(self._env_builder.config, [])
manager.start_manager()
# make sure that the first lines are lines which were written before manager start
assert set(self._wait_for_rpc_and_respond()) == set(["Line3", "Line4"])
test_file.append_lines("Line5")
test_file.append_lines("Line6")
assert set(self._wait_for_rpc_and_respond()) == set(["Line5", "Line6"])
manager.stop_manager()
test_file.append_lines("Line7")
test_file.append_lines("Line8")
# make sure that all worker session checkpoint files are consolidated and removed.
for worker_session in manager.worker_sessions:
assert not worker_session.get_checkpoints_path().exists()
assert not worker_session.get_active_checkpoints_path().exists()
assert manager.consolidated_checkpoints_path.exists()
manager.consolidated_checkpoints_path.unlink()
self._instance = manager = TestableCopyingManager(self._env_builder.config, [])
manager.start_manager()
assert self._wait_for_rpc_and_respond() == []
test_file.append_lines("Line9")
test_file.append_lines("Line10")
assert set(self._wait_for_rpc_and_respond()) == set(["Line9", "Line10"])
# verify if monitor checkpoint file is remaining untouched.
for monitor_checkpoint_path, check_text in monitors_checkpoint_paths.items():
assert monitor_checkpoint_path.exists()
assert monitor_checkpoint_path.read_text() == check_text
def test_checkpoints_consolidated_checkpoints(self):
if self.worker_sessions_count == 1 and self.workers_count == 1:
pytest.skip("This test is only for multi-worker copying manager.")
(test_file, test_file2), manager = self._init_manager(2)
# write something and stop in order to create checkpoint files.
test_file.append_lines("line1")
test_file2.append_lines("line2")
assert set(self._wait_for_rpc_and_respond()) == set(["line1", "line2"])
manager.stop_manager()
# recreate the manager, in order to simulate a new start.
self._instance = manager = TestableCopyingManager(self._env_builder.config, [])
# start manager, it has to create consolidated checkpoint file when starts.
manager.start_manager()
manager.stop()
# add some new lines
test_file.append_lines("line3")
test_file2.append_lines("line4")
checkpoint_files = scalyr_util.match_glob(
six.text_type(manager.consolidated_checkpoints_path)
)
# verify that only one file remains and it is a consolidated file.
assert checkpoint_files == [str(manager.consolidated_checkpoints_path)]
# recreate the manager, in order to simulate a new start.
self._instance = manager = TestableCopyingManager(self._env_builder.config, [])
# start manager, it has to create consolidated checkpoint file when starts.
manager.start_manager()
assert set(self._wait_for_rpc_and_respond()) == set(["line3", "line4"])
@pytest.mark.skipif(
sys.version_info < (2, 7),
reason="This test case can not be run on python < 2.7",
)
@mock.patch.object(
TestingConfiguration, "log_deletion_delay", new_callable=PropertyMock
)
@mock.patch.object(
TestingConfiguration,
"max_new_log_detection_time",
new_callable=PropertyMock,
)
def test_log_processors_lifecycle(
self, log_deletion_delay, max_new_log_detection_time
):
# mock config values so we do not need to wait for the next file scan.
log_deletion_delay.return_value = -1
# do the same to not wait when copying manager decides that file is deleted.
max_new_log_detection_time.return_value = -1
test_files, manager = self._init_manager(10)
for i, test_file in enumerate(test_files):
self._append_lines(["file_{}_line1".format(i)], log_file=test_file)
assert manager.worker_sessions_log_processors_count == len(test_files)
assert manager.matchers_log_processor_count == len(test_files)
for log_file in test_files:
log_file.remove()
# 1) log processors perform file processing and close deleted files.
manager.wait_for_full_iteration()
# 2) Copying manager removes closed processors from its collection.
manager.wait_for_full_iteration()
# 3) Log matchers remove their log processors.
manager.wait_for_full_iteration()
# check if there are no log processors remaining inside workers and log matchers.
assert manager.worker_sessions_log_processors_count == 0
assert manager.matchers_log_processor_count == 0
# crete log file back and see if log processors are created back too.
for log_file in test_files:
log_file.create()
manager.wait_for_full_iteration()
assert manager.worker_sessions_log_processors_count == len(test_files)
assert manager.matchers_log_processor_count == len(test_files)
@pytest.mark.skipif(
sys.version_info < (2, 7),
reason="This test case can not be run on python < 2.7",
)
@mock.patch.object(
TestingConfiguration, "log_deletion_delay", new_callable=PropertyMock
)
@mock.patch.object(
TestingConfiguration,
"max_new_log_detection_time",
new_callable=PropertyMock,
)
def test_log_processors_lifecycle_with_glob(
self, log_deletion_delay, max_new_log_detection_time
):
# mock config values so we do not need to wait for the next file scan.
log_deletion_delay.return_value = -1
# do the same to not wait when copying manager decides that file is deleted.
max_new_log_detection_time.return_value = -1
_, manager = self._init_manager(0)
# create some matching files.
files = self._env_builder.recreate_files(
10, self._env_builder.non_glob_logs_dir
)
assert manager.worker_sessions_log_processors_count == 0
assert manager.matchers_log_processor_count == 0
# wait for copying manager adds log processors.
manager.wait_for_full_iteration()
# both workers and log log matches should contain new log processors.
assert manager.worker_sessions_log_processors_count == len(files)
assert manager.matchers_log_processor_count == len(files)
self._env_builder.remove_files(self._env_builder.non_glob_logs_dir)
# 1) log processors perform file processing and close deleted files.
manager.wait_for_full_iteration()
# 2) Copying manager removes closed processors from its collection.
manager.wait_for_full_iteration()
# 3) Log matchers remove their log processors.
manager.wait_for_full_iteration()
# check if there are no log processors remaining inside workers and log matchers.
assert manager.worker_sessions_log_processors_count == 0
assert manager.matchers_log_processor_count == 0
# crete log file back and see if log processors are created back too.
files = self._env_builder.recreate_files(
10, self._env_builder.non_glob_logs_dir
)
manager.wait_for_full_iteration()
assert manager.worker_sessions_log_processors_count == len(files)
assert manager.matchers_log_processor_count == len(files)
@pytest.mark.skipif(
sys.version_info < (2, 7),
reason="This test case can not be run on python < 2.7",
)
@mock.patch.object(
TestingConfiguration, "log_deletion_delay", new_callable=PropertyMock
)
@mock.patch.object(
TestingConfiguration,
"max_new_log_detection_time",
new_callable=PropertyMock,
)
def test_log_processors_lifecycle_with_dynamic_matchers(
self, log_deletion_delay, max_new_log_detection_time
):
# mock config values so we do not need to wait for the next file scan.
log_deletion_delay.return_value = -1
# do the same to not wait when copying manager decides that file is deleted.
max_new_log_detection_time.return_value = -1
_, manager = self._init_manager(0)
# create directory which is unknown for the managers configuration
logs_dir = self._env_builder.test_logs_dir / "dynamicaly-added-logs"
logs_dir.mkdir()
files = self._env_builder.recreate_files(10, logs_dir)
for file in files:
log_config = self._env_builder.config.parse_log_config(
{"path": file.str_path}
)
manager.add_log_config("scheduled-deletion", log_config)
assert manager.worker_sessions_log_processors_count == 0
assert manager.matchers_log_processor_count == 0
# wait for copying manager adds log processors.
manager.wait_for_full_iteration()
assert manager.worker_sessions_log_processors_count == len(files)
assert manager.matchers_log_processor_count == len(files)
self._env_builder.remove_files(logs_dir)
# 1) log processors perform file processing and close deleted files.
manager.wait_for_full_iteration()
# 2) Copying manager removes closed processors from its collection.
manager.wait_for_full_iteration()
# 3) Log matchers remove their log processors.
manager.wait_for_full_iteration()
# check if there are no log processors remaining inside workers and log matchers.
assert manager.worker_sessions_log_processors_count == 0
assert manager.matchers_log_processor_count == 0
# crete log file back and see if log processors are created back too.
files = self._env_builder.recreate_files(10, logs_dir)
manager.wait_for_full_iteration()
assert manager.worker_sessions_log_processors_count == len(files)
assert manager.matchers_log_processor_count == len(files)
|
|
# Copyright 2017 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Tests for `tf.data.Dataset.map()`."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import functools
from collections import namedtuple
import threading
import time
import warnings
from absl.testing import parameterized
import numpy as np
from tensorflow.core.framework import attr_value_pb2
from tensorflow.core.protobuf import config_pb2
from tensorflow.python.data.experimental.ops import threading_options
from tensorflow.python.data.kernel_tests import checkpoint_test_base
from tensorflow.python.data.kernel_tests import test_base
from tensorflow.python.data.ops import dataset_ops
from tensorflow.python.eager import context
from tensorflow.python.framework import combinations
from tensorflow.python.framework import constant_op
from tensorflow.python.framework import dtypes
from tensorflow.python.framework import errors
from tensorflow.python.framework import function
from tensorflow.python.framework import ops
from tensorflow.python.framework import sparse_tensor
from tensorflow.python.framework import tensor_util
from tensorflow.python.ops import array_ops
from tensorflow.python.ops import control_flow_ops
from tensorflow.python.ops import data_flow_ops
from tensorflow.python.ops import lookup_ops
from tensorflow.python.ops import map_fn
from tensorflow.python.ops import math_ops
from tensorflow.python.ops import random_ops
from tensorflow.python.ops import script_ops
from tensorflow.python.ops import sparse_ops
from tensorflow.python.ops import string_ops
from tensorflow.python.ops import tensor_array_ops
from tensorflow.python.ops import variable_scope
from tensorflow.python.ops import variables
from tensorflow.python.ops.ragged import ragged_concat_ops
from tensorflow.python.ops.ragged import ragged_factory_ops
from tensorflow.python.ops.ragged import ragged_tensor
from tensorflow.python.platform import test
from tensorflow.python.training import checkpoint_management
from tensorflow.python.training.tracking import util as trackable_utils
def _test_combinations_with_mode_v1(mode):
def new_map_fn(dataset, *args, **kwargs):
return dataset.map(*args, **kwargs)
def legacy_map_fn(dataset, *args, **kwargs):
return dataset.map_with_legacy_function(*args, **kwargs)
new_map_combinations = combinations.combine(
tf_api_version=1,
mode=mode,
apply_map=combinations.NamedObject("map_fn", new_map_fn))
legacy_map_combinations = combinations.combine(
tf_api_version=1,
mode=mode,
apply_map=combinations.NamedObject("legacy_map_fn", legacy_map_fn))
return new_map_combinations + legacy_map_combinations
def _test_combinations_with_mode_v2(mode):
def new_map_fn(dataset, *args, **kwargs):
return dataset.map(*args, **kwargs)
return combinations.combine(
tf_api_version=2,
mode=mode,
apply_map=combinations.NamedObject("map_fn", new_map_fn))
def _test_combinations_with_mode(mode):
return _test_combinations_with_mode_v1(
mode) + _test_combinations_with_mode_v2(mode)
def _test_combinations():
return _test_combinations_with_mode("eager") + _test_combinations_with_mode(
"graph")
def _short_circuit_test_cases():
cases = [
("Identity", None, lambda x: x),
("Replicate", None, lambda x: (x, x)),
("Swap", (None, None), lambda x, y: (y, x)),
("Project", (None, None), lambda x, y: x)
]
def reduce_fn(x, y):
name, structure, fn = y
return x + combinations.combine(
structure=structure, fn=combinations.NamedObject(name, fn))
return functools.reduce(reduce_fn, cases, [])
def _make_coordinated_sloppy_dataset(apply_map, num_elements,
num_parallel_calls):
"""Produces a dataset iterator and events to control the order of elements.
Args:
apply_map: method that applies the `map` transformation
num_elements: the number of input elements
num_parallel_calls: the degree of map parallelism
Returns:
A dataset iterator (represented as `get_next` op) and events that can be
used to control the order of output elements.
"""
# Set up threading events used to sequence when items are produced that
# are subsequently interleaved. These events allow us to deterministically
# simulate slowdowns and force sloppiness.
coordination_events = {i: threading.Event() for i in range(num_elements)}
def map_py_fn(x):
coordination_events[x].wait()
coordination_events[x].clear()
return x * x
def fn(x):
return script_ops.py_func(map_py_fn, [x], x.dtype)
options = dataset_ops.Options()
options.experimental_deterministic = False
dataset = dataset_ops.Dataset.range(num_elements)
dataset = apply_map(dataset, fn, num_parallel_calls).with_options(options)
return dataset, coordination_events
class Foo(object):
"""Dummy class used for invalid return value tests."""
def __init__(self):
pass
class MapTest(test_base.DatasetTestBase, parameterized.TestCase):
def _map_dataset_factory(self, components, apply_map, count):
def _map_fn(x, y, z):
return math_ops.square(x), math_ops.square(y), math_ops.square(z)
dataset = dataset_ops.Dataset.from_tensor_slices(components)
dataset = apply_map(dataset, _map_fn).repeat(count)
self.assertEqual(
[c.shape[1:] for c in components],
[shape for shape in dataset_ops.get_legacy_output_shapes(dataset)])
return dataset
@combinations.generate(_test_combinations())
def testMapDataset(self, apply_map):
"""Test an dataset that maps a TF function across its input elements."""
# The pipeline is TensorSliceDataset -> MapDataset(square_3) ->
# RepeatDataset(count).
components = (np.arange(7),
np.array([[1, 2, 3]]) * np.arange(7)[:, np.newaxis],
np.array(37.0) * np.arange(7))
# Test single-threaded access to the iterator.
get_next = self.getNext(
self._map_dataset_factory(components, apply_map, count=14))
for _ in range(14):
for i in range(7):
result = self.evaluate(get_next())
for component, result_component in zip(components, result):
self.assertAllEqual(component[i]**2, result_component)
with self.assertRaises(errors.OutOfRangeError):
self.evaluate(get_next())
# TODO(b/117581999): add eager coverage
@combinations.generate(_test_combinations_with_mode("graph"))
def testMapDatasetMultiThreaded(self, apply_map):
# Test multi-threaded access to the same iterator.
components = (np.arange(7),
np.array([[1, 2, 3]]) * np.arange(7)[:, np.newaxis],
np.array(37.0) * np.arange(7))
get_next = self.getNext(
self._map_dataset_factory(components, apply_map, count=18))
results = []
with self.cached_session() as sess:
def iterator_thread():
while True:
try:
results.append(sess.run(get_next()))
except errors.OutOfRangeError:
return
threads = [self.checkedThread(target=iterator_thread) for _ in range(8)]
for t in threads:
t.start()
for t in threads:
t.join()
# `results` will contain the same elements components**2
# repeated 18 times, but in a non-deterministic order. Sort the
# results, and assert that each element of components**2 is
# produced 18 times.
results.sort(key=lambda x: x[0])
for i in range(7):
for j in range(18):
for component, result_component in zip(components,
results[i * 18 + j]):
self.assertAllEqual(component[i]**2, result_component)
def _parallel_map_dataset_factory(self, components, apply_map, count,
num_parallel_calls, buffer_size):
def _map_fn(x, y, z):
return math_ops.square(x), math_ops.square(y), math_ops.square(z)
dataset = dataset_ops.Dataset.from_tensor_slices(components)
dataset = apply_map(dataset, _map_fn, num_parallel_calls=num_parallel_calls)
dataset = dataset.prefetch(buffer_size).repeat(count)
self.assertEqual(
[c.shape[1:] for c in components],
[shape for shape in dataset_ops.get_legacy_output_shapes(dataset)])
return dataset
@combinations.generate(
combinations.times(
_test_combinations(),
combinations.combine(num_parallel_calls=1, buffer_size=1) +
combinations.combine(num_parallel_calls=1, buffer_size=2) +
combinations.combine(num_parallel_calls=2, buffer_size=2) +
combinations.combine(num_parallel_calls=2, buffer_size=4) +
combinations.combine(num_parallel_calls=8, buffer_size=8) +
combinations.combine(num_parallel_calls=8, buffer_size=16)))
def testParallelMapDataset(self, apply_map, num_parallel_calls, buffer_size):
"""Test an dataset that maps a TF function across its input elements."""
# The pipeline is TensorSliceDataset -> ParallelMapDataset(square_3) ->
# RepeatDataset(count).
components = (np.arange(7),
np.array([[1, 2, 3]]) * np.arange(7)[:, np.newaxis],
np.array(37.0) * np.arange(7))
# Test single-threaded access to the iterator.
get_next = self.getNext(
self._parallel_map_dataset_factory(components, apply_map, 14,
num_parallel_calls, buffer_size))
for _ in range(14):
for i in range(7):
result = self.evaluate(get_next())
for component, result_component in zip(components, result):
self.assertAllEqual(component[i]**2, result_component)
with self.assertRaises(errors.OutOfRangeError):
self.evaluate(get_next())
# TODO(b/117581999): add eager coverage
@combinations.generate(
combinations.times(
_test_combinations_with_mode("graph"),
combinations.combine(num_parallel_calls=1, buffer_size=1) +
combinations.combine(num_parallel_calls=1, buffer_size=2) +
combinations.combine(num_parallel_calls=2, buffer_size=2) +
combinations.combine(num_parallel_calls=2, buffer_size=4) +
combinations.combine(num_parallel_calls=8, buffer_size=8) +
combinations.combine(num_parallel_calls=8, buffer_size=16)))
def testParallelMapDatasetMultiThreaded(self, apply_map, num_parallel_calls,
buffer_size):
# Test multi-threaded access to the same iterator.
components = (np.arange(7),
np.array([[1, 2, 3]]) * np.arange(7)[:, np.newaxis],
np.array(37.0) * np.arange(7))
get_next = self.getNext(
self._parallel_map_dataset_factory(components, apply_map, 18,
num_parallel_calls, buffer_size))
results = []
with self.cached_session() as sess:
def iterator_thread():
while True:
try:
results.append(sess.run(get_next()))
except errors.OutOfRangeError:
return
threads = [self.checkedThread(target=iterator_thread) for _ in range(64)]
for t in threads:
t.start()
for t in threads:
t.join()
# `results` will contain the same elements components**2
# repeated 18 times, but in a non-deterministic order. Sort the
# results, and assert that each element of components**2 is
# produced 18 times.
results.sort(key=lambda x: x[0])
for i in range(7):
for j in range(18):
for component, result_component in zip(components,
results[i * 18 + j]):
self.assertAllEqual(component[i]**2, result_component)
@combinations.generate(_test_combinations())
def testImplicitDisposeParallelMapDataset(self, apply_map):
# Tests whether a parallel map dataset will be cleaned up correctly when
# the pipeline does not run it until exhaustion.
# The pipeline is TensorSliceDataset -> MapDataset(square_3) ->
# RepeatDataset(1000).
components = (np.arange(1000),
np.array([[1, 2, 3]]) * np.arange(1000)[:, np.newaxis],
np.array(37.0) * np.arange(1000))
dataset = self._parallel_map_dataset_factory(components, apply_map, 1000,
100, 100)
# NOTE(mrry): Also test that the prefetching thread is cancelled correctly.
dataset = dataset.prefetch(100)
get_next = self.getNext(dataset)
for _ in range(3):
self.evaluate(get_next())
@combinations.generate(_test_combinations())
def testParallelMapUnspecifiedOutputSize(self, apply_map):
components = np.array([1., 2., 3., np.nan, 5.]).astype(np.float32)
dataset = dataset_ops.Dataset.from_tensor_slices(components)
dataset = apply_map(
dataset,
lambda x: array_ops.check_numerics(x, "message"),
num_parallel_calls=2)
get_next = self.getNext(dataset)
for _ in range(3):
self.evaluate(get_next())
@combinations.generate(_test_combinations())
def testParallelMapError(self, apply_map):
components = np.array([1., 2., 3., np.nan, 5.]).astype(np.float32)
dataset = dataset_ops.Dataset.from_tensor_slices(components)
dataset = apply_map(
dataset,
lambda x: array_ops.check_numerics(x, "message"),
num_parallel_calls=2)
get_next = self.getNext(dataset)
for _ in range(3):
self.evaluate(get_next())
# The 4th element is NaN, so `array_ops.check_numerics()` should fail.
with self.assertRaises(errors.InvalidArgumentError):
self.evaluate(get_next())
self.evaluate(get_next())
with self.assertRaises(errors.OutOfRangeError):
self.evaluate(get_next())
@combinations.generate(_test_combinations())
def testPrefetchError(self, apply_map):
components = np.array([1., 2., 3., np.nan, 5.]).astype(np.float32)
dataset = dataset_ops.Dataset.from_tensor_slices(components)
dataset = apply_map(
dataset, lambda x: array_ops.check_numerics(x, "message")).prefetch(2)
get_next = self.getNext(dataset)
for _ in range(3):
self.evaluate(get_next())
# The 4th element is NaN, so `array_ops.check_numerics()` should fail.
with self.assertRaises(errors.InvalidArgumentError):
self.evaluate(get_next())
self.evaluate(get_next())
with self.assertRaises(errors.OutOfRangeError):
self.evaluate(get_next())
@combinations.generate(_test_combinations())
def testCaptureIterator(self, apply_map):
def _build_ds(iterator):
def _map_fn(x):
get_next = iterator.get_next()
return x * get_next
return apply_map(dataset_ops.Dataset.range(10), _map_fn)
def _build_graph():
if context.executing_eagerly():
captured_iterator = iter(dataset_ops.Dataset.range(10))
else:
captured_iterator = dataset_ops.make_initializable_iterator(
dataset_ops.Dataset.range(10))
ds = _build_ds(captured_iterator)
return captured_iterator, ds
captured_iter, ds = _build_graph()
if not context.executing_eagerly():
self.evaluate(captured_iter.initializer)
get_next = self.getNext(ds, requires_initialization=True)
for i in range(10):
self.assertEqual(i * i, self.evaluate(get_next()))
with self.assertRaises(errors.OutOfRangeError):
self.evaluate(get_next())
@combinations.generate(_test_combinations())
def testCaptureHashTable(self, apply_map):
# NOTE(mrry): We must use the V2 variants of `HashTable`
# etc. because these produce a `tf.resource`-typed output that is
# compatible with the in-graph function implementation.
default_val = -1
keys = constant_op.constant(["brain", "salad", "surgery"])
values = constant_op.constant([0, 1, 2], dtypes.int64)
table = lookup_ops.HashTable(
lookup_ops.KeyValueTensorInitializer(keys, values), default_val)
input_sentences = dataset_ops.Dataset.from_tensor_slices(
["brain brain tank salad surgery", "surgery brain"])
dataset = apply_map(input_sentences,
lambda x: string_ops.string_split([x]).values)
dataset = apply_map(dataset, table.lookup)
get_next = self.getNext(dataset, requires_initialization=True)
self.evaluate(table.initializer)
self.evaluate(get_next())
self.evaluate(get_next())
with self.assertRaises(errors.OutOfRangeError):
self.evaluate(get_next())
# TODO(b/123904513)
@combinations.generate(_test_combinations_with_mode_v1("graph"))
def testCaptureQueue(self, apply_map):
elements = np.random.randint(100, size=[200])
queue = data_flow_ops.FIFOQueue(200, dtypes.int64, shapes=[])
enqueue_op = queue.enqueue_many(elements)
close_op = queue.close()
dataset = dataset_ops.Dataset.from_tensors(0).repeat(-1)
dataset = apply_map(dataset, lambda _: queue.dequeue())
get_next = self.getNext(dataset, requires_initialization=True)
self.evaluate(enqueue_op)
self.evaluate(close_op)
for element in elements:
self.assertEqual(element, self.evaluate(get_next()))
with self.assertRaises(errors.OutOfRangeError):
self.evaluate(get_next())
# TODO(b/117581999): Possible deadlock in eager mode, debug.
@combinations.generate(_test_combinations_with_mode_v1("graph"))
def testCaptureSameResourceMultipleTimes(self, apply_map):
elements = np.random.randint(100, size=[200])
queue = data_flow_ops.FIFOQueue(
200, dtypes.int64, shapes=[], shared_name="shared_queue")
queue_2 = data_flow_ops.FIFOQueue(
200, dtypes.int64, shapes=[], shared_name="shared_queue")
enqueue_op = queue.enqueue_many(elements)
close_op = queue.close()
dataset = dataset_ops.Dataset.from_tensors(0).repeat(-1)
dataset = apply_map(dataset, lambda _: (queue.dequeue(), queue_2.dequeue()))
self.evaluate(enqueue_op)
self.evaluate(close_op)
get_next = self.getNext(dataset, requires_initialization=True)
for i in range(100):
self.assertCountEqual([elements[i * 2], elements[i * 2 + 1]],
self.evaluate(get_next()))
with self.assertRaises(errors.OutOfRangeError):
self.evaluate(get_next())
@combinations.generate(_test_combinations())
def testSeededStatefulOperatorIsProperlyStateful(self, apply_map):
dataset = dataset_ops.Dataset.from_tensors(0).repeat(10)
fn = lambda _: random_ops.random_uniform((), seed=11)
dataset = apply_map(dataset, fn).batch(2)
get_next = self.getNext(dataset, requires_initialization=True)
random_values = []
with self.assertRaises(errors.OutOfRangeError):
while True:
random_values.extend(self.evaluate(get_next()))
self.assertLen(random_values, 10)
self.assertGreater(np.abs(np.diff(random_values)).max(), 1e-6)
get_next = self.getNext(dataset, requires_initialization=True)
random_values_2 = []
with self.assertRaises(errors.OutOfRangeError):
while True:
random_values_2.extend(self.evaluate(get_next()))
# Randomness is repeatable given same seed
self.assertAllClose(random_values, random_values_2)
@combinations.generate(_test_combinations())
def testStatefulMapKeepsStateAcrossIterators(self, apply_map):
dataset = dataset_ops.Dataset.from_tensors(0).repeat(10)
fn = lambda _: random_ops.random_uniform((), seed=11)
dataset = apply_map(dataset, fn).repeat(1000).batch(10)
get_next = self.getNext(dataset)
random_values = self.evaluate(get_next())
# Assert that one of the next 99 batches yielded by the iterator is
# different from the first.
i = 0
while i < 99:
if np.any(random_values != self.evaluate(get_next())):
break
i += 1
self.assertLess(i, 99)
@combinations.generate(_test_combinations())
def testStatefulOperationInShortCircuit(self, apply_map):
counter_var = variable_scope.get_variable(
"counter", (), dtypes.int32, use_resource=True)
def increment_fn(x):
counter_var.assign_add(1)
return x
dataset = dataset_ops.Dataset.range(10)
dataset = apply_map(dataset, increment_fn)
get_next = self.getNext(dataset, requires_initialization=True)
self.evaluate(counter_var.initializer)
for i in range(10):
self.assertEqual(i, self.evaluate(counter_var))
self.assertEqual(i, self.evaluate(get_next()))
self.assertEqual(10, self.evaluate(counter_var))
with self.assertRaises(errors.OutOfRangeError):
self.evaluate(get_next())
self.assertEqual(10, self.evaluate(counter_var))
@combinations.generate(_test_combinations())
def testMapDict(self, apply_map):
dataset = dataset_ops.Dataset.range(10)
dataset = apply_map(dataset, lambda x: {"foo": x * 2, "bar": x**2})
dataset = apply_map(dataset, lambda d: d["foo"] + d["bar"])
self.assertDatasetProduces(
dataset, expected_output=[i * 2 + i**2 for i in range(10)])
@combinations.generate(_test_combinations())
def testMapNamedtuple(self, apply_map):
# construct dataset of tuples
labels = dataset_ops.Dataset.range(10)
images = apply_map(labels, lambda l: -l)
dataset_tuple = dataset_ops.Dataset.zip((labels, images))
# convert dataset of tuples to dataset of namedtuples
example = namedtuple("Example", ["label", "image"])
dataset_namedtuple = apply_map(dataset_tuple, example)
def preprocess_tuple(label, image):
image = 2 * image
return label, image
def preprocess_namedtuple(example):
return example._replace(image=2 * example.image)
# preprocess both datasets
dataset_tuple = apply_map(dataset_tuple, preprocess_tuple)
dataset_namedtuple = apply_map(dataset_namedtuple, preprocess_namedtuple)
next_tuple = self.getNext(dataset_tuple)
next_namedtuple = self.getNext(dataset_namedtuple)
# make sure both datasets contain the same data
for i in range(10):
tuple_, namedtuple_ = self.evaluate([next_tuple(), next_namedtuple()])
self.assertEqual(tuple_, namedtuple_)
self.assertEqual(tuple_, (i, -2 * i))
with self.assertRaises(errors.OutOfRangeError):
self.evaluate(next_namedtuple())
@combinations.generate(_test_combinations())
def testUseStepContainerInMap(self, apply_map):
row = np.arange(6)
dataset = dataset_ops.Dataset.from_tensors(row)
dataset = apply_map(dataset,
lambda elems: map_fn.map_fn(lambda x: x * x, elems))
self.assertDatasetProduces(dataset, expected_output=[row**2])
@combinations.generate(_test_combinations())
def testCaseAndCondInMap(self, apply_map):
def control_map_fn(x, y):
def multiply():
return x * 2
def divide():
return x // 2
def defaults_two():
return control_flow_ops.cond(
math_ops.equal(math_ops.mod(x, 2), 0),
multiply,
divide,
name="cond_mult")
pred_fn_pairs = [
(math_ops.logical_or(math_ops.equal(y, 2),
math_ops.equal(y, 3)), defaults_two),
]
return control_flow_ops.case(
pred_fn_pairs, default=multiply, exclusive=True)
def build_dataset(row, num):
dataset = dataset_ops.Dataset.from_tensor_slices(row)
return apply_map(dataset, lambda x: control_map_fn(x, num))
row = np.arange(6)
for num in [2, 3, 4]:
get_next = self.getNext(build_dataset(row, num))
for i in range(6):
self.assertEqual(
(i // 2 if i % 2 else i * 2) if (num == 2 or num == 3) else i * 2,
self.evaluate(get_next()))
with self.assertRaises(errors.OutOfRangeError):
self.evaluate(get_next())
@combinations.generate(_test_combinations())
def testCaseInWhileInMap(self, apply_map):
def control_map_fn(x, y):
def multiply():
return x * 2
def divide():
return x // 2
pred_fn_pairs = [
(math_ops.logical_or(math_ops.equal(y, 2),
math_ops.equal(y, 3)), divide),
]
return control_flow_ops.case(
pred_fn_pairs, default=multiply, exclusive=True)
def build_dataset(row, num):
dataset = dataset_ops.Dataset.from_tensors(row)
return apply_map(
dataset,
lambda elems: map_fn.map_fn(lambda x: control_map_fn(x, num), elems))
row = np.arange(6)
for num in [2, 3, 4]:
get_next = self.getNext(build_dataset(row, num))
self.assertAllEqual(
[x // 2 if (num == 2 or num == 3) else x * 2 for x in row],
self.evaluate(get_next()))
with self.assertRaises(errors.OutOfRangeError):
self.evaluate(get_next())
@combinations.generate(_test_combinations())
def testCaseAndCondInWhileInMap(self, apply_map):
def control_map_fn(x, y):
def multiply():
return x * 2
def divide():
return x // 2
def defaults_two():
return control_flow_ops.cond(
math_ops.equal(math_ops.mod(x, 2), 0),
multiply,
divide,
name="cond_mult")
pred_fn_pairs = [
(math_ops.logical_or(math_ops.equal(y, 2),
math_ops.equal(y, 3)), defaults_two),
]
return control_flow_ops.case(
pred_fn_pairs, default=multiply, exclusive=True)
row = np.arange(6)
num = 2
dataset = dataset_ops.Dataset.from_tensors(row)
dataset = apply_map(
dataset,
lambda elems: map_fn.map_fn(lambda x: control_map_fn(x, num), elems))
get_next = self.getNext(dataset)
self.assertAllEqual([(x // 2 if x % 2 else x * 2) if
(num == 2 or num == 3) else x * 2 for x in row],
self.evaluate(get_next()))
with self.assertRaises(errors.OutOfRangeError):
self.evaluate(get_next())
@combinations.generate(_test_combinations())
def testNestedListMapDataset(self, apply_map):
dataset = dataset_ops.Dataset.from_tensors([0, 1, 2]).repeat(10)
dataset = apply_map(dataset, lambda a: ([a[1], a[0] + a[2]], a[1]))
expected_output = [(np.array([1, 2]), 1)] * 10
self.assertDatasetProduces(dataset, expected_output=expected_output)
@combinations.generate(
combinations.times(_test_combinations(),
combinations.combine(buffer_size=[1, 2, 3, 4])))
def testPrefetch(self, apply_map, buffer_size):
# We will use this event to test that `_map_py_func()` has been invoked a
# certain number of times (6 times, to be exact) after consuming fewer
# elements from the iterator.
ev = threading.Event()
set_event_during_invocation = 5
def _map_py_func(x):
if x == set_event_during_invocation:
ev.set()
return x * x
def _map_fn(x):
return script_ops.py_func(_map_py_func, [x], x.dtype)
# We can indirectly observe that varying the buffer size has the intended
# effect by observing when `ev` is set (on the 6th invocation of
# `_map_py_func()`).
# NOTE(mrry): We do not test with `buffer_size ==
# set_event_during_invocation`, because we must consume at least one element
# to start the prefetching.
dataset = dataset_ops.Dataset.range(100)
dataset = apply_map(dataset, _map_fn).prefetch(buffer_size)
get_next = self.getNext(dataset)
event_will_be_set_after_consuming = (
set_event_during_invocation - buffer_size + 1)
ev.clear()
for i in range(event_will_be_set_after_consuming):
self.assertFalse(ev.is_set())
self.assertEqual(i * i, self.evaluate(get_next()))
ev.wait()
for i in range(event_will_be_set_after_consuming, 100):
self.assertEqual(i * i, self.evaluate(get_next()))
with self.assertRaises(errors.OutOfRangeError):
self.evaluate(get_next())
@combinations.generate(_test_combinations())
def testReturnList(self, apply_map):
dataset = dataset_ops.Dataset.range(10)
dataset = apply_map(dataset, lambda x: [x, constant_op.constant(37.0)])
self.assertDatasetProduces(
dataset, expected_output=[(i, 37.0) for i in range(10)])
@combinations.generate(_test_combinations())
def testMultiOutputPyFunc(self, apply_map):
# The `tf.py_func()` op returns a list of tensors for its outputs.
def _map_fn(x_tensor):
def _map_py_func(x):
return x, np.array(37.0, dtype=np.float64)
return script_ops.py_func(
_map_py_func, [x_tensor], [dtypes.int64, dtypes.float64])
dataset = dataset_ops.Dataset.range(10)
dataset = apply_map(dataset, _map_fn)
self.assertDatasetProduces(
dataset, expected_output=[(i, 37.0) for i in range(10)])
@combinations.generate(_test_combinations())
def testSparse(self, apply_map):
def _sparse(i):
return sparse_tensor.SparseTensorValue(
indices=np.array([[0, 0]]),
values=(i * np.array([1])),
dense_shape=np.array([1, 1]))
dataset = dataset_ops.Dataset.range(10)
dataset = apply_map(dataset, _sparse)
self.assertDatasetProduces(
dataset, expected_output=[_sparse(i) for i in range(10)])
@combinations.generate(_test_combinations())
def testSparseChain(self, apply_map):
def _sparse(i):
return sparse_tensor.SparseTensorValue(
indices=np.array([[0, 0]]),
values=(i * np.array([1])),
dense_shape=np.array([1, 1]))
def _check(i):
self.assertTrue(sparse_tensor.is_sparse(i))
return sparse_ops.sparse_concat(0, [i, i])
dataset = dataset_ops.Dataset.range(10)
dataset = apply_map(dataset, _sparse)
dataset = apply_map(dataset, _check)
self.assertDatasetProduces(
dataset,
expected_output=[self.evaluate(_check(_sparse(i))) for i in range(10)])
@combinations.generate(_test_combinations_with_mode("eager"))
def testSparseMapShapeInference(self, apply_map):
row_lengths = np.random.randint(0, 4, size=128)
values = np.ones(np.sum(row_lengths))
sparse = ragged_tensor.RaggedTensor.from_row_lengths(
values, row_lengths).to_sparse()
dataset = dataset_ops.Dataset.from_tensor_slices(sparse)
dataset = dataset.batch(32, drop_remainder=True)
dataset = apply_map(dataset, lambda x: x)
self.assertEqual((32, 3), dataset.element_spec.shape)
@combinations.generate(_test_combinations_with_mode("eager"))
def testSparseMapShapeInferencePartial(self, apply_map):
row_lengths = np.random.randint(0, 4, size=128)
values = np.ones(np.sum(row_lengths))
sparse = ragged_tensor.RaggedTensor.from_row_lengths(
values, row_lengths).to_sparse()
dataset = dataset_ops.Dataset.from_tensor_slices(sparse)
dataset = dataset.batch(32, drop_remainder=False)
dataset = apply_map(dataset, lambda x: x)
self.assertEqual([None, 3], dataset.element_spec.shape.as_list())
@combinations.generate(_test_combinations())
def testTensorArray(self, apply_map):
def _tensor_array(i):
i = math_ops.cast(i, dtypes.int32)
return (
tensor_array_ops.TensorArray(dtypes.int32, element_shape=(), size=i)
.unstack(math_ops.range(i, dtype=dtypes.int32)))
dataset = dataset_ops.Dataset.range(10)
dataset = apply_map(dataset, _tensor_array)
self.assertDatasetProduces(
dataset, expected_output=[list(range(i)) for i in range(10)])
@combinations.generate(_test_combinations())
def testTensorArrayChain(self, apply_map):
def _tensor_array(i):
i = math_ops.cast(i, dtypes.int32)
return (
tensor_array_ops.TensorArray(dtypes.int32, element_shape=(), size=i)
.unstack(math_ops.range(i, dtype=dtypes.int32)))
def _check(x):
self.assertIsInstance(x, tensor_array_ops.TensorArray)
return x.identity()
dataset = dataset_ops.Dataset.range(10)
dataset = apply_map(dataset, _tensor_array)
dataset = apply_map(dataset, _check)
self.assertDatasetProduces(
dataset,
expected_output=[list(range(i)) for i in range(10)])
@combinations.generate(_test_combinations())
def testRagged(self, apply_map):
def _ragged(i):
return ragged_tensor.RaggedTensor.from_tensor(i * [[1]])
dataset = dataset_ops.Dataset.range(5)
dataset = apply_map(dataset, _ragged)
self.assertDatasetProduces(
dataset,
expected_output=[ragged_factory_ops.constant([[i]]) for i in range(5)])
@combinations.generate(_test_combinations())
def testRaggedChain(self, apply_map):
def _ragged(i):
return ragged_tensor.RaggedTensor.from_tensor(i * [[1]])
def _concat(i):
self.assertTrue(ragged_tensor.is_ragged(i))
return ragged_concat_ops.concat([i, i], 0)
dataset = dataset_ops.Dataset.range(10)
dataset = apply_map(dataset, _ragged)
dataset = apply_map(dataset, _concat)
self.assertDatasetProduces(
dataset,
expected_output=[
self.evaluate(_concat(ragged_factory_ops.constant([[i]])))
for i in range(10)
])
# TODO(b/123904513)
@combinations.generate(_test_combinations_with_mode_v1("graph"))
def testParallelMapOutOfRangeError(self, apply_map):
def raising_py_func(i):
if i == 100:
raise StopIteration()
else:
return i
dataset = dataset_ops.Dataset.range(105)
dataset = apply_map(
dataset,
lambda x: script_ops.py_func(raising_py_func, [x], dtypes.int64),
num_parallel_calls=2)
get_next = self.getNext(dataset)
for i in range(100):
self.assertEqual(i, self.evaluate(get_next()))
with self.assertRaises(errors.OutOfRangeError):
self.evaluate(get_next())
@combinations.generate(_test_combinations())
def testConstantOutput(self, apply_map):
dataset = dataset_ops.Dataset.range(10)
dataset = apply_map(dataset, lambda x: [x, "hello", 10])
self.assertDatasetProduces(dataset, [(i, b"hello", 10) for i in range(10)])
@combinations.generate(_test_combinations())
def testWarnOnLookupTable(self, apply_map):
def collecting_function(x):
_ = lookup_ops.HashTable(
lookup_ops.KeyValueTensorInitializer(["a"], [1.]), 0.0, name="t1")
return x
warnings.simplefilter("always")
with warnings.catch_warnings(record=True) as w:
dataset = dataset_ops.Dataset.range(10)
_ = apply_map(dataset, collecting_function)
# NOTE(mrry): Python 3 prints other warnings in addition to the one we are
# testing, so we search for the expected warning.
self.assertGreaterEqual(len(w), 1)
found_warning = False
for warning in w:
if ("Creating resources inside a function passed to Dataset.map() is "
"not supported." in str(warning)):
found_warning = True
break
self.assertTrue(found_warning)
@combinations.generate(test_base.default_test_combinations())
def testWarnOnSeedFromOuterGraph(self):
with ops.Graph().as_default() as g:
g.seed = 10
warnings.simplefilter("always")
def _check_warning(caught_warnings, expected_result):
found_warning = False
for warning in caught_warnings:
if ("Explicitly set the seed in the function if this is not the "
"intended behavior" in str(warning)):
found_warning = True
break
self.assertEqual(found_warning, expected_result)
# map_fun doesn't use seed, so no warning is generated.
with warnings.catch_warnings(record=True) as w:
_ = dataset_ops.Dataset.range(10).map(math_ops.square)
_check_warning(w, False)
def random_func(x):
x = math_ops.add(x, 1)
random_ops.random_shuffle([x, math_ops.square(x)])
return x
with warnings.catch_warnings(record=True) as w:
_ = dataset_ops.Dataset.range(10).map(random_func)
_check_warning(w, True)
def random_func_seeded(x):
ops.get_default_graph().seed = None
random_ops.random_shuffle(x)
return x
with warnings.catch_warnings(record=True) as w:
_ = dataset_ops.Dataset.range(10).batch(2).map(random_func_seeded)
_check_warning(w, False)
with warnings.catch_warnings(record=True) as w:
_ = dataset_ops.Dataset.range(10).batch(2).map(
lambda x: random_ops.random_shuffle(x, seed=37))
_check_warning(w, False)
@combinations.generate(_test_combinations())
def testNestedDatasetMap(self, apply_map):
dataset = dataset_ops.Dataset.from_tensors([1.0, 2.0, 3.0])
dataset = apply_map(dataset, dataset_ops.Dataset.from_tensor_slices)
dataset = apply_map(dataset, lambda ds: ds.batch(3)).flat_map(lambda x: x)
self.assertDatasetProduces(dataset, expected_output=[[1.0, 2.0, 3.0]])
@combinations.generate(_test_combinations())
def testReturnValueError(self, apply_map):
dataset = dataset_ops.Dataset.from_tensors([1.0, 2.0, 3.0])
with self.assertRaisesRegex(
TypeError, r"Unsupported return value from function passed to "
r"Dataset.map\(\)"):
_ = apply_map(dataset, lambda x: Foo)
@combinations.generate(test_base.default_test_combinations())
def testBrokenFunctionErrorOnInitialization(self):
dataset = dataset_ops.Dataset.from_tensor_slices([1.0, 2.0, 3.0])
def broken_function(_):
"""A function deliberately designed to fail on instantiation."""
value = []
tensor_value = attr_value_pb2.AttrValue()
tensor_value.tensor.CopyFrom(
tensor_util.make_tensor_proto(
value, dtype=dtypes.float32, shape=[0], verify_shape=False))
dtype_value = attr_value_pb2.AttrValue(type=dtypes.int32.as_datatype_enum)
# Create a "Const" op with a `tf.float32` value and a `tf.int32` type.
const_tensor = ops.get_default_graph().create_op(
"Const", [], [dtypes.int32],
attrs={
"value": tensor_value,
"dtype": dtype_value
},
name="BrokenConst").outputs[0]
return const_tensor
dataset = dataset.map(broken_function)
self.assertDatasetProduces(
dataset, expected_error=(errors.InvalidArgumentError, "BrokenConst"))
@combinations.generate(
combinations.times(
_test_combinations_with_mode("graph"),
combinations.combine(num_parallel_calls=[None, 12])))
def testNoInterOpParallelism(self, apply_map, num_parallel_calls):
dataset = dataset_ops.Dataset.from_tensors(0)
def _get_tid():
return np.int64(threading.current_thread().ident)
def _map_fn(_):
tids = []
for _ in range(10):
tids.append(script_ops.py_func(_get_tid, [], dtypes.int64))
return tids
dataset = apply_map(dataset, _map_fn)
dataset._variant_tensor.op._set_attr("use_inter_op_parallelism",
attr_value_pb2.AttrValue(b=False))
get_next = self.getNext(dataset)
tids = self.evaluate(get_next())
self.assertTrue(all(tids[0] == tid for tid in tids))
@combinations.generate(
combinations.times(_test_combinations(), _short_circuit_test_cases(),
combinations.combine(num_parallel_calls=[None, 12])))
def testShortCircuit(self, apply_map, structure, fn, num_parallel_calls):
dataset = self.structuredDataset(structure).repeat()
dataset = apply_map(dataset, fn, num_parallel_calls=num_parallel_calls)
get_next = self.getNext(dataset)
if isinstance(structure, tuple):
expected = fn(*self.evaluate(self.structuredElement(structure)))
else:
expected = fn(self.evaluate(self.structuredElement(structure)))
self.assertEqual(expected, self.evaluate(get_next()))
@combinations.generate(
combinations.times(_test_combinations(),
combinations.combine(num_parallel_calls=[None, 12])))
def testShortCircuitCapturedInput(self, apply_map, num_parallel_calls):
captured_t = variables.Variable(42)
dataset = self.structuredDataset(None).repeat()
dataset = apply_map(
dataset, lambda x: captured_t, num_parallel_calls=num_parallel_calls)
self.evaluate(variables.global_variables_initializer())
get_next = self.getNext(dataset, requires_initialization=True)
self.assertEqual(42, self.evaluate(get_next()))
@combinations.generate(
combinations.times(
_test_combinations(),
combinations.combine(num_elements=1, num_parallel_calls=1) +
combinations.combine(num_elements=10, num_parallel_calls=1) +
combinations.combine(num_elements=10, num_parallel_calls=10) +
combinations.combine(num_elements=100, num_parallel_calls=1) +
combinations.combine(num_elements=100, num_parallel_calls=10) +
combinations.combine(num_elements=100, num_parallel_calls=100)))
def testSloppyInterleaveInOrder(self, apply_map, num_elements,
num_parallel_calls):
dataset, coordination_events = _make_coordinated_sloppy_dataset(
apply_map, num_elements, num_parallel_calls)
options = dataset_ops.Options()
options.experimental_threading = threading_options.ThreadingOptions()
options.experimental_threading.private_threadpool_size = (
num_parallel_calls + 1)
dataset = dataset.with_options(options)
get_next = self.getNext(dataset, requires_initialization=True)
for i in range(num_elements):
coordination_events[i].set()
self.assertEqual(i * i, self.evaluate(get_next()))
with self.assertRaises(errors.OutOfRangeError):
self.evaluate(get_next())
@combinations.generate(
combinations.times(
_test_combinations(),
combinations.combine(num_elements=10, num_parallel_calls=10) +
combinations.combine(num_elements=100, num_parallel_calls=10) +
combinations.combine(num_elements=100, num_parallel_calls=100)))
def testSloppyInterleaveOutOfOrder(self, apply_map, num_elements,
num_parallel_calls):
dataset, coordination_events = _make_coordinated_sloppy_dataset(
apply_map, num_elements, num_parallel_calls)
options = dataset_ops.Options()
options.experimental_threading = threading_options.ThreadingOptions()
options.experimental_threading.private_threadpool_size = (
num_parallel_calls + 1)
dataset = dataset.with_options(options)
get_next = self.getNext(dataset, requires_initialization=True)
elements = [x for x in range(num_elements)]
for i in [1, 4, 7]:
elements[i], elements[i + 1] = elements[i + 1], elements[i]
for element in elements:
coordination_events[element].set()
self.assertEqual(element * element, self.evaluate(get_next()))
with self.assertRaises(errors.OutOfRangeError):
self.evaluate(get_next())
@combinations.generate(
combinations.combine(
tf_api_version=2,
mode=["eager", "graph"],
num_parallel_calls=[None, 12]))
def testPreserveCardinality(self, num_parallel_calls):
def py_fn(_):
raise StopIteration()
dataset = dataset_ops.Dataset.from_tensors(0).map(
lambda x: script_ops.py_func(py_fn, [x], dtypes.int64),
num_parallel_calls=num_parallel_calls)
get_next = self.getNext(dataset)
with self.assertRaises(errors.InvalidArgumentError):
self.evaluate(get_next())
@combinations.generate(_test_combinations_with_mode("graph"))
def testCollectionCopy(self, apply_map):
w = variable_scope.get_variable("w", [])
self.assertIn(w, ops.get_collection(ops.GraphKeys.GLOBAL_VARIABLES))
def func(x):
self.assertIn(w, ops.get_collection(ops.GraphKeys.GLOBAL_VARIABLES))
return x
dataset = dataset_ops.Dataset.from_tensors(constant_op.constant(1.0))
_ = apply_map(dataset, func)
@combinations.generate(
combinations.times(
_test_combinations_with_mode_v1("graph"),
combinations.combine(num_parallel_calls=[None, 12])))
def testMapCancellation(self, apply_map, num_parallel_calls):
# Checks that a cancellation of is threaded through to map transformation.
queue = data_flow_ops.FIFOQueue(10, dtypes.int32, ())
def fn(_):
return queue.dequeue()
dataset = dataset_ops.Dataset.range(1)
dataset = apply_map(dataset, fn, num_parallel_calls=num_parallel_calls)
get_next = self.getNext(dataset, requires_initialization=True)
with self.cached_session() as sess:
thread = self.checkedThread(self.assert_op_cancelled, args=(get_next(),))
thread.start()
time.sleep(0.2)
sess.close()
thread.join()
# TODO(b/126553094): map doesnt work with variable defined inside function in
# eager mode, possible Graph tensors leak out of the function building context
# from function graph in eager mode as variables are created in init_scope.
@combinations.generate(test_base.graph_only_combinations())
def testCreateVariableInsideFunctionWithGetter(self):
def func(_):
with variable_scope.variable_scope(
"variable", reuse=variable_scope.AUTO_REUSE):
counter_var = variable_scope.get_variable(
"counter", (), dtypes.int32, use_resource=True)
return counter_var.assign_add(1)
dataset = dataset_ops.Dataset.from_tensors(0).repeat(10)
if hasattr(dataset, "map_with_legacy_function"):
# NOTE: In the legacy function, resource is captured by value.
with self.assertRaisesWithPredicateMatch(
AttributeError, "'Tensor' object has no attribute 'assign_add'"):
dataset.map_with_legacy_function(func)
dataset = dataset.map(func)
self.evaluate(variables.global_variables_initializer())
get_next = self.getNext(dataset, requires_initialization=True)
for i in range(10):
self.assertEqual(i + 1, self.evaluate(get_next()))
with self.assertRaises(errors.OutOfRangeError):
self.evaluate(get_next())
@combinations.generate(_test_combinations())
def testCaptureVariable(self, apply_map):
counter_var = variable_scope.get_variable(
"counter", (), dtypes.int32, use_resource=True)
dataset = dataset_ops.Dataset.from_tensors(0).repeat(10)
dataset = apply_map(dataset, lambda _: counter_var.assign_add(1))
get_next = self.getNext(dataset, requires_initialization=True)
self.evaluate(counter_var.initializer)
for i in range(10):
self.assertEqual(i, self.evaluate(counter_var))
self.assertEqual(i + 1, self.evaluate(get_next()))
self.assertEqual(10, self.evaluate(counter_var))
with self.assertRaises(errors.OutOfRangeError):
self.evaluate(get_next())
self.assertEqual(10, self.evaluate(counter_var))
@combinations.generate(_test_combinations_with_mode_v1("graph"))
def testCaptureUninitializedVariableError(self, apply_map):
counter_var = variable_scope.get_variable(
"counter", (), dtypes.int32, use_resource=True)
dataset = dataset_ops.Dataset.from_tensors(0).repeat(10)
dataset = apply_map(dataset, lambda _: counter_var.assign_add(1))
get_next = self.getNext(dataset, requires_initialization=True)
with self.assertRaises(errors.NotFoundError):
self.evaluate(get_next())
# TODO(b/121264236): add eager mode coverage when we have multi-device setup.
@combinations.generate(_test_combinations_with_mode_v1("graph"))
def testCaptureConstantsWithConflictingDevices(self, apply_map):
config = config_pb2.ConfigProto(device_count={"CPU": 3})
with self.cached_session(config=config):
with ops.device("/device:CPU:0"):
a = constant_op.constant(3.0)
with ops.device("/device:CPU:1"):
b = constant_op.constant(5.0)
def func(_):
return math_ops.add(a, b)
dataset = dataset_ops.Dataset.from_tensors(0).repeat(10)
dataset = apply_map(dataset, func)
expected_output = [8.0] * 10
self.assertDatasetProduces(dataset, expected_output=expected_output)
# TODO(b/121264236): add eager mode coverage when we have multi-device setup.
@combinations.generate(_test_combinations_with_mode_v1("graph"))
def testReferenceVariablesWithMultipleDevices(self, apply_map):
config = config_pb2.ConfigProto(device_count={"CPU": 3})
with self.cached_session(config=config):
def func(_):
with ops.device("/device:CPU:0"):
a = variables.VariableV1(3.0)
with ops.device("/device:CPU:1"):
b = variables.VariableV1(5.0)
return math_ops.add(a, b)
# NOTE: Use the legacy function implementation as eager function will
# convert RefVariables to ResourceVariables.
dataset = dataset_ops.Dataset.from_tensors(0).repeat(10)
dataset = apply_map(dataset, func)
self.evaluate(variables.global_variables_initializer())
expected_output = [8.0] * 10
self.assertDatasetProduces(
dataset,
expected_output=expected_output,
requires_initialization=True)
# TODO(b/121264236): add eager mode coverage when we have multi-device setup.
@combinations.generate(_test_combinations_with_mode_v1("graph"))
def testResourceVariablesWithMultipleDevices(self, apply_map):
config = config_pb2.ConfigProto(device_count={"CPU": 3})
def func(_):
with variable_scope.variable_scope(
"variable", reuse=variable_scope.AUTO_REUSE):
with ops.device("/device:CPU:0"):
a_var = variable_scope.get_variable(
"a", (), dtypes.int32, use_resource=True)
a_var = math_ops.add(a_var, 1)
with ops.device("/device:CPU:1"):
b_var = variable_scope.get_variable(
"b", (), dtypes.int32, use_resource=True)
return math_ops.add(a_var, b_var)
g = ops.Graph()
with self.session(config=config, graph=g):
dataset = dataset_ops.Dataset.from_tensors(0).repeat(10)
dataset = apply_map(dataset, func)
self.evaluate(variables.global_variables_initializer())
expected_output = [1] * 10
self.assertDatasetProduces(
dataset,
expected_output=expected_output,
requires_initialization=True)
@combinations.generate(
combinations.times(
_test_combinations(),
combinations.combine(
local_determinism=[None, True, False],
global_determinism=[True, False])))
def testDeterminismConfiguration(self, apply_map, local_determinism,
global_determinism):
expect_determinism = local_determinism or (local_determinism is None and
global_determinism)
elements = list(range(1000))
def dataset_fn(delay_ms):
def sleep(x):
time.sleep(delay_ms / 1000)
return x
def map_function(x):
if math_ops.equal(x, 0):
return script_ops.py_func(sleep, [x], x.dtype)
else:
return x
dataset = dataset_ops.Dataset.from_tensor_slices(elements)
dataset = apply_map(
dataset,
map_function,
num_parallel_calls=2,
deterministic=local_determinism)
opts = dataset_ops.Options()
opts.experimental_deterministic = global_determinism
dataset = dataset.with_options(opts)
return dataset
self.checkDeterminism(
dataset_fn, expect_determinism, expected_elements=elements)
@combinations.generate(_test_combinations())
def testNoneComponent(self, apply_map):
dataset = dataset_ops.Dataset.from_tensors((42, None))
def map_function(x, y):
if y is None:
return x / 2
return x
dataset = apply_map(dataset, map_function)
self.assertDatasetProduces(dataset, expected_output=[21])
@combinations.generate(test_base.eager_only_combinations())
def testCheckpointLargeBuffer(self):
# Tensor of size 100M
dataset = dataset_ops.Dataset.from_tensors(
array_ops.ones((25, 1000, 1000), dtype=dtypes.float32))
# Repeat 25 times to exceed the 2G proto limit
dataset = dataset.repeat(30)
dataset = dataset.map(lambda x: x * 2, num_parallel_calls=25)
iterator = iter(dataset)
# Call next() to trigger parallel map calls.
next(iterator)
ckpt = trackable_utils.Checkpoint(iterator=iterator)
manager = checkpoint_management.CheckpointManager(
ckpt, self.get_temp_dir(), max_to_keep=1)
manager.save()
class MapDatasetCheckpointTest(checkpoint_test_base.CheckpointTestBase,
parameterized.TestCase):
def setUp(self):
self._tensor_slice_len = 7
self._num_epochs = 7
self._num_outputs = self._tensor_slice_len * self._num_epochs
def _build_ds(self, multiplier=37.0):
components = (np.arange(self._tensor_slice_len), np.array([[1, 2, 3]]) *
np.arange(self._tensor_slice_len)[:, np.newaxis],
np.array(multiplier) * np.arange(self._tensor_slice_len))
def _map_fn(x, y, z):
return math_ops.square(x), math_ops.square(y), math_ops.square(z)
return (
dataset_ops.Dataset.from_tensor_slices(components).map(_map_fn).repeat(
self._num_epochs))
@combinations.generate(test_base.default_test_combinations())
def testSaveRestoreCore(self):
self.run_core_tests(self._build_ds, self._num_outputs)
@combinations.generate(test_base.default_test_combinations())
def testSaveStatefulFunction(self):
def _build_ds():
def _map_fn(x):
return random_ops.random_uniform(
(), 0, 10, dtype=dtypes.int32) * math_ops.cast(x, dtypes.int32)
return dataset_ops.Dataset.range(100).map(_map_fn)
self.verify_error_on_save(_build_ds, 15, errors.FailedPreconditionError)
@combinations.generate(test_base.default_test_combinations())
def testCaptureVariableInMapFn(self):
def _build_ds():
counter_var = variable_scope.get_variable(
"counter", (), dtypes.int32, use_resource=True)
return (dataset_ops.Dataset.from_tensors(0).repeat(10).map(
lambda _: counter_var.assign_add(1)))
self.verify_error_on_save(_build_ds, 15, errors.FailedPreconditionError)
@combinations.generate(test_base.default_test_combinations())
def testCaptureConstantInMapFn(self):
num_outputs = 10
def _build_ds():
constant_var = constant_op.constant(5)
return (dataset_ops.Dataset.from_tensors(0).repeat(10).map(
lambda x: x + constant_var))
self.run_core_tests(_build_ds, num_outputs)
@combinations.generate(test_base.default_test_combinations())
def testCaptureDefunInMapFn(self):
num_outputs = 10
def _build_ds():
@function.Defun(dtypes.int64)
def defun_fn(x):
return constant_op.constant(1000) + math_ops.cast(x, dtypes.int32)
return dataset_ops.Dataset.range(num_outputs).map(defun_fn)
self.run_core_tests(_build_ds, num_outputs)
@combinations.generate(test_base.default_test_combinations())
def testBuildDefunInMapFn(self):
num_outputs = 10
def _build_ds():
@function.Defun(dtypes.int64)
def defun_fn(x):
@function.Defun(dtypes.int32)
def defun_fn_deep(x):
return constant_op.constant(1000) + math_ops.cast(x, dtypes.int32)
return constant_op.constant(11000) + defun_fn_deep(
math_ops.cast(x, dtypes.int32))
return dataset_ops.Dataset.range(num_outputs).map(defun_fn)
self.run_core_tests(_build_ds, num_outputs)
@combinations.generate(test_base.default_test_combinations())
def testSparseCore(self):
def _sparse(i):
return sparse_tensor.SparseTensorValue(
indices=np.array([[0, 0]]),
values=(i * np.array([1])),
dense_shape=np.array([1, 1]))
def _build_ds(num_outputs):
return dataset_ops.Dataset.range(num_outputs).map(_sparse)
num_outputs = 10
self.run_core_tests(lambda: _build_ds(num_outputs), num_outputs)
if __name__ == "__main__":
test.main()
|
|
from __future__ import division
import sys, os, imp, urllib, json, time, traceback, re, getopt, tempfile, AdvancedHTMLParser, urllib2, urlparse, zipfile, shutil, requests, logging, psutil, subprocess, sqlite3, cgi
from threading import Timer
import thread
from scripts import kodi_utils
from collections import OrderedDict
import jinja2, signal
import globals
import app_proxy
VERSION='0.8.3'
def program_end(signal, frame):
logger.debug('Shutting down program')
logger.debug('Sending abort signal for all services')
for p in globals.SERVICES:
kodi_utils.trigger(kodi_utils.TRIGGER_ABORT, globals.SERVICES[p].id)
time.sleep(2)
logger.debug('Closing remaining processes')
for p in globals.PROCESSES:
if globals.PROCESSES[p].is_alive:
try:
globals.PROCESSES[p].responses.close()
except:
pass
try:
globals.PROCESSES[p].messages.close()
except:
pass
try:
del globals.PROCESSES[p].responses
except:
pass
try:
del globals.PROCESSES[p].messages
except:
pass
try:
del globals.PROCESSES[p].stop
except:
pass
try:
del globals.PROCESSES[p].triggers
except:
pass
try:
globals.PROCESSES[p]._popen.terminate()
except:
pass
del globals.PROCESSES[p]
logger.debug('Closing remaining services')
for p in globals.SERVICES:
if globals.SERVICES[p].is_alive:
try:
globals.SERVICES[p].responses.close()
except:
pass
try:
globals.SERVICES[p].messages.close()
except:
pass
try:
del globals.SERVICES[p].responses
except:
pass
try:
del globals.SERVICES[p].messages
except:
pass
try:
del globals.SERVICES[p].stop
except:
pass
try:
del globals.SERVICES[p].triggers
except:
pass
try:
globals.SERVICES[p]._popen.terminate()
except:
pass
del globals.SERVICES[p]
logger.debug('Forcefully closing remaining threads')
for p in multiprocessing.active_children():
if p.is_alive:
try:
p.terminate()
except:
pass
os._exit(1)
try:
from flask import Flask, render_template, send_from_directory, request, send_file, redirect
except:
print 'TVML Server requires flask module.\nPlease install it via "pip install flask"'
sys.exit(1)
try:
import setproctitle
except:
pass
import sqlite3
from packaging import version
# try:
# import faulthandler
# faulthandler.enable()
# except:
# print 'TVML Server requires faulthandler module.\nPlease install it via "pip install faulthandler"'
# sys.exit(1)
import multiprocessing
import urlparse
# import gevent.monkey
# gevent.monkey.patch_all()
try:
from gevent.pywsgi import WSGIServer
import gevent
except:
print 'TVML Server requires gevent module.\nPlease install it via "pip install gevent"'
sys.exit(1)
reload(sys)
sys.setdefaultencoding('utf8')
if getattr(sys, 'frozen', False):
# we are running in a bundle
bundle_dir = sys._MEIPASS
else:
bundle_dir = ''
DATA_DIR = os.path.join(os.path.expanduser("~"), '.TVMLSERVER')
if not os.path.exists(DATA_DIR):
os.makedirs(DATA_DIR)
if not os.path.isdir(DATA_DIR):
print '{} not a directory or cannot be created'.format(DATA_DIR)
sys.exit(2)
if not os.path.exists(os.path.join(DATA_DIR, 'addons')):
os.makedirs(os.path.join(DATA_DIR, 'addons'))
if not os.path.isdir(os.path.join(DATA_DIR, 'addons')):
print '{} not a directory or cannot be created'.format(os.path.join(DATA_DIR, 'addons'))
sys.exit(2)
if not os.path.exists(os.path.join(DATA_DIR, 'userdata')):
os.makedirs(os.path.join(DATA_DIR, 'userdata'))
if not os.path.isdir(os.path.join(DATA_DIR, 'userdata')):
print '{} not a directory or cannot be created'.format(os.path.join(DATA_DIR, 'userdata'))
sys.exit(2)
if not os.path.exists(os.path.join(DATA_DIR, 'addons', 'packages')):
os.makedirs(os.path.join(DATA_DIR, 'addons', 'packages'))
if not os.path.isdir(os.path.join(DATA_DIR, 'addons', 'packages')):
print '{} not a directory or cannot be created'.format(os.path.join(DATA_DIR, 'addons', 'packages'))
sys.exit(2)
if not os.path.exists(os.path.join(DATA_DIR, 'logs')):
os.makedirs(os.path.join(DATA_DIR, 'logs'))
if not os.path.isdir(os.path.join(DATA_DIR, 'logs')):
print '{} not a directory or cannot be created'.format(os.path.join(DATA_DIR, 'logs'))
sys.exit(2)
LOGFILE = os.path.join(DATA_DIR, 'logs', 'tvmlserver.log')
if not os.path.exists(os.path.join(DATA_DIR, 'db')):
os.makedirs(os.path.join(DATA_DIR, 'db'))
if not os.path.isdir(os.path.join(DATA_DIR, 'db')):
print '{} not a directory or cannot be created'.format(os.path.join(DATA_DIR, 'db'))
sys.exit(2)
DB_FILE = os.path.join(DATA_DIR, 'db', 'TVMLServer.db')
TRIGGER_DB = os.path.join(DATA_DIR, 'db', 'triggers.db')
open_db = kodi_utils.open_db
sys.path.append(os.path.join(bundle_dir, 'scripts'))
sys.path.append(os.path.join(bundle_dir, 'scripts', 'kodi'))
sys.path.append(os.path.join(DATA_DIR, 'addons'))
app = Flask(__name__, template_folder=os.path.join(bundle_dir, 'templates'))
app.jinja_env.filters['base64encode'] = kodi_utils.b64encode
app.config['JSON_AS_ASCII'] = False
from werkzeug.routing import PathConverter
class EverythingConverter(PathConverter):
regex = '.*?'
app.url_map.converters['everything'] = EverythingConverter
from scripts.Plugin import Plugin, Item
from scripts.KodiPlugin import *
from scripts.bridge import bridge
from scripts import messages
from scripts import imageCache
logging.basicConfig(
level=logging.DEBUG,
format='%(asctime)s:%(levelname)s:%(name)s:%(message)s',
filename=LOGFILE,
filemode='w'
)
root = logging.getLogger()
ch = logging.StreamHandler(sys.stdout)
ch.setLevel(logging.DEBUG)
formatter = logging.Formatter('%(asctime)s - %(name)s - %(levelname)s - %(message)s')
ch.setFormatter(formatter)
root.addHandler(ch)
logger = logging.getLogger('TVMLServer')
class MyProcess(multiprocessing.Process):
def run(self):
logger.debug('Process {} starting'.format(self.id))
ans = self._target(*self._args, **self._kwargs)
logger.debug('Process {} adding end message'.format(self.id))
self.message({'type': 'end', 'ans': ans})
self.onStop()
self.stop.set()
def response(self, id, response):
logger.debug('Adding response on process {}'.format(self.id))
self.responses.put({'id': id, 'response': response})
def message(self, msg):
self.messages.put(msg)
def onStop(self):
pass
def Process(group=None, target=None, name=None, args=(), kwargs={}):
p = MyProcess(group, target, name, args, kwargs)
p.daemon = True
p.messages = multiprocessing.Queue()
p.responses = multiprocessing.Queue()
p.triggers = multiprocessing.Queue()
p.stop = multiprocessing.Event() # can be used to indicate stop
p.id = str(id(p))
return p
def update_addons():
for row in get_all_installed_addons():
try:
current_version = row['version']
found = find_addon(row['id'])
if not found:
continue
available_version = found[0]['version']
if version.parse(current_version) < version.parse(available_version):
logger.info(
'Found update for addon {}. Current version: {}, Available version: {}'.format(row['id'], current_version,
available_version))
remove_addon(row['id'])
install_addon(found[0])
except:
logger.exception('Failed to update addon {}'.format(row['id']))
def fix_addons():
repeat = True
while repeat:
repeat = False
for row in get_all_installed_addons():
try:
for r in json.loads(row['requires']):
if not get_installed_addon(r):
found = find_addon(r)
if found:
install_addon(found[0])
repeat = True
else:
logger.error('Addon {} is required by addon {} and cannot be found'.format(r, row['id']))
except:
logger.exception('Failed to fix addon {}'.format(row['id']))
@app.route('/response/<pid>/<id>', methods=['POST', 'GET'])
@app.route('/response/<pid>/<id>/<res>')
def route(pid, id, res=None):
if request.method == 'POST':
res = request.form.keys()[0]
if pid in globals.PROCESSES:
p = globals.PROCESSES[pid]
logger.debug('received response on process {}'.format(pid))
if p is not None:
p.responses.put({'id': id, 'response': res})
return json.dumps({'messagetype': 'nothing', 'end': True})
doc = render_template('alert.xml', title='Communication error',
description="Failed to load page.\nThis could mean the server had a problem, or the request dialog timed-out\nPlease try again")
return json.dumps({'doc':doc, 'end':True})
else:
return json.dumps({'messagetype': 'nothing', 'end':True})
@app.route('/playstop/<s>/<text>', methods=['GET'])
def playstop(s, text):
try:
text = json.loads(kodi_utils.b64decode(text))
kodi_utils.trigger(kodi_utils.TRIGGER_PLAY_STOP, text)
if text['time'] == '0' or text['total'] == '0':
pass #I do not want to change history for mis-played item
else:
kodi_utils.set_play_history(s, text['time'], text['total'])
kodi_utils.update_item_stop('/playstop/{}'.format(s), text['time'])
except:
logger.exception('Failed to set play history')
return json.dumps({'messagetype': 'nothing', 'end': True})
@app.route('/playstart/<s>/<text>', methods=['GET'])
def playstart(s, text):
try:
text = kodi_utils.b64decode(text)
kodi_utils.trigger(kodi_utils.TRIGGER_PLAY_START, text)
except:
logger.exception('Failed to signal play start')
return json.dumps({'messagetype': 'nothing', 'end': True})
@app.route('/progressstop/<id>', methods=['GET'])
def progressstop(id):
try:
kodi_utils.trigger(kodi_utils.TRIGGER_PROGRESS_CLOSE, id)
except:
logger.exception('Failed to signal play start')
return json.dumps({'messagetype': 'nothing', 'end': True})
@app.route('/icon.png')
def icon():
return send_from_directory(bundle_dir, 'icon.png')
@app.route('/cache/<id>')
def cache(id):
file = imageCache.get(id)
if file:
return send_file(file)
else:
return 'Not found', 404
@app.route('/addons/<path:filename>')
def kodiplugin_icon(filename):
return send_from_directory(os.path.join(DATA_DIR, 'addons'), filename)
@app.route('/js/<path:filename>')
def js(filename):
return send_from_directory(os.path.join(bundle_dir, 'js'), filename)
@app.route('/templates/<path:filename>')
def template(filename):
return send_from_directory(os.path.join(bundle_dir, 'templates'), filename)
last_localfile_path=None
@app.route('/localfile/<file>')
def localfile(file):
try:
filename = kodi_utils.b64decode(file)
global last_localfile_path
last_localfile_path = os.path.dirname(filename)
except:
if file and last_localfile_path:
filename=last_localfile_path+'/'+file
return send_file(filename)
@app.route('/menu/<pluginid>', methods=['POST', 'GET'])
@app.route('/menu/<pluginid>/<process>', methods=['POST', 'GET'])
@app.route('/catalog/<pluginid>', methods=['POST', 'GET'])
@app.route('/catalog/<pluginid>/<process>', methods=['POST', 'GET'])
# @app.route('/catalog/<pluginid>/<url>')
# @app.route('/catalog/<pluginid>/<url>/<process>')
def catalog(pluginid, process=None, url=''):
if not url and request.method == 'POST':
try:
url = kodi_utils.b64decode(request.form.keys()[0])
except:
logger.exception('Failed to parse post data')
url = ''
try:
decoded_id = kodi_utils.b64decode(pluginid)
if request.full_path.startswith('/catalog'):
logger.debug('catalog {}, {}, {}'.format(decoded_id, url, process))
else:
logger.debug('menu {}, {}'.format(decoded_id, process))
plugin = get_installed_addon(decoded_id)
if not plugin:
doc = render_template('alert.xml', title='Missing plugin',
description="Failed to run plugin {}.\nYou may need to install it manually".format(decoded_id))
return json.dumps({'doc': doc, 'end': True})
if process:
if not process in globals.PROCESSES:
return json.dumps({'messagetype': 'nothing', 'end': True}) #For some reason the process has already ended and was deleted
p = globals.PROCESSES[process]
else:
if request.full_path.startswith('/catalog'):
p = Process(target=get_items, args=(plugin['id'], url, globals.CONTEXT))
else:
p = Process(target=get_menu, args=(plugin['id'], url))
logger.debug('saving process id {}'.format(p.id))
globals.PROCESSES[p.id] = p
def stop():
time.sleep(5) # close bridge after 5s
del globals.PROCESSES[p.id]
# b.thread.onStop = stop
p.start()
logger.debug('entering while alive')
try:
while p.is_alive():
try:
msg = p.messages.get(False)
except:
gevent.sleep(0.1)
continue
try:
method = getattr(messages, msg['type'])
if msg['type'] == 'end' and not p.messages.empty():
logger.debug('Got terminal messages but process has more messages')
p.messages.put(msg)
continue
if msg['type'] == 'load' and not p.messages.empty():
msg_2 = p.messages.get()
if msg_2['type'] != 'end':
logger.debug('Got load message but process has more messages')
p.messages.put(msg_2)
p.messages.put(msg)
continue
if msg['type'] == 'load':
return catalog(msg['url'].split('/')[-1], None, kodi_utils.b64decode(msg['data']))
if msg['type'] == 'end':
for t in globals.PROCESSES:
if not globals.PROCESSES[t].messages.empty() and t != p.id:
msg2 = globals.PROCESSES[t].messages.get()
if msg2['type'] == 'end':
continue
p.messages.put(msg2)
p.messages.put(msg)
logger.debug('Got load message but replaced with {} from process {}'.format(msg2, globals.PROCESSES[t].id))
raise globals.ContinueException()
globals.PROCESSES[t].responses.close()
globals.PROCESSES[t].messages.close()
globals.PROCESSES[t].triggers.close()
del globals.PROCESSES[t].responses
del globals.PROCESSES[t].messages
del globals.PROCESSES[t].stop
del globals.PROCESSES[t].triggers
globals.PROCESSES[t]._popen.terminate()
globals.PROCESSES.clear()
# p.join()
# p.terminate()
logger.debug('PROCESS {} TERMINATED'.format(p.id))
return_url = None
if process:
# return on same url for more
return_url = request.url
else:
# add response bridge
return_url = '{}/{}'.format(request.url, p.id)
ans = method(plugin, msg, return_url, kodi_utils.b64encode(url))
#time.sleep(1)
ans['end'] = msg['type'] == 'end'
if not ans['end'] and not 'return_url' in ans:
print 'blah'
return json.dumps(ans)
except globals.ContinueException:
pass
except:
logger.exception('Error in while alive')
except:
logger.exception('Error in while alive')
logger.debug('exiting while alive and entering 5s wait')
# Check for possible last message which could have appeared after the thread has died. This could happen if message was sent during time.sleep in while and loop exited immediately afterwards
start = 0
while start < 5: # wait at most 5 seconds
try:
msg = p.messages.get(False)
except:
gevent.sleep(0.1)
start += 0.1
continue
try:
method = getattr(messages, msg['type'])
if msg['type'] == 'load' and not p.messages.empty():
msg_2 = p.messages.get()
if msg_2['type'] != 'end':
logger.debug('Got load message but process has more messages')
p.messages.put(msg_2)
p.messages.put(msg)
continue
if msg['type'] == 'end' and not p.messages.empty():
logger.warning('Got end message but queue not empty. Getting another')
p.messages.put(msg)
continue
if msg['type'] == 'load':
return catalog(msg['url'].split('/')[-1], None, kodi_utils.b64decode(msg['data']))
if msg['type'] == 'end':
for t in globals.PROCESSES:
if not globals.PROCESSES[t].messages.empty() and t != p.id:
msg2 = globals.PROCESSES[t].messages.get()
if msg2['type'] == 'end':
continue
p.messages.put(msg2)
p.messages.put(msg)
logger.debug('Got end message but replaced with {} from process '.format(msg2, globals.PROCESSES[t].id))
raise globals.ContinueException()
globals.PROCESSES[t].responses.close()
globals.PROCESSES[t].messages.close()
globals.PROCESSES[t].triggers.close()
del globals.PROCESSES[t].responses
del globals.PROCESSES[t].messages
del globals.PROCESSES[t].stop
del globals.PROCESSES[t].triggers
globals.PROCESSES[t]._popen.terminate()
globals.PROCESSES.clear()
# p.join()
# p.terminate()
logger.debug('PROCESS {} TERMINATED'.format(p.id))
ans = method(plugin, msg, request.url, kodi_utils.b64encode(url)) if process else method(plugin, msg,
'{}/{}'.format(request.url, p.id), kodi_utils.b64encode(url))
ans['end'] = msg['type'] == 'end'
if not ans['end'] and not 'return_url' in ans:
print 'blah'
return json.dumps(ans)
except globals.ContinueException:
pass
except:
logger.exception('Error while waiting for process messages after death')
logger.debug('finished 5 sec wait')
# if we got here, this means thread has probably crashed.
if p.id in globals.PROCESSES:
del globals.PROCESSES[p.id]
logger.error('PROCESS {} CRASHED'.format(p.id))
#doc = render_template('alert.xml', title='Communication error',
# description="Failed to load page.\nThis could mean the server had a problem, or the request dialog timed-out\nPlease try again")
#return json.dumps({'doc':doc, 'end':True})
return json.dumps({'messagetype': 'nothing', 'end': True})
except:
logger.exception('Error in catalog')
doc = render_template('alert.xml', title='Communication error',
description="Failed to load page.\nThis could mean the server had a problem, or the request dialog timed-out\nPlease try again")
return json.dumps({'doc': doc, 'end': True})
@app.route('/main')
def main():
try:
favs = kodi_utils.get_config(kodi_utils.FAVORITE_CONFIG, [])
language = kodi_utils.get_config(kodi_utils.LANGUAGE_CONFIG, 'English')
filtered_plugins = [dict(p) for p in get_all_installed_addons() if [val for val in json.loads(p['type']) if val in ['Video', 'Audio']]] #Show only plugins with video/audio capability since all others are not supported
for p in filtered_plugins:
p['name'] = kodi_utils.tag_conversion(p['name'])
fav_plugins = [p for p in filtered_plugins if p['id'] in favs]
# recents = {}
# try:
# with open_db() as DB:
# for row in DB.execute('select * from HISTORY'):
# try:
# s = kodi_utils.b64decode(row['s'])
# m = re.search('plugin://([^/]*)/(.*)', s)
# if m:
# id = m.group(1)
# addon = get_installed_addon(id)
# if addon:
# if not addon['name'] in recents:
# recents[addon['name']] = []
# recents[addon['name']].append()
# except:
# pass
# except:
# logger.exception('Failed to retrieve play history')
# return {'time': 0, 'total': 0}
doc = render_template('main.xml', recents=kodi_utils.get_items(), menu=filtered_plugins, favs=fav_plugins, url=request.full_path, version=VERSION, languages=["Afrikaans", "Albanian", "Amharic", "Arabic", "Armenian", "Azerbaijani", "Basque", "Belarusian", "Bosnian", "Bulgarian", "Burmese", "Catalan", "Chinese", "Croatian", "Czech", "Danish", "Dutch", "English", "Esperanto", "Estonian", "Faroese", "Finnish", "French", "Galician", "German", "Greek", "Hebrew", "Hindi", "Hungarian", "Icelandic", "Indonesian", "Italian", "Japanese", "Korean", "Latvian", "Lithuanian", "Macedonian", "Malay", "Malayalam", "Maltese", "Maori", "Mongolian", "Norwegian", "Ossetic", "Persian", "Persian", "Polish", "Portuguese", "Romanian", "Russian", "Serbian", "Silesian", "Sinhala", "Slovak", "Slovenian", "Spanish", "Spanish", "Swedish", "Tajik", "Tamil", "Telugu", "Thai", "Turkish", "Ukrainian", "Uzbek", "Vietnamese", "Welsh"], current_language=language)
return json.dumps({'doc':doc, 'end': True})
except:
logger.exception('Failed to load main screen')
doc = render_template('alert.xml', title='Application error',
description="Failed to main page.\nThis means the server has a problem")
return json.dumps({'doc': doc, 'end': True})
@app.route('/clearPlay')
def clear_play():
try:
with open_db() as DB:
DB.execute('delete from HISTORY')
except:
logger.exception('Failed to clear history')
return json.dumps({'messagetype': 'nothing', 'end': True})
@app.route('/clearSettings')
def clear_settings():
try:
with open_db() as DB:
DB.execute('delete from {}'.format(kodi_utils.SETTINGS_TABLE))
except:
logger.exception('Failed to clear settings')
return json.dumps({'messagetype': 'nothing', 'end': True})
@app.route('/clearAll')
def clear_all():
try:
with open_db() as DB:
DB.execute('delete from {}'.format(kodi_utils.SETTINGS_TABLE))
DB.execute('delete from {}'.format(kodi_utils.HISTORY_TABLE))
DB.execute('delete from {}'.format(kodi_utils.CONFIG_TABLE))
except:
logger.exception('Failed to clear all')
return json.dumps({'messagetype': 'nothing', 'end': True})
@app.route('/setLanguage', methods=['POST'])
def set_language():
try:
language = kodi_utils.b64decode(request.form.keys()[0])
kodi_utils.set_config(kodi_utils.LANGUAGE_CONFIG, language)
except:
logger.exception('Failed to set language')
return json.dumps({'messagetype':'nothing', 'end': True})
@app.route('/removeAddon', methods=['POST'])
def removeAddon():
try:
if request.method == 'POST':
id = kodi_utils.b64decode(request.form.keys()[0])
found = get_installed_addon(id)
if found:
remove_addon(id)
#return json.dumps({'url': '/main', 'replace': True, 'initial': True}), 212 #Reload main screen
doc = render_template('alert.xml', title='Succcess', description='Successfully removed addon {}'.format(found['name']))
return json.dumps({'doc': doc, 'end': True})
except:
traceback.print_exc(file=sys.stdout)
doc = render_template('alert.xml', title='Failed',
description='Failed to remove addon {}'.format(found['name']))
return json.dumps({'doc': doc, 'end': True, 'end': True})
def remove_addon(id):
logger.debug('deleting plugin {}'.format(id))
addon = get_installed_addon(id)
if addon:
if 'Repository' in json.loads(addon['type']):
index_to_del = None
for (i,j) in enumerate(globals.REPOSITORIES):
if j['name'] == addon['name']:
index_to_del = i
if index_to_del:
del globals.REPOSITORIES[index_to_del]
global REFRESH_EVENT
REFRESH_EVENT.clear()
#multiprocessing.Process(target=get_available_addons, args=(globals.REPOSITORIES, REFRESH_EVENT)).start()
thread.start_new_thread(get_available_addons, (globals.REPOSITORIES, REFRESH_EVENT))
if 'Service' in json.loads(addon['type']):
#First abort monitor for this addon if exists
kodi_utils.trigger(kodi_utils.TRIGGER_ABORT, id)
time.sleep(5)
#Now terminate service process
if id in globals.SERVICES:
globals.SERVICES[id].responses.close()
globals.SERVICES[id].messages.close()
del globals.SERVICES[id].responses
del globals.SERVICES[id].messages
del globals.SERVICES[id].stop
del globals.SERVICES[id].triggers
globals.SERVICES[id]._popen.terminate()
del globals.SERVICES[id]
path = os.path.join(DATA_DIR, 'addons', id)
try:
shutil.rmtree(path)
except:
pass
with open_db() as DB:
DB.execute('delete from INSTALLED where id=?', (id,))
def get_items(plugin_id, url, context, run_as_service=False):
logger = logging.getLogger(plugin_id)
logger.debug('Getting items for {}:{}'.format(plugin_id, url))
if 'setproctitle' in sys.modules:
setproctitle.setproctitle('python TVMLServer ({}:{})'.format(plugin_id, url))
kodi_utils.windows_pyinstaller_multiprocess_hack()
try:
plugin = KodiPlugin(plugin_id)
if not plugin:
raise Exception('could not load plugin')
b = bridge()
#multiprocessing.current_process().bridge = b
b.context = context
items = plugin.run(b, url, run_as_service)
del plugin
del b
del logger
except:
logger.exception('Encountered error in plugin: {}'.format(plugin_id))
items = None
# logger.debug('get_items finished with {}'.format(items))
return items
def get_menu(plugin_id, url):
print('Getting menu for: {}'.format(url))
url = url.split('?')[1] if '?' in url else url
try:
plugin = KodiPlugin(plugin_id)
if not plugin:
raise Exception('could not load plugin')
b = bridge()
items = plugin.settings(b, url)
except:
logger.exception('Encountered error in plugin: {}'.format(plugin_id))
items = None
return items
def is_ascii(s):
return all(ord(c) < 128 for c in s)
def get_available_addons(REPOSITORIES, e=None):
logger.debug('Refreshing repositories. Please wait...')
with open_db() as DB:
DB.execute('delete from ADDONS')
for r in REPOSITORIES:
temp = []
for dir in r['dirs']:
try:
req = requests.get(dir['xml'])
link = req.text
parsed = parse_addon_xml(link, r, dir)
parsed = [(addon['id'], r['name'], json.dumps(dir), json.dumps(addon['type']), addon['name'].decode('utf-8'), json.dumps(addon['data']), addon['version'], addon['script'], json.dumps(addon['requires']), addon['icon']) for addon in parsed]
temp += parsed
except:
logger.exception('Cannot read repository {}'.format(r['name']))
with open_db() as DB:
try:
DB.executemany('insert into ADDONS values(?,?,?,?,?,?,?,?,?,?)', temp)
except:
logger.exception('failed to insert addons into DB')
logger.debug('Finished refreshing repostories')
fix_addons()
update_addons()
if e:
e.set()
@app.route('/installAddon', methods=['POST'])
def installAddon():
if request.method == 'POST':
try:
id = kodi_utils.b64decode(request.form.keys()[0])
already_installed = get_installed_addon(id)
if already_installed:
doc = render_template('alert.xml', title='Already installed',
description="This addon is already installed")
return json.dumps({'doc': doc, 'end': True})
found = find_addon(id)
if not found:
doc = render_template('alert.xml', title='Unknown addon', description="This addon cannot be found")
return json.dumps({'doc': doc, 'end': True})
install_addon(found[0])
plugin = KodiPlugin(id)
for r in plugin.requires:
already_installed = get_installed_addon(r)
if already_installed:
continue
found = find_addon(r)
if not found:
doc = render_template('alert.xml', title='Unknown addon', description="This addon has a requirement that cannot be found {}".format(r))
return json.dumps({'doc': doc, 'end': True})
install_addon(found[0])
#return json.dumps({'url': '/main', 'replace': True, 'initial': True}), 212 # Reload main screen
doc = render_template('alert.xml', title='Installation complete',
description="Successfully installed addon {}".format(
plugin.name))
return json.dumps({'doc': doc, 'end': True})
except:
logger.exception('Failed to download/install {}'.format(id))
try:
remove_addon(id)
except:
pass
doc = render_template('alert.xml', title='Install error',
description="Failed to install addon.\nThis could be due to a network error or bad repository parsing")
return json.dumps({'doc': doc, 'end': True})
doc = render_template('alert.xml', title='URL error', description='This URL is invalid')
return json.dumps({'doc': doc, 'end': True})
def find_addon(id):
"""Gets all rows of available addons with same id sorted by version from highest to lowest"""
found = []
with open_db() as DB:
for row in DB.execute('select * from ADDONS where id=?', (id,)):
found.append(row)
def cmp(a, b):
a_version = a['version']
b_version = b['version']
if version.parse(a_version) < version.parse(b_version):
return 1
if version.parse(a_version) == version.parse(b_version):
return 0
return -1
found = sorted(found, cmp=cmp)
return found
def get_installed_addon(id):
"""Gets the row of the addon if its installed"""
with open_db() as DB:
row = DB.execute('select * from INSTALLED where id=?', (id,)).fetchone()
return row
@app.route('/setFavorite', methods=['POST'])
def set_installed_addon_favorite():
"""Updates the favorite column of the installed addon in the DB"""
try:
id = kodi_utils.b64decode(request.form.keys()[0])
ans = kodi_utils.get_config(kodi_utils.FAVORITE_CONFIG, [])
if id in ans:
ans.remove(id)
else:
ans.append(id)
kodi_utils.set_config(kodi_utils.FAVORITE_CONFIG, ans)
except:
logger.exception('Failed to update addon favorite')
return json.dumps({'messagetype': 'nothing', 'end': True})
def get_all_installed_addons():
"""Returns a list of rows from DB of all installed addons"""
with open_db() as DB:
found = []
for row in DB.execute('select * from INSTALLED'):
found.append(row)
return found
def install_addon(addon):
logger.debug('Installing addon {}'.format(addon['id']))
download_url = '{0}/{1}/{1}-{2}.zip'.format(json.loads(addon['dir'])['download'], addon['id'], addon['version'])
logger.debug('downloading plugin {}'.format(download_url))
r = requests.get(download_url, stream=True)
if r.status_code >= 400:
#if not addon['id'].startswith('plugin'):
# download_url = '{0}/plugin.{1}/plugin.{1}-{2}.zip'.format(json.loads(addon['dir'])['download'], addon['id'], addon['version'])
# logger.debug('downloading plugin {}'.format(download_url))
# r = requests.get(download_url, stream=True)
# if r.status_code >= 400:
# raise Exception('Failed to download')
#else:
raise Exception('Failed to download')
temp = os.path.join(tempfile.gettempdir(), '{}.zip'.format(addon['id']))
with open(temp, 'wb') as f:
r.raw.decode_content = True
shutil.copyfileobj(r.raw, f)
if not zipfile.is_zipfile(temp):
raise Exception('failed to download')
path = os.path.join(DATA_DIR, 'addons')
with zipfile.ZipFile(temp, 'r') as zip:
zip.extractall(path)
time.sleep(5)
plugin = KodiPlugin(addon['id'])
logger.debug('Successfully installed plugin {} of type {}'.format(plugin.id, plugin.type))
if 'Repository' in plugin.type: # Need additional stuff
try:
with open(os.path.join(DATA_DIR, 'addons', plugin.id, 'addon.xml'), 'r') as f:
repo = {}
parser = AdvancedHTMLParser.Parser.AdvancedHTMLParser()
parser.feed(f.read())
repo['name'] = parser.getElementsByTagName('addon')[0].attributes['name']
repo['dirs'] = []
infos = parser.getElementsByTagName('info')
datadirs = parser.getElementsByTagName('datadir')
if len(infos) != len(datadirs):
raise Exception('Failed to parse addon.xml')
for i in range(len(infos)):
repo['dirs'].append({'xml': infos[i].text, 'download': datadirs[i].text})
# check if already exists
if [d for d in repo['dirs'] if d not in [i for j in [r['dirs'] for r in globals.REPOSITORIES] for i in j]]:
globals.REPOSITORIES.append(repo)
except:
logger.exception('Failed to parse installed repository {}'.format(plugin))
if 'Service' in plugin.type: # Need to run service
logger.debug('Starting service {}'.format(plugin.id))
try:
p = Process(target=get_items, args=(plugin.id, '', globals.CONTEXT, True))
globals.SERVICES[plugin.id] = p
p.daemon = True
p.start()
except:
logger.exception('Failed to run {} service'.format(plugin.id))
with open_db() as DB:
DB.execute('insert into INSTALLED VALUES(?,?,?,?,?,?,?,?,0)', (plugin.id, json.dumps(plugin.type), plugin.name.decode('utf-8'), json.dumps(plugin.data), plugin.version, plugin.script, json.dumps(plugin.requires), plugin.icon))
@app.route('/getAddonData', methods=['POST'])
def getAddonData():
try:
id = kodi_utils.b64decode(request.form.keys()[0])
found = get_installed_addon(id)
if found:
addon = dict(found)
addon['installed'] = True
else:
found = find_addon(id)
if found:
addon = dict(found[0])
if not addon:
doc = render_template('alert.xml', title='Unknown addon', description="This addon cannot be found")
return json.dumps({'doc': doc, 'end': True})
addon['type'] = json.loads(addon['type'])
#addon['dir'] = json.loads(addon['dir'])
addon['data'] = json.loads(addon['data'])
addon['requires'] = json.loads(addon['requires'])
favorites = kodi_utils.get_config(kodi_utils.FAVORITE_CONFIG, [])
doc = render_template('addonDetails.xml', addon=addon, favorite=id in favorites)
return json.dumps({'doc': doc, 'end': True, 'end': True})
except:
logger.exception('Failed to get data on {}'.format(id))
doc = render_template('alert.xml', title='Error',
description="Failed to get data on addon.\nThis could be due to a network error or bad repository parsing")
return json.dumps({'doc': doc, 'end': True})
@app.route('/refreshRepositories')
def refresh_repositories():
global REFRESH_EVENT
if REFRESH_EVENT.is_set(): #i.e. refresh not in progress
REFRESH_EVENT.clear()
#multiprocessing.Process(target=get_available_addons, args=(globals.REPOSITORIES, REFRESH_EVENT)).start()
thread.start_new_thread(get_available_addons, (globals.REPOSITORIES, REFRESH_EVENT))
gevent.sleep(1)
return json.dumps({'messagetye':'load', 'url': '/refreshProgress'})
@app.route('/refreshProgress')
def refresh_progress():
if not REFRESH_EVENT.is_set():
gevent.sleep(1)
doc = render_template('progressdialog.xml', title='Please wait', text='Still refreshing globals.REPOSITORIES.\nWaiting for it to complete', value='0', url='/refreshProgress')
return json.dumps({'doc': doc, 'messagetype': 'progress'})
return json.dumps({'messagetype': 'nothing'})
@app.route('/viewLog')
def viewLog():
with open(LOGFILE, 'r') as f:
log = f.readlines()
log.reverse()
doc = render_template('logTemplate.xml', title='TVMLServer log', text=''.join(log))
return json.dumps({'doc': doc, 'end': True})
@app.route('/clearLog')
def clear_log():
open(LOGFILE, 'w').close()
return json.dumps({'messagetype': 'nothing', 'end': True})
@app.route('/checkForUpdate')
def check_for_update():
try:
req = requests.get('https://api.github.com/repos/ggyeh/TVML-Kodi-Addons/releases/latest')
req = req.json()
latest = req['tag_name']
current = VERSION
if latest != current:
doc = render_template('alert.xml', title='Update found', description='New version detected {}\nCurrent version is {}\n\nSorry, no auto-update yet.\nPlease visit https://github.com/ggyeh/TVML-Kodi-Addons/releases/latest to download'.format(latest, current))
else:
doc = render_template('alert.xml', title='Up to date',
decsription='You are running the latest version {}'.format(current))
return json.dumps({'doc': doc, 'end': True})
except:
doc = render_template('alert.xml', title='UError',
decsription='Failed to check for new version')
return json.dumps({'doc': doc, 'end': True})
@app.route('/restart')
def restart():
print 'restarting app'
global http_server
http_server.stop()
# try:
# p = psutil.Process(os.getpid())
# for handler in p.open_files() + p.connections():
# os.close(handler.fd)
# except Exception, e:
# print e
exe = sys.executable
subprocess.Popen([exe] + sys.argv)
# os.execl(python, python, *sys.argv)
sys.exit(0)
@app.route('/repositories')
def respositories():
doc = render_template('repositories.xml', title='Repositories', repositories=[{'name':r['name'].replace("'", "\\'"), 'title':kodi_utils.tag_conversion(r['name'])} for r in globals.REPOSITORIES])
return json.dumps({'doc': doc, 'end': True})
@app.route('/addonsForRepository', methods=['POST'])
def addonsForRepository():
try:
if not REFRESH_EVENT.is_set():
gevent.sleep(1)
doc = render_template('progressdialog.xml', title='Please wait', text='Still refreshing repositories.\nWaiting for it to complete', value='0', url='/addonsForRepository', data=request.form.keys()[0])
return json.dumps({'doc': doc, 'messagetype': 'progress'})
name = kodi_utils.b64decode(request.form.keys()[0])
with open_db() as DB:
repo_addons = [row for row in DB.execute('select * from ADDONS where repo=?', (name,))]
addons = {}
for a in repo_addons:
b = dict(a)
b['type'] = json.loads(b['type'])
b['dir'] = json.loads(b['dir'])
b['data'] = json.loads(b['data'])
b['requires'] = json.loads(b['requires'])
for type in b['type']:
if not type in addons:
addons[type] = []
addons[type].append(b)
for type in addons:
addons[type] = sorted(addons[type], key=lambda a: a['name'])
doc = render_template('addonsList.xml', addons=addons)
return json.dumps({'doc': doc, 'end': True})
except Exception as e:
logger.exception('Failed to show addons by repository {}'.format(name))
doc = render_template('alert.xml', title='Error', description='{}'.format(e))
return json.dumps({'doc': doc, 'end': True})
@app.route('/addRepository', methods=['POST'])
def addRepository():
try:
path = kodi_utils.b64decode(request.form.keys()[0])
if not os.path.exists(path):
doc = render_template('alert.xml', title='Error', description='{} does not exist'.format(path))
return json.dumps({'doc': doc, 'end': True})
if not os.path.isfile(path):
doc = render_template('alert.xml', title='Error', description='{} is not a valid file'.format(path))
return json.dumps({'doc': doc, 'end': True})
if not zipfile.is_zipfile(path):
doc = render_template('alert.xml', title='Error', description='{} is not a valid zipfile'.format(path))
return json.dumps({'doc': doc, 'end': True})
with zipfile.ZipFile(path, 'r') as zip:
xml = [f for f in zip.namelist() if f.endswith('addon.xml')][0]
dir = os.path.join(DATA_DIR, 'addons')
zip.extractall(dir)
xml = os.path.join(DATA_DIR, 'addons', xml)
with open(xml, 'r') as f:
repo = {}
parser = AdvancedHTMLParser.Parser.AdvancedHTMLParser()
parser.feed(f.read())
repo['name'] = parser.getElementsByTagName('addon')[0].attributes['name']
repo['dirs'] = []
infos = parser.getElementsByTagName('info')
datadirs = parser.getElementsByTagName('datadir')
if len(infos) != len(datadirs):
raise Exception('Failed to parse addon.xml')
for i in range(len(infos)):
repo['dirs'].append({'xml': infos[i].text, 'download': datadirs[i].text})
if repo['name'] in [r['name'] for r in globals.REPOSITORIES]:
doc = render_template('alert.xml', title='Already exists', description='Repository with this name already exists')
return json.dumps({'doc':doc, 'end':True})
globals.REPOSITORIES.append(repo)
global REFRESH_EVENT
REFRESH_EVENT.clear()
#multiprocessing.Process(target=get_available_addons, args=(globals.REPOSITORIES, REFRESH_EVENT)).start()
thread.start_new_thread(get_available_addons, (globals.REPOSITORIES, REFRESH_EVENT))
return json.dumps({'messagetype':'load', 'url': '/main', 'replace': True, 'initial': True, 'end':True})
except Exception as e:
logger.exception('Failed to add repository {}'.format(path))
doc = render_template('alert.xml', title='Error', description='{}'.format(e))
return json.dumps({'doc':doc, 'end':True})
@app.route('/browseAddons', methods=['POST', 'GET'])
def browse_addons():
"""This method will return all available addons by type"""
search = None
if request.method == 'POST':
search='.*{}.*'.format(kodi_utils.b64decode(request.form.keys()[0]))
if not REFRESH_EVENT.is_set():
gevent.sleep(1)
doc = render_template('progressdialog.xml', title='Please wait', text='Refreshing globals.REPOSITORIES. This may take some time', value='0', url='/browseAddons')
return json.dumps({'doc':doc, 'messagetype':'progress'})
with open_db() as DB:
rows = [row for row in DB.execute('select * from ADDONS')]
all = {}
for row in rows:
if search:
if not re.match(search, row['name']) and not re.match(search, row['id']):
continue
row = dict(row)
row['types'] = json.loads(row['type'])
installed = 1 if get_installed_addon(row['id']) else 0
row['installed'] = installed
row['dir'] = json.loads(row['dir'])
row['requires'] = json.loads(row['requires'])
for type in row['types']:
if not type in all:
all[type] = []
all[type].append(row)
doc = render_template('addonsList.xml', addons=all)
return json.dumps({'doc':doc, 'end':True})
@app.route('/allAddons')
def all_addons():
"""This method will return all available addons in a search template"""
if not REFRESH_EVENT.is_set():
gevent.sleep(1)
doc = render_template('progressdialog.xml', title='Please wait', text='Refreshing globals.REPOSITORIES. This may take some time', value='0', url='/allAddons')
return json.dumps({'doc':doc, 'messagetype':'progress'})
with open_db() as DB:
rows = [row for row in DB.execute('select * from ADDONS')]
all = {}
for row in rows:
row = dict(row)
row['types'] = json.loads(row['type'])
installed = 1 if get_installed_addon(row['id']) else 0
row['installed'] = installed
row['dir'] = json.loads(row['dir'])
row['requires'] = json.loads(row['requires'])
if row['id'] in all: #if already exists with same id
if version.parse(all[row['id']]['version']) < version.parse(row['version']): #if found higher version
all[row['id']] = row #overrite newer version
else:
all[row['id']] = row
doc = render_template('addons.xml', all=all)
return json.dumps({'doc': doc, 'end': True})
last_dir = os.path.expanduser("~")
@app.route('/browse', methods=['GET', 'POST'])
def browse():
dir = None
filter = None
if request.method == 'POST':
post_data = json.loads(kodi_utils.b64decode(request.form.keys()[0]))
dir = kodi_utils.b64decode(post_data['dir']) if post_data['dir'] else ''
filter = post_data['filter']
print 'browsing to {}'.format(dir)
global last_dir
if not dir:
dir = last_dir
try:
if os.path.isdir(dir):
files = [{'url': kodi_utils.b64encode(os.path.join(dir, f)), 'title': f} for f in os.listdir('{}'.format(dir))]
if filter:
files = [f for f in files if os.path.isdir(kodi_utils.b64decode(f['url'])) or re.match(filter, f['title'])]
up = kodi_utils.b64encode(os.path.dirname(dir))
doc = render_template('browse.xml', title=dir, files=files, up=up)
last_dir = dir
return json.dumps({'messagetype':'modal', 'doc': doc, 'end': True})
else:
return json.dumps({'ans':dir, 'messagetype':'special', 'end':True})
except:
logger.exception('Failed to browse {}'.format(dir))
doc = render_template('alert.xml', title='Error', description='Failed to browse {}'.format(dir))
return json.dumps({'doc': doc, 'end': True})
def help(argv):
print 'Usage: {} [-p <port>] [-d <dir>]'.format(argv[0])
print
print '-p <port>, --port=<port> Run the server on <port>. Default is 5000'
print '-t <dir>, --temp=<dir> Specify alternate temp directory. Default is {}'.format(tempfile.gettempdir())
sys.exit()
def mmain(argv):
signal.signal(signal.SIGINT, program_end)
globals.port = 5000 # default
try:
opts, args = getopt.getopt(argv[1:], "hp:t:", ["port=", "temp="])
except getopt.GetoptError:
help(argv)
for opt, arg in opts:
if opt == '-h':
help(argv)
elif opt in ("-p", "--port"):
try:
globals.port = int(arg)
except:
print '<port> option must be an integer'
sys.exit(2)
elif opt in ("-t", "--temp"):
if os.path.isdir(arg):
tempfile.tempdir = arg
else:
print '{} is not a valid directory'.format(arg)
sys.exit(2)
with open_db() as DB:
DB.execute('create table if not exists {}(id text primary_key, string text)'.format(kodi_utils.SETTINGS_TABLE))
DB.execute('create table if not exists {}(id text primary_key, string text)'.format(kodi_utils.CONFIG_TABLE))
DB.execute('create table if not exists {}(s text primary_key, time integer, total integer)'.format(kodi_utils.HISTORY_TABLE))
if 's' not in kodi_utils.column_names(kodi_utils.ITEMS_TABLE):
DB.execute('drop table if exists {}'.format(kodi_utils.ITEMS_TABLE))
DB.execute('create table if not exists {}(s text primary_key, addon text)'.format(kodi_utils.ITEMS_TABLE))
DB.execute('drop table if exists ADDONS')
DB.execute('create table ADDONS(id text, repo text, dir text, type text, name text, data text, version text, script text, requires text, icon text)')
DB.execute('drop table if exists INSTALLED')
DB.execute('create table INSTALLED(id text primary_key, type text, name text, data text, version text, script text, requires text, icon text, favorite integer default 0)')
for plugin in os.listdir(os.path.join(DATA_DIR, 'addons')):
try:
dir = os.path.join(DATA_DIR, 'addons', plugin)
if not os.path.isdir(dir):
continue
logger.debug('Loading kodi plugin {}'.format(plugin))
p = KodiPlugin(plugin)
#if [val for val in p.type if val in ['Video', 'Audio', 'Repository']]:
if 'Repository' in p.type: #Need additional stuff
try:
with open(os.path.join(DATA_DIR, 'addons', plugin, 'addon.xml'), 'r') as f:
repo = {}
parser = AdvancedHTMLParser.Parser.AdvancedHTMLParser()
parser.feed(f.read())
repo['name'] = parser.getElementsByTagName('addon')[0].attributes['name']
repo['dirs'] = []
infos = parser.getElementsByTagName('info')
datadirs = parser.getElementsByTagName('datadir')
if len(infos) != len(datadirs):
raise Exception('Failed to parse addon.xml')
for i in range(len(infos)):
repo['dirs'].append({'xml': infos[i].text, 'download': datadirs[i].text})
#check if already exists
if not [d for d in repo['dirs'] if d not in [i for j in [r['dirs'] for r in globals.REPOSITORIES] for i in j]]:
#we have no dirs that don't already exists
continue
globals.REPOSITORIES.append(repo)
except:
logger.exception('Failed to parse installed repository {}'.format(plugin))
if 'Service' in p.type: # Need to run service
try:
pr = Process(target=get_items, args=(p.id, '', globals.CONTEXT, True))
pr.daemon = True
globals.SERVICES[p.id] = pr
pr.start()
except:
logger.exception('Failed to run {} service'.format(p.id))
DB.execute('insert into INSTALLED VALUES(?,?,?,?,?,?,?,?,0)', (p.id, json.dumps(p.type), p.name.decode('utf-8'), json.dumps(p.data), p.version, p.script, json.dumps(p.requires), p.icon))
logger.debug('Successfully loaded plugin: {}'.format(p))
except Exception as e:
logger.error('Failed to load kodi plugin {}. Error: {}'.format(plugin, e))
global http_server
http_server = WSGIServer(('', globals.port), app)
global REFRESH_EVENT
REFRESH_EVENT = multiprocessing.Event()
#multiprocessing.Process(target=get_available_addons, args=(globals.REPOSITORIES, REFRESH_EVENT)).start()
thread.start_new_thread(get_available_addons, (globals.REPOSITORIES, REFRESH_EVENT))
proxy = app_proxy.ProxyService(app_proxy.HTTP("0.0.0.0", globals.PROXY_PORT))
proxy.start()
print
print 'Server now running on port {}'.format(globals.port)
print 'Connect your TVML client to: http://{}:{}'.format(globals.ADDR, globals.port)
# http_server.log = open('http.log', 'w')
http_server.serve_forever()
# app.run(debug=True, host='0.0.0.0')
if __name__ == '__main__':
multiprocessing.freeze_support()
kodi_utils.windows_pyinstaller_multiprocess_hack()
globals.manager = multiprocessing.Manager()
globals.CONTEXT = globals.manager.dict()
# import pystray
# from PIL import Image, ImageDraw
# width=30
# height=30
# color1='red'
# color2='blue'
#
# image = Image.new('RGB', (width, height), color1)
# dc = ImageDraw.Draw(image)
# dc.rectangle((width // 2, 0, width, height // 2), fill=color2)
# dc.rectangle((0, height // 2, width // 2, height), fill=color2)
#
#
# def f(*args):
# print 'stopping http server'
# http_server.stop()
# server_process.join()
# print 'http server stopped'
# server_process = multiprocessing.Process(target=http_server.serve_forever, args=()).start()
#
# menu = pystray.MenuItem('Restart server', f)
#
#
#
# icon = pystray.Icon('test name', image, menu=[menu])
#
# def setup(icon):
# icon.visible = True
# mmain(sys.argv)
#
# icon.run(setup)
mmain(sys.argv)
|
|
import json
import os
from decimal import Decimal
from django.core.urlresolvers import reverse
from django.test import TestCase
from casexml.apps.stock.models import StockTransaction, StockReport
from corehq.apps.accounting import generator
from corehq.apps.accounting.models import BillingAccount, DefaultProductPlan, SoftwarePlanEdition, Subscription
from corehq.apps.commtrack.models import StockState
from corehq.apps.commtrack.tests.util import bootstrap_domain as initial_bootstrap
from corehq.apps.domain.utils import DOMAIN_MODULE_KEY
from corehq.apps.locations.models import SQLLocation
from corehq.apps.products.models import SQLProduct
from corehq.apps.users.models import WebUser, UserRole
from django.test.client import Client
from custom.ewsghana import StockLevelsReport
from custom.ewsghana.api import EWSApi, Product, Location
from custom.ewsghana.tests.mock_endpoint import MockEndpoint
from custom.ewsghana.utils import make_url
from dimagi.utils.couch.database import get_db
TEST_DOMAIN = 'ewsghana-test-input-stock'
class TestInputStockView(TestCase):
@classmethod
def setUpClass(cls):
cls.domain = initial_bootstrap(TEST_DOMAIN)
db = get_db()
if db.doc_exist(DOMAIN_MODULE_KEY):
module_config = db.open_doc(DOMAIN_MODULE_KEY)
module_map = module_config.get('module_map')
if module_map:
module_map[TEST_DOMAIN] = 'custom.ewsghana'
else:
module_config['module_map'][TEST_DOMAIN] = 'custom.ewsghana'
else:
module_config = db.save_doc(
{
'_id': DOMAIN_MODULE_KEY,
'module_map': {
'ewsghana-test-input-stock': 'custom.ewsghana'
}
}
)
db.save_doc(module_config)
generator.instantiate_accounting_for_tests()
account = BillingAccount.get_or_create_account_by_domain(
cls.domain.name,
created_by="automated-test",
)[0]
plan = DefaultProductPlan.get_default_plan_by_domain(
cls.domain, edition=SoftwarePlanEdition.ENTERPRISE
)
subscription = Subscription.new_domain_subscription(
account,
cls.domain.name,
plan
)
subscription.is_active = True
subscription.save()
cls.endpoint = MockEndpoint('http://test-api.com/', 'dummy', 'dummy')
cls.api_object = EWSApi(TEST_DOMAIN, cls.endpoint)
cls.api_object.prepare_commtrack_config()
cls.api_object.prepare_custom_fields()
cls.datapath = os.path.join(os.path.dirname(__file__), 'data')
with open(os.path.join(cls.datapath, 'sample_products.json')) as f:
for p in json.loads(f.read()):
cls.api_object.product_sync(Product(p))
with open(os.path.join(cls.datapath, 'sample_locations.json')) as f:
for loc in json.loads(f.read()):
cls.api_object.location_sync(Location(loc))
cls.test_facility3 = SQLLocation.objects.get(domain=TEST_DOMAIN, site_code='tsactive')
cls.testregion2 = SQLLocation.objects.get(domain=TEST_DOMAIN, site_code='testregion2')
cls.rsp = SQLLocation.objects.get(domain=TEST_DOMAIN, site_code='rsp')
cls.test_district = SQLLocation.objects.get(domain=TEST_DOMAIN, site_code='testdistrict')
cls.username1 = 'ews_user1'
cls.password1 = 'dummy'
cls.web_user1 = WebUser.create(TEST_DOMAIN, cls.username1, cls.password1)
cls.web_user1.eula.signed = True
cls.web_user1.save()
cls.username2 = 'ews_user2'
cls.password2 = 'dummy'
cls.web_user2 = WebUser.create(TEST_DOMAIN, cls.username2, cls.password2)
cls.web_user2.get_domain_membership(TEST_DOMAIN).location_id = cls.test_facility3.location_id
cls.web_user2.eula.signed = True
cls.web_user2.save()
cls.username3 = 'ews_user3'
cls.password3 = 'dummy'
cls.web_user3 = WebUser.create(TEST_DOMAIN, cls.username3, cls.password3)
cls.web_user3.get_domain_membership(TEST_DOMAIN).location_id = cls.testregion2.location_id
cls.web_user3.eula.signed = True
cls.web_user3.save()
cls.username4 = 'ews_user4'
cls.password4 = 'dummy'
cls.web_user4 = WebUser.create(TEST_DOMAIN, cls.username4, cls.password4)
cls.web_user4.get_domain_membership(TEST_DOMAIN).location_id = cls.rsp.location_id
cls.web_user4.eula.signed = True
cls.web_user4.save()
cls.username5 = 'ews_user5'
cls.password5 = 'dummy'
cls.web_user5 = WebUser.create(TEST_DOMAIN, cls.username5, cls.password5)
domain_membership = cls.web_user5.get_domain_membership(TEST_DOMAIN)
domain_membership.location_id = cls.test_district.location_id
domain_membership.role_id = UserRole.get_read_only_role_by_domain(cls.domain.name).get_id
cls.web_user5.eula.signed = True
cls.web_user5.save()
cls.ad = SQLProduct.objects.get(domain=TEST_DOMAIN, code='ad')
cls.al = SQLProduct.objects.get(domain=TEST_DOMAIN, code='al')
cls.client = Client()
def setUp(self):
StockTransaction.objects.all().delete()
StockReport.objects.all().delete()
StockState.objects.all().delete()
def test_access_for_non_existing_location(self):
self.client.login(username=self.username5, password=self.password5)
view_url = reverse('input_stock', kwargs={'domain': TEST_DOMAIN, 'site_code': 'invalidcode'})
response = self.client.get(view_url, follow=True)
self.assertEqual(response.status_code, 404)
def test_web_user_without_location(self):
self.client.login(username=self.username1, password=self.password1)
view_url = reverse('input_stock', kwargs={'domain': TEST_DOMAIN, 'site_code': 'rsp'})
response = self.client.get(view_url, follow=True)
self.assertEqual(response.status_code, 403)
def test_web_user_with_wrong_location_access(self):
"""
User assigned to reporting location can send data only for this particular facility.
"""
self.client.login(username=self.username4, password=self.password4)
view_url = reverse('input_stock', kwargs={'domain': TEST_DOMAIN, 'site_code': 'tsactive'})
response = self.client.get(view_url, follow=True)
self.assertEqual(response.status_code, 403)
def test_web_user_with_not_parent_location_access(self):
"""
User assigned to non-reporting location can send data only for facilities below his location level
"""
self.client.login(username=self.username3, password=self.password3)
view_url = reverse('input_stock', kwargs={'domain': TEST_DOMAIN, 'site_code': 'tsactive'})
response = self.client.get(view_url, follow=True)
self.assertEqual(response.status_code, 403)
def test_web_user_with_valid_location_access(self):
self.client.login(username=self.username2, password=self.password2)
view_url = reverse('input_stock', kwargs={'domain': TEST_DOMAIN, 'site_code': 'tsactive'})
response = self.client.get(view_url, follow=True)
self.assertEqual(response.status_code, 200)
formset = response.context['formset']
tsactive = SQLLocation.objects.get(domain=TEST_DOMAIN, site_code='tsactive')
self.assertIsNotNone(formset)
self.assertEqual(tsactive.products.count(), 2)
self.assertEqual(len(list(formset)), tsactive.products.count())
def test_web_user_with_valid_parent_location_access(self):
self.client.login(username=self.username5, password=self.password5)
view_url = reverse('input_stock', kwargs={'domain': TEST_DOMAIN, 'site_code': 'tsactive'})
response = self.client.get(view_url, follow=True)
self.assertEqual(response.status_code, 200)
formset = response.context['formset']
tsactive = SQLLocation.objects.get(domain=TEST_DOMAIN, site_code='tsactive')
self.assertIsNotNone(formset)
self.assertEqual(tsactive.products.count(), 2)
self.assertEqual(len(list(formset)), tsactive.products.count())
def test_web_user_report_submission(self):
self.client.login(username=self.username5, password=self.password5)
view_url = reverse('input_stock', kwargs={'domain': TEST_DOMAIN, 'site_code': 'tsactive'})
data = {
'form-TOTAL_FORMS': 2,
'form-INITIAL_FORMS': 2,
'form-MAX_NUM_FORMS': 1000
}
tsactive = SQLLocation.objects.get(domain=TEST_DOMAIN, site_code='tsactive')
data['form-0-product_id'] = self.ad.product_id
data['form-0-product'] = 'ad'
data['form-0-stock_on_hand'] = 20
data['form-0-receipts'] = 30
data['form-1-product_id'] = self.al.product_id
data['form-1-product'] = 'al'
data['form-1-stock_on_hand'] = 14
data['form-1-receipts'] = 17
response = self.client.post(view_url, data=data)
url = make_url(
StockLevelsReport,
self.domain,
'?location_id=%s&filter_by_program=all&startdate='
'&enddate=&report_type=&filter_by_product=all',
(tsactive.location_id, )
)
self.assertRedirects(response, url)
stock_states = StockState.objects.filter(case_id=tsactive.supply_point_id)
stock_transactions = StockTransaction.objects.filter(case_id=tsactive.supply_point_id)
self.assertEqual(stock_states.count(), 2)
self.assertEqual(stock_transactions.count(), 6)
self.assertEqual(stock_transactions.filter(type='consumption').count(), 2)
self.assertEqual(stock_transactions.filter(type='stockonhand').count(), 2)
self.assertEqual(stock_transactions.filter(type='receipts').count(), 2)
ad_consumption = stock_transactions.filter(type='consumption', product_id=self.ad.product_id)[0].quantity
al_consumption = stock_transactions.filter(type='consumption', product_id=self.al.product_id)[0].quantity
self.assertEqual(ad_consumption, Decimal(-10))
self.assertEqual(al_consumption, Decimal(-3))
al_stock_state = StockState.objects.get(case_id=tsactive.supply_point_id, product_id=self.al.product_id)
ad_stock_state = StockState.objects.get(case_id=tsactive.supply_point_id, product_id=self.ad.product_id)
self.assertEqual(int(ad_stock_state.stock_on_hand), 20)
self.assertEqual(int(al_stock_state.stock_on_hand), 14)
reports = StockReport.objects.filter(domain=TEST_DOMAIN)
self.assertEqual(reports.count(), 2)
def test_incomplete_report_submission(self):
self.client.login(username=self.username5, password=self.password5)
view_url = reverse('input_stock', kwargs={'domain': TEST_DOMAIN, 'site_code': 'tsactive'})
data = {
'form-TOTAL_FORMS': 2,
'form-INITIAL_FORMS': 2,
'form-MAX_NUM_FORMS': 1000
}
tsactive = SQLLocation.objects.get(domain=TEST_DOMAIN, site_code='tsactive')
data['form-0-product_id'] = self.ad.product_id
data['form-0-product'] = 'ad'
data['form-0-stock_on_hand'] = ''
data['form-0-receipts'] = ''
data['form-1-product_id'] = self.al.product_id
data['form-1-product'] = 'al'
data['form-1-stock_on_hand'] = 14
data['form-1-receipts'] = 17
response = self.client.post(view_url, data=data)
url = make_url(
StockLevelsReport,
self.domain,
'?location_id=%s&filter_by_program=all&startdate='
'&enddate=&report_type=&filter_by_product=all',
(tsactive.location_id, )
)
self.assertRedirects(response, url)
stock_states = StockState.objects.filter(case_id=tsactive.supply_point_id)
stock_transactions = StockTransaction.objects.filter(case_id=tsactive.supply_point_id)
self.assertEqual(stock_states.count(), 1)
self.assertEqual(stock_transactions.count(), 3)
ad_transactions = stock_transactions.filter(product_id=self.ad.product_id)
self.assertEqual(ad_transactions.count(), 0)
with self.assertRaises(StockState.DoesNotExist):
StockState.objects.get(case_id=tsactive.supply_point_id,
product_id=self.ad.product_id)
@classmethod
def tearDownClass(cls):
cls.web_user1.delete()
cls.domain.delete()
|
|
# imu.py
# The supporting library for the sensors on Arduino board.
# Author: Yanglei Zhao
#############################
import struct
import time
import math
import sqlite3
import sys
try:
import serial
except:
sys.stderr.write('Warning: pySerial not installed, will not connect Arduino.\n')
import numpy as np
from matplotlib import pyplot as plt
import visual
BUF_LEN = 26
RAW_LEN = 9
HEAD_LEN = 8
COM = 'COM6'
class Parameters(object):
# Calibrated rest level.
SENSOR_ZERO_LEVEL = [514, 509, 516, 386, 384, 382, 0, 0, 0]
# Calibrated sensitivity. (Because the z-axis gyro is on a different chip, the
# data for z-axis gyro is slightly different from the x/y-axis gyro.
SENSOR_SENSITIVITY = [104, 105, 102, 15.9142, 15.8326, 14.2891, 1300, 1300, 1300]
# This is the covariance matrix for the noise in the gyroscope,
# represented by quaternions. Should be used as Q matrix in the EKalman.
GYRO_ERR_COV_S = np.matrix(
[[ 0.09617226, 0.00035709, 0.00120697, 0.00094805],
[ 0.00035709, 0.00563692, 0.00351737, 0.00295389],
[ 0.00120697, 0.00351737, 0.01479248, 0.00977058],
[ 0.00094805, 0.00295389, 0.00977058, 0.0132765 ]])
GYRO_ERR_COV_S_S = GYRO_ERR_COV_S / 50
# Dynamic
GYRO_ERR_COV_D = np.matrix(
[[ 0.50547552, 0.00170386, 0.00103366, 0.00061697],
[ 0.00170386, 0.17675834, -0.03223344, -0.00790452],
[ 0.00103366, -0.03223344, 0.17435359, -0.01433586],
[ 0.00061697, -0.00790452, -0.01433586, 0.14327618]])
# The covariance matrix for the noise in the accelerometer. Should be used
# as R matrix in the EKalman.
ACC_ERR_COV_S = np.matrix(
[[ 1.25592175e-05, 5.02785656e-07, -1.48793605e-06],
[ 5.02785656e-07, 1.49101810e-05, -8.28079731e-06],
[-1.48793605e-06, -8.28079731e-06, 2.36853045e-05]])
# Dynamic
ACC_ERR_COV_D = np.matrix(
[[ 0.04902105, 0.00640971, 0.00189323],
[ 0.00640971, 0.03728613, 0.00115823],
[ 0.00189323, 0.00115823, 0.06454173]])
ACC_ERR_COV_D_L = ACC_ERR_COV_D * 50
# The covarance matrix for the noise in the magnetometer. Should be used
# as R matrix in the EKalman.
MAG_ERR_COV_S = np.matrix(
[[ 2.43683824e-03, -1.30620637e-03, 5.74294645e-05],
[-1.30620637e-03, 8.00758180e-04, -1.24840836e-04],
[ 5.74294645e-05, -1.24840836e-04, 1.08079276e-04]])
# Dynamic
MAG_ERR_COV_D = np.matrix(
[[ 0.00211615, -0.00071693, 0.00028416],
[-0.00071693, 0.0038208 , -0.00086872],
[ 0.00028416, -0.00086872, 0.00254654]])
MAG_ERR_COV_D_L = MAG_ERR_COV_D * 50
@classmethod
def acc_h(cls, x):
q = x.A1
return np.matrix([[2*(q[1]*q[3]-q[2]*q[0])],
[2*(q[2]*q[3]+q[1]*q[0])],
[1-2*(q[1]*q[1]+q[2]*q[2])]])
@classmethod
def acc_H(cls, x):
q = x.A1
return np.matrix([[-2*q[2], 2*q[3], -2*q[0], 2*q[1]],
[2*q[1], 2*q[0], 2*q[3], 2*q[2]],
[0, -4*q[1], -4*q[2], 0]])
@classmethod
def mag_h(cls, x):
q = x.A1
return np.matrix([[1-2*(q[2]*q[2]+q[3]*q[3])],
[2*(q[1]*q[2]-q[0]*q[3])],
[2*(q[1]*q[3]+q[0]*q[2])]])
@classmethod
def mag_H(cls, x):
q = x.A1
return np.matrix([[0, 0, -4*q[2], -4*q[3]],
[-2*q[3], 2*q[2], 2*q[1], -2*q[0]],
[2*q[2], 2*q[3], 2*q[0], 2*q[1]]])
#################### Data processing ####################
class SerialCom(object):
def __init__(self, mode=0):
'''
mode=0: the auto mode, the Arduino will sequencially send data to PC.
mode=1: the step mode, the Arduino wait for a byte of serial data
from PC, then send data to PC.
'''
if mode != 0 and mode != 1:
raise Exception('parameter "mode" should be 0 or 1.')
self.mode = mode
self.ser = serial.Serial(COM, 57600, timeout=10)
r = self.ser.read()
if self.mode == 0:
self._write('\x00')
else:
self._write('\x01')
def _write(self, byte):
self.ser.write(byte)
def read(self):
'''
-> (id, t, raw)
id: the id of the data.
t: in microsecond, time since the board start up.
raw: the raw imu data.
'''
if self.mode == 1:
self._write('\xf0')
s = self.ser.read(BUF_LEN)
if len(s) < BUF_LEN:
# Timeout
return (None, None, None)
data = struct.unpack('<LLhhhhhhhhh', s)
id = data[0]
t = data[1]
raw = data[2:]
return (id, t, raw)
def __del__(self):
if 'ser' in dir(self):
self.ser.close()
class Normalizer(object):
def __init__(self):
self.zero_level = Parameters.SENSOR_ZERO_LEVEL
self.sensitivity = Parameters.SENSOR_SENSITIVITY
def normalize(self, raw):
'''
-> data: the normalized IMU data.
raw: the raw IMU data.
'''
data = []
for i in range(RAW_LEN):
data.append((raw[i] - self.zero_level[i]) * 1.0 / self.sensitivity[i])
return self.align_axis(data)
def balance(self, raw):
'''
Balance the raw data by subtract the raw data by self.zero_level.
'''
data = []
for i in range(RAW_LEN):
data.append(raw[i] - self.zero_level[i])
return data
def scale(self, raw):
'''
Scale the raw data by divide the raw data by sensitivity.
'''
data = []
for i in range(RAW_LEN):
data.append(raw[i] * 1.0 / self.sensitivity[i])
return data
def align_axis(self, data):
'''
The device's axis are not aligned. This function helps align the data.
'''
# For this device
# x = -ax, y = -ay, z = gz
# x = gx, y = gy, z = gz
# x = -mx, y = my, z = -mz
# We need to change the accelerometer data to align the axis.
data[0] = -data[0]
data[1] = -data[1]
# As well as the magnetometer
data[6] = -data[6]
data[8] = -data[8]
return data
def get_x_mag(self, mag, gravity):
'''
Get the horizontal projection of the magnetic field vector by removing
component toward gravity.
'''
mag = np.array(mag)
gravity = np.array(gravity)
return mag - gravity / np.dot(gravity, gravity) * np.dot(gravity, mag)
class Database(object):
def __init__(self, file_name=None):
self.SQL_CREATE_TABLE = '''CREATE TABLE data
(id INTEGER PRIMARY KEY AUTOINCREMENT,
raw_id INTEGER, time INTEGER,
raw_ax INTEGER, raw_ay INTEGER, raw_az INTEGER,
raw_gx INTEGER, raw_gy INTEGER, raw_gz INTEGER,
raw_mx INTEGER, raw_my INTEGER, raw_mz INTEGER,
ax REAL, ay REAL, az REAL,
gx REAL, gy REAL, gz REAL,
mx REAL, my REAL, mz REAL);'''
self.SQL_INSERT_DATA = '''INSERT INTO data
(raw_id, time,
raw_ax, raw_ay, raw_az, raw_gx, raw_gy, raw_gz,
raw_mx, raw_my, raw_mz, ax, ay, az, gx, gy, gz,
mx, my, mz) VALUES
(?, ?, ?, ?, ?, ?,
?, ?, ?, ?, ?, ?,
?, ?, ?, ?, ?, ?, ?, ?);'''
self.SQL_SELECT_DATA = '''SELECT * FROM data;'''
if file_name == None:
fname = time.strftime('%H%M%S-%b%d%Y') + '.gib'
else:
fname = file_name
self._filename = fname
self.conn = sqlite3.connect(fname)
try:
self.conn.execute(self.SQL_CREATE_TABLE)
except:
pass
self.conn.row_factory = sqlite3.Row
self.cur = self.conn.cursor()
@property
def filename(self):
return self._filename
def write_data(self, id, t, raw, data):
para = [id, t]
para.extend(raw)
para.extend(data)
self.cur.execute(self.SQL_INSERT_DATA, para)
def read_all_data(self):
'''
Get all data from the database.
'''
self.cur.execute(self.SQL_SELECT_DATA)
return self.cur.fetchall()
def read(self):
'''
Get next row from the database.
'''
return self.cur.fetchone()
def __del__(self):
if 'conn' in dir(self):
self.conn.commit()
self.conn.close()
#################### The model ####################
class Quaternion(object):
SUB_GRID = np.ix_([1,2,3], [1,2,3])
def __init__(self, array=[1, 0, 0, 0]):
'''
array: a length 4 array, default to a zero-rotation
'''
if type(array) in (np.matrix, np.ndarray):
if array.shape == (1, 4):
self.q = np.matrix(array).T
elif array.shape == (4, 1) or array.shape == (4, ):
self.q = np.matrix(array)
else:
raise Exception('Shape of the matrix or ndarray is not valid')
elif type(array) in (list, tuple):
if len(array) == 4:
self.q = np.matrix(array).T
else:
raise Exception('Length of the array is not valid')
else:
raise Exception('Not valid parameter "array", type %s' % type(array))
self.unitize()
@classmethod
def from_gyro(cls, gyro, dt):
theta = norm3(gyro)
if theta == 0:
v = (1, 0, 0)
else:
v = (gyro[0] / theta, gyro[1] / theta, gyro[2] / theta)
theta = theta * dt
return cls.from_rotation_vector(v, theta)
@classmethod
def from_rotation_vector(cls, v, theta):
if theta == 0:
return cls([1, 0, 0, 0])
q0 = np.cos(theta / 2)
q1 = v[0] * np.sin(theta / 2)
q2 = v[1] * np.sin(theta / 2)
q3 = v[2] * np.sin(theta / 2)
return cls([q0, q1, q2, q3])
@classmethod
def random(cls):
"""
This algorithm comes from http://planning.cs.uiuc.edu/node198.html
"""
u1, u2, u3 = np.random.random(3)
q0 = math.sqrt(1 - u1) * math.sin(2 * math.pi * u2)
q1 = math.sqrt(1 - u1) * math.cos(2 * math.pi * u2)
q2 = math.sqrt(u1) * math.sin(2 * math.pi * u3)
q3 = math.sqrt(u1) * math.cos(2 * math.pi * u3)
return cls((q0, q1, q2, q3))
@property
def rotation_vector(self):
abs_q = math.sqrt(self.q[1] * self.q[1] + self.q[2] * self.q[2]\
+ self.q[3] * self.q[3])
if abs_q == 0:
return ([1, 0, 0], 0)
v = (self.q[1:] / abs_q).A1.tolist()
theta = 2 * math.acos(self.q[0])
return (v, theta)
@property
def matrix_repr(self):
qa = self.q.A1 # array
qm = np.matrix([[qa[0], -qa[1], -qa[2], -qa[3]],
[qa[1], qa[0], -qa[3], qa[2]],
[qa[2], qa[3], qa[0], -qa[1]],
[qa[3], -qa[2], qa[1], qa[0]]])
return qm
@property
def neg_matrix_repr(self):
qa = self.q.A1
qm = np.matrix([[qa[0], -qa[1], -qa[2], -qa[3]],
[qa[1], qa[0], qa[3], -qa[2]],
[qa[2], -qa[3], qa[0], qa[1]],
[qa[3], qa[2], -qa[1], qa[0]]])
return qm
@property
def M(self):
'''
To matrix form.
'''
return self.q
@property
def A(self):
'''
To flattened array form.
'''
return self.q.A1
@property
def RM(self):
'''
-> The corresponding rotation matrix of the quaternion.
'''
return (self.neg_matrix_repr.T * self.matrix_repr)[self.SUB_GRID]
def __mul__(self, q2):
'''
Quaternion multiplication.
'''
return Quaternion(self.matrix_repr * q2.M)
def __neg__(self):
'''
Negative of the Quaternion.
'''
a = self.A
a[1] = -a[1]
a[2] = -a[2]
a[3] = -a[3]
return Quaternion(a)
def __repr__(self):
return '<Quaternion q0=%s, q1=%s, q2=%s, q3=%s>' % tuple(self.q.A1)
def unitize(self):
self.q = self.q / norm4(self.q)
class Gesture(object):
INF = 32767.0
STATIC_THRESHOLD = 0.40
@classmethod
def from_db(cls, db, sample_interval=0.05):
gesture = cls()
prev_sample_t = 0
data = db.read_all_data()
for row in data:
t = row['time'] * 0.000001
if t - prev_sample_t < sample_interval:
continue
prev_sample_t = t
gesture.append((row['gx'], row['gy'], row['gz']))
gesture.trim_head()
gesture.trim_tail()
return gesture
def __init__(self, array=[]):
self.s = [np.array(row) for row in array]
def __len__(self):
return len(self.s)
def append(self, data):
self.s.append(np.array(data))
for j in xrange(len(self.s[-1])):
if self.s[-1][j] >= 2:
self.s[-1][j] = 1.6
elif self.s[-1][j] <= -2:
self.s[-1][j] = -1.6
elif self.s[-1][j] >= 1:
self.s[-1][j] = (self.s[-1][j] - 1) / 2 + 1
elif self.s[-1][j] <= -1:
self.s[-1][j] = (self.s[-1][j] + 1) / 2 - 1
@property
def copy(self):
return Gesture(self.s)
def pop_head(self):
if len(self.s) > 0:
del self.s[0]
return self
def trim_head(self):
while len(self.s) > 0 and norm3(self.s[0]) <= self.STATIC_THRESHOLD:
del self.s[0]
return self
def trim_tail(self):
while len(self.s) > 0 and norm3(self.s[-1]) <= self.STATIC_THRESHOLD:
self.s.pop()
return self
def dtw_distance(self, seq, w):
len_a = len(self.s)
len_b = len(seq.s)
dtw = np.empty((len_a+1, len_b+1))
dtw.fill(self.INF)
dtw[0][0] = 0
for i in xrange(len_a):
for j in xrange(max(0, i-w), min(len_b, i+w)):
dtw[i+1][j+1] = norm3(self.s[i]-seq.s[j]) +\
min(dtw[i][j+1], dtw[i][j], dtw[i+1][j])
return dtw[len_a][len_b] * 1.0 / (len_a + len_b)
class EKalman(object):
'''
The Kalman Filter class for the IMU.
'''
def __init__(self, array=[1, 0, 0, 0], Q=Parameters.GYRO_ERR_COV_D):
self.x = Quaternion(array)
self.p = Q
def unitize(self, data):
v = np.array(data)
return v / norm3(v)
def naive_time_update(self, gyro, dt):
q = Quaternion.from_gyro(gyro, dt)
self.x *= q
def time_update(self, gyro, dt, Q):
'''
The time update phase of the EKF.
'''
q = Quaternion.from_gyro(gyro, dt)
# The rotation represented by quaternion
# q1 * q2 means apply q2 first about XYZ,
# then apply q1 about XYZ, which is equavalent to
# apply q1 first about XYZ, then q2 about xyz.
self.xminus = self.x * q
# q1 * q2 = q1.matrix_repr * q2 = q2.neg_matrix_repr * q1
A = q.neg_matrix_repr
self.pminus = A * self.p * A.T + Q
def measurement_update(self, data, R, h_func, H_func):
'''
The measurement update phase of the EKF.
'''
I = np.identity(4)
z = np.matrix(self.unitize(data)).T
H = H_func(self.xminus.M)
K = self.pminus * H.T * (H * self.pminus * H.T + R).I
self.x = Quaternion(self.xminus.M + K * (z - h_func(self.xminus.M)))
self.p = (I - K * H) * self.pminus
@property
def quat(self):
return self.x
#################### Visualizing ####################
class Plotter(object):
def __init__(self):
self.COLORS = ['r', 'g', 'b', 'c', 'm', 'y', 'k', 'w']
def show_plot(self, data, t_idx, data_idx, title=None, text=None):
'''
Show the plot of the data.
'''
fig = plt.figure()
if title is not None:
fig.suptitle(title)
sub = fig.add_subplot(111)
t_array = np.array([row[t_idx] for row in data]) / 1000000.0
for i, data_i in enumerate(data_idx):
d_array = np.array([row[data_i] for row in data])
sub.plot(t_array, d_array, c=self.COLORS[i], marker='-')
plt.show()
class Visualizer(object):
def __init__(self):
# For converting
self.rt_mx = np.matrix([[1, 0, 0],
[0, 0, 1],
[0, -1, 0]])
self.x = visual.arrow(color=(1,0,0))
self.y = visual.arrow(color=(0,1,0))
self.z = visual.arrow(color=(0,0,1))
def _cnvrt_axis(self, rm):
return self.rt_mx * rm
def show(self, quat):
rm = self._cnvrt_axis(quat.RM).T.A
self.x.pos = rm[0] * 0.05
self.x.axis = rm[0]
self.x.up = rm[1]
self.y.pos = rm[1] * 0.05
self.y.axis = rm[1]
self.y.up = rm[2]
self.z.pos = rm[2] * 0.05
self.z.axis = rm[2]
self.z.up = rm[0]
#################### Utils ####################
def norm3(a):
return math.sqrt(a[0]*a[0] + a[1]*a[1] + a[2]*a[2])
def norm4(a):
return math.sqrt(a[0]*a[0] + a[1]*a[1] + a[2]*a[2] + a[3]*a[3])
|
|
"""
A MathML printer.
"""
from __future__ import print_function, division
from sympy import sympify, S, Mul
from sympy.core.compatibility import range, string_types, default_sort_key
from sympy.core.function import _coeff_isneg
from sympy.printing.conventions import split_super_sub, requires_partial
from sympy.printing.precedence import \
precedence_traditional, PRECEDENCE, PRECEDENCE_TRADITIONAL
from sympy.printing.pretty.pretty_symbology import greek_unicode
from sympy.printing.printer import Printer
import mpmath.libmp as mlib
from mpmath.libmp import prec_to_dps
class MathMLPrinterBase(Printer):
"""Contains common code required for MathMLContentPrinter and
MathMLPresentationPrinter.
"""
_default_settings = {
"order": None,
"encoding": "utf-8",
"fold_frac_powers": False,
"fold_func_brackets": False,
"fold_short_frac": None,
"inv_trig_style": "abbreviated",
"ln_notation": False,
"long_frac_ratio": None,
"mat_delim": "[",
"mat_symbol_style": "plain",
"mul_symbol": None,
"root_notation": True,
"symbol_names": {},
"mul_symbol_mathml_numbers": '·',
}
def __init__(self, settings=None):
Printer.__init__(self, settings)
from xml.dom.minidom import Document, Text
self.dom = Document()
# Workaround to allow strings to remain unescaped
# Based on
# https://stackoverflow.com/questions/38015864/python-xml-dom-minidom-\
# please-dont-escape-my-strings/38041194
class RawText(Text):
def writexml(self, writer, indent='', addindent='', newl=''):
if self.data:
writer.write(u'{}{}{}'.format(indent, self.data, newl))
def createRawTextNode(data):
r = RawText()
r.data = data
r.ownerDocument = self.dom
return r
self.dom.createTextNode = createRawTextNode
def doprint(self, expr):
"""
Prints the expression as MathML.
"""
mathML = Printer._print(self, expr)
unistr = mathML.toxml()
xmlbstr = unistr.encode('ascii', 'xmlcharrefreplace')
res = xmlbstr.decode()
return res
def apply_patch(self):
# Applying the patch of xml.dom.minidom bug
# Date: 2011-11-18
# Description: http://ronrothman.com/public/leftbraned/xml-dom-minidom\
# -toprettyxml-and-silly-whitespace/#best-solution
# Issue: http://bugs.python.org/issue4147
# Patch: http://hg.python.org/cpython/rev/7262f8f276ff/
from xml.dom.minidom import Element, Text, Node, _write_data
def writexml(self, writer, indent="", addindent="", newl=""):
# indent = current indentation
# addindent = indentation to add to higher levels
# newl = newline string
writer.write(indent + "<" + self.tagName)
attrs = self._get_attributes()
a_names = list(attrs.keys())
a_names.sort()
for a_name in a_names:
writer.write(" %s=\"" % a_name)
_write_data(writer, attrs[a_name].value)
writer.write("\"")
if self.childNodes:
writer.write(">")
if (len(self.childNodes) == 1 and
self.childNodes[0].nodeType == Node.TEXT_NODE):
self.childNodes[0].writexml(writer, '', '', '')
else:
writer.write(newl)
for node in self.childNodes:
node.writexml(
writer, indent + addindent, addindent, newl)
writer.write(indent)
writer.write("</%s>%s" % (self.tagName, newl))
else:
writer.write("/>%s" % (newl))
self._Element_writexml_old = Element.writexml
Element.writexml = writexml
def writexml(self, writer, indent="", addindent="", newl=""):
_write_data(writer, "%s%s%s" % (indent, self.data, newl))
self._Text_writexml_old = Text.writexml
Text.writexml = writexml
def restore_patch(self):
from xml.dom.minidom import Element, Text
Element.writexml = self._Element_writexml_old
Text.writexml = self._Text_writexml_old
class MathMLContentPrinter(MathMLPrinterBase):
"""Prints an expression to the Content MathML markup language.
References: https://www.w3.org/TR/MathML2/chapter4.html
"""
printmethod = "_mathml_content"
def mathml_tag(self, e):
"""Returns the MathML tag for an expression."""
translate = {
'Add': 'plus',
'Mul': 'times',
'Derivative': 'diff',
'Number': 'cn',
'int': 'cn',
'Pow': 'power',
'Max': 'max',
'Min': 'min',
'Abs': 'abs',
'And': 'and',
'Or': 'or',
'Xor': 'xor',
'Not': 'not',
'Implies': 'implies',
'Symbol': 'ci',
'MatrixSymbol': 'ci',
'RandomSymbol': 'ci',
'Integral': 'int',
'Sum': 'sum',
'sin': 'sin',
'cos': 'cos',
'tan': 'tan',
'cot': 'cot',
'csc': 'csc',
'sec': 'sec',
'sinh': 'sinh',
'cosh': 'cosh',
'tanh': 'tanh',
'coth': 'coth',
'csch': 'csch',
'sech': 'sech',
'asin': 'arcsin',
'asinh': 'arcsinh',
'acos': 'arccos',
'acosh': 'arccosh',
'atan': 'arctan',
'atanh': 'arctanh',
'atan2': 'arctan',
'acot': 'arccot',
'acoth': 'arccoth',
'asec': 'arcsec',
'asech': 'arcsech',
'acsc': 'arccsc',
'acsch': 'arccsch',
'log': 'ln',
'Equality': 'eq',
'Unequality': 'neq',
'GreaterThan': 'geq',
'LessThan': 'leq',
'StrictGreaterThan': 'gt',
'StrictLessThan': 'lt',
}
for cls in e.__class__.__mro__:
n = cls.__name__
if n in translate:
return translate[n]
# Not found in the MRO set
n = e.__class__.__name__
return n.lower()
def _print_Mul(self, expr):
if _coeff_isneg(expr):
x = self.dom.createElement('apply')
x.appendChild(self.dom.createElement('minus'))
x.appendChild(self._print_Mul(-expr))
return x
from sympy.simplify import fraction
numer, denom = fraction(expr)
if denom is not S.One:
x = self.dom.createElement('apply')
x.appendChild(self.dom.createElement('divide'))
x.appendChild(self._print(numer))
x.appendChild(self._print(denom))
return x
coeff, terms = expr.as_coeff_mul()
if coeff is S.One and len(terms) == 1:
# XXX since the negative coefficient has been handled, I don't
# think a coeff of 1 can remain
return self._print(terms[0])
if self.order != 'old':
terms = Mul._from_args(terms).as_ordered_factors()
x = self.dom.createElement('apply')
x.appendChild(self.dom.createElement('times'))
if coeff != 1:
x.appendChild(self._print(coeff))
for term in terms:
x.appendChild(self._print(term))
return x
def _print_Add(self, expr, order=None):
args = self._as_ordered_terms(expr, order=order)
lastProcessed = self._print(args[0])
plusNodes = []
for arg in args[1:]:
if _coeff_isneg(arg):
# use minus
x = self.dom.createElement('apply')
x.appendChild(self.dom.createElement('minus'))
x.appendChild(lastProcessed)
x.appendChild(self._print(-arg))
# invert expression since this is now minused
lastProcessed = x
if arg == args[-1]:
plusNodes.append(lastProcessed)
else:
plusNodes.append(lastProcessed)
lastProcessed = self._print(arg)
if arg == args[-1]:
plusNodes.append(self._print(arg))
if len(plusNodes) == 1:
return lastProcessed
x = self.dom.createElement('apply')
x.appendChild(self.dom.createElement('plus'))
while plusNodes:
x.appendChild(plusNodes.pop(0))
return x
def _print_Piecewise(self, expr):
if expr.args[-1].cond != True:
# We need the last conditional to be a True, otherwise the resulting
# function may not return a result.
raise ValueError("All Piecewise expressions must contain an "
"(expr, True) statement to be used as a default "
"condition. Without one, the generated "
"expression may not evaluate to anything under "
"some condition.")
root = self.dom.createElement('piecewise')
for i, (e, c) in enumerate(expr.args):
if i == len(expr.args) - 1 and c == True:
piece = self.dom.createElement('otherwise')
piece.appendChild(self._print(e))
else:
piece = self.dom.createElement('piece')
piece.appendChild(self._print(e))
piece.appendChild(self._print(c))
root.appendChild(piece)
return root
def _print_MatrixBase(self, m):
x = self.dom.createElement('matrix')
for i in range(m.rows):
x_r = self.dom.createElement('matrixrow')
for j in range(m.cols):
x_r.appendChild(self._print(m[i, j]))
x.appendChild(x_r)
return x
def _print_Rational(self, e):
if e.q == 1:
# don't divide
x = self.dom.createElement('cn')
x.appendChild(self.dom.createTextNode(str(e.p)))
return x
x = self.dom.createElement('apply')
x.appendChild(self.dom.createElement('divide'))
# numerator
xnum = self.dom.createElement('cn')
xnum.appendChild(self.dom.createTextNode(str(e.p)))
# denominator
xdenom = self.dom.createElement('cn')
xdenom.appendChild(self.dom.createTextNode(str(e.q)))
x.appendChild(xnum)
x.appendChild(xdenom)
return x
def _print_Limit(self, e):
x = self.dom.createElement('apply')
x.appendChild(self.dom.createElement(self.mathml_tag(e)))
x_1 = self.dom.createElement('bvar')
x_2 = self.dom.createElement('lowlimit')
x_1.appendChild(self._print(e.args[1]))
x_2.appendChild(self._print(e.args[2]))
x.appendChild(x_1)
x.appendChild(x_2)
x.appendChild(self._print(e.args[0]))
return x
def _print_ImaginaryUnit(self, e):
return self.dom.createElement('imaginaryi')
def _print_EulerGamma(self, e):
return self.dom.createElement('eulergamma')
def _print_GoldenRatio(self, e):
"""We use unicode #x3c6 for Greek letter phi as defined here
http://www.w3.org/2003/entities/2007doc/isogrk1.html"""
x = self.dom.createElement('cn')
x.appendChild(self.dom.createTextNode(u"\N{GREEK SMALL LETTER PHI}"))
return x
def _print_Exp1(self, e):
return self.dom.createElement('exponentiale')
def _print_Pi(self, e):
return self.dom.createElement('pi')
def _print_Infinity(self, e):
return self.dom.createElement('infinity')
def _print_NaN(self, e):
return self.dom.createElement('notanumber')
def _print_EmptySet(self, e):
return self.dom.createElement('emptyset')
def _print_BooleanTrue(self, e):
return self.dom.createElement('true')
def _print_BooleanFalse(self, e):
return self.dom.createElement('false')
def _print_NegativeInfinity(self, e):
x = self.dom.createElement('apply')
x.appendChild(self.dom.createElement('minus'))
x.appendChild(self.dom.createElement('infinity'))
return x
def _print_Integral(self, e):
def lime_recur(limits):
x = self.dom.createElement('apply')
x.appendChild(self.dom.createElement(self.mathml_tag(e)))
bvar_elem = self.dom.createElement('bvar')
bvar_elem.appendChild(self._print(limits[0][0]))
x.appendChild(bvar_elem)
if len(limits[0]) == 3:
low_elem = self.dom.createElement('lowlimit')
low_elem.appendChild(self._print(limits[0][1]))
x.appendChild(low_elem)
up_elem = self.dom.createElement('uplimit')
up_elem.appendChild(self._print(limits[0][2]))
x.appendChild(up_elem)
if len(limits[0]) == 2:
up_elem = self.dom.createElement('uplimit')
up_elem.appendChild(self._print(limits[0][1]))
x.appendChild(up_elem)
if len(limits) == 1:
x.appendChild(self._print(e.function))
else:
x.appendChild(lime_recur(limits[1:]))
return x
limits = list(e.limits)
limits.reverse()
return lime_recur(limits)
def _print_Sum(self, e):
# Printer can be shared because Sum and Integral have the
# same internal representation.
return self._print_Integral(e)
def _print_Symbol(self, sym):
ci = self.dom.createElement(self.mathml_tag(sym))
def join(items):
if len(items) > 1:
mrow = self.dom.createElement('mml:mrow')
for i, item in enumerate(items):
if i > 0:
mo = self.dom.createElement('mml:mo')
mo.appendChild(self.dom.createTextNode(" "))
mrow.appendChild(mo)
mi = self.dom.createElement('mml:mi')
mi.appendChild(self.dom.createTextNode(item))
mrow.appendChild(mi)
return mrow
else:
mi = self.dom.createElement('mml:mi')
mi.appendChild(self.dom.createTextNode(items[0]))
return mi
# translate name, supers and subs to unicode characters
def translate(s):
if s in greek_unicode:
return greek_unicode.get(s)
else:
return s
name, supers, subs = split_super_sub(sym.name)
name = translate(name)
supers = [translate(sup) for sup in supers]
subs = [translate(sub) for sub in subs]
mname = self.dom.createElement('mml:mi')
mname.appendChild(self.dom.createTextNode(name))
if not supers:
if not subs:
ci.appendChild(self.dom.createTextNode(name))
else:
msub = self.dom.createElement('mml:msub')
msub.appendChild(mname)
msub.appendChild(join(subs))
ci.appendChild(msub)
else:
if not subs:
msup = self.dom.createElement('mml:msup')
msup.appendChild(mname)
msup.appendChild(join(supers))
ci.appendChild(msup)
else:
msubsup = self.dom.createElement('mml:msubsup')
msubsup.appendChild(mname)
msubsup.appendChild(join(subs))
msubsup.appendChild(join(supers))
ci.appendChild(msubsup)
return ci
_print_MatrixSymbol = _print_Symbol
_print_RandomSymbol = _print_Symbol
def _print_Pow(self, e):
# Here we use root instead of power if the exponent is the reciprocal
# of an integer
if (self._settings['root_notation'] and e.exp.is_Rational
and e.exp.p == 1):
x = self.dom.createElement('apply')
x.appendChild(self.dom.createElement('root'))
if e.exp.q != 2:
xmldeg = self.dom.createElement('degree')
xmlci = self.dom.createElement('ci')
xmlci.appendChild(self.dom.createTextNode(str(e.exp.q)))
xmldeg.appendChild(xmlci)
x.appendChild(xmldeg)
x.appendChild(self._print(e.base))
return x
x = self.dom.createElement('apply')
x_1 = self.dom.createElement(self.mathml_tag(e))
x.appendChild(x_1)
x.appendChild(self._print(e.base))
x.appendChild(self._print(e.exp))
return x
def _print_Number(self, e):
x = self.dom.createElement(self.mathml_tag(e))
x.appendChild(self.dom.createTextNode(str(e)))
return x
def _print_Derivative(self, e):
x = self.dom.createElement('apply')
diff_symbol = self.mathml_tag(e)
if requires_partial(e.expr):
diff_symbol = 'partialdiff'
x.appendChild(self.dom.createElement(diff_symbol))
x_1 = self.dom.createElement('bvar')
for sym, times in reversed(e.variable_count):
x_1.appendChild(self._print(sym))
if times > 1:
degree = self.dom.createElement('degree')
degree.appendChild(self._print(sympify(times)))
x_1.appendChild(degree)
x.appendChild(x_1)
x.appendChild(self._print(e.expr))
return x
def _print_Function(self, e):
x = self.dom.createElement("apply")
x.appendChild(self.dom.createElement(self.mathml_tag(e)))
for arg in e.args:
x.appendChild(self._print(arg))
return x
def _print_Basic(self, e):
x = self.dom.createElement(self.mathml_tag(e))
for arg in e.args:
x.appendChild(self._print(arg))
return x
def _print_AssocOp(self, e):
x = self.dom.createElement('apply')
x_1 = self.dom.createElement(self.mathml_tag(e))
x.appendChild(x_1)
for arg in e.args:
x.appendChild(self._print(arg))
return x
def _print_Relational(self, e):
x = self.dom.createElement('apply')
x.appendChild(self.dom.createElement(self.mathml_tag(e)))
x.appendChild(self._print(e.lhs))
x.appendChild(self._print(e.rhs))
return x
def _print_list(self, seq):
"""MathML reference for the <list> element:
http://www.w3.org/TR/MathML2/chapter4.html#contm.list"""
dom_element = self.dom.createElement('list')
for item in seq:
dom_element.appendChild(self._print(item))
return dom_element
def _print_int(self, p):
dom_element = self.dom.createElement(self.mathml_tag(p))
dom_element.appendChild(self.dom.createTextNode(str(p)))
return dom_element
_print_Implies = _print_AssocOp
_print_Not = _print_AssocOp
_print_Xor = _print_AssocOp
class MathMLPresentationPrinter(MathMLPrinterBase):
"""Prints an expression to the Presentation MathML markup language.
References: https://www.w3.org/TR/MathML2/chapter3.html
"""
printmethod = "_mathml_presentation"
def mathml_tag(self, e):
"""Returns the MathML tag for an expression."""
translate = {
'Number': 'mn',
'Limit': '→',
'Derivative': 'ⅆ',
'int': 'mn',
'Symbol': 'mi',
'Integral': '∫',
'Sum': '∑',
'sin': 'sin',
'cos': 'cos',
'tan': 'tan',
'cot': 'cot',
'asin': 'arcsin',
'asinh': 'arcsinh',
'acos': 'arccos',
'acosh': 'arccosh',
'atan': 'arctan',
'atanh': 'arctanh',
'acot': 'arccot',
'atan2': 'arctan',
'Equality': '=',
'Unequality': '≠',
'GreaterThan': '≥',
'LessThan': '≤',
'StrictGreaterThan': '>',
'StrictLessThan': '<',
'lerchphi': 'Φ',
'zeta': 'ζ',
'dirichlet_eta': 'η',
'elliptic_k': 'Κ',
'lowergamma': 'γ',
'uppergamma': 'Γ',
'gamma': 'Γ',
'totient': 'ϕ',
'reduced_totient': 'λ',
'primenu': 'ν',
'primeomega': 'Ω',
'fresnels': 'S',
'fresnelc': 'C',
'LambertW': 'W',
'Heaviside': 'Θ',
'BooleanTrue': 'True',
'BooleanFalse': 'False',
'NoneType': 'None',
'mathieus': 'S',
'mathieuc': 'C',
'mathieusprime': 'S′',
'mathieucprime': 'C′',
}
def mul_symbol_selection():
if (self._settings["mul_symbol"] is None or
self._settings["mul_symbol"] == 'None'):
return '⁢'
elif self._settings["mul_symbol"] == 'times':
return '×'
elif self._settings["mul_symbol"] == 'dot':
return '·'
elif self._settings["mul_symbol"] == 'ldot':
return '․'
elif not isinstance(self._settings["mul_symbol"], string_types):
raise TypeError
else:
return self._settings["mul_symbol"]
for cls in e.__class__.__mro__:
n = cls.__name__
if n in translate:
return translate[n]
# Not found in the MRO set
if e.__class__.__name__ == "Mul":
return mul_symbol_selection()
n = e.__class__.__name__
return n.lower()
def parenthesize(self, item, level, strict=False):
prec_val = precedence_traditional(item)
if (prec_val < level) or ((not strict) and prec_val <= level):
brac = self.dom.createElement('mfenced')
brac.appendChild(self._print(item))
return brac
else:
return self._print(item)
def _print_Mul(self, expr):
def multiply(expr, mrow):
from sympy.simplify import fraction
numer, denom = fraction(expr)
if denom is not S.One:
frac = self.dom.createElement('mfrac')
if self._settings["fold_short_frac"] and len(str(expr)) < 7:
frac.setAttribute('bevelled', 'true')
xnum = self._print(numer)
xden = self._print(denom)
frac.appendChild(xnum)
frac.appendChild(xden)
mrow.appendChild(frac)
return mrow
coeff, terms = expr.as_coeff_mul()
if coeff is S.One and len(terms) == 1:
mrow.appendChild(self._print(terms[0]))
return mrow
if self.order != 'old':
terms = Mul._from_args(terms).as_ordered_factors()
if coeff != 1:
x = self._print(coeff)
y = self.dom.createElement('mo')
y.appendChild(self.dom.createTextNode(self.mathml_tag(expr)))
mrow.appendChild(x)
mrow.appendChild(y)
for term in terms:
mrow.appendChild(self.parenthesize(term, PRECEDENCE['Mul']))
if not term == terms[-1]:
y = self.dom.createElement('mo')
y.appendChild(self.dom.createTextNode(self.mathml_tag(expr)))
mrow.appendChild(y)
return mrow
mrow = self.dom.createElement('mrow')
if _coeff_isneg(expr):
x = self.dom.createElement('mo')
x.appendChild(self.dom.createTextNode('-'))
mrow.appendChild(x)
mrow = multiply(-expr, mrow)
else:
mrow = multiply(expr, mrow)
return mrow
def _print_Add(self, expr, order=None):
mrow = self.dom.createElement('mrow')
args = self._as_ordered_terms(expr, order=order)
mrow.appendChild(self._print(args[0]))
for arg in args[1:]:
if _coeff_isneg(arg):
# use minus
x = self.dom.createElement('mo')
x.appendChild(self.dom.createTextNode('-'))
y = self._print(-arg)
# invert expression since this is now minused
else:
x = self.dom.createElement('mo')
x.appendChild(self.dom.createTextNode('+'))
y = self._print(arg)
mrow.appendChild(x)
mrow.appendChild(y)
return mrow
def _print_MatrixBase(self, m):
table = self.dom.createElement('mtable')
for i in range(m.rows):
x = self.dom.createElement('mtr')
for j in range(m.cols):
y = self.dom.createElement('mtd')
y.appendChild(self._print(m[i, j]))
x.appendChild(y)
table.appendChild(x)
if self._settings["mat_delim"] == '':
return table
brac = self.dom.createElement('mfenced')
if self._settings["mat_delim"] == "[":
brac.setAttribute('close', ']')
brac.setAttribute('open', '[')
brac.appendChild(table)
return brac
def _get_printed_Rational(self, e, folded=None):
if e.p < 0:
p = -e.p
else:
p = e.p
x = self.dom.createElement('mfrac')
if folded or self._settings["fold_short_frac"]:
x.setAttribute('bevelled', 'true')
x.appendChild(self._print(p))
x.appendChild(self._print(e.q))
if e.p < 0:
mrow = self.dom.createElement('mrow')
mo = self.dom.createElement('mo')
mo.appendChild(self.dom.createTextNode('-'))
mrow.appendChild(mo)
mrow.appendChild(x)
return mrow
else:
return x
def _print_Rational(self, e):
if e.q == 1:
# don't divide
return self._print(e.p)
return self._get_printed_Rational(e, self._settings["fold_short_frac"])
def _print_Limit(self, e):
mrow = self.dom.createElement('mrow')
munder = self.dom.createElement('munder')
mi = self.dom.createElement('mi')
mi.appendChild(self.dom.createTextNode('lim'))
x = self.dom.createElement('mrow')
x_1 = self._print(e.args[1])
arrow = self.dom.createElement('mo')
arrow.appendChild(self.dom.createTextNode(self.mathml_tag(e)))
x_2 = self._print(e.args[2])
x.appendChild(x_1)
x.appendChild(arrow)
x.appendChild(x_2)
munder.appendChild(mi)
munder.appendChild(x)
mrow.appendChild(munder)
mrow.appendChild(self._print(e.args[0]))
return mrow
def _print_ImaginaryUnit(self, e):
x = self.dom.createElement('mi')
x.appendChild(self.dom.createTextNode('ⅈ'))
return x
def _print_GoldenRatio(self, e):
x = self.dom.createElement('mi')
x.appendChild(self.dom.createTextNode('Φ'))
return x
def _print_Exp1(self, e):
x = self.dom.createElement('mi')
x.appendChild(self.dom.createTextNode('ⅇ'))
return x
def _print_Pi(self, e):
x = self.dom.createElement('mi')
x.appendChild(self.dom.createTextNode('π'))
return x
def _print_Infinity(self, e):
x = self.dom.createElement('mi')
x.appendChild(self.dom.createTextNode('∞'))
return x
def _print_NegativeInfinity(self, e):
mrow = self.dom.createElement('mrow')
y = self.dom.createElement('mo')
y.appendChild(self.dom.createTextNode('-'))
x = self._print_Infinity(e)
mrow.appendChild(y)
mrow.appendChild(x)
return mrow
def _print_HBar(self, e):
x = self.dom.createElement('mi')
x.appendChild(self.dom.createTextNode('ℏ'))
return x
def _print_EulerGamma(self, e):
x = self.dom.createElement('mi')
x.appendChild(self.dom.createTextNode('γ'))
return x
def _print_TribonacciConstant(self, e):
x = self.dom.createElement('mi')
x.appendChild(self.dom.createTextNode('TribonacciConstant'))
return x
def _print_Dagger(self, e):
msup = self.dom.createElement('msup')
msup.appendChild(self._print(e.args[0]))
msup.appendChild(self.dom.createTextNode('†'))
return msup
def _print_Contains(self, e):
mrow = self.dom.createElement('mrow')
mrow.appendChild(self._print(e.args[0]))
mo = self.dom.createElement('mo')
mo.appendChild(self.dom.createTextNode('∈'))
mrow.appendChild(mo)
mrow.appendChild(self._print(e.args[1]))
return mrow
def _print_HilbertSpace(self, e):
x = self.dom.createElement('mi')
x.appendChild(self.dom.createTextNode('ℋ'))
return x
def _print_ComplexSpace(self, e):
msup = self.dom.createElement('msup')
msup.appendChild(self.dom.createTextNode('𝒞'))
msup.appendChild(self._print(e.args[0]))
return msup
def _print_FockSpace(self, e):
x = self.dom.createElement('mi')
x.appendChild(self.dom.createTextNode('ℱ'))
return x
def _print_Integral(self, expr):
intsymbols = {1: "∫", 2: "∬", 3: "∭"}
mrow = self.dom.createElement('mrow')
if len(expr.limits) <= 3 and all(len(lim) == 1 for lim in expr.limits):
# Only up to three-integral signs exists
mo = self.dom.createElement('mo')
mo.appendChild(self.dom.createTextNode(intsymbols[len(expr.limits)]))
mrow.appendChild(mo)
else:
# Either more than three or limits provided
for lim in reversed(expr.limits):
mo = self.dom.createElement('mo')
mo.appendChild(self.dom.createTextNode(intsymbols[1]))
if len(lim) == 1:
mrow.appendChild(mo)
if len(lim) == 2:
msup = self.dom.createElement('msup')
msup.appendChild(mo)
msup.appendChild(self._print(lim[1]))
mrow.appendChild(msup)
if len(lim) == 3:
msubsup = self.dom.createElement('msubsup')
msubsup.appendChild(mo)
msubsup.appendChild(self._print(lim[1]))
msubsup.appendChild(self._print(lim[2]))
mrow.appendChild(msubsup)
# print function
mrow.appendChild(self.parenthesize(expr.function, PRECEDENCE["Mul"],
strict=True))
# print integration variables
for lim in reversed(expr.limits):
d = self.dom.createElement('mo')
d.appendChild(self.dom.createTextNode('ⅆ'))
mrow.appendChild(d)
mrow.appendChild(self._print(lim[0]))
return mrow
def _print_Sum(self, e):
limits = list(e.limits)
subsup = self.dom.createElement('munderover')
low_elem = self._print(limits[0][1])
up_elem = self._print(limits[0][2])
summand = self.dom.createElement('mo')
summand.appendChild(self.dom.createTextNode(self.mathml_tag(e)))
low = self.dom.createElement('mrow')
var = self._print(limits[0][0])
equal = self.dom.createElement('mo')
equal.appendChild(self.dom.createTextNode('='))
low.appendChild(var)
low.appendChild(equal)
low.appendChild(low_elem)
subsup.appendChild(summand)
subsup.appendChild(low)
subsup.appendChild(up_elem)
mrow = self.dom.createElement('mrow')
mrow.appendChild(subsup)
if len(str(e.function)) == 1:
mrow.appendChild(self._print(e.function))
else:
fence = self.dom.createElement('mfenced')
fence.appendChild(self._print(e.function))
mrow.appendChild(fence)
return mrow
def _print_Symbol(self, sym, style='plain'):
def join(items):
if len(items) > 1:
mrow = self.dom.createElement('mrow')
for i, item in enumerate(items):
if i > 0:
mo = self.dom.createElement('mo')
mo.appendChild(self.dom.createTextNode(" "))
mrow.appendChild(mo)
mi = self.dom.createElement('mi')
mi.appendChild(self.dom.createTextNode(item))
mrow.appendChild(mi)
return mrow
else:
mi = self.dom.createElement('mi')
mi.appendChild(self.dom.createTextNode(items[0]))
return mi
# translate name, supers and subs to unicode characters
def translate(s):
if s in greek_unicode:
return greek_unicode.get(s)
else:
return s
name, supers, subs = split_super_sub(sym.name)
name = translate(name)
supers = [translate(sup) for sup in supers]
subs = [translate(sub) for sub in subs]
mname = self.dom.createElement('mi')
mname.appendChild(self.dom.createTextNode(name))
if len(supers) == 0:
if len(subs) == 0:
x = mname
else:
x = self.dom.createElement('msub')
x.appendChild(mname)
x.appendChild(join(subs))
else:
if len(subs) == 0:
x = self.dom.createElement('msup')
x.appendChild(mname)
x.appendChild(join(supers))
else:
x = self.dom.createElement('msubsup')
x.appendChild(mname)
x.appendChild(join(subs))
x.appendChild(join(supers))
# Set bold font?
if style == 'bold':
x.setAttribute('mathvariant', 'bold')
return x
def _print_MatrixSymbol(self, sym):
return self._print_Symbol(sym,
style=self._settings['mat_symbol_style'])
_print_RandomSymbol = _print_Symbol
def _print_conjugate(self, expr):
enc = self.dom.createElement('menclose')
enc.setAttribute('notation', 'top')
enc.appendChild(self._print(expr.args[0]))
return enc
def _print_operator_after(self, op, expr):
row = self.dom.createElement('mrow')
row.appendChild(self.parenthesize(expr, PRECEDENCE["Func"]))
mo = self.dom.createElement('mo')
mo.appendChild(self.dom.createTextNode(op))
row.appendChild(mo)
return row
def _print_factorial(self, expr):
return self._print_operator_after('!', expr.args[0])
def _print_factorial2(self, expr):
return self._print_operator_after('!!', expr.args[0])
def _print_binomial(self, expr):
brac = self.dom.createElement('mfenced')
frac = self.dom.createElement('mfrac')
frac.setAttribute('linethickness', '0')
frac.appendChild(self._print(expr.args[0]))
frac.appendChild(self._print(expr.args[1]))
brac.appendChild(frac)
return brac
def _print_Pow(self, e):
# Here we use root instead of power if the exponent is the
# reciprocal of an integer
if (e.exp.is_Rational and abs(e.exp.p) == 1 and e.exp.q != 1 and
self._settings['root_notation']):
if e.exp.q == 2:
x = self.dom.createElement('msqrt')
x.appendChild(self._print(e.base))
if e.exp.q != 2:
x = self.dom.createElement('mroot')
x.appendChild(self._print(e.base))
x.appendChild(self._print(e.exp.q))
if e.exp.p == -1:
frac = self.dom.createElement('mfrac')
frac.appendChild(self._print(1))
frac.appendChild(x)
return frac
else:
return x
if e.exp.is_Rational and e.exp.q != 1:
if e.exp.is_negative:
top = self.dom.createElement('mfrac')
top.appendChild(self._print(1))
x = self.dom.createElement('msup')
x.appendChild(self.parenthesize(e.base, PRECEDENCE['Pow']))
x.appendChild(self._get_printed_Rational(-e.exp,
self._settings['fold_frac_powers']))
top.appendChild(x)
return top
else:
x = self.dom.createElement('msup')
x.appendChild(self.parenthesize(e.base, PRECEDENCE['Pow']))
x.appendChild(self._get_printed_Rational(e.exp,
self._settings['fold_frac_powers']))
return x
if e.exp.is_negative:
top = self.dom.createElement('mfrac')
top.appendChild(self._print(1))
if e.exp == -1:
top.appendChild(self._print(e.base))
else:
x = self.dom.createElement('msup')
x.appendChild(self.parenthesize(e.base, PRECEDENCE['Pow']))
x.appendChild(self._print(-e.exp))
top.appendChild(x)
return top
x = self.dom.createElement('msup')
x.appendChild(self.parenthesize(e.base, PRECEDENCE['Pow']))
x.appendChild(self._print(e.exp))
return x
def _print_Number(self, e):
x = self.dom.createElement(self.mathml_tag(e))
x.appendChild(self.dom.createTextNode(str(e)))
return x
def _print_AccumulationBounds(self, i):
brac = self.dom.createElement('mfenced')
brac.setAttribute('close', u'\u27e9')
brac.setAttribute('open', u'\u27e8')
brac.appendChild(self._print(i.min))
brac.appendChild(self._print(i.max))
return brac
def _print_Derivative(self, e):
if requires_partial(e.expr):
d = '∂'
else:
d = self.mathml_tag(e)
# Determine denominator
m = self.dom.createElement('mrow')
dim = 0 # Total diff dimension, for numerator
for sym, num in reversed(e.variable_count):
dim += num
if num >= 2:
x = self.dom.createElement('msup')
xx = self.dom.createElement('mo')
xx.appendChild(self.dom.createTextNode(d))
x.appendChild(xx)
x.appendChild(self._print(num))
else:
x = self.dom.createElement('mo')
x.appendChild(self.dom.createTextNode(d))
m.appendChild(x)
y = self._print(sym)
m.appendChild(y)
mnum = self.dom.createElement('mrow')
if dim >= 2:
x = self.dom.createElement('msup')
xx = self.dom.createElement('mo')
xx.appendChild(self.dom.createTextNode(d))
x.appendChild(xx)
x.appendChild(self._print(dim))
else:
x = self.dom.createElement('mo')
x.appendChild(self.dom.createTextNode(d))
mnum.appendChild(x)
mrow = self.dom.createElement('mrow')
frac = self.dom.createElement('mfrac')
frac.appendChild(mnum)
frac.appendChild(m)
mrow.appendChild(frac)
# Print function
mrow.appendChild(self._print(e.expr))
return mrow
def _print_Function(self, e):
mrow = self.dom.createElement('mrow')
x = self.dom.createElement('mi')
if self.mathml_tag(e) == 'log' and self._settings["ln_notation"]:
x.appendChild(self.dom.createTextNode('ln'))
else:
x.appendChild(self.dom.createTextNode(self.mathml_tag(e)))
y = self.dom.createElement('mfenced')
for arg in e.args:
y.appendChild(self._print(arg))
mrow.appendChild(x)
mrow.appendChild(y)
return mrow
def _print_Float(self, expr):
# Based off of that in StrPrinter
dps = prec_to_dps(expr._prec)
str_real = mlib.to_str(expr._mpf_, dps, strip_zeros=True)
# Must always have a mul symbol (as 2.5 10^{20} just looks odd)
# thus we use the number separator
separator = self._settings['mul_symbol_mathml_numbers']
mrow = self.dom.createElement('mrow')
if 'e' in str_real:
(mant, exp) = str_real.split('e')
if exp[0] == '+':
exp = exp[1:]
mn = self.dom.createElement('mn')
mn.appendChild(self.dom.createTextNode(mant))
mrow.appendChild(mn)
mo = self.dom.createElement('mo')
mo.appendChild(self.dom.createTextNode(separator))
mrow.appendChild(mo)
msup = self.dom.createElement('msup')
mn = self.dom.createElement('mn')
mn.appendChild(self.dom.createTextNode("10"))
msup.appendChild(mn)
mn = self.dom.createElement('mn')
mn.appendChild(self.dom.createTextNode(exp))
msup.appendChild(mn)
mrow.appendChild(msup)
return mrow
elif str_real == "+inf":
return self._print_Infinity(None)
elif str_real == "-inf":
return self._print_NegativeInfinity(None)
else:
mn = self.dom.createElement('mn')
mn.appendChild(self.dom.createTextNode(str_real))
return mn
def _print_polylog(self, expr):
mrow = self.dom.createElement('mrow')
m = self.dom.createElement('msub')
mi = self.dom.createElement('mi')
mi.appendChild(self.dom.createTextNode('Li'))
m.appendChild(mi)
m.appendChild(self._print(expr.args[0]))
mrow.appendChild(m)
brac = self.dom.createElement('mfenced')
brac.appendChild(self._print(expr.args[1]))
mrow.appendChild(brac)
return mrow
def _print_Basic(self, e):
mrow = self.dom.createElement('mrow')
mi = self.dom.createElement('mi')
mi.appendChild(self.dom.createTextNode(self.mathml_tag(e)))
mrow.appendChild(mi)
brac = self.dom.createElement('mfenced')
for arg in e.args:
brac.appendChild(self._print(arg))
mrow.appendChild(brac)
return mrow
def _print_Tuple(self, e):
mrow = self.dom.createElement('mrow')
x = self.dom.createElement('mfenced')
for arg in e.args:
x.appendChild(self._print(arg))
mrow.appendChild(x)
return mrow
def _print_Interval(self, i):
mrow = self.dom.createElement('mrow')
brac = self.dom.createElement('mfenced')
if i.start == i.end:
# Most often, this type of Interval is converted to a FiniteSet
brac.setAttribute('close', '}')
brac.setAttribute('open', '{')
brac.appendChild(self._print(i.start))
else:
if i.right_open:
brac.setAttribute('close', ')')
else:
brac.setAttribute('close', ']')
if i.left_open:
brac.setAttribute('open', '(')
else:
brac.setAttribute('open', '[')
brac.appendChild(self._print(i.start))
brac.appendChild(self._print(i.end))
mrow.appendChild(brac)
return mrow
def _print_Abs(self, expr, exp=None):
mrow = self.dom.createElement('mrow')
x = self.dom.createElement('mfenced')
x.setAttribute('close', '|')
x.setAttribute('open', '|')
x.appendChild(self._print(expr.args[0]))
mrow.appendChild(x)
return mrow
_print_Determinant = _print_Abs
def _print_re_im(self, c, expr):
mrow = self.dom.createElement('mrow')
mi = self.dom.createElement('mi')
mi.setAttribute('mathvariant', 'fraktur')
mi.appendChild(self.dom.createTextNode(c))
mrow.appendChild(mi)
brac = self.dom.createElement('mfenced')
brac.appendChild(self._print(expr))
mrow.appendChild(brac)
return mrow
def _print_re(self, expr, exp=None):
return self._print_re_im('R', expr.args[0])
def _print_im(self, expr, exp=None):
return self._print_re_im('I', expr.args[0])
def _print_AssocOp(self, e):
mrow = self.dom.createElement('mrow')
mi = self.dom.createElement('mi')
mi.appendChild(self.dom.createTextNode(self.mathml_tag(e)))
mrow.appendChild(mi)
for arg in e.args:
mrow.appendChild(self._print(arg))
return mrow
def _print_SetOp(self, expr, symbol, prec):
mrow = self.dom.createElement('mrow')
mrow.appendChild(self.parenthesize(expr.args[0], prec))
for arg in expr.args[1:]:
x = self.dom.createElement('mo')
x.appendChild(self.dom.createTextNode(symbol))
y = self.parenthesize(arg, prec)
mrow.appendChild(x)
mrow.appendChild(y)
return mrow
def _print_Union(self, expr):
prec = PRECEDENCE_TRADITIONAL['Union']
return self._print_SetOp(expr, '∪', prec)
def _print_Intersection(self, expr):
prec = PRECEDENCE_TRADITIONAL['Intersection']
return self._print_SetOp(expr, '∩', prec)
def _print_Complement(self, expr):
prec = PRECEDENCE_TRADITIONAL['Complement']
return self._print_SetOp(expr, '∖', prec)
def _print_SymmetricDifference(self, expr):
prec = PRECEDENCE_TRADITIONAL['SymmetricDifference']
return self._print_SetOp(expr, '∆', prec)
def _print_ProductSet(self, expr):
prec = PRECEDENCE_TRADITIONAL['ProductSet']
return self._print_SetOp(expr, '×', prec)
def _print_FiniteSet(self, s):
return self._print_set(s.args)
def _print_set(self, s):
items = sorted(s, key=default_sort_key)
brac = self.dom.createElement('mfenced')
brac.setAttribute('close', '}')
brac.setAttribute('open', '{')
for item in items:
brac.appendChild(self._print(item))
return brac
_print_frozenset = _print_set
def _print_LogOp(self, args, symbol):
mrow = self.dom.createElement('mrow')
if args[0].is_Boolean and not args[0].is_Not:
brac = self.dom.createElement('mfenced')
brac.appendChild(self._print(args[0]))
mrow.appendChild(brac)
else:
mrow.appendChild(self._print(args[0]))
for arg in args[1:]:
x = self.dom.createElement('mo')
x.appendChild(self.dom.createTextNode(symbol))
if arg.is_Boolean and not arg.is_Not:
y = self.dom.createElement('mfenced')
y.appendChild(self._print(arg))
else:
y = self._print(arg)
mrow.appendChild(x)
mrow.appendChild(y)
return mrow
def _print_BasisDependent(self, expr):
from sympy.vector import Vector
if expr == expr.zero:
# Not clear if this is ever called
return self._print(expr.zero)
if isinstance(expr, Vector):
items = expr.separate().items()
else:
items = [(0, expr)]
mrow = self.dom.createElement('mrow')
for system, vect in items:
inneritems = list(vect.components.items())
inneritems.sort(key = lambda x:x[0].__str__())
for i, (k, v) in enumerate(inneritems):
if v == 1:
if i: # No + for first item
mo = self.dom.createElement('mo')
mo.appendChild(self.dom.createTextNode('+'))
mrow.appendChild(mo)
mrow.appendChild(self._print(k))
elif v == -1:
mo = self.dom.createElement('mo')
mo.appendChild(self.dom.createTextNode('-'))
mrow.appendChild(mo)
mrow.appendChild(self._print(k))
else:
if i: # No + for first item
mo = self.dom.createElement('mo')
mo.appendChild(self.dom.createTextNode('+'))
mrow.appendChild(mo)
mbrac = self.dom.createElement('mfenced')
mbrac.appendChild(self._print(v))
mrow.appendChild(mbrac)
mo = self.dom.createElement('mo')
mo.appendChild(self.dom.createTextNode('⁢'))
mrow.appendChild(mo)
mrow.appendChild(self._print(k))
return mrow
def _print_And(self, expr):
args = sorted(expr.args, key=default_sort_key)
return self._print_LogOp(args, '∧')
def _print_Or(self, expr):
args = sorted(expr.args, key=default_sort_key)
return self._print_LogOp(args, '∨')
def _print_Xor(self, expr):
args = sorted(expr.args, key=default_sort_key)
return self._print_LogOp(args, '⊻')
def _print_Implies(self, expr):
return self._print_LogOp(expr.args, '⇒')
def _print_Equivalent(self, expr):
args = sorted(expr.args, key=default_sort_key)
return self._print_LogOp(args, '⇔')
def _print_Not(self, e):
mrow = self.dom.createElement('mrow')
mo = self.dom.createElement('mo')
mo.appendChild(self.dom.createTextNode('¬'))
mrow.appendChild(mo)
if (e.args[0].is_Boolean):
x = self.dom.createElement('mfenced')
x.appendChild(self._print(e.args[0]))
else:
x = self._print(e.args[0])
mrow.appendChild(x)
return mrow
def _print_bool(self, e):
mi = self.dom.createElement('mi')
mi.appendChild(self.dom.createTextNode(self.mathml_tag(e)))
return mi
_print_BooleanTrue = _print_bool
_print_BooleanFalse = _print_bool
def _print_NoneType(self, e):
mi = self.dom.createElement('mi')
mi.appendChild(self.dom.createTextNode(self.mathml_tag(e)))
return mi
def _print_Range(self, s):
dots = u"\u2026"
brac = self.dom.createElement('mfenced')
brac.setAttribute('close', '}')
brac.setAttribute('open', '{')
if s.start.is_infinite and s.stop.is_infinite:
if s.step.is_positive:
printset = dots, -1, 0, 1, dots
else:
printset = dots, 1, 0, -1, dots
elif s.start.is_infinite:
printset = dots, s[-1] - s.step, s[-1]
elif s.stop.is_infinite:
it = iter(s)
printset = next(it), next(it), dots
elif len(s) > 4:
it = iter(s)
printset = next(it), next(it), dots, s[-1]
else:
printset = tuple(s)
for el in printset:
if el == dots:
mi = self.dom.createElement('mi')
mi.appendChild(self.dom.createTextNode(dots))
brac.appendChild(mi)
else:
brac.appendChild(self._print(el))
return brac
def _hprint_variadic_function(self, expr):
args = sorted(expr.args, key=default_sort_key)
mrow = self.dom.createElement('mrow')
mo = self.dom.createElement('mo')
mo.appendChild(self.dom.createTextNode((str(expr.func)).lower()))
mrow.appendChild(mo)
brac = self.dom.createElement('mfenced')
for symbol in args:
brac.appendChild(self._print(symbol))
mrow.appendChild(brac)
return mrow
_print_Min = _print_Max = _hprint_variadic_function
def _print_exp(self, expr):
msup = self.dom.createElement('msup')
msup.appendChild(self._print_Exp1(None))
msup.appendChild(self._print(expr.args[0]))
return msup
def _print_Relational(self, e):
mrow = self.dom.createElement('mrow')
mrow.appendChild(self._print(e.lhs))
x = self.dom.createElement('mo')
x.appendChild(self.dom.createTextNode(self.mathml_tag(e)))
mrow.appendChild(x)
mrow.appendChild(self._print(e.rhs))
return mrow
def _print_int(self, p):
dom_element = self.dom.createElement(self.mathml_tag(p))
dom_element.appendChild(self.dom.createTextNode(str(p)))
return dom_element
def _print_BaseScalar(self, e):
msub = self.dom.createElement('msub')
index, system = e._id
mi = self.dom.createElement('mi')
mi.setAttribute('mathvariant', 'bold')
mi.appendChild(self.dom.createTextNode(system._variable_names[index]))
msub.appendChild(mi)
mi = self.dom.createElement('mi')
mi.setAttribute('mathvariant', 'bold')
mi.appendChild(self.dom.createTextNode(system._name))
msub.appendChild(mi)
return msub
def _print_BaseVector(self, e):
msub = self.dom.createElement('msub')
index, system = e._id
mover = self.dom.createElement('mover')
mi = self.dom.createElement('mi')
mi.setAttribute('mathvariant', 'bold')
mi.appendChild(self.dom.createTextNode(system._vector_names[index]))
mover.appendChild(mi)
mo = self.dom.createElement('mo')
mo.appendChild(self.dom.createTextNode('^'))
mover.appendChild(mo)
msub.appendChild(mover)
mi = self.dom.createElement('mi')
mi.setAttribute('mathvariant', 'bold')
mi.appendChild(self.dom.createTextNode(system._name))
msub.appendChild(mi)
return msub
def _print_VectorZero(self, e):
mover = self.dom.createElement('mover')
mi = self.dom.createElement('mi')
mi.setAttribute('mathvariant', 'bold')
mi.appendChild(self.dom.createTextNode("0"))
mover.appendChild(mi)
mo = self.dom.createElement('mo')
mo.appendChild(self.dom.createTextNode('^'))
mover.appendChild(mo)
return mover
def _print_Cross(self, expr):
mrow = self.dom.createElement('mrow')
vec1 = expr._expr1
vec2 = expr._expr2
mrow.appendChild(self.parenthesize(vec1, PRECEDENCE['Mul']))
mo = self.dom.createElement('mo')
mo.appendChild(self.dom.createTextNode('×'))
mrow.appendChild(mo)
mrow.appendChild(self.parenthesize(vec2, PRECEDENCE['Mul']))
return mrow
def _print_Curl(self, expr):
mrow = self.dom.createElement('mrow')
mo = self.dom.createElement('mo')
mo.appendChild(self.dom.createTextNode('∇'))
mrow.appendChild(mo)
mo = self.dom.createElement('mo')
mo.appendChild(self.dom.createTextNode('×'))
mrow.appendChild(mo)
mrow.appendChild(self.parenthesize(expr._expr, PRECEDENCE['Mul']))
return mrow
def _print_Divergence(self, expr):
mrow = self.dom.createElement('mrow')
mo = self.dom.createElement('mo')
mo.appendChild(self.dom.createTextNode('∇'))
mrow.appendChild(mo)
mo = self.dom.createElement('mo')
mo.appendChild(self.dom.createTextNode('·'))
mrow.appendChild(mo)
mrow.appendChild(self.parenthesize(expr._expr, PRECEDENCE['Mul']))
return mrow
def _print_Dot(self, expr):
mrow = self.dom.createElement('mrow')
vec1 = expr._expr1
vec2 = expr._expr2
mrow.appendChild(self.parenthesize(vec1, PRECEDENCE['Mul']))
mo = self.dom.createElement('mo')
mo.appendChild(self.dom.createTextNode('·'))
mrow.appendChild(mo)
mrow.appendChild(self.parenthesize(vec2, PRECEDENCE['Mul']))
return mrow
def _print_Gradient(self, expr):
mrow = self.dom.createElement('mrow')
mo = self.dom.createElement('mo')
mo.appendChild(self.dom.createTextNode('∇'))
mrow.appendChild(mo)
mrow.appendChild(self.parenthesize(expr._expr, PRECEDENCE['Mul']))
return mrow
def _print_Laplacian(self, expr):
mrow = self.dom.createElement('mrow')
mo = self.dom.createElement('mo')
mo.appendChild(self.dom.createTextNode('∆'))
mrow.appendChild(mo)
mrow.appendChild(self.parenthesize(expr._expr, PRECEDENCE['Mul']))
return mrow
def _print_Integers(self, e):
x = self.dom.createElement('mi')
x.setAttribute('mathvariant', 'normal')
x.appendChild(self.dom.createTextNode('ℤ'))
return x
def _print_Complexes(self, e):
x = self.dom.createElement('mi')
x.setAttribute('mathvariant', 'normal')
x.appendChild(self.dom.createTextNode('ℂ'))
return x
def _print_Reals(self, e):
x = self.dom.createElement('mi')
x.setAttribute('mathvariant', 'normal')
x.appendChild(self.dom.createTextNode('ℝ'))
return x
def _print_Naturals(self, e):
x = self.dom.createElement('mi')
x.setAttribute('mathvariant', 'normal')
x.appendChild(self.dom.createTextNode('ℕ'))
return x
def _print_Naturals0(self, e):
sub = self.dom.createElement('msub')
x = self.dom.createElement('mi')
x.setAttribute('mathvariant', 'normal')
x.appendChild(self.dom.createTextNode('ℕ'))
sub.appendChild(x)
sub.appendChild(self._print(S.Zero))
return sub
def _print_SingularityFunction(self, expr):
shift = expr.args[0] - expr.args[1]
power = expr.args[2]
sup = self.dom.createElement('msup')
brac = self.dom.createElement('mfenced')
brac.setAttribute('close', u'\u27e9')
brac.setAttribute('open', u'\u27e8')
brac.appendChild(self._print(shift))
sup.appendChild(brac)
sup.appendChild(self._print(power))
return sup
def _print_NaN(self, e):
x = self.dom.createElement('mi')
x.appendChild(self.dom.createTextNode('NaN'))
return x
def _print_number_function(self, e, name):
# Print name_arg[0] for one argument or name_arg[0](arg[1])
# for more than one argument
sub = self.dom.createElement('msub')
mi = self.dom.createElement('mi')
mi.appendChild(self.dom.createTextNode(name))
sub.appendChild(mi)
sub.appendChild(self._print(e.args[0]))
if len(e.args) == 1:
return sub
# TODO: copy-pasted from _print_Function: can we do better?
mrow = self.dom.createElement('mrow')
y = self.dom.createElement('mfenced')
for arg in e.args[1:]:
y.appendChild(self._print(arg))
mrow.appendChild(sub)
mrow.appendChild(y)
return mrow
def _print_bernoulli(self, e):
return self._print_number_function(e, 'B')
_print_bell = _print_bernoulli
def _print_catalan(self, e):
return self._print_number_function(e, 'C')
def _print_euler(self, e):
return self._print_number_function(e, 'E')
def _print_fibonacci(self, e):
return self._print_number_function(e, 'F')
def _print_lucas(self, e):
return self._print_number_function(e, 'L')
def _print_stieltjes(self, e):
return self._print_number_function(e, 'γ')
def _print_tribonacci(self, e):
return self._print_number_function(e, 'T')
def _print_ComplexInfinity(self, e):
x = self.dom.createElement('mover')
mo = self.dom.createElement('mo')
mo.appendChild(self.dom.createTextNode('∞'))
x.appendChild(mo)
mo = self.dom.createElement('mo')
mo.appendChild(self.dom.createTextNode('~'))
x.appendChild(mo)
return x
def _print_EmptySet(self, e):
x = self.dom.createElement('mo')
x.appendChild(self.dom.createTextNode('∅'))
return x
def _print_UniversalSet(self, e):
x = self.dom.createElement('mo')
x.appendChild(self.dom.createTextNode('𝕌'))
return x
def _print_Adjoint(self, expr):
from sympy.matrices import MatrixSymbol
mat = expr.arg
sup = self.dom.createElement('msup')
if not isinstance(mat, MatrixSymbol):
brac = self.dom.createElement('mfenced')
brac.appendChild(self._print(mat))
sup.appendChild(brac)
else:
sup.appendChild(self._print(mat))
mo = self.dom.createElement('mo')
mo.appendChild(self.dom.createTextNode('†'))
sup.appendChild(mo)
return sup
def _print_Transpose(self, expr):
from sympy.matrices import MatrixSymbol
mat = expr.arg
sup = self.dom.createElement('msup')
if not isinstance(mat, MatrixSymbol):
brac = self.dom.createElement('mfenced')
brac.appendChild(self._print(mat))
sup.appendChild(brac)
else:
sup.appendChild(self._print(mat))
mo = self.dom.createElement('mo')
mo.appendChild(self.dom.createTextNode('T'))
sup.appendChild(mo)
return sup
def _print_Inverse(self, expr):
from sympy.matrices import MatrixSymbol
mat = expr.arg
sup = self.dom.createElement('msup')
if not isinstance(mat, MatrixSymbol):
brac = self.dom.createElement('mfenced')
brac.appendChild(self._print(mat))
sup.appendChild(brac)
else:
sup.appendChild(self._print(mat))
sup.appendChild(self._print(-1))
return sup
def _print_MatMul(self, expr):
from sympy import MatMul
x = self.dom.createElement('mrow')
args = expr.args
if isinstance(args[0], Mul):
args = args[0].as_ordered_factors() + list(args[1:])
else:
args = list(args)
if isinstance(expr, MatMul) and _coeff_isneg(expr):
if args[0] == -1:
args = args[1:]
else:
args[0] = -args[0]
mo = self.dom.createElement('mo')
mo.appendChild(self.dom.createTextNode('-'))
x.appendChild(mo)
for arg in args[:-1]:
x.appendChild(self.parenthesize(arg, precedence_traditional(expr),
False))
mo = self.dom.createElement('mo')
mo.appendChild(self.dom.createTextNode('⁢'))
x.appendChild(mo)
x.appendChild(self.parenthesize(args[-1], precedence_traditional(expr),
False))
return x
def _print_MatPow(self, expr):
from sympy.matrices import MatrixSymbol
base, exp = expr.base, expr.exp
sup = self.dom.createElement('msup')
if not isinstance(base, MatrixSymbol):
brac = self.dom.createElement('mfenced')
brac.appendChild(self._print(base))
sup.appendChild(brac)
else:
sup.appendChild(self._print(base))
sup.appendChild(self._print(exp))
return sup
def _print_HadamardProduct(self, expr):
x = self.dom.createElement('mrow')
args = expr.args
for arg in args[:-1]:
x.appendChild(
self.parenthesize(arg, precedence_traditional(expr), False))
mo = self.dom.createElement('mo')
mo.appendChild(self.dom.createTextNode('∘'))
x.appendChild(mo)
x.appendChild(
self.parenthesize(args[-1], precedence_traditional(expr), False))
return x
def _print_ZeroMatrix(self, Z):
x = self.dom.createElement('mn')
x.appendChild(self.dom.createTextNode('𝟘'))
return x
def _print_OneMatrix(self, Z):
x = self.dom.createElement('mn')
x.appendChild(self.dom.createTextNode('𝟙'))
return x
def _print_Identity(self, I):
x = self.dom.createElement('mi')
x.appendChild(self.dom.createTextNode('𝕀'))
return x
def _print_floor(self, e):
mrow = self.dom.createElement('mrow')
x = self.dom.createElement('mfenced')
x.setAttribute('close', u'\u230B')
x.setAttribute('open', u'\u230A')
x.appendChild(self._print(e.args[0]))
mrow.appendChild(x)
return mrow
def _print_ceiling(self, e):
mrow = self.dom.createElement('mrow')
x = self.dom.createElement('mfenced')
x.setAttribute('close', u'\u2309')
x.setAttribute('open', u'\u2308')
x.appendChild(self._print(e.args[0]))
mrow.appendChild(x)
return mrow
def _print_Lambda(self, e):
x = self.dom.createElement('mfenced')
mrow = self.dom.createElement('mrow')
symbols = e.args[0]
if len(symbols) == 1:
symbols = self._print(symbols[0])
else:
symbols = self._print(symbols)
mrow.appendChild(symbols)
mo = self.dom.createElement('mo')
mo.appendChild(self.dom.createTextNode('↦'))
mrow.appendChild(mo)
mrow.appendChild(self._print(e.args[1]))
x.appendChild(mrow)
return x
def _print_tuple(self, e):
x = self.dom.createElement('mfenced')
for i in e:
x.appendChild(self._print(i))
return x
def _print_IndexedBase(self, e):
return self._print(e.label)
def _print_Indexed(self, e):
x = self.dom.createElement('msub')
x.appendChild(self._print(e.base))
if len(e.indices) == 1:
x.appendChild(self._print(e.indices[0]))
return x
x.appendChild(self._print(e.indices))
return x
def _print_MatrixElement(self, e):
x = self.dom.createElement('msub')
x.appendChild(self.parenthesize(e.parent, PRECEDENCE["Atom"], strict = True))
brac = self.dom.createElement('mfenced')
brac.setAttribute("close", "")
brac.setAttribute("open", "")
for i in e.indices:
brac.appendChild(self._print(i))
x.appendChild(brac)
return x
def _print_elliptic_f(self, e):
x = self.dom.createElement('mrow')
mi = self.dom.createElement('mi')
mi.appendChild(self.dom.createTextNode('𝖥'))
x.appendChild(mi)
y = self.dom.createElement('mfenced')
y.setAttribute("separators", "|")
for i in e.args:
y.appendChild(self._print(i))
x.appendChild(y)
return x
def _print_elliptic_e(self, e):
x = self.dom.createElement('mrow')
mi = self.dom.createElement('mi')
mi.appendChild(self.dom.createTextNode('𝖤'))
x.appendChild(mi)
y = self.dom.createElement('mfenced')
y.setAttribute("separators", "|")
for i in e.args:
y.appendChild(self._print(i))
x.appendChild(y)
return x
def _print_elliptic_pi(self, e):
x = self.dom.createElement('mrow')
mi = self.dom.createElement('mi')
mi.appendChild(self.dom.createTextNode('𝛱'))
x.appendChild(mi)
y = self.dom.createElement('mfenced')
if len(e.args) == 2:
y.setAttribute("separators", "|")
else:
y.setAttribute("separators", ";|")
for i in e.args:
y.appendChild(self._print(i))
x.appendChild(y)
return x
def _print_Ei(self, e):
x = self.dom.createElement('mrow')
mi = self.dom.createElement('mi')
mi.appendChild(self.dom.createTextNode('Ei'))
x.appendChild(mi)
x.appendChild(self._print(e.args))
return x
def _print_expint(self, e):
x = self.dom.createElement('mrow')
y = self.dom.createElement('msub')
mo = self.dom.createElement('mo')
mo.appendChild(self.dom.createTextNode('E'))
y.appendChild(mo)
y.appendChild(self._print(e.args[0]))
x.appendChild(y)
x.appendChild(self._print(e.args[1:]))
return x
def _print_jacobi(self, e):
x = self.dom.createElement('mrow')
y = self.dom.createElement('msubsup')
mo = self.dom.createElement('mo')
mo.appendChild(self.dom.createTextNode('P'))
y.appendChild(mo)
y.appendChild(self._print(e.args[0]))
y.appendChild(self._print(e.args[1:3]))
x.appendChild(y)
x.appendChild(self._print(e.args[3:]))
return x
def _print_gegenbauer(self, e):
x = self.dom.createElement('mrow')
y = self.dom.createElement('msubsup')
mo = self.dom.createElement('mo')
mo.appendChild(self.dom.createTextNode('C'))
y.appendChild(mo)
y.appendChild(self._print(e.args[0]))
y.appendChild(self._print(e.args[1:2]))
x.appendChild(y)
x.appendChild(self._print(e.args[2:]))
return x
def _print_chebyshevt(self, e):
x = self.dom.createElement('mrow')
y = self.dom.createElement('msub')
mo = self.dom.createElement('mo')
mo.appendChild(self.dom.createTextNode('T'))
y.appendChild(mo)
y.appendChild(self._print(e.args[0]))
x.appendChild(y)
x.appendChild(self._print(e.args[1:]))
return x
def _print_chebyshevu(self, e):
x = self.dom.createElement('mrow')
y = self.dom.createElement('msub')
mo = self.dom.createElement('mo')
mo.appendChild(self.dom.createTextNode('U'))
y.appendChild(mo)
y.appendChild(self._print(e.args[0]))
x.appendChild(y)
x.appendChild(self._print(e.args[1:]))
return x
def _print_legendre(self, e):
x = self.dom.createElement('mrow')
y = self.dom.createElement('msub')
mo = self.dom.createElement('mo')
mo.appendChild(self.dom.createTextNode('P'))
y.appendChild(mo)
y.appendChild(self._print(e.args[0]))
x.appendChild(y)
x.appendChild(self._print(e.args[1:]))
return x
def _print_assoc_legendre(self, e):
x = self.dom.createElement('mrow')
y = self.dom.createElement('msubsup')
mo = self.dom.createElement('mo')
mo.appendChild(self.dom.createTextNode('P'))
y.appendChild(mo)
y.appendChild(self._print(e.args[0]))
y.appendChild(self._print(e.args[1:2]))
x.appendChild(y)
x.appendChild(self._print(e.args[2:]))
return x
def _print_laguerre(self, e):
x = self.dom.createElement('mrow')
y = self.dom.createElement('msub')
mo = self.dom.createElement('mo')
mo.appendChild(self.dom.createTextNode('L'))
y.appendChild(mo)
y.appendChild(self._print(e.args[0]))
x.appendChild(y)
x.appendChild(self._print(e.args[1:]))
return x
def _print_assoc_laguerre(self, e):
x = self.dom.createElement('mrow')
y = self.dom.createElement('msubsup')
mo = self.dom.createElement('mo')
mo.appendChild(self.dom.createTextNode('L'))
y.appendChild(mo)
y.appendChild(self._print(e.args[0]))
y.appendChild(self._print(e.args[1:2]))
x.appendChild(y)
x.appendChild(self._print(e.args[2:]))
return x
def _print_hermite(self, e):
x = self.dom.createElement('mrow')
y = self.dom.createElement('msub')
mo = self.dom.createElement('mo')
mo.appendChild(self.dom.createTextNode('H'))
y.appendChild(mo)
y.appendChild(self._print(e.args[0]))
x.appendChild(y)
x.appendChild(self._print(e.args[1:]))
return x
def mathml(expr, printer='content', **settings):
"""Returns the MathML representation of expr. If printer is presentation
then prints Presentation MathML else prints content MathML.
"""
if printer == 'presentation':
return MathMLPresentationPrinter(settings).doprint(expr)
else:
return MathMLContentPrinter(settings).doprint(expr)
def print_mathml(expr, printer='content', **settings):
"""
Prints a pretty representation of the MathML code for expr. If printer is
presentation then prints Presentation MathML else prints content MathML.
Examples
========
>>> ##
>>> from sympy.printing.mathml import print_mathml
>>> from sympy.abc import x
>>> print_mathml(x+1) #doctest: +NORMALIZE_WHITESPACE
<apply>
<plus/>
<ci>x</ci>
<cn>1</cn>
</apply>
>>> print_mathml(x+1, printer='presentation')
<mrow>
<mi>x</mi>
<mo>+</mo>
<mn>1</mn>
</mrow>
"""
if printer == 'presentation':
s = MathMLPresentationPrinter(settings)
else:
s = MathMLContentPrinter(settings)
xml = s._print(sympify(expr))
s.apply_patch()
pretty_xml = xml.toprettyxml()
s.restore_patch()
print(pretty_xml)
# For backward compatibility
MathMLPrinter = MathMLContentPrinter
|
|
# Standard library
import os
import warnings
# Third-party
from astropy.table.meta import get_header_from_yaml, get_yaml_from_table
from astropy.io.misc.hdf5 import _encode_mixins, meta_path
from astropy.utils.exceptions import AstropyUserWarning
from astropy.utils import metadata
from thejoker.thejoker import validate_prepare_data
def _custom_tbl_dtype_compare(dtype1, dtype2):
"""This is a custom equality operator for comparing table data types that
is less strict about units when unit is missing in one and dimensionless in
the other.
"""
for d1, d2 in zip(dtype1, dtype2):
for k in set(list(d1.keys()) + list(d2.keys())):
if k == 'unit':
if d1.get(k, '') != '' and k not in d2:
return False
if d2.get(k, '') != '' and k not in d1:
return False
if d1.get(k, '') != d2.get(k, ''):
return False
else:
if d1.get(k, '1') != d2.get(k, '2'):
return False
return True
def write_table_hdf5(table, output, path=None, compression=False,
append=False, overwrite=False, serialize_meta=False,
metadata_conflicts='error', **create_dataset_kwargs):
"""
Write a Table object to an HDF5 file
This requires `h5py <http://www.h5py.org/>`_ to be installed.
Parameters
----------
table : `~astropy.table.Table`
Data table that is to be written to file.
output : str or :class:`h5py:File` or :class:`h5py:Group`
If a string, the filename to write the table to. If an h5py object,
either the file or the group object to write the table to.
path : str
The path to which to write the table inside the HDF5 file.
This should be relative to the input file or group.
If not specified, defaults to ``__astropy_table__``.
compression : bool or str or int
Whether to compress the table inside the HDF5 file. If set to `True`,
``'gzip'`` compression is used. If a string is specified, it should be
one of ``'gzip'``, ``'szip'``, or ``'lzf'``. If an integer is
specified (in the range 0-9), ``'gzip'`` compression is used, and the
integer denotes the compression level.
append : bool
Whether to append the table to an existing HDF5 file.
overwrite : bool
Whether to overwrite any existing file without warning.
If ``append=True`` and ``overwrite=True`` then only the dataset will be
replaced; the file/group will not be overwritten.
metadata_conflicts : str
How to proceed with metadata conflicts. This should be one of:
* ``'silent'``: silently pick the last conflicting meta-data value
* ``'warn'``: pick the last conflicting meta-data value, but emit a
warning (default)
* ``'error'``: raise an exception.
**create_dataset_kwargs
Additional keyword arguments are passed to `h5py.File.create_dataset`.
"""
from astropy.table import meta
try:
import h5py
except ImportError:
raise Exception("h5py is required to read and write HDF5 files")
if path is None:
# table is just an arbitrary, hardcoded string here.
path = '__astropy_table__'
elif path.endswith('/'):
raise ValueError("table path should end with table name, not /")
if '/' in path:
group, name = path.rsplit('/', 1)
else:
group, name = None, path
if isinstance(output, (h5py.File, h5py.Group)):
if len(list(output.keys())) > 0 and name == '__astropy_table__':
raise ValueError("table path should always be set via the "
"path= argument when writing to existing "
"files")
elif name == '__astropy_table__':
warnings.warn("table path was not set via the path= argument; "
"using default path {}".format(path))
if group:
try:
output_group = output[group]
except (KeyError, ValueError):
output_group = output.create_group(group)
else:
output_group = output
elif isinstance(output, str):
if os.path.exists(output) and not append:
if overwrite and not append:
os.remove(output)
else:
raise OSError(f"File exists: {output}")
# Open the file for appending or writing
f = h5py.File(output, 'a' if append else 'w')
# Recursively call the write function
try:
return write_table_hdf5(table, f, path=path,
compression=compression, append=append,
overwrite=overwrite,
serialize_meta=serialize_meta,
**create_dataset_kwargs)
finally:
f.close()
else:
raise TypeError('output should be a string or an h5py File or '
'Group object')
# Check whether table already exists
existing_header = None
if name in output_group:
if append and overwrite:
# Delete only the dataset itself
del output_group[name]
elif append:
# Data table exists, so we interpret "append" to mean "extend
# existing table with the table passed in". However, this requires
# the table to have been written by this function in the past, so it
# should have a metadata header
if meta_path(name) not in output_group:
raise ValueError("No metadata exists for existing table. We "
"can only append tables if metadata "
"is consistent for all tables")
# Load existing table header:
existing_header = get_header_from_yaml(
h.decode('utf-8') for h in output_group[meta_path(name)])
else:
raise OSError(f"Table {path} already exists")
# Encode any mixin columns as plain columns + appropriate metadata
table = _encode_mixins(table)
# Table with numpy unicode strings can't be written in HDF5 so
# to write such a table a copy of table is made containing columns as
# bytestrings. Now this copy of the table can be written in HDF5.
if any(col.info.dtype.kind == 'U' for col in table.itercols()):
table = table.copy(copy_data=False)
table.convert_unicode_to_bytestring()
# Warn if information will be lost when serialize_meta=False. This is
# hardcoded to the set difference between column info attributes and what
# HDF5 can store natively (name, dtype) with no meta.
if serialize_meta is False:
for col in table.itercols():
for attr in ('unit', 'format', 'description', 'meta'):
if getattr(col.info, attr, None) not in (None, {}):
warnings.warn("table contains column(s) with defined 'unit', 'format',"
" 'description', or 'meta' info attributes. These will"
" be dropped since serialize_meta=False.",
AstropyUserWarning)
if existing_header is None: # Just write the table and metadata
# Write the table to the file
if compression:
if compression is True:
compression = 'gzip'
dset = output_group.create_dataset(name, data=table.as_array(),
compression=compression,
**create_dataset_kwargs)
else:
dset = output_group.create_dataset(name, data=table.as_array(),
**create_dataset_kwargs)
if serialize_meta:
header_yaml = meta.get_yaml_from_table(table)
header_encoded = [h.encode('utf-8') for h in header_yaml]
output_group.create_dataset(meta_path(name),
data=header_encoded)
else:
# Write the Table meta dict key:value pairs to the file as HDF5
# attributes. This works only for a limited set of scalar data types
# like numbers, strings, etc., but not any complex types. This path
# also ignores column meta like unit or format.
for key in table.meta:
val = table.meta[key]
try:
dset.attrs[key] = val
except TypeError:
warnings.warn("Attribute `{}` of type {} cannot be written to "
"HDF5 files - skipping. (Consider specifying "
"serialize_meta=True to write all meta data)"
.format(key, type(val)), AstropyUserWarning)
else: # We need to append the tables!
try:
# FIXME: do something with the merged metadata!
metadata.merge(existing_header['meta'],
table.meta,
metadata_conflicts=metadata_conflicts)
except metadata.MergeConflictError:
raise metadata.MergeConflictError(
"Cannot append table to existing file because "
"the existing file table metadata and this "
"table object's metadata do not match. If you "
"want to ignore this issue, or change to a "
"warning, set metadata_conflicts='silent' or 'warn'.")
# Now compare datatype of this object and on disk
this_header = get_header_from_yaml(get_yaml_from_table(table))
if not _custom_tbl_dtype_compare(existing_header['datatype'],
this_header['datatype']):
raise ValueError(
"Cannot append table to existing file because "
"the existing file table datatype and this "
"object's table datatype do not match. "
f"{existing_header['datatype']} vs. {this_header['datatype']}")
# If we got here, we can now try to append:
current_size = len(output_group[name])
output_group[name].resize((current_size + len(table), ))
output_group[name][current_size:] = table.as_array()
def inferencedata_to_samples(joker_prior, inferencedata, data,
prune_divergences=True):
"""
Create a ``JokerSamples`` instance from an arviz object.
Parameters
----------
joker_prior : `thejoker.JokerPrior`
inferencedata : `arviz.InferenceData`
data : `thejoker.RVData`
prune_divergences : bool (optional)
"""
from thejoker.samples import JokerSamples
import exoplanet.units as xu
if hasattr(inferencedata, 'posterior'):
posterior = inferencedata.posterior
else:
posterior = inferencedata
inferencedata = None
data, *_ = validate_prepare_data(data,
joker_prior.poly_trend,
joker_prior.n_offsets)
samples = JokerSamples(poly_trend=joker_prior.poly_trend,
n_offsets=joker_prior.n_offsets,
t_ref=data.t_ref)
names = joker_prior.par_names
for name in names:
if name in joker_prior.pars:
par = joker_prior.pars[name]
unit = getattr(par, xu.UNIT_ATTR_NAME)
samples[name] = posterior[name].values.ravel() * unit
else:
samples[name] = posterior[name].values.ravel()
if hasattr(posterior, 'logp'):
samples['ln_posterior'] = posterior.logp.values.ravel()
for name in ['ln_likelihood', 'ln_prior']:
if hasattr(posterior, name):
samples[name] = getattr(posterior, name).values.ravel()
if prune_divergences:
if inferencedata is None:
raise ValueError(
"If you want to remove divergences, you must pass in the root "
"level inferencedata object (instead of, e.g., inferencedata. "
"posterior")
divergences = inferencedata.sample_stats.diverging.values.ravel()
samples = samples[~divergences]
return samples
def trace_to_samples(self, trace, data, names=None):
"""
Create a ``JokerSamples`` instance from a pymc3 trace object.
Parameters
----------
trace : `~pymc3.backends.base.MultiTrace`
"""
import pymc3 as pm
import exoplanet.units as xu
from thejoker.samples import JokerSamples
df = pm.trace_to_dataframe(trace)
data, *_ = validate_prepare_data(data,
self.prior.poly_trend,
self.prior.n_offsets)
samples = JokerSamples(poly_trend=self.prior.poly_trend,
n_offsets=self.prior.n_offsets,
t_ref=data.t_ref)
if names is None:
names = self.prior.par_names
for name in names:
par = self.prior.pars[name]
unit = getattr(par, xu.UNIT_ATTR_NAME)
samples[name] = df[name].values * unit
return samples
|
|
#! /usr/bin/env python
# encoding: utf-8
# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file
import os,re,imp,sys
from waflib import Utils,Errors,Logs
import waflib.Node
HEXVERSION=0x1081300
WAFVERSION="1.8.19"
WAFREVISION="f14a6d43092d3419d90c1ce16b9d3c700309d7b3"
ABI=98
DBFILE='.wafpickle-%s-%d-%d'%(sys.platform,sys.hexversion,ABI)
APPNAME='APPNAME'
VERSION='VERSION'
TOP='top'
OUT='out'
WSCRIPT_FILE='wscript'
launch_dir=''
run_dir=''
top_dir=''
out_dir=''
waf_dir=''
local_repo=''
remote_repo='https://raw.githubusercontent.com/waf-project/waf/master/'
remote_locs=['waflib/extras','waflib/Tools']
g_module=None
STDOUT=1
STDERR=-1
BOTH=0
classes=[]
def create_context(cmd_name,*k,**kw):
global classes
for x in classes:
if x.cmd==cmd_name:
return x(*k,**kw)
ctx=Context(*k,**kw)
ctx.fun=cmd_name
return ctx
class store_context(type):
def __init__(cls,name,bases,dict):
super(store_context,cls).__init__(name,bases,dict)
name=cls.__name__
if name=='ctx'or name=='Context':
return
try:
cls.cmd
except AttributeError:
raise Errors.WafError('Missing command for the context class %r (cmd)'%name)
if not getattr(cls,'fun',None):
cls.fun=cls.cmd
global classes
classes.insert(0,cls)
ctx=store_context('ctx',(object,),{})
class Context(ctx):
errors=Errors
tools={}
def __init__(self,**kw):
try:
rd=kw['run_dir']
except KeyError:
global run_dir
rd=run_dir
self.node_class=type("Nod3",(waflib.Node.Node,),{})
self.node_class.__module__="waflib.Node"
self.node_class.ctx=self
self.root=self.node_class('',None)
self.cur_script=None
self.path=self.root.find_dir(rd)
self.stack_path=[]
self.exec_dict={'ctx':self,'conf':self,'bld':self,'opt':self}
self.logger=None
def __hash__(self):
return id(self)
def finalize(self):
try:
logger=self.logger
except AttributeError:
pass
else:
Logs.free_logger(logger)
delattr(self,'logger')
def load(self,tool_list,*k,**kw):
tools=Utils.to_list(tool_list)
path=Utils.to_list(kw.get('tooldir',''))
with_sys_path=kw.get('with_sys_path',True)
for t in tools:
module=load_tool(t,path,with_sys_path=with_sys_path)
fun=getattr(module,kw.get('name',self.fun),None)
if fun:
fun(self)
def execute(self):
global g_module
self.recurse([os.path.dirname(g_module.root_path)])
def pre_recurse(self,node):
self.stack_path.append(self.cur_script)
self.cur_script=node
self.path=node.parent
def post_recurse(self,node):
self.cur_script=self.stack_path.pop()
if self.cur_script:
self.path=self.cur_script.parent
def recurse(self,dirs,name=None,mandatory=True,once=True,encoding=None):
try:
cache=self.recurse_cache
except AttributeError:
cache=self.recurse_cache={}
for d in Utils.to_list(dirs):
if not os.path.isabs(d):
d=os.path.join(self.path.abspath(),d)
WSCRIPT=os.path.join(d,WSCRIPT_FILE)
WSCRIPT_FUN=WSCRIPT+'_'+(name or self.fun)
node=self.root.find_node(WSCRIPT_FUN)
if node and(not once or node not in cache):
cache[node]=True
self.pre_recurse(node)
try:
function_code=node.read('rU',encoding)
exec(compile(function_code,node.abspath(),'exec'),self.exec_dict)
finally:
self.post_recurse(node)
elif not node:
node=self.root.find_node(WSCRIPT)
tup=(node,name or self.fun)
if node and(not once or tup not in cache):
cache[tup]=True
self.pre_recurse(node)
try:
wscript_module=load_module(node.abspath(),encoding=encoding)
user_function=getattr(wscript_module,(name or self.fun),None)
if not user_function:
if not mandatory:
continue
raise Errors.WafError('No function %s defined in %s'%(name or self.fun,node.abspath()))
user_function(self)
finally:
self.post_recurse(node)
elif not node:
if not mandatory:
continue
try:
os.listdir(d)
except OSError:
raise Errors.WafError('Cannot read the folder %r'%d)
raise Errors.WafError('No wscript file in directory %s'%d)
def exec_command(self,cmd,**kw):
subprocess=Utils.subprocess
kw['shell']=isinstance(cmd,str)
Logs.debug('runner: %r'%(cmd,))
Logs.debug('runner_env: kw=%s'%kw)
if self.logger:
self.logger.info(cmd)
if'stdout'not in kw:
kw['stdout']=subprocess.PIPE
if'stderr'not in kw:
kw['stderr']=subprocess.PIPE
if Logs.verbose and not kw['shell']and not Utils.check_exe(cmd[0]):
raise Errors.WafError("Program %s not found!"%cmd[0])
wargs={}
if'timeout'in kw:
if kw['timeout']is not None:
wargs['timeout']=kw['timeout']
del kw['timeout']
if'input'in kw:
if kw['input']:
wargs['input']=kw['input']
kw['stdin']=Utils.subprocess.PIPE
del kw['input']
try:
if kw['stdout']or kw['stderr']:
p=subprocess.Popen(cmd,**kw)
(out,err)=p.communicate(**wargs)
ret=p.returncode
else:
out,err=(None,None)
ret=subprocess.Popen(cmd,**kw).wait(**wargs)
except Exception ,e:
raise Errors.WafError('Execution failure: %s'%str(e),ex=e)
if out:
if not isinstance(out,str):
out=out.decode(sys.stdout.encoding or'iso8859-1')
if self.logger:
self.logger.debug('out: %s'%out)
else:
Logs.info(out,extra={'stream':sys.stdout,'c1':''})
if err:
if not isinstance(err,str):
err=err.decode(sys.stdout.encoding or'iso8859-1')
if self.logger:
self.logger.error('err: %s'%err)
else:
Logs.info(err,extra={'stream':sys.stderr,'c1':''})
return ret
def cmd_and_log(self,cmd,**kw):
subprocess=Utils.subprocess
kw['shell']=isinstance(cmd,str)
Logs.debug('runner: %r'%(cmd,))
if'quiet'in kw:
quiet=kw['quiet']
del kw['quiet']
else:
quiet=None
if'output'in kw:
to_ret=kw['output']
del kw['output']
else:
to_ret=STDOUT
if Logs.verbose and not kw['shell']and not Utils.check_exe(cmd[0]):
raise Errors.WafError("Program %s not found!"%cmd[0])
kw['stdout']=kw['stderr']=subprocess.PIPE
if quiet is None:
self.to_log(cmd)
wargs={}
if'timeout'in kw:
if kw['timeout']is not None:
wargs['timeout']=kw['timeout']
del kw['timeout']
if'input'in kw:
if kw['input']:
wargs['input']=kw['input']
kw['stdin']=Utils.subprocess.PIPE
del kw['input']
try:
p=subprocess.Popen(cmd,**kw)
(out,err)=p.communicate(**wargs)
except Exception ,e:
raise Errors.WafError('Execution failure: %s'%str(e),ex=e)
if not isinstance(out,str):
out=out.decode(sys.stdout.encoding or'iso8859-1')
if not isinstance(err,str):
err=err.decode(sys.stdout.encoding or'iso8859-1')
if out and quiet!=STDOUT and quiet!=BOTH:
self.to_log('out: %s'%out)
if err and quiet!=STDERR and quiet!=BOTH:
self.to_log('err: %s'%err)
if p.returncode:
e=Errors.WafError('Command %r returned %r'%(cmd,p.returncode))
e.returncode=p.returncode
e.stderr=err
e.stdout=out
raise e
if to_ret==BOTH:
return(out,err)
elif to_ret==STDERR:
return err
return out
def fatal(self,msg,ex=None):
if self.logger:
self.logger.info('from %s: %s'%(self.path.abspath(),msg))
try:
msg='%s\n(complete log in %s)'%(msg,self.logger.handlers[0].baseFilename)
except Exception:
pass
raise self.errors.ConfigurationError(msg,ex=ex)
def to_log(self,msg):
if not msg:
return
if self.logger:
self.logger.info(msg)
else:
sys.stderr.write(str(msg))
sys.stderr.flush()
def msg(self,*k,**kw):
try:
msg=kw['msg']
except KeyError:
msg=k[0]
self.start_msg(msg,**kw)
try:
result=kw['result']
except KeyError:
result=k[1]
color=kw.get('color',None)
if not isinstance(color,str):
color=result and'GREEN'or'YELLOW'
self.end_msg(result,color,**kw)
def start_msg(self,*k,**kw):
if kw.get('quiet',None):
return
msg=kw.get('msg',None)or k[0]
try:
if self.in_msg:
self.in_msg+=1
return
except AttributeError:
self.in_msg=0
self.in_msg+=1
try:
self.line_just=max(self.line_just,len(msg))
except AttributeError:
self.line_just=max(40,len(msg))
for x in(self.line_just*'-',msg):
self.to_log(x)
Logs.pprint('NORMAL',"%s :"%msg.ljust(self.line_just),sep='')
def end_msg(self,*k,**kw):
if kw.get('quiet',None):
return
self.in_msg-=1
if self.in_msg:
return
result=kw.get('result',None)or k[0]
defcolor='GREEN'
if result==True:
msg='ok'
elif result==False:
msg='not found'
defcolor='YELLOW'
else:
msg=str(result)
self.to_log(msg)
try:
color=kw['color']
except KeyError:
if len(k)>1 and k[1]in Logs.colors_lst:
color=k[1]
else:
color=defcolor
Logs.pprint(color,msg)
def load_special_tools(self,var,ban=[]):
global waf_dir
if os.path.isdir(waf_dir):
lst=self.root.find_node(waf_dir).find_node('waflib/extras').ant_glob(var)
for x in lst:
if not x.name in ban:
load_tool(x.name.replace('.py',''))
else:
from zipfile import PyZipFile
waflibs=PyZipFile(waf_dir)
lst=waflibs.namelist()
for x in lst:
if not re.match("waflib/extras/%s"%var.replace("*",".*"),var):
continue
f=os.path.basename(x)
doban=False
for b in ban:
r=b.replace("*",".*")
if re.match(r,f):
doban=True
if not doban:
f=f.replace('.py','')
load_tool(f)
cache_modules={}
def load_module(path,encoding=None):
try:
return cache_modules[path]
except KeyError:
pass
module=imp.new_module(WSCRIPT_FILE)
try:
code=Utils.readf(path,m='rU',encoding=encoding)
except EnvironmentError:
raise Errors.WafError('Could not read the file %r'%path)
module_dir=os.path.dirname(path)
sys.path.insert(0,module_dir)
try:exec(compile(code,path,'exec'),module.__dict__)
finally:sys.path.remove(module_dir)
cache_modules[path]=module
return module
def load_tool(tool,tooldir=None,ctx=None,with_sys_path=True):
if tool=='java':
tool='javaw'
else:
tool=tool.replace('++','xx')
origSysPath=sys.path
if not with_sys_path:sys.path=[]
try:
if tooldir:
assert isinstance(tooldir,list)
sys.path=tooldir+sys.path
try:
__import__(tool)
finally:
for d in tooldir:
sys.path.remove(d)
ret=sys.modules[tool]
Context.tools[tool]=ret
return ret
else:
if not with_sys_path:sys.path.insert(0,waf_dir)
try:
for x in('waflib.Tools.%s','waflib.extras.%s','waflib.%s','%s'):
try:
__import__(x%tool)
break
except ImportError:
x=None
if x is None:
__import__(tool)
finally:
if not with_sys_path:sys.path.remove(waf_dir)
ret=sys.modules[x%tool]
Context.tools[tool]=ret
return ret
finally:
if not with_sys_path:sys.path+=origSysPath
|
|
# -*- coding: utf-8 -*-
from copy import deepcopy
import inspect
import sys
from six import iteritems
from six import iterkeys
from six import add_metaclass
from .common import *
from .datastructures import OrderedDict as OrderedDictWithSort
from .exceptions import (
BaseError, DataError, MockCreationError,
MissingValueError, UnknownFieldError
)
from .types import BaseType
from .types.serializable import Serializable
from .undefined import Undefined
try:
unicode #PY2
except:
import codecs
unicode = str #PY3
class FieldDescriptor(object):
"""
``FieldDescriptor`` instances serve as field accessors on models.
"""
def __init__(self, name):
"""
:param name:
The field's name
"""
self.name = name
def __get__(self, instance, cls):
"""
For a model instance, returns the field's current value.
For a model class, returns the field's type object.
"""
if instance is None:
return cls._fields[self.name]
else:
value = instance._data[self.name]
if value is Undefined:
raise MissingValueError
else:
return value
def __set__(self, instance, value):
"""
Sets the field's value.
"""
field = instance._fields[self.name]
value = field.pre_setattr(value)
instance._data[self.name] = value
def __delete__(self, instance):
"""
Deletes the field's value.
"""
instance._data[self.name] = Undefined
class ModelOptions(object):
"""
This class is a container for all model configuration options. Its
primary purpose is to create an independent instance of a model's
options for every class.
"""
def __init__(self, klass, namespace=None, roles=None, export_level=DEFAULT,
serialize_when_none=None, fields_order=None):
"""
:param klass:
The class which this options instance belongs to.
:param namespace:
A namespace identifier that can be used with persistence layers.
:param roles:
Allows to specify certain subsets of the model's fields for
serialization.
:param serialize_when_none:
When ``False``, serialization skips fields that are None.
Default: ``True``
:param fields_order:
List of field names that dictates the order in which keys will
appear in a serialized dictionary.
"""
self.klass = klass
self.namespace = namespace
self.roles = roles or {}
self.export_level = export_level
if serialize_when_none is True:
self.export_level = DEFAULT
elif serialize_when_none is False:
self.export_level = NONEMPTY
self.fields_order = fields_order
class ModelMeta(type):
"""
Metaclass for Models.
"""
def __new__(mcs, name, bases, attrs):
"""
This metaclass adds four attributes to host classes: mcs._fields,
mcs._serializables, mcs._validator_functions, and mcs._options.
This function creates those attributes like this:
``mcs._fields`` is list of fields that are Schematics types
``mcs._serializables`` is a list of ``Serializable`` objects
``mcs._validator_functions`` are class-level validation functions
``mcs._options`` is the end result of parsing the ``Options`` class
"""
# Structures used to accumulate meta info
fields = OrderedDictWithSort()
serializables = {}
validator_functions = {} # Model level
# Accumulate metas info from parent classes
for base in reversed(bases):
if hasattr(base, '_fields'):
fields.update(deepcopy(base._fields))
if hasattr(base, '_serializables'):
serializables.update(deepcopy(base._serializables))
if hasattr(base, '_validator_functions'):
validator_functions.update(base._validator_functions)
# Parse this class's attributes into meta structures
for key, value in iteritems(attrs):
if key.startswith('validate_') and callable(value):
validator_functions[key[9:]] = prepare_validator(value, 4)
if isinstance(value, BaseType):
fields[key] = value
if isinstance(value, Serializable):
serializables[key] = value
# Parse meta options
options = mcs._read_options(name, bases, attrs)
# Convert list of types into fields for new klass
fields.sort(key=lambda i: i[1]._position_hint)
for key, field in iteritems(fields):
attrs[key] = FieldDescriptor(key)
for key, serializable in iteritems(serializables):
attrs[key] = serializable
# Ready meta data to be klass attributes
attrs['_fields'] = fields
attrs['_serializables'] = serializables
attrs['_validator_functions'] = validator_functions
attrs['_options'] = options
klass = type.__new__(mcs, name, bases, attrs)
# Register class on ancestor models
klass._subclasses = []
for base in klass.__mro__[1:]:
if isinstance(base, ModelMeta):
base._subclasses.append(klass)
# Finalize fields
for field_name, field in fields.items():
field._setup(field_name, klass)
for field_name, field in serializables.items():
field._setup(field_name, klass)
return klass
@classmethod
def _read_options(mcs, name, bases, attrs):
"""
Parses `ModelOptions` instance into the options value attached to
`Model` instances.
"""
options_members = {}
for base in reversed(bases):
if hasattr(base, "_options"):
for key, value in inspect.getmembers(base._options):
if not key.startswith("_") and not key == "klass":
options_members[key] = value
options_class = attrs.get('__optionsclass__', ModelOptions)
if 'Options' in attrs:
for key, value in inspect.getmembers(attrs['Options']):
if not key.startswith("_"):
if key == "roles":
roles = options_members.get("roles", {}).copy()
roles.update(value)
options_members["roles"] = roles
else:
options_members[key] = value
return options_class(mcs, **options_members)
@property
def fields(cls):
return cls._fields
class NonDictModelMeta(ModelMeta):
def __new__(mcs, name, bases, attrs):
klass = ModelMeta.__new__(mcs, name, bases, attrs)
if name != 'NonDictModel' and len(klass._fields) != 1:
raise Exception('Class should contains only one field')
return klass
@add_metaclass(ModelMeta)
class Model(object):
"""
Enclosure for fields and validation. Same pattern deployed by Django
models, SQLAlchemy declarative extension and other developer friendly
libraries.
:param Mapping raw_data:
The data to be imported into the model instance.
:param Mapping deserialize_mapping:
Can be used to provide alternative input names for fields. Values may be
strings or lists of strings, keyed by the actual field name.
:param bool partial:
Allow partial data to validate. Essentially drops the ``required=True``
settings from field definitions. Default: True
:param bool strict:
Complain about unrecognized keys. Default: True
"""
__optionsclass__ = ModelOptions
def __init__(self, raw_data=None, trusted_data=None, deserialize_mapping=None,
init=True, partial=True, strict=True, validate=False, app_data=None,
**kwargs):
self._initial = raw_data or {}
kwargs.setdefault('init_values', init)
kwargs.setdefault('apply_defaults', init)
self._data = self.convert(raw_data,
trusted_data=trusted_data, mapping=deserialize_mapping,
partial=partial, strict=strict, validate=validate, new=True,
app_data=app_data, **kwargs)
def validate(self, partial=False, convert=True, app_data=None, **kwargs):
"""
Validates the state of the model. If the data is invalid, raises a ``DataError``
with error messages.
:param bool partial:
Allow partial data to validate. Essentially drops the ``required=True``
settings from field definitions. Default: False
:param convert:
Controls whether to perform import conversion before validating.
Can be turned off to skip an unnecessary conversion step if all values
are known to have the right datatypes (e.g., when validating immediately
after the initial import). Default: True
"""
data = validate(self.__class__, self._data, partial=partial, convert=convert,
app_data=app_data, **kwargs)
if convert:
self._data.update(**data)
def import_data(self, raw_data, **kw):
"""
Converts and imports the raw data into an existing model instance.
:param raw_data:
The data to be imported.
"""
data = self.convert(raw_data, **kw)
del_keys = [k for k in data.keys() if data[k] is Undefined]
for k in del_keys:
del data[k]
self._data.update(data)
return self
def convert(self, raw_data, **kw):
"""
Converts the raw data into richer Python constructs according to the
fields on the model
:param raw_data:
The data to be converted
"""
_validate = getattr(kw.get('context'), 'validate', kw.get('validate', False))
if _validate:
return validate(self.__class__, raw_data, **kw)
else:
return convert(self.__class__, raw_data, **kw)
@classmethod
def _convert(cls, obj, context):
if context.new or not isinstance(obj, Model):
return cls(obj, context=context)
else:
data = obj.convert(obj._data, context=context)
if context.convert:
obj._data.update(data)
return obj
def export(self, format, field_converter=None, role=None, app_data=None, **kwargs):
data = export_loop(self.__class__, self, field_converter=field_converter,
role=role, app_data=app_data, **kwargs)
if format == NATIVE:
return self.__class__(data, init=False)
else:
return data
def to_native(self, role=None, app_data=None, **kwargs):
data = to_native(self.__class__, self, role=role, app_data=app_data, **kwargs)
return self.__class__(data, init=False)
def to_dict(self, role=None, app_data=None, **kwargs):
return to_dict(self.__class__, self, role=role, app_data=app_data, **kwargs)
def to_primitive(self, role=None, app_data=None, **kwargs):
return to_primitive(self.__class__, self, role=role, app_data=app_data, **kwargs)
def serialize(self, role=None, app_data=None, **kwargs):
return self.to_primitive(role=role, app_data=app_data, **kwargs)
def flatten(self, role=None, prefix="", app_data=None, context=None):
"""
Return data as a pure key-value dictionary, where the values are
primitive types (string, bool, int, long).
:param role:
Filter output by a specific role
:param prefix:
A prefix to use for keynames during flattening.
"""
return flatten(self.__class__, self, role=role, prefix=prefix,
app_data=app_data, context=context)
@classmethod
def from_flat(cls, data):
return cls(expand(data))
def atoms(self):
"""
Iterator for the atomic components of a model definition and relevant
data that creates a 3-tuple of the field's name, its type instance and
its value.
"""
return atoms(self.__class__, self)
def __iter__(self):
return self.iter()
def iter(self):
return iter(self.keys())
def keys(self):
return [k for k in self._fields if self._data[k] is not Undefined]
def items(self):
return [(k, self._data[k]) for k in self.keys()]
def values(self):
return [self._data[k] for k in self.keys()]
def get(self, key, default=None):
return getattr(self, key, default)
@classmethod
def get_mock_object(cls, context=None, overrides=None):
"""Get a mock object.
:param dict context:
:param dict overrides: overrides for the model
"""
if overrides is None:
overrides = {}
values = {}
for name, field in cls.fields.items():
if name not in overrides:
try:
values[name] = field.mock(context)
except MockCreationError as exc:
raise MockCreationError('%s: %s' % (name, exc.message))
values.update(overrides)
return cls(values)
def __getitem__(self, name):
if name in self._fields or name in self._serializables:
return getattr(self, name)
else:
raise UnknownFieldError
def __setitem__(self, name, value):
if name in self._fields:
return setattr(self, name, value)
else:
raise UnknownFieldError
def __delitem__(self, name):
if name in self._fields:
return delattr(self, name)
else:
raise UnknownFieldError
def __contains__(self, name):
return name in self.keys() or name in self._serializables
def __len__(self):
return len(self.keys())
def __eq__(self, other):
if isinstance(other, self.__class__):
for k in self._fields:
if self._data[k] != other._data[k]:
return False
return True
return NotImplemented
def __ne__(self, other):
return not self == other
def __repr__(self):
try:
obj = unicode(self)
except (UnicodeEncodeError, UnicodeDecodeError):
obj = '[Bad Unicode data]'
try:
class_name = unicode(self.__class__.__name__)
except (UnicodeEncodeError, UnicodeDecodeError):
class_name = '[Bad Unicode class name]'
return u"<%s: %s>" % (class_name, obj)
def __str__(self):
return '%s object' % self.__class__.__name__
def __unicode__(self):
return '%s object' % self.__class__.__name__
@add_metaclass(NonDictModelMeta)
class NonDictModel(Model):
def convert(self, raw_data, **kw):
"""
Converts the raw data into richer Python constructs according to the
fields on the model
:param raw_data:
The data to be converted
"""
if not isinstance(raw_data, dict):
raw_data = {self._fields.keys()[0]: raw_data}
elif raw_data:
raise Exception('raw_data should be empty dict')
return Model.convert(self, raw_data, **kw)
def to_primitive(self, role=None, context=None):
res = Model.to_primitive(self, role, context)
return res.values()[0]
def to_native(self, role=None, context=None):
res = Model.to_native(self, role, context)
return res.values()[0]
from .transforms import (
atoms, export_loop,
convert, to_native, to_dict, to_primitive,
flatten, expand,
)
from .validate import validate, prepare_validator
|
|
#! /usr/bin/env python
# Copyright (c) 2015 Samuel Merritt <[email protected]>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
import json
import mock
import unittest
from StringIO import StringIO
from test.unit import with_tempdir
from swift.cli.ring_builder_analyzer import parse_scenario, run_scenario
class TestRunScenario(unittest.TestCase):
@with_tempdir
def test_it_runs(self, tempdir):
builder_path = os.path.join(tempdir, 'test.builder')
scenario = {
'replicas': 3, 'part_power': 8, 'random_seed': 123, 'overload': 0,
'rounds': [[['add', 'r1z2-3.4.5.6:7/sda8', 100],
['add', 'z2-3.4.5.6:7/sda9', 200],
['add', 'z2-3.4.5.6:7/sda10', 200],
['add', 'z2-3.4.5.6:7/sda11', 200]],
[['set_weight', 0, 150]],
[['remove', 1]],
[['save', builder_path]]]}
parsed = parse_scenario(json.dumps(scenario))
fake_stdout = StringIO()
with mock.patch('sys.stdout', fake_stdout):
run_scenario(parsed)
# Just test that it produced some output as it ran; the fact that
# this doesn't crash and produces output that resembles something
# useful is good enough.
self.assertTrue('Rebalance' in fake_stdout.getvalue())
self.assertTrue(os.path.exists(builder_path))
class TestParseScenario(unittest.TestCase):
def test_good(self):
scenario = {
'replicas': 3, 'part_power': 8, 'random_seed': 123, 'overload': 0,
'rounds': [[['add', 'r1z2-3.4.5.6:7/sda8', 100],
['add', 'z2-3.4.5.6:7/sda9', 200]],
[['set_weight', 0, 150]],
[['remove', 1]]]}
parsed = parse_scenario(json.dumps(scenario))
self.assertEqual(parsed['replicas'], 3)
self.assertEqual(parsed['part_power'], 8)
self.assertEqual(parsed['random_seed'], 123)
self.assertEqual(parsed['overload'], 0)
self.assertEqual(parsed['rounds'], [
[['add', {'device': 'sda8',
'ip': '3.4.5.6',
'meta': '',
'port': 7,
'region': 1,
'replication_ip': '3.4.5.6',
'replication_port': 7,
'weight': 100.0,
'zone': 2}],
['add', {'device': u'sda9',
'ip': u'3.4.5.6',
'meta': '',
'port': 7,
'region': 1,
'replication_ip': '3.4.5.6',
'replication_port': 7,
'weight': 200.0,
'zone': 2}]],
[['set_weight', 0, 150.0]],
[['remove', 1]]])
# The rest of this test class is just a catalog of the myriad ways that
# the input can be malformed.
def test_invalid_json(self):
self.assertRaises(ValueError, parse_scenario, "{")
def test_json_not_object(self):
self.assertRaises(ValueError, parse_scenario, "[]")
self.assertRaises(ValueError, parse_scenario, "\"stuff\"")
def test_bad_replicas(self):
working_scenario = {
'replicas': 3, 'part_power': 8, 'random_seed': 123, 'overload': 0,
'rounds': [[['add', 'r1z2-3.4.5.6:7/sda8', 100]]]}
busted = dict(working_scenario)
del busted['replicas']
self.assertRaises(ValueError, parse_scenario, json.dumps(busted))
busted = dict(working_scenario, replicas='blahblah')
self.assertRaises(ValueError, parse_scenario, json.dumps(busted))
busted = dict(working_scenario, replicas=-1)
self.assertRaises(ValueError, parse_scenario, json.dumps(busted))
def test_bad_part_power(self):
working_scenario = {
'replicas': 3, 'part_power': 8, 'random_seed': 123, 'overload': 0,
'rounds': [[['add', 'r1z2-3.4.5.6:7/sda8', 100]]]}
busted = dict(working_scenario)
del busted['part_power']
self.assertRaises(ValueError, parse_scenario, json.dumps(busted))
busted = dict(working_scenario, part_power='blahblah')
self.assertRaises(ValueError, parse_scenario, json.dumps(busted))
busted = dict(working_scenario, part_power=0)
self.assertRaises(ValueError, parse_scenario, json.dumps(busted))
busted = dict(working_scenario, part_power=33)
self.assertRaises(ValueError, parse_scenario, json.dumps(busted))
def test_bad_random_seed(self):
working_scenario = {
'replicas': 3, 'part_power': 8, 'random_seed': 123, 'overload': 0,
'rounds': [[['add', 'r1z2-3.4.5.6:7/sda8', 100]]]}
busted = dict(working_scenario)
del busted['random_seed']
self.assertRaises(ValueError, parse_scenario, json.dumps(busted))
busted = dict(working_scenario, random_seed='blahblah')
self.assertRaises(ValueError, parse_scenario, json.dumps(busted))
def test_bad_overload(self):
working_scenario = {
'replicas': 3, 'part_power': 8, 'random_seed': 123, 'overload': 0,
'rounds': [[['add', 'r1z2-3.4.5.6:7/sda8', 100]]]}
busted = dict(working_scenario)
del busted['overload']
self.assertRaises(ValueError, parse_scenario, json.dumps(busted))
busted = dict(working_scenario, overload='blahblah')
self.assertRaises(ValueError, parse_scenario, json.dumps(busted))
busted = dict(working_scenario, overload=-0.01)
self.assertRaises(ValueError, parse_scenario, json.dumps(busted))
def test_bad_rounds(self):
base = {
'replicas': 3, 'part_power': 8, 'random_seed': 123, 'overload': 0}
self.assertRaises(ValueError, parse_scenario, json.dumps(base))
busted = dict(base, rounds={})
self.assertRaises(ValueError, parse_scenario, json.dumps(busted))
busted = dict(base, rounds=[{}])
self.assertRaises(ValueError, parse_scenario, json.dumps(busted))
busted = dict(base, rounds=[[['bork']]])
self.assertRaises(ValueError, parse_scenario, json.dumps(busted))
def test_bad_add(self):
base = {
'replicas': 3, 'part_power': 8, 'random_seed': 123, 'overload': 0}
# no dev
busted = dict(base, rounds=[[['add']]])
self.assertRaises(ValueError, parse_scenario, json.dumps(busted))
# no weight
busted = dict(base, rounds=[[['add', 'r1z2-1.2.3.4:6000/d7']]])
self.assertRaises(ValueError, parse_scenario, json.dumps(busted))
# too many fields
busted = dict(base, rounds=[[['add', 'r1z2-1.2.3.4:6000/d7', 1, 2]]])
self.assertRaises(ValueError, parse_scenario, json.dumps(busted))
# can't parse
busted = dict(base, rounds=[[['add', 'not a good value', 100]]])
# N.B. the ValueError's coming out of ring.utils.parse_add_value
# are already pretty good
expected = "Invalid device specifier (round 0, command 0): " \
"Invalid add value: not a good value"
try:
parse_scenario(json.dumps(busted))
except ValueError as err:
self.assertEqual(str(err), expected)
# negative weight
busted = dict(base, rounds=[[['add', 'r1z2-1.2.3.4:6000/d7', -1]]])
self.assertRaises(ValueError, parse_scenario, json.dumps(busted))
def test_bad_remove(self):
base = {
'replicas': 3, 'part_power': 8, 'random_seed': 123, 'overload': 0}
# no dev
busted = dict(base, rounds=[[['remove']]])
self.assertRaises(ValueError, parse_scenario, json.dumps(busted))
# bad dev id
busted = dict(base, rounds=[[['remove', 'not an int']]])
self.assertRaises(ValueError, parse_scenario, json.dumps(busted))
# too many fields
busted = dict(base, rounds=[[['remove', 1, 2]]])
self.assertRaises(ValueError, parse_scenario, json.dumps(busted))
def test_bad_set_weight(self):
base = {
'replicas': 3, 'part_power': 8, 'random_seed': 123, 'overload': 0}
# no dev
busted = dict(base, rounds=[[['set_weight']]])
self.assertRaises(ValueError, parse_scenario, json.dumps(busted))
# no weight
busted = dict(base, rounds=[[['set_weight', 0]]])
self.assertRaises(ValueError, parse_scenario, json.dumps(busted))
# bad dev id
busted = dict(base, rounds=[[['set_weight', 'not an int', 90]]])
expected = "Invalid device ID in set_weight (round 0, command 0): " \
"invalid literal for int() with base 10: 'not an int'"
try:
parse_scenario(json.dumps(busted))
except ValueError as e:
self.assertEqual(str(e), expected)
# negative weight
busted = dict(base, rounds=[[['set_weight', 1, -1]]])
self.assertRaises(ValueError, parse_scenario, json.dumps(busted))
# bogus weight
busted = dict(base, rounds=[[['set_weight', 1, 'bogus']]])
self.assertRaises(ValueError, parse_scenario, json.dumps(busted))
def test_bad_save(self):
base = {
'replicas': 3, 'part_power': 8, 'random_seed': 123, 'overload': 0}
# no builder name
busted = dict(base, rounds=[[['save']]])
self.assertRaises(ValueError, parse_scenario, json.dumps(busted))
|
|
# Copyright (c) 2013 Mirantis Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import re
import memcache
from oslo_log import log as logging
import six
from stackalytics.processor import utils
LOG = logging.getLogger(__name__)
BULK_READ_SIZE = 64
BULK_DELETE_SIZE = 4096
RECORD_ID_PREFIX = 'record:'
UPDATE_ID_PREFIX = 'update:'
MEMCACHED_URI_PREFIX = r'^memcached:\/\/'
class RuntimeStorage(object):
def __init__(self, uri):
pass
def set_records(self, records_iterator):
pass
def apply_corrections(self, corrections_iterator):
pass
def get_by_key(self, key):
pass
def set_by_key(self, key, value):
pass
def get_update(self, pid):
pass
def active_pids(self, pids):
pass
def get_all_records(self):
pass
class MemcachedStorage(RuntimeStorage):
def __init__(self, uri):
super(MemcachedStorage, self).__init__(uri)
stripped = re.sub(MEMCACHED_URI_PREFIX, '', uri)
if stripped:
storage_uri = stripped.split(',')
self.memcached = memcache.Client(storage_uri)
self._init_user_count()
self.record_index = {}
else:
raise Exception('Invalid storage uri %s' % uri)
def _build_index_lazily(self):
if self.record_index:
return
for record in self.get_all_records():
self.record_index[record['primary_key']] = record['record_id']
def set_records(self, records_iterator, merge_handler=None):
self._build_index_lazily()
for record in records_iterator:
if record['primary_key'] in self.record_index:
# update
record_id = self.record_index[record['primary_key']]
if not merge_handler:
record['record_id'] = record_id
LOG.debug('Update record %s', record)
self.set_by_key(self._get_record_name(record_id), record)
else:
original = self.get_by_key(self._get_record_name(
record_id))
if merge_handler(original, record):
LOG.debug('Update record with merge %s', record)
self.set_by_key(self._get_record_name(record_id),
original)
else:
# insert record
record_id = self._get_record_count()
record['record_id'] = record_id
self.record_index[record['primary_key']] = record_id
LOG.debug('Insert new record %s', record)
self.set_by_key(self._get_record_name(record_id), record)
self._set_record_count(record_id + 1)
self._commit_update(record_id)
def apply_corrections(self, corrections_iterator):
self._build_index_lazily()
for correction in corrections_iterator:
if correction['primary_key'] not in self.record_index:
continue
record_id = self.record_index[correction['primary_key']]
original = self.get_by_key(self._get_record_name(record_id))
need_update = False
for field, value in six.iteritems(correction):
if (field not in original) or (original[field] != value):
need_update = True
original[field] = value
if need_update:
self.set_by_key(self._get_record_name(record_id), original)
self._commit_update(record_id)
def inc_user_count(self):
return self.memcached.incr('user:count')
def get_all_users(self):
for n in six.moves.range(0, self.get_by_key('user:count') + 1):
user = self.get_by_key('user:%s' % n)
if user:
yield user
def get_by_key(self, key):
if six.PY2:
key = key.encode('utf8')
return self.memcached.get(key)
def set_by_key(self, key, value):
if six.PY2:
key = key.encode('utf8')
if not self.memcached.set(key, value):
LOG.critical('Failed to store data in memcached: '
'key %(key)s, value %(value)s',
{'key': key, 'value': value})
raise Exception('Memcached set failed')
def delete_by_key(self, key):
if six.PY2:
key = key.encode('utf8')
if not self.memcached.delete(key):
LOG.critical('Failed to delete data from memcached: key %s', key)
raise Exception('Memcached delete failed')
def get_update(self, pid):
last_update = self.get_by_key('pid:%s' % pid)
update_count = self._get_update_count()
self.set_by_key('pid:%s' % pid, update_count)
self._set_pids(pid)
if not last_update:
for i in self.get_all_records():
yield i
else:
for update_id_set in utils.make_range(last_update, update_count,
BULK_READ_SIZE):
update_set = self.memcached.get_multi(
update_id_set, UPDATE_ID_PREFIX).values()
for i in self.memcached.get_multi(
update_set, RECORD_ID_PREFIX).values():
yield i
def active_pids(self, pids):
stored_pids = self.get_by_key('pids') or set()
for pid in stored_pids:
if pid not in pids:
LOG.debug('Purge dead uwsgi pid %s from pids list', pid)
self.delete_by_key('pid:%s' % pid)
self.set_by_key('pids', pids)
# remove unneeded updates
min_update = self._get_update_count()
for pid in pids:
n = self.get_by_key('pid:%s' % pid)
if n:
if n < min_update:
min_update = n
first_valid_update = self.get_by_key('first_valid_update') or 0
LOG.debug('Purge polled updates from %(first)s to %(min)s',
{'first': first_valid_update, 'min': min_update})
for delete_id_set in utils.make_range(first_valid_update, min_update,
BULK_DELETE_SIZE):
if not self.memcached.delete_multi(delete_id_set,
key_prefix=UPDATE_ID_PREFIX):
LOG.critical('Failed to delete_multi from memcached')
raise Exception('Failed to delete_multi from memcached')
self.set_by_key('first_valid_update', min_update)
def _get_update_count(self):
return self.get_by_key('update:count') or 0
def _set_pids(self, pid):
pids = self.get_by_key('pids') or set()
if pid in pids:
return
pids.add(pid)
self.set_by_key('pids', pids)
def _get_record_name(self, record_id):
return RECORD_ID_PREFIX + str(record_id)
def _get_record_count(self):
return self.get_by_key('record:count') or 0
def _set_record_count(self, count):
self.set_by_key('record:count', count)
def get_all_records(self):
for record_id_set in utils.make_range(0, self._get_record_count(),
BULK_READ_SIZE):
for i in self.memcached.get_multi(
record_id_set, RECORD_ID_PREFIX).values():
yield i
def _commit_update(self, record_id):
count = self._get_update_count()
self.set_by_key(UPDATE_ID_PREFIX + str(count), record_id)
self.set_by_key('update:count', count + 1)
def _init_user_count(self):
if not self.get_by_key('user:count'):
self.set_by_key('user:count', 1)
def get_runtime_storage(uri):
LOG.debug('Runtime storage is requested for uri %s', uri)
match = re.search(MEMCACHED_URI_PREFIX, uri)
if match:
return MemcachedStorage(uri)
else:
raise Exception('Unknown runtime storage uri %s' % uri)
|
|
from antlr4_generated.CListener import CListener
from antlr4_generated.CParser import CParser
from AbstractSyntaxTree import *
from antlr4 import tree
from antlr4 import ParserRuleContext
import sys
class Listener(CListener):
def __init__(self, tree):
super(Listener, self).__init__()
self.ast = tree
self.currentNode = self.ast.root
self.createdNode = []
def enterProgram(self, ctx:CParser.ProgramContext):
self.ast.root = ASTProgramNode()
self.currentNode = self.ast.root
def exitProgram(self, ctx:CParser.ProgramContext):
pass
def enterStdInclude(self, ctx:CParser.StdIncludeContext):
self.currentNode = self.currentNode.addChildNode(ASTIncludeNode(True, ctx.getText()))
def exitStdInclude(self, ctx:CParser.StdIncludeContext):
self.currentNode = self.currentNode.parent
def enterCustomInclude(self, ctx:CParser.CustomIncludeContext):
self.currentNode = self.currentNode.addChildNode(ASTIncludeNode(False, ctx.getText()[1:-1]))
def exitCustomInclude(self, ctx:CParser.CustomIncludeContext):
self.currentNode.children = []
self.currentNode = self.currentNode.parent
def enterStatements(self, ctx:CParser.StatementsContext):
self.currentNode = self.currentNode.addChildNode(ASTStatementsNode())
def exitStatements(self, ctx:CParser.StatementsContext):
self.currentNode = self.currentNode.parent
def enterStatement(self, ctx:CParser.StatementContext):
self.currentNode = self.currentNode.addChildNode(ASTStatementNode(ctx=ctx))
def exitStatement(self, ctx:CParser.StatementContext):
self.currentNode = self.currentNode.parent
def enterReturnStmt(self, ctx:CParser.ReturnStmtContext):
self.currentNode = self.currentNode.addChildNode(ASTReturnNode(ctx))
def exitReturnStmt(self, ctx:CParser.ReturnStmtContext):
self.currentNode = self.currentNode.parent
def enterBreakStmt(self, ctx:CParser.BreakStmtContext):
self.currentNode = self.currentNode.addChildNode(ASTBreakNode(ctx))
def exitBreakStmt(self, ctx:CParser.BreakStmtContext):
self.currentNode = self.currentNode.parent
def enterContinueStmt(self, ctx:CParser.ContinueStmtContext):
self.currentNode = self.currentNode.addChildNode(ASTContinueNode(ctx))
def exitContinueStmt(self, ctx:CParser.ContinueStmtContext):
self.currentNode = self.currentNode.parent
def enterVariableDeclaration(self, ctx:CParser.VariableDeclarationContext):
self.currentNode = self.currentNode.addChildNode(ASTVariableDeclarationNode())
def exitVariableDeclaration(self, ctx:CParser.VariableDeclarationContext):
self.currentNode = self.currentNode.parent
def enterDeclarationSpecifier(self, ctx:CParser.DeclarationSpecifierContext):
pass
def exitDeclarationSpecifier(self, ctx:CParser.DeclarationSpecifierContext):
pass
def enterCvQualifier(self, ctx:CParser.CvQualifierContext):
self.currentNode.isConstant = True
def exitCvQualifier(self, ctx:CParser.CvQualifierContext):
pass
def enterDeclarator1(self, ctx:CParser.Declarator1Context):
children = list(ctx.getChildren())
for child in children:
if isinstance(child, CParser.PointerPartContext):
self.currentNode.indirections.append((False, child.getChildCount() == 2)) # if child count == 2, there is a const node
for i in range(len(children) - 1, -1, -1):
if isinstance(children[i], CParser.ArrayPartContext):
self.currentNode.indirections.append((True, False))
def exitDeclarator1(self, ctx:CParser.Declarator1Context):
children = list(ctx.getChildren())
def enterDeclaratorInitializer(self, ctx:CParser.DeclaratorInitializerContext):
self.currentNode = self.currentNode.addChildNode(ASTDeclaratorInitializerNode(ctx))
def exitDeclaratorInitializer(self, ctx:CParser.DeclaratorInitializerContext):
self.currentNode = self.currentNode.parent
def enterVariable(self, ctx:CParser.VariableContext):
self.currentNode = self.currentNode.addChildNode(ASTVariableNode(ctx.getText(), ctx))
def exitVariable(self, ctx:CParser.VariableContext):
self.currentNode = self.currentNode.parent
def enterFunctionDeclaration(self, ctx:CParser.FunctionDeclarationContext):
self.currentNode = self.currentNode.addChildNode(ASTFunctionDeclarationNode(ctx=ctx))
def exitFunctionDeclaration(self, ctx:CParser.FunctionDeclarationContext):
self.currentNode = self.currentNode.parent
def enterFunctionDefinition(self, ctx:CParser.FunctionDefinitionContext):
child = ctx.getChild(0, CParser.IdentifierContext)
if child is not None and child.getText() == "main":
self.currentNode = self.currentNode.addChildNode(ASTMainFunctionNode(ctx=ctx))
else:
self.currentNode = self.currentNode.addChildNode(ASTFunctionDefinitionNode(ctx=ctx))
def exitFunctionDefinition(self, ctx:CParser.FunctionDefinitionContext):
self.currentNode = self.currentNode.parent
def enterParameters(self, ctx:CParser.ParametersContext):
self.currentNode = self.currentNode.addChildNode(ASTParametersNode())
def exitParameters(self, ctx:CParser.ParametersContext):
self.currentNode = self.currentNode.parent
def enterParameter(self, ctx:CParser.ParameterContext):
self.currentNode = self.currentNode.addChildNode(ASTParameterNode(ctx=ctx))
def exitParameter(self, ctx:CParser.ParameterContext):
# self.currentNode.children = []
self.currentNode = self.currentNode.parent
def enterParamDeclarator1(self, ctx:CParser.ParamDeclaratorContext):
self.enterDeclarator1(ctx)
def exitParamDeclarator1(self, ctx:CParser.ParamDeclaratorContext):
self.exitDeclarator1(ctx)
def enterArrayPart(self, ctx:CParser.ArrayPartContext):
newNode = ASTArrayPartNode(ctx=ctx)
self.currentNode.arrayLengths.append(newNode)
self.currentNode = self.currentNode.addChildNode(newNode)
def exitArrayPart(self, ctx:CParser.ArrayPartContext):
self.currentNode = self.currentNode.parent
def enterTypeDeclaration(self, ctx:CParser.TypeDeclarationContext):
self.currentNode.baseType = ctx.getText()
self.currentNode.typeSpecifierPresent = True
def exitTypeDeclaration(self, ctx:CParser.TypeDeclarationContext):
pass
def enterFunctionCall(self, ctx:CParser.FunctionCallContext):
self.currentNode = self.currentNode.addChildNode(ASTFunctionCallNode(ctx))
def exitFunctionCall(self, ctx:CParser.FunctionCallContext):
self.currentNode = self.currentNode.parent
def enterArguments(self, ctx:CParser.ArgumentsContext):
self.currentNode = self.currentNode.addChildNode(ASTArgumentsNode(ctx))
def exitArguments(self, ctx:CParser.ArgumentsContext):
self.currentNode = self.currentNode.parent
def enterArrayInitializer(self, ctx:CParser.ArrayInitializerContext):
initializerList = ASTInitializerListNode(ctx)
self.currentNode.initializerList = initializerList
self.currentNode = self.currentNode.addChildNode(initializerList)
self.currentNode.isArray = True
def exitArrayInitializer(self, ctx:CParser.ArrayInitializerContext):
self.currentNode = self.currentNode.parent
def enterExpressionInitializer(self, ctx:CParser.ExpressionInitializerContext):
initializerList = ASTInitializerListNode(ctx)
self.currentNode.initializerList = initializerList
self.currentNode = self.currentNode.addChildNode(initializerList)
def exitExpressionInitializer(self, ctx:CParser.ExpressionInitializerContext):
self.currentNode = self.currentNode.parent
def enterIfCond(self, ctx:CParser.IfCondContext):
self.currentNode = self.currentNode.addChildNode(ASTIfNode(ctx))
def exitIfCond(self, ctx:CParser.IfCondContext):
self.currentNode = self.currentNode.parent
def enterElseCond(self, ctx:CParser.ElseCondContext):
self.currentNode = self.currentNode.addChildNode(ASTElseNode(ctx))
def exitElseCond(self, ctx:CParser.ElseCondContext):
self.currentNode = self.currentNode.parent
def enterForLoop(self, ctx:CParser.ForLoopContext):
self.currentNode = self.currentNode.addChildNode(ASTForNode(ctx))
def exitForLoop(self, ctx:CParser.ForLoopContext):
self.currentNode = self.currentNode.parent
def enterForLoopInitStatement(self, ctx:CParser.ForLoopInitStatementContext):
self.currentNode = self.currentNode.dummies[0]
def exitForLoopInitStatement(self, ctx:CParser.ForLoopInitStatementContext):
if self.currentNode.children:
self.currentNode.parent.initializer = self.currentNode.children[0]
self.currentNode.parent.initializer.parent = self.currentNode.parent
self.currentNode = self.currentNode.parent
def enterForLoopCondition(self, ctx:CParser.ForLoopConditionContext):
self.currentNode = self.currentNode.dummies[1]
def exitForLoopCondition(self, ctx:CParser.ForLoopConditionContext):
if self.currentNode.children:
self.currentNode.parent.condition = self.currentNode.children[0]
self.currentNode.parent.condition.parent = self.currentNode.parent
self.currentNode = self.currentNode.parent
def enterForLoopIterationExpression(self, ctx:CParser.ForLoopIterationExpressionContext):
self.currentNode = self.currentNode.dummies[2]
def exitForLoopIterationExpression(self, ctx:CParser.ForLoopIterationExpressionContext):
if self.currentNode.children:
self.currentNode.parent.iteration = self.currentNode.children[0]
self.currentNode.parent.iteration.parent = self.currentNode.parent
self.currentNode = self.currentNode.parent
def enterWhileCond(self, ctx:CParser.WhileCondContext):
self.currentNode = self.currentNode.addChildNode(ASTWhileNode(ctx))
def exitWhileCond(self, ctx:CParser.WhileCondContext):
self.currentNode = self.currentNode.parent
def enterDoWhileCond(self, ctx:CParser.DoWhileCondContext):
self.currentNode = self.currentNode.addChildNode(ASTDoWhileNode(ctx))
def exitDoWhileCond(self, ctx:CParser.DoWhileCondContext):
self.currentNode = self.currentNode.parent
def enterFloatLiteral(self, ctx:CParser.FloatLiteralContext):
self.currentNode = self.currentNode.addChildNode(ASTFloatLiteralNode(float(ctx.getText()), ctx))
def exitFloatLiteral(self, ctx:CParser.FloatLiteralContext):
self.currentNode = self.currentNode.parent
def enterIntegerLiteral(self, ctx:CParser.IntegerLiteralContext):
self.currentNode = self.currentNode.addChildNode(ASTIntegerLiteralNode(int(ctx.getText()), ctx))
def exitIntegerLiteral(self, ctx:CParser.IntegerLiteralContext):
self.currentNode = self.currentNode.parent
def enterCharacterLiteral(self, ctx:CParser.CharacterLiteralContext):
self.currentNode = self.currentNode.addChildNode(ASTCharacterLiteralNode(ctx.getText(), ctx))
def exitCharacterLiteral(self, ctx:CParser.CharacterLiteralContext):
self.currentNode = self.currentNode.parent
def enterStringLiteral(self, ctx:CParser.StringLiteralContext):
self.currentNode = self.currentNode.addChildNode(ASTStringLiteralNode(ctx.getText()[1:-1], ctx))
def exitStringLiteral(self, ctx:CParser.StringLiteralContext):
self.currentNode = self.currentNode.parent
def enterIdentifier(self, ctx:CParser.IdentifierContext):
if hasattr(self.currentNode, "identifier"):
self.currentNode.identifier = ctx.getText()
def exitIdentifier(self, ctx:CParser.IdentifierContext):
pass
def enterPointerPart(self, ctx:CParser.PointerPartContext):
if isinstance(self.currentNode, (ASTFunctionDeclarationNode, ASTTypeCastNode)):
self.currentNode.indirections.append((False, ctx.getChildCount() == 2)) # if child count == 2, there is a const node
def exitPointerPart(self, ctx:CParser.PointerPartContext):
pass
def enterOplevel15(self, ctx:CParser.Oplevel15Context):
children = list(ctx.getChildren())
if len(children) > 1:
self.currentNode = self.currentNode.addChildNode(ASTCommaOperatorNode(ctx))
self.createdNode.append(True)
else:
self.createdNode.append(False)
def exitOplevel15(self, ctx:CParser.Oplevel15Context):
if self.createdNode.pop(): self.currentNode = self.currentNode.parent
def enterOplevel14(self, ctx:CParser.Oplevel14Context):
children = list(ctx.getChildren())
if len(children) == 3:
symbol = children[1].getText()
if symbol == "=":
self.currentNode = self.currentNode.addChildNode(ASTSimpleAssignmentOperatorNode(ctx))
self.createdNode.append(True)
return
self.createdNode.append(False)
def exitOplevel14(self, ctx:CParser.Oplevel14Context):
if self.createdNode.pop(): self.currentNode = self.currentNode.parent
def enterOplevel13(self, ctx:CParser.Oplevel13Context):
children = list(ctx.getChildren())
if len(children) == 5:
symbol1 = children[1].getText()
symbol2 = children[3].getText()
if symbol1 == "?" and symbol2 == ":":
self.currentNode = self.currentNode.addChildNode(ASTTernaryConditionalOperatorNode(ctx))
self.createdNode.append(True)
return
self.createdNode.append(False)
def exitOplevel13(self, ctx:CParser.Oplevel13Context):
if self.createdNode.pop(): self.currentNode = self.currentNode.parent
def enterOplevel12(self, ctx:CParser.Oplevel12Context):
children = list(ctx.getChildren())
if len(children) == 3:
symbol = children[1].getText()
if symbol == "||":
self.currentNode = self.currentNode.addChildNode(ASTLogicOperatorNode(ASTLogicOperatorNode.LogicOperatorType["disj"], ctx))
self.createdNode.append(True)
return
self.createdNode.append(False)
def exitOplevel12(self, ctx:CParser.Oplevel12Context):
if self.createdNode.pop(): self.currentNode = self.currentNode.parent
def enterOplevel11(self, ctx:CParser.Oplevel11Context):
children = list(ctx.getChildren())
if len(children) == 3:
symbol = children[1].getText()
if symbol == "&&":
self.currentNode = self.currentNode.addChildNode(ASTLogicOperatorNode(ASTLogicOperatorNode.LogicOperatorType["conj"], ctx))
self.createdNode.append(True)
return
self.createdNode.append(False)
def exitOplevel11(self, ctx:CParser.Oplevel11Context):
if self.createdNode.pop(): self.currentNode = self.currentNode.parent
def enterOplevel10(self, ctx:CParser.Oplevel10Context):
self.createdNode.append(False)
def exitOplevel10(self, ctx:CParser.Oplevel10Context):
if self.createdNode.pop(): self.currentNode = self.currentNode.parent
def enterOplevel9(self, ctx:CParser.Oplevel9Context):
self.createdNode.append(False)
def exitOplevel9(self, ctx:CParser.Oplevel9Context):
if self.createdNode.pop(): self.currentNode = self.currentNode.parent
def enterOplevel8(self, ctx:CParser.Oplevel8Context):
self.createdNode.append(False)
def exitOplevel8(self, ctx:CParser.Oplevel8Context):
if self.createdNode.pop(): self.currentNode = self.currentNode.parent
def enterOplevel7(self, ctx:CParser.Oplevel7Context):
children = list(ctx.getChildren())
if len(children) == 3:
symbol = children[1].getSymbol().text
self.currentNode = self.currentNode.addChildNode(ASTComparisonOperatorNode( \
ASTComparisonOperatorNode.ComparisonType["inequal"] if symbol == "!=" else ASTComparisonOperatorNode.ComparisonType["equal"], ctx))
self.createdNode.append(True)
else:
self.createdNode.append(False)
def exitOplevel7(self, ctx:CParser.Oplevel7Context):
if self.createdNode.pop(): self.currentNode = self.currentNode.parent
def enterOplevel6(self, ctx:CParser.Oplevel6Context):
children = list(ctx.getChildren())
if len(children) == 3:
symbol = children[1].getSymbol().text
if symbol in ["<", ">", "<=", ">="]:
comparisonType = None
if symbol == "<": comparisonType = ASTComparisonOperatorNode.ComparisonType["lt"]
elif symbol == ">": comparisonType = ASTComparisonOperatorNode.ComparisonType["gt"]
elif symbol == "<=": comparisonType = ASTComparisonOperatorNode.ComparisonType["le"]
elif symbol == ">=": comparisonType = ASTComparisonOperatorNode.ComparisonType["ge"]
self.currentNode = self.currentNode.addChildNode(ASTComparisonOperatorNode(comparisonType, ctx))
self.createdNode.append(True)
return
self.createdNode.append(False)
def exitOplevel6(self, ctx:CParser.Oplevel6Context):
if self.createdNode.pop(): self.currentNode = self.currentNode.parent
def enterOplevel5(self, ctx:CParser.Oplevel5Context):
self.createdNode.append(False)
def exitOplevel5(self, ctx:CParser.Oplevel5Context):
if self.createdNode.pop(): self.currentNode = self.currentNode.parent
def enterOplevel4(self, ctx:CParser.Oplevel4Context):
children = list(ctx.getChildren())
if len(children) == 3:
symbol = children[1].getText()
if self.addBinaryArithmeticOperator(symbol, ctx):
self.createdNode.append(True)
return
self.createdNode.append(False)
def exitOplevel4(self, ctx:CParser.Oplevel4Context):
if self.createdNode.pop(): self.currentNode = self.currentNode.parent
def addBinaryArithmeticOperator(self, symbol, ctx):
arithmeticType = None
if symbol == "+": arithmeticType = ASTBinaryArithmeticOperatorNode.ArithmeticType["add"]
elif symbol == "-": arithmeticType = ASTBinaryArithmeticOperatorNode.ArithmeticType["sub"]
elif symbol == "*": arithmeticType = ASTBinaryArithmeticOperatorNode.ArithmeticType["mul"]
elif symbol == "/": arithmeticType = ASTBinaryArithmeticOperatorNode.ArithmeticType["div"]
elif symbol == "%": arithmeticType = ASTBinaryArithmeticOperatorNode.ArithmeticType["modulo"]
else: return False
self.currentNode = self.currentNode.addChildNode(ASTBinaryArithmeticOperatorNode(arithmeticType, ctx))
return True
def enterOplevel3(self, ctx:CParser.Oplevel3Context):
children = list(ctx.getChildren())
if len(children) == 3:
symbol = children[1].getText()
if self.addBinaryArithmeticOperator(symbol, ctx):
self.createdNode.append(True)
return
self.createdNode.append(False)
def exitOplevel3(self, ctx:CParser.Oplevel3Context):
if self.createdNode.pop(): self.currentNode = self.currentNode.parent
def enterOplevel2(self, ctx:CParser.Oplevel2Context):
children = list(ctx.getChildren())
if len(children) == 2:
symbol = children[0].getText()
if symbol == "++": self.currentNode = self.currentNode.addChildNode(ASTUnaryArithmeticOperatorNode(ASTUnaryArithmeticOperatorNode.ArithmeticType["increment"], ASTUnaryOperatorNode.Type["prefix"], ctx))
elif symbol == "--": self.currentNode = self.currentNode.addChildNode(ASTUnaryArithmeticOperatorNode(ASTUnaryArithmeticOperatorNode.ArithmeticType["decrement"], ASTUnaryOperatorNode.Type["prefix"], ctx))
elif symbol == "+": self.currentNode = self.currentNode.addChildNode(ASTUnaryArithmeticOperatorNode(ASTUnaryArithmeticOperatorNode.ArithmeticType["plus"], ASTUnaryOperatorNode.Type["prefix"], ctx))
elif symbol == "-": self.currentNode = self.currentNode.addChildNode(ASTUnaryArithmeticOperatorNode(ASTUnaryArithmeticOperatorNode.ArithmeticType["minus"], ASTUnaryOperatorNode.Type["prefix"], ctx))
elif symbol == "&": self.currentNode = self.currentNode.addChildNode(ASTAddressOfOperatorNode(ctx))
elif symbol == "*": self.currentNode = self.currentNode.addChildNode(ASTDereferenceOperatorNode(ctx))
elif symbol == "!": self.currentNode = self.currentNode.addChildNode(ASTLogicalNotOperatorNode(ctx))
else:
self.createdNode.append(False)
return;
self.createdNode.append(True)
return
self.createdNode.append(False)
def exitOplevel2(self, ctx:CParser.Oplevel2Context):
if self.createdNode.pop(): self.currentNode = self.currentNode.parent
def enterOplevel1(self, ctx:CParser.Oplevel1Context):
children = list(ctx.getChildren())
if len(children) == 2:
symbol = children[1].getText()
if symbol == "++": self.currentNode = self.currentNode.addChildNode(ASTUnaryArithmeticOperatorNode(ASTUnaryArithmeticOperatorNode.ArithmeticType["increment"], ASTUnaryOperatorNode.Type["postfix"], ctx))
elif symbol == "--": self.currentNode = self.currentNode.addChildNode(ASTUnaryArithmeticOperatorNode(ASTUnaryArithmeticOperatorNode.ArithmeticType["decrement"], ASTUnaryOperatorNode.Type["postfix"], ctx))
self.createdNode.append(True)
return
elif len(children) == 4:
symbol1 = children[1].getText()
symbol2 = children[3].getText()
if symbol1 == "[" and symbol2 == "]":
self.currentNode = self.currentNode.addChildNode(ASTArraySubscriptNode(ctx))
self.createdNode.append(True)
return
self.createdNode.append(False)
def exitOplevel1(self, ctx:CParser.Oplevel1Context):
if self.createdNode.pop(): self.currentNode = self.currentNode.parent
def enterTypeCast(self, ctx=CParser.TypeCastContext):
self.currentNode = self.currentNode.addChildNode(ASTTypeCastNode(ctx))
def exitTypeCast(self, ctx=CParser.TypeCastContext):
self.currentNode = self.currentNode.parent
|
|
# ----------------------------------------------------------------------
# Copyright (c) 2014 Rafael Gonzalez.
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
#
# The above copyright notice and this permission notice shall be
# included in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
# ----------------------------------------------------------------------
# ========================== DESIGN NOTES ==============================
# EMA sends messages between () brackes, so after the tiny asynchronous
# server framework was setup to get input activity, the next thing to do
# was to reassemble from a continuous stream of bytes, so that the
# "upper processing layers" dealt with one complete message at a time.
#
# Also, output to EMA should be done a a moderate pace, so as not to
# overload EMA with a buch of messages with the probability of many of
# them being lost.
#
# So this is the purpose of the SerialDriver.
# Responsibilities:
# 1) Get bytes from SerialPort and reassemble them into whole messages
# 2) Register uper layer callbacks and invoke them.
# 3) Enqueue output messages to EMA and trasmit them at a moderate pace
# 4) Hold/resume the transmission of output messages from the queue.
#
#
# SerialDriver inherits from Lazy so that we can use an periodic work()
# method to pop messages from the queue and transmit them to EMA.
#
# Enqueing output messages from "upper layers" adds delay to messages
# and this is relevant to timeout & retry procedures, However,
# its effect was easy to take into account and the upper layers can add
# the delay to be introduced by the last element in the queue.
#
# Peculiar to EMA is the need to suspend/resume he transmssion of output
# messages at certain times.
#
# I have never had the need to unregister a handler,
# so there is no delHandler()
# ======================================================================
import serial
import re
import logging
from server import Lazy
log = logging.getLogger('serdriver')
class SerialDriver(Lazy):
NSTATS = 1000 # Print number of reads each NSTATs times
def __init__(self, port, baud, **kargs):
Lazy.__init__(self)
self.__nreads = 0
self.__nwrites = 0
self.__buffer = ''
self.__handlers = []
self.__outqueue = []
self.__stopped = False
# An EMA message, surronded by brackets
self.__patt = re.compile('\([^)]+\)')
self.__serial = serial.Serial()
self.__serial.port = port
self.__serial.baudrate = baud
try:
self.__serial.open()
self.__serial.flushInput()
self.__serial.flushOutput()
except serial.SerialException, e:
log.error("Could not open serial port %s: %s", self.__serial.name, e)
raise
log.info("Opened %s at %s bps", self.__serial.port, self.__serial.baudrate)
# ----------------------------------------
# Public interface exposed to upper layers
# -----------------------------------------
def write(self, message):
'''
Enqueues message to output queue
'''
self.__outqueue.append(message)
def queueDelay(self):
'''returns the max wait time in multiples of Server.TIMEOUT'''
return 1+len(self.__outqueue)
def hold(self, flag):
'''
Stop/Resume dequeuing messages from the output queue
and transmitting to serial port.
'''
self.__stopped = flag
log.debug("on hold = %s", flag)
def addHandler(self, object):
'''Registers an object implementing a handle(message) method'''
self.__handlers.append(object)
# --------------
# Helper methods
# --------------
def work(self):
'''
Writes data to serial port configured at init.
Called periodically from a Server object.
Write blocking behaviour.
'''
qlen = len(self.__outqueue)
if self.__stopped:
return
if qlen:
try:
log.debug("Tx %s", self.__outqueue[0])
self.__nwrites += 1
self.__serial.write(self.__outqueue.pop(0))
except serial.__serialException, e:
log.error("%s: %s" , self.__serial.portstr, e)
raise
def read(self):
'''
Reads from serial port.
Return all available data in buffer.
'''
try:
return self.__serial.read(self.__serial.inWaiting())
except serial.SerialException, e:
log.error("%s: %s" , self.__serial.portstr, e)
raise
def extract(self):
'''
Extracts a complete EMA message
Returns whole message if available or null string if not.
'''
matched = self.__patt.search(self.__buffer)
message = ''
if matched:
message = matched.group()
self.__buffer = self.__buffer[matched.end():]
self.__nreads += 1
log.debug("Rx %s", message)
return message
def show(self):
'''print read/written message statistcs every NSTATs times'''
n = max(self.__nreads, self.__nwrites) % SerialDriver.NSTATS
if not n:
log.info("nreads = %d, nwrites = %d , queue = %d", self.__nreads, self.__nwrites, len(self.__outqueue))
def onInput(self):
'''
Read from message buffer and notify handlers if message complete.
Called from Server object
'''
self.__buffer += self.read() # accumulate reading
message = self.extract() # extract whole message
if message:
for handler in self.__handlers:
handler.onSerialMessage(message)
def fileno(self):
'''Implement this interface to be added in select() system call'''
return self.__serial.fileno()
if __name__ == "__main__":
from utils import setDebug
class Sample(object):
def onSerialMessage(self, message):
log.info(message.split())
setDebug()
options = {'serial_baud': '9600', 'serial_port': '/dev/ttyAMA0'}
driver = SerialDriver('/dev/ttyAMA0', 9600, **options)
driver.addHandler( Sample() )
driver.write('( )')
s = server.Server()
s.addReadable(driver)
s.run()
|
|
import collections
import numpy
import six
import chainer
from chainer import backend
from chainer.backends import cuda
def to_device(device, x):
"""Send an array to a given device.
This method sends a given array to a given device. This method is used in
:func:`~chainer.dataset.concat_examples`.
You can also use this method in a custom converter method used in
:class:`~chainer.training.Updater` and :class:`~chainer.training.Extension`
such as :class:`~chainer.training.updaters.StandardUpdater` and
:class:`~chainer.training.extensions.Evaluator`.
See also :func:`chainer.dataset.concat_examples`.
Args:
device (None or int or device specifier): A device to which an array
is sent. If it is a negative integer, an array is sent to CPU.
If it is a positive integer, an array is sent to GPU with the
given ID. If it is``None``, an array is left in the original
device. Also, any of device specifiers described at
:class:`~chainer.backend.DeviceId` is accepted.
x (numpy.ndarray, cupy.ndarray, or chainerx.ndarray): An array to send.
Returns:
Converted array.
"""
if device is None:
return x
# For backward compatibilities
if isinstance(device, six.integer_types):
if device < 0:
device = backend.CpuDevice()
else:
device = backend.get_device(cuda.Device(device))
else:
device = backend.get_device(device)
return device.send(x)
# TODO(hvy): Write unit tests where batch elements contain Python lists.
def concat_examples(batch, device=None, padding=None):
"""Concatenates a list of examples into array(s).
This function converts an "array of tuples" into a "tuple of arrays".
Specifically, given a list of examples each of which consists of
a list of elements, this function first makes an array
by taking the element in the same position from each example
and concatenates them along the newly-inserted first axis
(called `batch dimension`) into one array.
It repeats this for all positions and returns the resulting arrays.
The output type depends on the type of examples in ``batch``.
For instance, consider each example consists of two arrays ``(x, y)``.
Then, this function concatenates ``x`` 's into one array, and ``y`` 's
into another array, and returns a tuple of these two arrays. Another
example: consider each example is a dictionary of two entries whose keys
are ``'x'`` and ``'y'``, respectively, and values are arrays. Then, this
function concatenates ``x`` 's into one array, and ``y`` 's into another
array, and returns a dictionary with two entries ``x`` and ``y`` whose
values are the concatenated arrays.
When the arrays to concatenate have different shapes, the behavior depends
on the ``padding`` value. If ``padding`` is ``None`` (default), it raises
an error. Otherwise, it builds an array of the minimum shape that the
contents of all arrays can be substituted to. The padding value is then
used to the extra elements of the resulting arrays.
.. admonition:: Example
>>> import numpy as np
>>> from chainer import dataset
>>> x = [([1, 2], 1),
... ([3, 4], 2),
... ([5, 6], 3)]
>>> dataset.concat_examples(x)
(array([[1, 2],
[3, 4],
[5, 6]]), array([1, 2, 3]))
>>>
>>> y = [(np.array([1, 2]), 0),
... (np.array([3]), 1),
... (np.array([]), 2)]
>>> dataset.concat_examples(y, padding=100)
(array([[ 1, 2],
[ 3, 100],
[100, 100]]), array([0, 1, 2]))
>>>
>>> z = [(np.array([1, 2]), np.array([0])),
... (np.array([3]), np.array([])),
... (np.array([]), np.array([2]))]
>>> dataset.concat_examples(z, padding=(100, 200))
(array([[ 1, 2],
[ 3, 100],
[100, 100]]), array([[ 0],
[200],
[ 2]]))
>>> w = [{'feature': np.array([1, 2]), 'label': 0},
... {'feature': np.array([3, 4]), 'label': 1},
... {'feature': np.array([5, 6]), 'label': 2}]
>>> dataset.concat_examples(w) # doctest: +SKIP
{'feature': array([[1, 2],
[3, 4],
[5, 6]]), 'label': array([0, 1, 2])}
Args:
batch (list): A list of examples. This is typically given by a dataset
iterator.
device (device specifier): A device to which each array is sent.
If it is omitted, all arrays are left in their original devices.
See :meth:`~chainer.dataset.convert.to_device` for more details.
padding: Scalar value for extra elements. If this is None (default),
an error is raised on shape mismatch. Otherwise, an array of
minimum dimensionalities that can accommodate all arrays is
created, and elements outside of the examples are padded by this
value.
Returns:
Array, a tuple of arrays, or a dictionary of arrays. The type depends
on the type of each example in the batch.
"""
if len(batch) == 0:
raise ValueError('batch is empty')
first_elem = batch[0]
if isinstance(first_elem, tuple):
result = []
if not isinstance(padding, tuple):
padding = [padding] * len(first_elem)
for i in six.moves.range(len(first_elem)):
result.append(to_device(device, _concat_arrays(
[example[i] for example in batch], padding[i])))
return tuple(result)
elif isinstance(first_elem, dict):
result = {}
if not isinstance(padding, dict):
padding = {key: padding for key in first_elem}
for key in first_elem:
result[key] = to_device(device, _concat_arrays(
[example[key] for example in batch], padding[key]))
return result
else:
return to_device(device, _concat_arrays(batch, padding))
def _concat_arrays(arrays, padding):
# Convert `arrays` to numpy.ndarray if `arrays` consists of the built-in
# types such as int, float or list.
if not isinstance(arrays[0], chainer.get_array_types()):
arrays = numpy.asarray(arrays)
if padding is not None:
arr_concat = _concat_arrays_with_padding(arrays, padding)
else:
device = backend.get_device_from_array(arrays[0])
with chainer.using_device(device):
arr_concat = device.xp.concatenate(
[array[None] for array in arrays])
return arr_concat
def _concat_arrays_with_padding(arrays, padding):
shape = numpy.array(arrays[0].shape, dtype=int)
for array in arrays[1:]:
if numpy.any(shape != array.shape):
numpy.maximum(shape, array.shape, shape)
shape = tuple(numpy.insert(shape, 0, len(arrays)))
device = backend.get_device_from_array(arrays[0])
with chainer.using_device(device):
result = device.xp.full(shape, padding, dtype=arrays[0].dtype)
for i in six.moves.range(len(arrays)):
src = arrays[i]
slices = tuple(slice(dim) for dim in src.shape)
result[(i,) + slices] = src
return result
class ConcatWithAsyncTransfer(object):
"""Interface to concatenate data and transfer them to GPU asynchronously.
It enables to transfer next batch of input data to GPU while GPU is
running kernels for training using current batch of input data.
An instance of this class is mainly intended to be used as a converter
function of an updater like below.
.. doctest::
from chainer.dataset import convert
...
updater = chainer.training.updaters.StandardUpdater(
...,
converter=convert.ConcatWithAsyncTransfer(),
...)
Args:
stream (cupy.cuda.Stream): CUDA stream. If ``None``, a stream is
automatically created on the first call. Data transfer operation
is launched asynchronously using the stream.
compute_stream(cupy.cuda.Stream): CUDA stream used for compute kernels.
If not ``None``, CUDA events are created/used to avoid global
synchronization and overlap execution of compute kernels and data
transfers as much as possible. If ``None``, global synchronization
is used instead.
"""
def __init__(self, stream=None, compute_stream=None):
self._stream = stream
self.compute_stream = compute_stream
self._device = None
self._conveyor = collections.defaultdict(
lambda: Conveyor(self._device, self._stream))
if compute_stream is not None:
# * event1 prevents a CPU thread to update arrays that might be
# still being used by GPU kernels.
# * event2 prevents a GPU kernel to read arrays that might be
# still being transferred to GPU.
self._event1 = cuda.Event()
self._event2 = cuda.Event()
self._sync_get = False
else:
self._sync_get = True
def __call__(self, batch, device=None, padding=None):
"""Concatenate data and transfer them to GPU asynchronously.
See also :func:`chainer.dataset.concat_examples`.
Args:
batch (list): A list of examples.
device (int): Device ID to which each array is sent.
padding: Scalar value for extra elements.
Returns:
Array, a tuple of arrays, or a dictionary of arrays.
The type depends on the type of each example in the batch.
"""
if len(batch) == 0:
raise ValueError('batch is empty')
first_elem = batch[0]
if len(self._conveyor) == 0:
self._device = device # device is set at first call
if device is not None and device >= 0 and self._stream is None:
with cuda.get_device_from_id(device):
self._stream = cuda.Stream(non_blocking=True)
if device is not self._device:
raise ValueError('device is different')
if self.compute_stream is not None:
self._event1.synchronize()
self._event1.record(stream=self.compute_stream)
with cuda.get_device_from_id(device):
if isinstance(first_elem, tuple):
result = []
if not isinstance(padding, tuple):
padding = [padding] * len(first_elem)
for i in six.moves.range(len(first_elem)):
self._conveyor[i].put(_concat_arrays(
[example[i] for example in batch], padding[i]))
for i in six.moves.range(len(first_elem)):
result.append(self._conveyor[i].get(sync=self._sync_get))
if self.compute_stream is not None:
self._event2.record(stream=self._stream)
self.compute_stream.wait_event(self._event2)
return tuple(result)
elif isinstance(first_elem, dict):
result = {}
if not isinstance(padding, dict):
padding = {key: padding for key in first_elem}
for key in first_elem:
self._conveyor[key].put(_concat_arrays(
[example[key] for example in batch], padding[key]))
for key in first_elem:
result[key] = self._conveyor[key].get(sync=self._sync_get)
if self.compute_stream is not None:
self._event2.record(stream=self._stream)
self.compute_stream.wait_event(self._event2)
return result
else:
return to_device(device, _concat_arrays(batch, padding))
class Conveyor(object):
"""Interface to handle asynchronous data transfer using double buffering.
An asynchronous data transfer is initiated by :meth:`put`, and the result,
the array transferred to a target device, is obtained by :meth:`get`.
You should call :meth:`put` followed by :meth:`get`.
Args:
device (int): Device ID to which an array is sent. Negative value
indicates the host memory (CPU). If it is omitted, the array is
left in the original device. Asynchronous data transfer is used
only when device ID >= 0.
stream (cupy.cuda.Stream): CUDA stream. An array is sent to GPU
asynchronously using this stream. If ``None``, asynchronous data
transfer is not used.
"""
def __init__(self, device=None, stream=None):
self._device = device
self._stream = stream
self._array_set = [[None, None], [None, None]]
self._ret_array = []
def put(self, array):
"""Initiates asynchronous transfer of an array to a target device.
This method assumes that the input array is a numpy array and
on host memory without page-locked. So, it first copys the data
to page-locked host memory (so called pinned memory), then initiates
asynchronous data transfer to a target device.
The intermediate arrays on pinned memory and cupy arrays on the
target device are retained at self._array_set in order to reduce number
of memory allocation/release, and they are to be reused for subsequent
data transfer as long as the size are the same.
Double buffering scheme is used here, so you can initiate next data
transfer safely even when current data is still used on the target
device.
"""
if self._device is None or self._device < 0 or self._stream is None:
self._ret_array.append(to_device(self._device, array))
return
pin_array, cp_array = self._array_set.pop(0)
if pin_array is not None:
if pin_array.nbytes != array.nbytes:
pin_array = None
with cuda.get_device_from_id(self._device):
if pin_array is None:
# The global synchronization below is necessary to ensure ALL
# operations including compute and data transfer submitted
# to GPU so far have been completed, in order to avoid possible
# memory corruption due to race condition among operations that
# use different CUDA streams.
# You can also solve this sort of race condition by preparing a
# memory pool for each CUDA stream and using it carefully.
cuda.cupy.cuda.runtime.deviceSynchronize()
pin_mem = cuda.cupy.cuda.alloc_pinned_memory(array.nbytes)
pin_array = numpy.frombuffer(pin_mem,
array.dtype,
array.size
).reshape(array.shape)
cp_array = cuda.cupy.empty_like(array)
pin_array[...] = array # copy(CPU): paged -> pinned
cp_array.set(pin_array, self._stream) # copy: CPU to GPU
self._array_set.append([pin_array, cp_array])
self._ret_array.append(cp_array)
def get(self, sync=True):
"""Returns the array of data transferred to a target device asynchronously.
If sync is ``True``, the data of returned array is available in GPU
kernels. If sync is ``False``, the data of returned array might be
being transferred to GPU, so synchronization must be done carefully by
the calling function.
Args:
sync (bool): If ``True``, global synchronization is used to ensure
completion of asynchronous data transfer for safer reason.
If ``False``, it assumes a caller function is handling
synchronization correctly hence does not use global
synchronization.
"""
if (self._device is not None and self._device >= 0 and
self._stream is not None):
if sync:
cuda.cupy.cuda.runtime.deviceSynchronize()
return self._ret_array.pop(0)
|
|
import os
import sys
import shutil
import logging
import webbrowser
import traceback
import socket
import django.conf
from cactus import ui as ui_module
from cactus.config.router import ConfigRouter
from cactus.deployment import get_deployment_engine_class
from cactus.i18n.commands import MessageMaker, MessageCompiler
from cactus.plugin.builtin.cache import CacheDurationPlugin
from cactus.plugin.builtin.context import ContextPlugin
from cactus.plugin.builtin.ignore import IgnorePatternsPlugin
from cactus.plugin.loader import CustomPluginsLoader, ObjectsPluginLoader
from cactus.plugin.manager import PluginManager
from cactus.static.external.manager import ExternalManager
from cactus.compat.paths import SiteCompatibilityLayer
from cactus.compat.page import PageContextCompatibilityPlugin
from cactus.utils.file import fileSize
from cactus.utils.filesystem import fileList
from cactus.utils.helpers import memoize, map_apply
from cactus.utils.network import internetWorking
from cactus.utils.parallel import multiMap, PARALLEL_DISABLED, PARALLEL_CONSERVATIVE, PARALLEL_AGGRESSIVE
from cactus.utils.url import is_external
from cactus.page import Page
from cactus.static import Static
from cactus.listener import Listener
# from cactus.server import Server, RequestHandler
from cactus.server import WebServer
from cactus.browser import browserReload, browserReloadCSS
from cactus.utils import ipc
logger = logging.getLogger(__name__)
DEFAULT_PROVIDER = "aws"
class Site(SiteCompatibilityLayer):
_path = None
_parallel = PARALLEL_CONSERVATIVE #TODO: Test me
_static = None
def __init__(self, path, config_paths=None, ui=None,
PluginManagerClass=None, ExternalManagerClass=None, DeploymentEngineClass=None):
# Load the config engine
if config_paths is None:
config_paths = []
self.config = ConfigRouter(config_paths)
# Load site-specific config values
self.prettify_urls = self.config.get('prettify', False)
self.compress_extensions = self.config.get('compress', ['html', 'css', 'js', 'txt', 'xml'])
self.fingerprint_extensions = self.config.get('fingerprint', [])
self.locale = self.config.get("locale", None)
# Verify our location looks correct
self.path = path
self.verify_path()
# Load Managers
if ui is None:
ui = ui_module
self.ui = ui
if PluginManagerClass is None:
PluginManagerClass = PluginManager
self.plugin_manager = PluginManagerClass(self,
[
CustomPluginsLoader(self.plugin_path), # User plugins
ObjectsPluginLoader([ # Builtin plugins
ContextPlugin(), CacheDurationPlugin(),
IgnorePatternsPlugin(), PageContextCompatibilityPlugin(),
])
]
)
if ExternalManagerClass is None:
ExternalManagerClass = ExternalManager
self.external_manager = ExternalManagerClass(self)
if DeploymentEngineClass is None:
hosting_provider = self.config.get("provider", DEFAULT_PROVIDER)
DeploymentEngineClass = get_deployment_engine_class(hosting_provider)
assert DeploymentEngineClass is not None, \
"Could not load Deployment for Provider: {0}".format(hosting_provider)
self.deployment_engine = DeploymentEngineClass(self)
# Load Django settings
self.setup()
@property
def url(self):
return self.config.get('site-url')
@url.setter
def url(self, value):
self.config.set('site-url', value)
self.config.write()
def verify_url(self):
"""
We need the site url to generate the sitemap.
"""
#TODO: Make a "required" option in the config.
#TODO: Use URL tags in the sitemap
# if self.url is None:
# self.url = self.ui.prompt_url("Enter your site URL (e.g. http://example.com/)")
@property
def path(self):
return self._path
@path.setter
def path(self, path):
self._path = path
self.build_path = os.path.join(path, '.build')
self.deploy_path = os.path.join(path, '.deploy')
self.template_path = os.path.join(path, 'templates')
self.page_path = os.path.join(path, 'pages')
self.plugin_path = os.path.join(path, 'plugins')
self.static_path = os.path.join(path, 'static')
self.script_path = os.path.join(os.getcwd(), __file__)
self.locale_path = os.path.join(path, "locale")
def setup(self):
"""
Configure django to use both our template and pages folder as locations
to look for included templates.
"""
settings = {
"TEMPLATE_DIRS": [self.template_path, self.page_path],
"INSTALLED_APPS": ['django_markwhat'],
}
if self.locale is not None:
settings.update({
"USE_I18N": True,
"USE_L10N": False,
"LANGUAGE_CODE": self.locale,
"LOCALE_PATHS": [self.locale_path],
})
django.conf.settings.configure(**settings)
# - Importing here instead of the top-level makes it work on Python 3.x (!)
# - loading add_to_builtins from loader implictly loads the loader_tags built-in
# - Injecting our tags using add_to_builtins ensures that Cactus tags don't require an import
from django.template.loader import add_to_builtins
add_to_builtins('cactus.template_tags')
def verify_path(self):
"""
Check if this path looks like a Cactus website
"""
required_subfolders = ['pages', 'static', 'templates', 'plugins']
if self.locale is not None:
required_subfolders.append('locale')
for p in required_subfolders:
if not os.path.isdir(os.path.join(self.path, p)):
logger.error('This does not look like a (complete) cactus project (missing "%s" subfolder)', p)
sys.exit(1)
@memoize
def context(self):
"""
Base context for the site: all the html pages.
"""
ctx = {
'CACTUS': {
'pages': [p for p in self.pages() if p.is_html()],
'static': [p for p in self.static()]
},
'__CACTUS_SITE__': self,
}
# Also make lowercase work
ctx['cactus'] = ctx['CACTUS']
return ctx
def make_messages(self):
"""
Generate the .po files for the site.
"""
if self.locale is None:
logger.error("You should set a locale in your configuration file before running this command.")
return
message_maker = MessageMaker(self)
message_maker.execute()
def compile_messages(self):
"""
Remove pre-existing compiled language files, and re-compile.
"""
#TODO: Make this cleaner
mo_path = os.path.join(self.locale_path, self.locale, "LC_MESSAGES", "django.mo")
try:
os.remove(mo_path)
except OSError:
# No .mo file yet
pass
message_compiler = MessageCompiler(self)
message_compiler.execute()
def clean(self):
"""
Remove all build files.
"""
logger.debug("*** CLEAN %s", self.path)
if os.path.isdir(self.build_path):
shutil.rmtree(self.build_path)
def build(self):
"""
Generate fresh site from templates.
"""
logger.debug("*** BUILD %s", self.path)
self.verify_url()
# Reset the static content
self._static = None
#TODO: Facility to reset the site, and reload config.
#TODO: Currently, we can't build a site instance multiple times
self.plugin_manager.reload() # Reload in case we're running on the server # We're still loading twice!
self.plugin_manager.preBuild(self)
logger.debug('Plugins: %s', ', '.join([p.plugin_name for p in self.plugin_manager.plugins]))
logger.debug('Processors: %s', ', '.join([p.__name__ for p in self.external_manager.processors]))
logger.debug('Optimizers: %s', ', '.join([p.__name__ for p in self.external_manager.optimizers]))
# Make sure the build path exists
if not os.path.exists(self.build_path):
os.mkdir(self.build_path)
# Prepare translations
if self.locale is not None:
self.compile_messages()
#TODO: Check the command actually completes (msgfmt might not be on the PATH!)
# Copy the static files
self.buildStatic()
# Always clean out the pages
build_static_path = os.path.join(self.build_path, "static")
for path in os.listdir(self.build_path):
path = os.path.join(self.build_path, path)
if path != build_static_path:
if os.path.isdir(path):
shutil.rmtree(path)
else:
os.remove(path)
# Render the pages to their output files
mapper = multiMap if self._parallel >= PARALLEL_AGGRESSIVE else map_apply
mapper(lambda p: p.build(), self.pages())
self.plugin_manager.postBuild(self)
for static in self.static():
if os.path.isdir(static.pre_dir):
shutil.rmtree(static.pre_dir)
def static(self):
"""
Retrieve a list of static files for the site
"""
if self._static is None:
self._static = []
for path in fileList(self.static_path, relative=True):
full_path = os.path.join(self.static_path, path)
if os.path.islink(full_path):
if not os.path.exists(os.path.realpath(full_path)):
logger.warning("Skipping symlink that points to unexisting file:\n%s", full_path)
continue
self._static.append(Static(self, path))
return self._static
def _get_resource(self, src_url, resources):
if is_external(src_url):
return src_url
for split_char in ["#", "?"]:
if split_char in src_url:
src_url = src_url.split(split_char)[0]
resources_dict = dict((resource.link_url, resource) for resource in resources)
if src_url in resources_dict:
return resources_dict[src_url].final_url
return None
def _get_url(self, src_url, resources):
return self._get_resource(src_url, resources)
def get_url_for_static(self, src_path):
return self._get_url(src_path, self.static())
def get_url_for_page(self, src_path):
return self._get_url(src_path, self.pages())
def buildStatic(self):
"""
Build static files (pre-process, copy to static folder)
"""
mapper = multiMap if self._parallel > PARALLEL_DISABLED else map_apply
mapper(lambda s: s.build(), self.static())
def pages(self):
"""
List of pages.
"""
if not hasattr(self, "_page_cache"):
self._page_cache = {}
pages = []
for path in fileList(self.page_path, relative=True):
if path.endswith("~"):
continue
if path not in self._page_cache:
logger.debug("Found page: %s", path)
self._page_cache[path] = Page(self, path)
pages.append(self._page_cache[path])
return pages
def _rebuild_should_ignore(self, file_path):
file_relative_path = os.path.relpath(file_path, self.path)
# Ignore anything in a hidden folder like .git
for path_part in file_relative_path.split(os.path.sep):
if path_part.startswith("."):
return True
if file_path.startswith(self.page_path):
return False
if file_path.startswith(self.template_path):
return False
if file_path.startswith(self.static_path):
return False
if file_path.startswith(self.plugin_path):
return False
return True
def _rebuild(self, changes):
logger.debug("*** REBUILD %s", self.path)
logger.info('*** Rebuilding (%s changed)' % self.path)
# We will pause the listener while building so scripts that alter the output
# like coffeescript and less don't trigger the listener again immediately.
self.listener.pause()
try:
#TODO: Fix this.
#TODO: The static files should handle collection of their static folder on their own
#TODO: The static files should not run everything on __init__
#TODO: Only rebuild static files that changed
# We need to "clear out" the list of static first. Otherwise, processors will not run again
# They run on __init__ to run before fingerprinting, and the "built" static files themselves,
# which are in a temporary folder, have been deleted already!
# self._static = None
self.build()
except Exception as e:
logger.info('*** Error while building\n%s', e)
traceback.print_exc(file=sys.stdout)
changed_file_extension = set(map(lambda x: os.path.splitext(x)[1], changes["changed"]))
reload_css_file_extenstions = set([".css", ".sass", ".scss", ".styl"])
# When we have changes, we want to refresh the browser tabs with the updates.
# Mostly we just refresh the browser except when there are just css changes,
# then we reload the css in place.
local_hosts = [
"http://127.0.0.1:%s" % self._port,
"http://localhost:%s" % self._port,
"http://0.0.0.0:%s" % self._port
]
if len(changes["added"]) == 0 and len(changes["deleted"]) == 0 and changed_file_extension.issubset(reload_css_file_extenstions):
# browserReloadCSS(local_hosts)
self.server.reloadCSS()
else:
# browserReload(local_hosts)
self.server.reloadPage()
self.listener.resume()
def serve(self, browser=True, port=8000):
"""
Start a http server and rebuild on changes.
"""
self._parallel = PARALLEL_DISABLED
self._port = port
self.clean()
self.build()
logger.info('Running webserver at http://127.0.0.1:%s for %s' % (port, self.build_path))
ipc.signal("server.didstart")
logger.info('Type control-c to exit')
os.chdir(self.build_path)
self.listener = Listener(self.path, self._rebuild, ignore=self._rebuild_should_ignore)
self.listener.run()
self.server = WebServer(self.build_path, port=port)
try:
self.server.start()
# if browser is True:
# webbrowser.open('http://127.0.0.1:%s' % port)
except (KeyboardInterrupt, SystemExit):
self.server.stop()
logger.info("Bye")
def upload(self):
# Make sure we have internet
if not internetWorking():
logger.info('There does not seem to be internet here, check your connection')
return
logger.debug('Start upload')
self.build_path = self.deploy_path
self.clean()
self.build()
self.plugin_manager.preDeploy(self)
totalFiles = self.deployment_engine.deploy()
changedFiles = [r for r in totalFiles if r['changed']]
self.plugin_manager.postDeploy(self)
# Display done message and some statistics
logger.info('\nDone\n')
logger.info('%s total files with a size of %s' %
(len(totalFiles), fileSize(sum([r['size'] for r in totalFiles]))))
logger.info('%s changed files with a size of %s' %
(len(changedFiles), fileSize(sum([r['size'] for r in changedFiles]))))
logger.info('\nhttp://%s\n' % self.config.get('aws-bucket-website')) #TODO: Fix
def domain_setup(self):
# Make sure we have internet
if not internetWorking():
logger.info('There does not seem to be internet here, check your connection')
return
self.deployment_engine.domain_setup()
self.domain_list()
def domain_list(self):
self.deployment_engine.domain_list()
|
|
import logging
import re
import sys
import functools
import json
from django.utils import six
try:
str = str
except NameError:
str = str
logger = logging.getLogger(__name__)
URL_PARAM_RE = re.compile('(?P<k>[^(=|&)]+)=(?P<v>[^&]+)(&|$)')
URL_PARAM_NO_VALUE_RE = re.compile('(?P<k>[^(&|?)]+)(&|$)')
def import_statsd():
'''
Import only the statd by wolph not the mozilla statsd
TODO: Move to mozilla statds which is more widely used
'''
try:
# check to see if the django_statsd we found
# supports start (stop) timing.
import django_statsd
is_wolphs_statsd = hasattr(
django_statsd, 'start') and hasattr(django_statsd, 'stop')
if not is_wolphs_statsd:
django_statsd = None
except ImportError:
django_statsd = None
return django_statsd
django_statsd = import_statsd()
def start_statsd(path):
'''
Simple wrapper to save some typing
'''
if django_statsd:
django_statsd.start(path)
def stop_statsd(path):
if django_statsd:
django_statsd.stop(path)
def base64_url_decode_php_style(inp):
'''
PHP follows a slightly different protocol for base64 url decode.
For a full explanation see:
http://stackoverflow.com/questions/3302946/how-to-base64-url-decode-in-python
and
http://sunilarora.org/parsing-signedrequest-parameter-in-python-bas
'''
import base64
padding_factor = (4 - len(inp) % 4) % 4
inp += "=" * padding_factor
return base64.b64decode(str(inp).translate(
dict(list(zip(list(map(ord, '-_')), '+/')))))
def encode_params(params_dict):
'''
Take the dictionary of params and encode keys and
values to ascii if it's unicode
'''
encoded = [(smart_str(k), smart_str(v)) for k, v in list(params_dict.items())]
encoded_dict = dict(encoded)
return encoded_dict
def smart_str(s, encoding='utf-8', strings_only=False, errors='strict'):
"""
Adapted from django, needed for urlencoding
Returns a bytestring version of 's', encoded as specified in 'encoding'.
If strings_only is True, don't convert (some) non-string-like objects.
"""
import types
if strings_only and isinstance(s, (type(None), int)):
return s
elif not isinstance(s, six.string_types):
try:
return str(s)
except UnicodeEncodeError:
if isinstance(s, Exception):
# An Exception subclass containing non-ASCII data that doesn't
# know how to print itself properly. We shouldn't raise a
# further exception.
return ' '.join([smart_str(arg, encoding, strings_only,
errors) for arg in s])
return str(s).encode(encoding, errors)
elif isinstance(s, str):
return s.encode(encoding, errors)
elif s and encoding != 'utf-8':
return s.decode('utf-8', errors).encode(encoding, errors)
else:
return s
def send_warning(message, request=None, e=None, **extra_data):
'''
Uses the logging system to send a message to logging and sentry
'''
username = None
if request and request.user.is_authenticated():
username = request.user.username
error_message = None
if e:
error_message = str(e)
data = {
'username': username,
'body': error_message,
}
data.update(extra_data)
logger.warn(message,
exc_info=sys.exc_info(), extra={
'request': request,
'data': data
})
def merge_urls(generated_url, human_url):
'''
merge the generated_url with the human_url following this rules:
params introduced by generated_url are kept
final params order comes from generated_url
there's an hack to support things like this http://url?param¶m=value
>>> gen = "http://mysite.com?p1=a&p2=b&p3=c&p4=d"
>>> hum = "http://mysite.com?p4=D&p3=C&p2=B"
>>> merge_urls(gen, hum)
u'http://mysite.com?p1=a&p2=B&p3=C&p4=D'
>>> gen = "http://mysite.com?id=a&id_s=b&p_id=d"
>>> hum = "http://mysite.com?id=A&id_s=B&p_id=D"
>>> merge_urls(gen, hum)
u'http://mysite.com?id=A&id_s=B&p_id=D'
>>> gen = "http://mysite.com?p1=a&p2=b&p3=c&p4=d"
>>> hum = "http://mysite.com"
>>> merge_urls(gen, hum)
u'http://mysite.com'
>>> gen = "http://ad.zanox.com/ppc/?18595160C2000463397T&zpar4=scrapbook&zpar0=e2494344_c4385641&zpar1=not_authenticated&zpar2=unknown_campaign&zpar3=unknown_ref&ULP=http://www.asos.com/ASOS/ASOS-MARS-Loafer-Shoes/Prod/pgeproduct.aspx?iid=1703516&cid=4172&sh=0&pge=2&pgesize=20&sort=-1&clr=Black&affId=2441"
>>> hum = "http://ad.zanox.com/ppc/?18595160C2000463397T&zpar3=scrapbook&ULP=http://www.asos.com/ASOS/ASOS-MARS-Loafer-Shoes/Prod/pgeproduct.aspx?iid=1703516&cid=4172&sh=0&pge=2&pgesize=20&sort=-1&clr=Black&affId=2441"
>>> merge_urls(gen, hum)
u'http://ad.zanox.com/ppc/?18595160C2000463397T&zpar4=scrapbook&zpar0=e2494344_c4385641&zpar1=not_authenticated&zpar2=unknown_campaign&zpar3=scrapbook&ULP=http://www.asos.com/ASOS/ASOS-MARS-Loafer-Shoes/Prod/pgeproduct.aspx?iid=1703516&cid=4172&sh=0&pge=2&pgesize=20&sort=-1&clr=Black&affId=2441'
>>> gen = "http://mysite.com?invalidparam&p=2"
>>> hum = "http://mysite.com?p=1"
>>> merge_urls(gen, hum)
u'http://mysite.com?invalidparam&p=1'
'''
if '?' not in human_url:
return '%s' % human_url
gen_path, gen_args = generated_url.split('?', 1)
hum_path, hum_args = human_url.split('?', 1)
get_args = lambda args: [(m.group('k'), m.group('v'))
for m in URL_PARAM_RE.finditer(args)]
get_novalues_args = lambda args: [m.group('k')
for m in URL_PARAM_NO_VALUE_RE.finditer(
args) if "=" not in m.group('k')]
hum_dict = dict(get_args(hum_args))
out_args = []
# prepend crazy param w/o values
for param in get_novalues_args(gen_args):
out_args.append('%s' % param)
# replace gen url params
for k, v in get_args(gen_args):
out_args.append('%s=%s' % (k, hum_dict.get(k, v)))
return '%s?%s' % (gen_path, '&'.join(out_args))
class memoized(object):
'''Decorator. Caches a function's return value each time it is called.
If called later with the same arguments, the cached value is returned
(not reevaluated).
'''
def __init__(self, func):
self.func = func
self.cache = {}
def __call__(self, *args):
try:
return self.cache[args]
except KeyError:
value = self.func(*args)
self.cache[args] = value
return value
except TypeError:
# uncachable -- for instance, passing a list as an argument.
# Better to not cache than to blow up entirely.
return self.func(*args)
def __repr__(self):
'''Return the function's docstring.'''
return self.func.__doc__
def __get__(self, obj, objtype):
'''Support instance methods.'''
return functools.partial(self.__call__, obj)
def camel_to_underscore(name):
'''Convert camelcase style naming to underscore style naming
e.g. SpamEggs -> spam_eggs '''
import string
for c in string.ascii_uppercase:
name = name.replace(c, '_%c' % c)
return name.strip('_').lower()
def validate_is_instance(instance, classes):
'''
Usage
validate_is_instance(10, int)
validate_is_instance('a', (str, unicode))
'''
if not isinstance(classes, tuple):
classes = (classes,)
correct_instance = isinstance(instance, classes)
if not correct_instance:
raise ValueError(
'Expected instance type %s found %s' % (classes, type(instance)))
def is_json(content):
'''
Unfortunately facebook returns 500s which mean they are down
Or 500s with a nice error message because you use open graph wrong
So we have to figure out which is which :)
'''
try:
json.loads(content)
is_json = True
except:
is_json = False
return is_json
|
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""Script to extract the strings from Event Log message resource files."""
import argparse
import logging
import os
import re
import sys
from dfvfs.helpers import command_line as dfvfs_command_line
from dfvfs.helpers import volume_scanner as dfvfs_volume_scanner
from dfvfs.lib import errors as dfvfs_errors
from winevtrc import database
from winevtrc import definitions
from winevtrc import extractor
class SQLite3OutputWriter(object):
"""SQLite3 output writer."""
EVENT_PROVIDERS_DATABASE_FILENAME = 'winevt-kb.db'
def __init__(self, databases_path):
"""Initializes an output writer object.
Args:
databases_path (str): path to the database files.
"""
super(SQLite3OutputWriter, self).__init__()
self._databases_path = databases_path
self._database_writer = None
def Close(self):
"""Closes the output writer object."""
self._database_writer.Close()
self._database_writer = None
def Open(self):
"""Opens the output writer object.
Returns:
bool: True if successful or False if not.
"""
if not os.path.isdir(self._databases_path):
logging.warning('invalid path to the database files not a directory.')
return False
event_providers_database_path = os.path.join(
self._databases_path, self.EVENT_PROVIDERS_DATABASE_FILENAME)
if os.path.exists(event_providers_database_path):
logging.warning('event providers database: {0:s} already exists.'.format(
event_providers_database_path))
return False
self._database_writer = database.EventProvidersSQLite3DatabaseWriter()
self._database_writer.Open(event_providers_database_path)
return True
def WriteEventLogProvider(self, event_log_provider):
"""Writes the Event Log provider.
Args:
event_log_provider (EventLogProvider): Event Log provider.
"""
self._database_writer.WriteEventLogProvider(event_log_provider)
def WriteMessageResourceFile(
self, event_log_provider, message_resource_file, message_filename,
message_file_type):
"""Writes the Windows Message Resource file.
Args:
event_log_provider (EventLogProvider): Event Log provider.
message_resource_file (MessageResourceFile): message resource file.
message_filename (str): message filename.
message_file_type (str): message file type.
"""
database_filename = message_resource_file.windows_path
_, _, database_filename = database_filename.rpartition('\\')
database_filename = '{0:s}.db'.format(database_filename.lower())
database_filename = re.sub(r'\.mui', '', database_filename)
database_writer = database.MessageResourceFileSQLite3DatabaseWriter(
message_resource_file)
database_writer.Open(
os.path.join(self._databases_path, database_filename))
database_writer.WriteResources()
database_writer.Close()
self._database_writer.WriteMessageFile(message_filename, database_filename)
# TODO: write the relationship between the event log provider and
# the message file and the Windows version?
self._database_writer.WriteMessageFilesPerEventLogProvider(
event_log_provider, message_filename, message_file_type)
class StdoutOutputWriter(object):
"""Stdout output writer."""
def _WriteMessageTable(self, message_table):
"""Writes the Windows Message Resource file message table.
Args:
message_table (pywrc.message_table): message table resource.
"""
try:
number_of_languages = message_table.get_number_of_languages()
except IOError as exception:
number_of_languages = 0
logging.warning(
'Unable to retrieve number of languages with error: {0:s}.'.format(
exception))
if number_of_languages > 0:
for language_identifier in message_table.language_identifiers:
number_of_messages = message_table.get_number_of_messages(
language_identifier)
if number_of_messages > 0:
print('Message table:')
print('LCID\t\t: 0x{0:08x}'.format(language_identifier))
for message_index in range(0, number_of_messages):
message_identifier = message_table.get_message_identifier(
language_identifier, message_index)
message_string = message_table.get_string(
language_identifier, message_index)
ouput_string = '0x{0:08x}\t: {1:s}'.format(
message_identifier, message_string)
print(ouput_string.encode('utf8'))
print('')
def Close(self):
"""Closes the output writer object."""
return
def Open(self):
"""Opens the output writer object.
Returns:
bool: True if successful or False if not.
"""
return True
def WriteEventLogProvider(self, event_log_provider):
"""Writes the Event Log provider.
Args:
event_log_provider (EventLogProvider): Event Log provider.
"""
print('Source\t\t: {0:s}'.format(
event_log_provider.log_sources[0]))
print('Event Log type\t: {0:s}'.format(
event_log_provider.log_types[0]))
print('Categories\t: {0:s}'.format(
event_log_provider.category_message_files))
print('Messages\t: {0:s}'.format(
event_log_provider.event_message_files))
print('Parameters\t: {0:s}'.format(
event_log_provider.parameter_message_files))
print('')
# pylint: disable=unused-argument
def WriteMessageResourceFile(
self, event_log_provider, message_resource_file, message_filename,
message_file_type):
"""Writes the Windows Message Resource file.
Args:
event_log_provider (EventLogProvider): Event Log provider.
message_resource_file (MessageResourceFile): message resource file.
message_filename (str): message filename.
message_file_type (str): message file type.
"""
file_version = getattr(message_resource_file, 'file_version', '')
product_version = getattr(message_resource_file, 'product_version', '')
print('Message file:')
print('Path\t\t: {0:s}'.format(message_resource_file.windows_path))
print('File version\t: {0:s}'.format(file_version))
print('Product version\t: {0:s}'.format(product_version))
message_table = message_resource_file.GetMessageTableResource()
self._WriteMessageTable(message_table)
def Main():
"""The main program function.
Returns:
bool: True if successful or False if not.
"""
argument_parser = argparse.ArgumentParser(description=(
'Extract strings from message resource files for Event Log sources.'))
argument_parser.add_argument(
'-d', '--debug', dest='debug', action='store_true', default=False,
help='enable debug output.')
argument_parser.add_argument(
'--db', '--database', dest='database', action='store',
metavar='./winevt-kb/', default=None, help=(
'directory to write the sqlite3 databases to.'))
argument_parser.add_argument(
'-w', '--windows_version', '--windows-version',
dest='windows_version', action='store', metavar='Windows XP',
default=None, help='string that identifies the Windows version.')
argument_parser.add_argument(
'source', nargs='?', action='store', metavar='/mnt/c/',
default=None, help=(
'path of the volume containing C:\\Windows or the filename of '
'a storage media image containing the C:\\Windows directory.'))
options = argument_parser.parse_args()
if not options.source:
print('Source value is missing.')
print('')
argument_parser.print_help()
print('')
return False
logging.basicConfig(
level=logging.INFO, format='[%(levelname)s] %(message)s')
if options.database:
if not os.path.exists(options.database):
os.mkdir(options.database)
if not os.path.isdir(options.database):
print('{0:s} must be a directory'.format(options.database))
print('')
return False
output_writer = SQLite3OutputWriter(options.database)
else:
output_writer = StdoutOutputWriter()
mediator = dfvfs_command_line.CLIVolumeScannerMediator()
extractor_object = extractor.EventMessageStringExtractor(
debug=options.debug, mediator=mediator)
volume_scanner_options = dfvfs_volume_scanner.VolumeScannerOptions()
volume_scanner_options.partitions = ['all']
volume_scanner_options.snapshots = ['none']
volume_scanner_options.volumes = ['none']
try:
result = extractor_object.ScanForWindowsVolume(
options.source, options=volume_scanner_options)
except dfvfs_errors.ScannerError:
result = False
if not result:
print(('Unable to retrieve the volume with the Windows directory from: '
'{0:s}.').format(options.source))
print('')
return False
if not extractor_object.windows_version:
if not options.windows_version:
print('Unable to determine Windows version.')
if options.database:
print('Database output requires a Windows version, specify one with '
'--windows-version.')
print('')
return False
extractor_object.windows_version = options.windows_version
if not output_writer.Open():
print('Unable to open output writer.')
print('')
return False
try:
logging.info('Detected Windows version: {0:s}'.format(
extractor_object.windows_version))
extractor_object.CollectSystemEnvironmentVariables()
# TODO: handle $(runtime.X) notation
for event_log_provider in extractor_object.CollectEventLogProviders():
logging.info('Processing event log provider: {0:s}'.format(
event_log_provider.log_source))
output_writer.WriteEventLogProvider(event_log_provider)
if event_log_provider.event_message_files:
for message_filename in event_log_provider.event_message_files:
message_resource_file = extractor_object.GetMessageResourceFile(
event_log_provider, message_filename)
if message_resource_file:
logging.info('Processing event message file: {0:s}'.format(
message_filename))
output_writer.WriteMessageResourceFile(
event_log_provider, message_resource_file,
message_resource_file.windows_path,
definitions.MESSAGE_FILE_TYPE_EVENT)
message_resource_file.Close()
if event_log_provider.category_message_files:
for message_filename in event_log_provider.category_message_files:
message_resource_file = extractor_object.GetMessageResourceFile(
event_log_provider, message_filename)
if message_resource_file:
logging.info('Processing category message file: {0:s}'.format(
message_filename))
output_writer.WriteMessageResourceFile(
event_log_provider, message_resource_file,
message_resource_file.windows_path,
definitions.MESSAGE_FILE_TYPE_CATEGORY)
message_resource_file.Close()
if event_log_provider.parameter_message_files:
for message_filename in event_log_provider.parameter_message_files:
message_resource_file = extractor_object.GetMessageResourceFile(
event_log_provider, message_filename)
if message_resource_file:
logging.info('Processing parameter message file: {0:s}'.format(
message_filename))
output_writer.WriteMessageResourceFile(
event_log_provider, message_resource_file,
message_resource_file.windows_path,
definitions.MESSAGE_FILE_TYPE_PARAMETER)
message_resource_file.Close()
finally:
output_writer.Close()
if extractor_object.missing_message_filenames:
print('')
print('Message resource files not found or without resource section:')
for message_filename in extractor_object.missing_message_filenames:
print('{0:s}'.format(message_filename))
if extractor_object.missing_resources_message_filenames:
print('')
print('Message resource files without a string and message table resource:')
for message_filename in (
extractor_object.missing_resources_message_filenames):
print('{0:s}'.format(message_filename))
print('')
return True
if __name__ == '__main__':
if not Main():
sys.exit(1)
else:
sys.exit(0)
|
|
#!/usr/bin/env python
# requirements_to_rst.py
# Copyright (c) 2013-2020 Pablo Acosta-Serafini
# See LICENSE for details
# pylint: disable=C0103,C0111,R0912,R0914,R0915,R1717,R1718
import os
import textwrap
from pypkg.functions import get_supported_interps, json_load
###
# Global variables
###
LINE_WIDTH = 72
###
# Functions
###
def def_links(mobj):
"""Define Sphinx requirements links."""
fdict = json_load(os.path.join("data", "requirements.json"))
sdeps = sorted(fdict.keys())
olines = []
for item in sdeps:
olines.append(
".. _{name}: {url}\n".format(
name=fdict[item]["name"], url=fdict[item]["url"]
)
)
ret = []
for line in olines:
wobj = textwrap.wrap(line, width=LINE_WIDTH, subsequent_indent=" ")
ret.append("\n".join(item for item in wobj))
mobj.out("\n".join(ret))
def make_common_entry(plist, pyver, suffix, req_ver):
"""Generate Python interpreter version entries for 2.x or 3.x series."""
prefix = "Python {pyver}.x{suffix}".format(pyver=pyver, suffix=suffix)
plist.append("{prefix}{ver}".format(prefix=prefix, ver=ops_to_words(req_ver)))
def make_multi_entry(plist, pkg_pyvers, ver_dict):
"""Generate Python interpreter version entries."""
for pyver in pkg_pyvers:
pver = pyver[2] + "." + pyver[3:]
plist.append("Python {0}: {1}".format(pver, ops_to_words(ver_dict[pyver])))
def op_to_words(item):
"""Translate >=, ==, <= to words."""
sdicts = [
{"==": ""},
{">=": " or newer"},
{">": "newer than "},
{"<=": " or older"},
{"<": "older than "},
{"!=": "except "},
]
for sdict in sdicts:
prefix = list(sdict.keys())[0]
suffix = sdict[prefix]
if item.startswith(prefix):
if prefix == "==":
return item[2:]
if prefix == "!=":
return suffix + item[2:]
if prefix in [">", "<"]:
return suffix + item[1:]
return item[2:] + suffix
raise RuntimeError("Inequality not supported")
def ops_to_words(item):
"""Translate requirement specification to words."""
unsupp_ops = ["~=", "==="]
# Ordered for "pleasant" word specification
supp_ops = [">=", ">", "==", "<=", "<", "!="]
tokens = sorted(item.split(","), reverse=True)
actual_tokens = []
for req in tokens:
for op in unsupp_ops:
if req.startswith(op):
raise RuntimeError("Unsupported version specification: {0}".format(op))
for op in supp_ops:
if req.startswith(op):
actual_tokens.append(op)
break
else:
raise RuntimeError("Illegal comparison operator: {0}".format(op))
if len(list(set(actual_tokens))) != len(actual_tokens):
raise RuntimeError("Multiple comparison operators of the same type")
if "!=" in actual_tokens:
return (
" and ".join([op_to_words(token) for token in tokens[:-1]])
+ " "
+ op_to_words(tokens[-1])
)
return " and ".join([op_to_words(token) for token in tokens])
def proc_requirements(mobj):
"""Get requirements in reStructuredText format."""
pyvers = ["py{0}".format(item.replace(".", "")) for item in get_supported_interps()]
py2vers = sorted([item for item in pyvers if item.startswith("py2")])
py3vers = sorted([item for item in pyvers if item.startswith("py3")])
fdict = json_load(os.path.join("data", "requirements.json"))
olines = [""]
sdict = dict([(item["name"], item) for item in fdict.values()])
for real_name in sorted(sdict.keys()):
pkg_dict = sdict[real_name]
if pkg_dict["cat"] == ["rtd"]:
continue
plist = [] if not pkg_dict["optional"] else ["optional"]
# Convert instances that have a single version for all Python
# interpreters into a full dictionary of Python interpreter and
# package versions # so as to apply the same algorithm in all cases
if isinstance(pkg_dict["ver"], str):
pkg_dict["ver"] = dict([(pyver, pkg_dict["ver"]) for pyver in pyvers])
pkg_pyvers = sorted(pkg_dict["ver"].keys())
pkg_py2vers = sorted(
[item for item in pkg_dict["ver"].keys() if item.startswith("py2")]
)
req_vers = list(set(pkg_dict["ver"].values()))
req_py2vers = list(
set([pkg_dict["ver"][item] for item in py2vers if item in pkg_dict["ver"]])
)
req_py3vers = list(
set([pkg_dict["ver"][item] for item in py3vers if item in pkg_dict["ver"]])
)
if (len(req_vers) == 1) and (pkg_pyvers == pyvers):
plist.append(ops_to_words(req_vers[0]))
elif (
(pkg_pyvers == pyvers)
and (len(req_py2vers) == 1)
and (len(req_py3vers) == 1)
):
make_common_entry(plist, "2", ": ", req_py2vers[0])
make_common_entry(plist, "3", ": ", req_py3vers[0])
elif (
(pkg_pyvers == pyvers)
and (len(req_py2vers) == len(py2vers))
and (len(req_py3vers) == 1)
and (pkg_dict["ver"][pkg_py2vers[-1]] == req_py3vers[0])
):
py2dict = dict(
[
(key, value)
for key, value in pkg_dict["ver"].items()
if key.startswith("py2") and (key != pkg_py2vers[-1])
]
)
make_multi_entry(plist, py2vers[:-1], py2dict)
pver = pkg_py2vers[-1][2] + "." + pkg_py2vers[-1][3:]
plist.append(
"Python {pyver} or newer: {ver}".format(
pyver=pver, ver=ops_to_words(req_py3vers[0])
)
)
elif (
(pkg_pyvers == pyvers)
and (len(req_py2vers) == len(py2vers))
and (len(req_py3vers) == 1)
):
py2dict = dict(
[
(key, value)
for key, value in pkg_dict["ver"].items()
if key.startswith("py2")
]
)
make_multi_entry(plist, py2vers, py2dict)
make_common_entry(plist, "3", ": ", req_py3vers[0])
elif (
(pkg_pyvers == pyvers)
and (len(req_py3vers) == len(py3vers))
and (len(req_py2vers) == 1)
):
py3dict = dict(
[
(key, value)
for key, value in pkg_dict["ver"].items()
if key.startswith("py3")
]
)
make_common_entry(plist, "2", ": ", req_py2vers[0])
make_multi_entry(plist, py3vers, py3dict)
elif (len(req_vers) == 1) and (pkg_pyvers == py2vers):
make_common_entry(plist, "2", " only, ", req_vers[0])
elif (len(req_vers) == 1) and (pkg_pyvers == py3vers):
make_common_entry(plist, "3", " only, ", req_vers[0])
else:
make_multi_entry(plist, pkg_pyvers, pkg_dict["ver"])
olines.append(
" * `{name}`_ ({par})".format(
name=pkg_dict["name"], par=", ".join(plist)
)
)
ret = []
for line in olines:
wobj = textwrap.wrap(line, width=LINE_WIDTH, subsequent_indent=" ")
ret.append("\n".join(item for item in wobj))
mobj.out("\n\n".join(ret) + "\n\n")
|
|
# (c) Copyright 2018 SUSE LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from . import policy
from datetime import datetime
from datetime import timedelta
from flask import abort
from flask import Blueprint
from flask import jsonify
from flask import request
import json
from monascaclient.client import Client as Mon_client
from oslo_config import cfg
from oslo_log import log as logging
import requests
LOG = logging.getLogger(__name__)
bp = Blueprint('monasca', __name__)
CONF = cfg.CONF
# STATUS constants for server and service status
STATUS_UP = 'up'
STATUS_DOWN = 'down'
STATUS_UNKNOWN = 'unknown'
def get_monasca_endpoint():
"""Get the keystone endpoint for Monasca
the client in Pike won't self-discover, and
the endpoint is used for passthru calls as well
"""
# load the service catalog listing out of the headers inserted
# by the keystone middleware
service_cat = json.loads(request.headers['X-Service-Catalog'])
for service in service_cat:
if service['name'] == 'monasca':
# the endpoints object is a list of size 1 with the endpoint
# dictionary inside of it
endpoints = service['endpoints'][0]
return endpoints['internalURL']
def get_monasca_client():
"""Instantiates and returns an instance of the monasca python client"""
monasca_endpoint = get_monasca_endpoint()
# Monasca client v1.7.1 used in pike is old, so get its client via
# old-fashioned way (credentials)
# the pike version also cannot reliably discover its own endpoint,
# so it is specified here
mon_client = Mon_client(
api_version="2_0",
endpoint=monasca_endpoint,
auth_url=CONF.keystone_authtoken.auth_url,
username=CONF.keystone_authtoken.username,
password=CONF.keystone_authtoken.password,
project_name=CONF.keystone_authtoken.project_name,
project_domain_name=CONF.keystone_authtoken.project_domain_name,
user_domain_name=CONF.keystone_authtoken.user_domain_name,
insecure=CONF.keystone_authtoken.insecure
)
return mon_client
@bp.route("/api/v2/monasca/service_status", methods=['GET'])
@policy.enforce('lifecycle:get_measurements')
def get_service_statuses():
"""Get the latest monasca http_statuses for all available services
Provides a list of monasca services that have the http_status metric. It
gets the last measurement in the list of measurements and uses that value
as the status for the service.
.. :quickref: monasca; Get a list of service statuses
**Example Request**:
.. sourcecode:: http
GET /api/v2/monasca/service_status HTTP/1.1
Content-Type: application/json
**Example Response**:
.. sourcecode:: http
HTTP/1.1 200 OK
[
{
"name": "ardana",
"status": "up"
},
{
"name": "backup",
"status": "down"
},
{
"name": "block-storage",
"status": "unknown"
},
... <and so on>
]
"""
# We'll collect the statuses for the service in a list.
# Note: increasing the "minutes" value will reduce the chances of an
# getting no status, but also potentially might give a late result
client = get_monasca_client()
parms = {
"name": "http_status",
"start_time":
(datetime.utcnow() - timedelta(minutes=1)).isoformat(),
"group_by": "service"
}
measurements = None
try:
measurements = client.metrics.list_measurements(**parms)
if not measurements:
LOG.error("Empty measurements from Monasca")
abort(404, "Unable to retrieve any statuses")
except Exception as e:
LOG.error("Unable to access Monasca: %s" % e)
abort(503, "Monasca service unavailable")
statuses = []
for m in measurements:
service = m['dimensions']['service']
# we get the last measurement value, which is also the latest
val_idx = m['columns'].index('value')
if not m['measurements']:
status = "unknown"
else:
value = m['measurements'][-1][val_idx]
if value == 0:
status = "up"
else:
status = "down"
statuses.append({
'name': service,
'status': status
})
return jsonify(statuses)
@bp.route("/api/v2/monasca/is_installed", methods=['GET'])
@policy.enforce('lifecycle:get_measurements')
def is_monasca_installed():
"""Checks to see if Monasca is installed on the environment
this check can be used to evaluate whether further
monasca calls are useful
"""
return jsonify({'installed': get_monasca_endpoint() is not None})
def get_parse_host_measurements_for_status(params, client):
"""Makes the query to Monasca for the specified measurement
requires a set of parameters to define the metric and dimension
being queried, but assumes that the metric is compatible with
the ping_check/host_alive_status. Assumes the metric specified
is compatible with a ping_status check (validates against
0.0 for 'up' , 1.0 for 'down', consistent with Monasca ping checks)
the monasca client may optionally be provided to avoid loading
a fresh monasca client instance for each call in a loop
"""
status = STATUS_UNKNOWN
if not client:
client = get_monasca_client()
ping_measurements = client.metrics.list_measurements(**params)
for per_host_meas in ping_measurements:
# check if there are any valid measurements
# and if they show the host to be up
if len(per_host_meas['measurements']) > 0:
(time, ping_value, value_meta) = per_host_meas['measurements'][-1]
if ping_value == 0.0:
status = STATUS_UP
elif ping_value == 1.0 and status == STATUS_UNKNOWN:
# if a previous check found the host to be up,
# don't change it to down
status = STATUS_DOWN
return status
@bp.route("/api/v2/monasca/server_status/<path:name>", methods=['GET'])
@policy.enforce('lifecycle:get_measurements')
def get_server_status(name):
"""Get the latest monasca host_alive_status for the specified host
Provides the result of the most recent host_alive_status for the host . It
gets the last measurement in the list of measurements and uses that value
as the status for the host. If the host does not have a status as
a target host, then a fallback check will be made to see if the host
observed any other hosts for ping status successfully
.. :quickref: monasca; Get a single host status
**Example Request**:
.. sourcecode:: http
GET /api/v2/monasca/server_status/host001 HTTP/1.1
Content-Type: application/json
**Example Response**:
.. sourcecode:: http
HTTP/1.1 200 OK
{
"status": "up"
}
"""
if not name:
return jsonify({})
client = get_monasca_client()
# get the ping measurements for the host in question
# for the last 5 minutes
start_time = (datetime.utcnow() - timedelta(minutes=5)) \
.strftime("%Y-%m-%dT%H:%M:%SZ")
meas_parms = {
'name': 'host_alive_status',
"start_time": start_time,
'group_by': "*",
'dimensions': {
'test_type': 'ping',
'hostname': name
}
}
status = get_parse_host_measurements_for_status(meas_parms, client)
# if the host didnt have direct ping checks, see if
# it observed any other hosts, as that necessitates being
# up as well
meas_parms = {
'name': 'host_alive_status',
"start_time": start_time,
'group_by': "*",
'dimensions': {
'test_type': 'ping',
'observer_host': name
}
}
if status == STATUS_UNKNOWN:
status = get_parse_host_measurements_for_status(meas_parms, client)
return jsonify({'status': status})
@bp.route("/api/v2/monasca/passthru/<path:url>",
methods=['GET', 'POST', 'PUT', 'DELETE'])
@policy.enforce('lifecycle:get_measurements')
def passthru(url):
"""Passes thru the request directly to monasca
.. :quickref: monasca; passthru endpoint to monasca
**Example Request**:
.. sourcecode:: http
GET /api/v2/monasca/passthru/alarms/count HTTP/1.1
"""
# populate monasca_endpoint in case it has not yet been populated
monasca_endpoint = get_monasca_endpoint()
req_url = monasca_endpoint + "/" + url
req = requests.Request(method=request.method, url=req_url,
params=request.args, headers=request.headers,
data=request.data)
resp = requests.Session().send(req.prepare(),
verify=not CONF.keystone_authtoken.insecure)
return (resp.text, resp.status_code, resp.headers.items())
|
|
# coding=utf-8
# Copyright 2014 Pants project contributors (see CONTRIBUTORS.md).
# Licensed under the Apache License, Version 2.0 (see LICENSE).
from __future__ import (absolute_import, division, generators, nested_scopes, print_function,
unicode_literals, with_statement)
import glob
import os
import xml.dom.minidom as DOM
from textwrap import dedent
import coverage
from mock import patch
from pants.backend.python.tasks.pytest_run import PytestRun
from pants.base.exceptions import TestFailedTaskError
from pants.util.contextutil import pushd
from pants.util.timeout import TimeoutReached
from pants_test.backend.python.tasks.python_task_test_base import PythonTaskTestBase
class PythonTestBuilderTestBase(PythonTaskTestBase):
@classmethod
def task_type(cls):
return PytestRun
def run_tests(self, targets, **options):
test_options = {
'colors': False,
'level': 'info' # When debugging a test failure it may be helpful to set this to 'debug'.
}
test_options.update(options)
self.set_options(**test_options)
context = self.context(target_roots=targets)
pytest_run_task = self.create_task(context)
with pushd(self.build_root):
pytest_run_task.execute()
def run_failing_tests(self, targets, failed_targets, **options):
with self.assertRaises(TestFailedTaskError) as cm:
self.run_tests(targets=targets, **options)
self.assertEqual(set(failed_targets), set(cm.exception.failed_targets))
class PythonTestBuilderTestEmpty(PythonTestBuilderTestBase):
def test_empty(self):
self.run_tests(targets=[])
class PythonTestBuilderTest(PythonTestBuilderTestBase):
def setUp(self):
super(PythonTestBuilderTest, self).setUp()
self.create_file(
'lib/core.py',
dedent("""
def one(): # line 1
return 1 # line 2
# line 3
# line 4
def two(): # line 5
return 2 # line 6
""").strip())
self.add_to_build_file(
'lib',
dedent("""
python_library(
name='core',
sources=[
'core.py'
]
)
"""))
self.create_file(
'tests/test_core_green.py',
dedent("""
import unittest2 as unittest
import core
class CoreGreenTest(unittest.TestCase):
def test_one(self):
self.assertEqual(1, core.one())
"""))
self.create_file(
'tests/test_core_red.py',
dedent("""
import core
def test_two():
assert 1 == core.two()
"""))
self.create_file(
'tests/test_core_red_in_class.py',
dedent("""
import unittest2 as unittest
import core
class CoreRedClassTest(unittest.TestCase):
def test_one_in_class(self):
self.assertEqual(1, core.two())
"""))
self.create_file(
'tests/test_core_sleep.py',
dedent("""
import core
def test_three():
assert 1 == core.one()
"""))
self.add_to_build_file(
'tests',
dedent("""
python_tests(
name='green',
sources=[
'test_core_green.py'
],
dependencies=[
'lib:core'
],
coverage=[
'core'
]
)
python_tests(
name='red',
sources=[
'test_core_red.py',
],
dependencies=[
'lib:core'
],
coverage=[
'core'
]
)
python_tests(
name='red_in_class',
sources=[
'test_core_red_in_class.py',
],
dependencies=[
'lib:core'
],
coverage=[
'core'
]
)
python_tests(
name='sleep_no_timeout',
sources=[
'test_core_sleep.py',
],
timeout = 0,
dependencies=[
'lib:core'
],
coverage=[
'core'
]
)
python_tests(
name='sleep_timeout',
sources=[
'test_core_sleep.py',
],
timeout = 1,
dependencies=[
'lib:core'
],
coverage=[
'core'
]
)
python_tests(
name='all',
sources=[
'test_core_green.py',
'test_core_red.py',
],
dependencies=[
'lib:core'
]
)
python_tests(
name='all-with-coverage',
sources=[
'test_core_green.py',
'test_core_red.py'
],
dependencies=[
'lib:core'
],
coverage=[
'core'
]
)
"""))
self.green = self.target('tests:green')
self.red = self.target('tests:red')
self.red_in_class = self.target('tests:red_in_class')
self.sleep_no_timeout = self.target('tests:sleep_no_timeout')
self.sleep_timeout = self.target('tests:sleep_timeout')
self.all = self.target('tests:all')
self.all_with_coverage = self.target('tests:all-with-coverage')
def test_green(self):
self.run_tests(targets=[self.green])
def test_red(self):
self.run_failing_tests(targets=[self.red], failed_targets=[self.red])
def test_red_test_in_class(self):
# for test in a class, the failure line is in the following format
# F testprojects/tests/python/pants/constants_only/test_fail.py::TestClassName::test_boom
self.run_failing_tests(targets=[self.red_in_class], failed_targets=[self.red_in_class])
def test_mixed(self):
self.run_failing_tests(targets=[self.green, self.red], failed_targets=[self.red])
def test_one_timeout(self):
"""When we have two targets, any of them doesn't have a timeout, and we have no default, then no timeout is set."""
with patch('pants.backend.core.tasks.test_task_mixin.Timeout') as mock_timeout:
self.run_tests(targets=[self.sleep_no_timeout, self.sleep_timeout])
mock_timeout.assert_called_with(None)
def test_timeout(self):
"""Check that a failed timeout returns the right results."""
with patch('pants.backend.core.tasks.test_task_mixin.Timeout') as mock_timeout:
mock_timeout().__exit__.side_effect = TimeoutReached(1)
self.run_failing_tests(targets=[self.sleep_timeout],
failed_targets=[self.sleep_timeout])
mock_timeout.assert_called_with(1)
def test_junit_xml_option(self):
# We expect xml of the following form:
# <testsuite errors=[Ne] failures=[Nf] skips=[Ns] tests=[Nt] ...>
# <testcase classname="..." name="..." .../>
# <testcase classname="..." name="..." ...>
# <failure ...>...</failure>
# </testcase>
# </testsuite>
report_basedir = os.path.join(self.build_root, 'dist', 'junit_option')
self.run_failing_tests(targets=[self.green, self.red], failed_targets=[self.red],
junit_xml_dir=report_basedir)
files = glob.glob(os.path.join(report_basedir, '*.xml'))
self.assertEqual(1, len(files), 'Expected 1 file, found: {}'.format(files))
junit_xml = files[0]
root = DOM.parse(junit_xml).documentElement
self.assertEqual(2, len(root.childNodes))
self.assertEqual(2, int(root.getAttribute('tests')))
self.assertEqual(1, int(root.getAttribute('failures')))
self.assertEqual(0, int(root.getAttribute('errors')))
self.assertEqual(0, int(root.getAttribute('skips')))
children_by_test_name = dict((elem.getAttribute('name'), elem) for elem in root.childNodes)
self.assertEqual(0, len(children_by_test_name['test_one'].childNodes))
self.assertEqual(1, len(children_by_test_name['test_two'].childNodes))
self.assertEqual('failure', children_by_test_name['test_two'].firstChild.nodeName)
def coverage_data_file(self):
return os.path.join(self.build_root, '.coverage')
def load_coverage_data(self, path):
data_file = self.coverage_data_file()
self.assertTrue(os.path.isfile(data_file))
coverage_data = coverage.coverage(data_file=data_file)
coverage_data.load()
_, all_statements, not_run_statements, _ = coverage_data.analysis(path)
return all_statements, not_run_statements
def test_coverage_simple_option(self):
# TODO(John Sirois): Consider eliminating support for "simple" coverage or at least formalizing
# the coverage option value that turns this on to "1" or "all" or "simple" = anything formal.
simple_coverage_kwargs = {'coverage': '1'}
self.assertFalse(os.path.isfile(self.coverage_data_file()))
covered_file = os.path.join(self.build_root, 'lib', 'core.py')
self.run_tests(targets=[self.green], **simple_coverage_kwargs)
all_statements, not_run_statements = self.load_coverage_data(covered_file)
self.assertEqual([1, 2, 5, 6], all_statements)
self.assertEqual([6], not_run_statements)
self.run_failing_tests(targets=[self.red], failed_targets=[self.red], **simple_coverage_kwargs)
all_statements, not_run_statements = self.load_coverage_data(covered_file)
self.assertEqual([1, 2, 5, 6], all_statements)
self.assertEqual([2], not_run_statements)
self.run_failing_tests(targets=[self.green, self.red], failed_targets=[self.red],
**simple_coverage_kwargs)
all_statements, not_run_statements = self.load_coverage_data(covered_file)
self.assertEqual([1, 2, 5, 6], all_statements)
self.assertEqual([], not_run_statements)
# The all target has no coverage attribute and the code under test does not follow the
# auto-discover pattern so we should get no coverage.
self.run_failing_tests(targets=[self.all], failed_targets=[self.all], **simple_coverage_kwargs)
all_statements, not_run_statements = self.load_coverage_data(covered_file)
self.assertEqual([1, 2, 5, 6], all_statements)
self.assertEqual([1, 2, 5, 6], not_run_statements)
self.run_failing_tests(targets=[self.all_with_coverage],
failed_targets=[self.all_with_coverage],
**simple_coverage_kwargs)
all_statements, not_run_statements = self.load_coverage_data(covered_file)
self.assertEqual([1, 2, 5, 6], all_statements)
self.assertEqual([], not_run_statements)
def test_coverage_modules_dne_option(self):
self.assertFalse(os.path.isfile(self.coverage_data_file()))
covered_file = os.path.join(self.build_root, 'lib', 'core.py')
# modules: should trump .coverage
self.run_failing_tests(targets=[self.green, self.red], failed_targets=[self.red],
coverage='modules:does_not_exist,nor_does_this')
all_statements, not_run_statements = self.load_coverage_data(covered_file)
self.assertEqual([1, 2, 5, 6], all_statements)
self.assertEqual([1, 2, 5, 6], not_run_statements)
def test_coverage_modules_option(self):
self.assertFalse(os.path.isfile(self.coverage_data_file()))
covered_file = os.path.join(self.build_root, 'lib', 'core.py')
self.run_failing_tests(targets=[self.all], failed_targets=[self.all], coverage='modules:core')
all_statements, not_run_statements = self.load_coverage_data(covered_file)
self.assertEqual([1, 2, 5, 6], all_statements)
self.assertEqual([], not_run_statements)
def test_coverage_paths_dne_option(self):
self.assertFalse(os.path.isfile(self.coverage_data_file()))
covered_file = os.path.join(self.build_root, 'lib', 'core.py')
# paths: should trump .coverage
self.run_failing_tests(targets=[self.green, self.red], failed_targets=[self.red],
coverage='paths:does_not_exist/,nor_does_this/')
all_statements, not_run_statements = self.load_coverage_data(covered_file)
self.assertEqual([1, 2, 5, 6], all_statements)
self.assertEqual([1, 2, 5, 6], not_run_statements)
def test_coverage_paths_option(self):
self.assertFalse(os.path.isfile(self.coverage_data_file()))
covered_file = os.path.join(self.build_root, 'lib', 'core.py')
self.run_failing_tests(targets=[self.all], failed_targets=[self.all], coverage='paths:core.py')
all_statements, not_run_statements = self.load_coverage_data(covered_file)
self.assertEqual([1, 2, 5, 6], all_statements)
self.assertEqual([], not_run_statements)
def test_sharding(self):
self.run_failing_tests(targets=[self.red, self.green], failed_targets=[self.red], shard='0/2')
self.run_tests(targets=[self.red, self.green], shard='1/2')
def test_sharding_single(self):
self.run_failing_tests(targets=[self.red], failed_targets=[self.red], shard='0/1')
def test_sharding_invalid_shard_too_small(self):
with self.assertRaises(PytestRun.InvalidShardSpecification):
self.run_tests(targets=[self.green], shard='-1/1')
def test_sharding_invalid_shard_too_big(self):
with self.assertRaises(PytestRun.InvalidShardSpecification):
self.run_tests(targets=[self.green], shard='1/1')
def test_sharding_invalid_shard_bad_format(self):
with self.assertRaises(PytestRun.InvalidShardSpecification):
self.run_tests(targets=[self.green], shard='1')
with self.assertRaises(PytestRun.InvalidShardSpecification):
self.run_tests(targets=[self.green], shard='1/2/3')
with self.assertRaises(PytestRun.InvalidShardSpecification):
self.run_tests(targets=[self.green], shard='1/a')
|
|
#!/usr/bin/env python
#
# __COPYRIGHT__
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY
# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
#
__revision__ = "__FILE__ __REVISION__ __DATE__ __DEVELOPER__"
import TestSCons
import sys
if sys.platform == 'win32':
_exe = '.exe'
bar_lib = 'bar.lib'
else:
_exe = ''
bar_lib = 'libbar.a'
test = TestSCons.TestSCons()
test.subdir('sub1', 'sub2')
foo1_exe = test.workpath('foo1' + _exe)
foo2_exe = test.workpath('foo2' + _exe)
foo3_exe = test.workpath('foo3' + _exe)
foo4_exe = test.workpath('foo4' + _exe)
foo5_exe = test.workpath('foo5' + _exe)
slprog_exe = test.workpath('slprog' + _exe)
test.write('SConstruct', """
env = Environment(LIBS=['bar'], LIBPATH = '.')
env.Program(target='foo1', source='foo1.c')
env2 = Environment(LIBS=[File(r'%s')], LIBPATH = '.')
env2.Program(target='foo2', source='foo2.c')
env3 = Environment(LIBS='bar', LIBPATH = '.')
env3.Program(target='foo3', source='foo3.c')
env4 = Environment(LIBS=File(r'%s'), LIBPATH = '.')
env4.Program(target='foo4', source='foo4.c')
env5 = Environment(LIBS=['bar', '$UNSPECIFIED'], LIBPATH = '.')
env5.Program(target='foo5', source='foo5.c')
sl = env.StaticLibrary('sl.c')
env.Program(target='slprog.c', LIBS=[sl])
SConscript('sub1/SConscript', 'env')
SConscript('sub2/SConscript', 'env')
""" % (bar_lib, bar_lib))
test.write(['sub1', 'SConscript'], r"""
Import('env')
lib = env.Library(target='bar', source=Split('bar.c baz.c'))
env.Install('..', lib)
""")
test.write(['sub2', 'SConscript'], r"""
Import('env')
lib = env.Library(target='baz', source='baz.c')
env.Install('..', lib)
""")
foo_contents = r"""
void bar();
void baz();
int main(void)
{
bar();
baz();
return 0;
}
"""
test.write('foo1.c', foo_contents)
test.write('foo2.c', foo_contents)
test.write('foo3.c', foo_contents)
test.write('foo4.c', foo_contents)
test.write('foo5.c', foo_contents)
test.write('sl.c', """\
#include <stdio.h>
void
sl(void)
{
printf("sl.c\\n");
}
""")
test.write('slprog.c', """\
#include <stdio.h>
int
main(int argc, char *argv[])
{
sl();
printf("slprog.c\\n");
exit (0);
}
""")
test.write(['sub1', 'bar.c'], r"""
#include <stdio.h>
void bar()
{
printf("sub1/bar.c\n");
}
""")
test.write(['sub1', 'baz.c'], r"""
#include <stdio.h>
void baz()
{
printf("sub1/baz.c\n");
}
""")
test.write(['sub2', 'baz.c'], r"""
#include <stdio.h>
void baz()
{
printf("sub2/baz.c\n");
}
""")
# ar sometimes produces a "warning" on stderr -- ar: creating sub1/libbar.a
test.run(arguments = '.', stderr=None)
test.run(program=foo1_exe, stdout='sub1/bar.c\nsub1/baz.c\n')
test.run(program=foo2_exe, stdout='sub1/bar.c\nsub1/baz.c\n')
test.run(program=foo3_exe, stdout='sub1/bar.c\nsub1/baz.c\n')
test.run(program=foo4_exe, stdout='sub1/bar.c\nsub1/baz.c\n')
test.run(program=foo5_exe, stdout='sub1/bar.c\nsub1/baz.c\n')
test.run(program=slprog_exe, stdout='sl.c\nslprog.c\n')
#
test.write('SConstruct', """
env = Environment(LIBS=['baz'])
env.Program(target='foo1', source='foo1.c', LIBS=['$LIBS', 'bar'], LIBPATH = '.')
SConscript('sub1/SConscript', 'env')
SConscript('sub2/SConscript', 'env')
""")
test.run(arguments = '.')
test.run(program=foo1_exe, stdout='sub1/bar.c\nsub2/baz.c\n')
#
test.write('SConstruct', """
env = Environment(LIBS=['bar', 'baz'], LIBPATH = '.')
env.Program(target='foo1', source='foo1.c')
SConscript('sub1/SConscript', 'env')
SConscript('sub2/SConscript', 'env')
""")
# on IRIX, ld32 prints out a warning saying that libbaz.a isn't used
sw = 'ld32: WARNING 84 : ./libbaz.a is not used for resolving any symbol.\n'
test.run(arguments = '.',
stderr='(%s|%s'%(sw, TestSCons.noisy_ar[1:]),
match=TestSCons.match_re_dotall)
#test.fail_test(not test.stderr() in ['', sw])
test.run(program=foo1_exe, stdout='sub1/bar.c\nsub1/baz.c\n')
#
test.write('SConstruct', """
env = Environment()
env.Program(target='foo1', source='foo1.c', LIBS=['bar', 'baz'], LIBPATH = '.')
SConscript('sub1/SConscript', 'env')
SConscript('sub2/SConscript', 'env')
""")
test.run(arguments = '.')
test.run(program=foo1_exe, stdout='sub1/bar.c\nsub1/baz.c\n')
test.write(['sub1', 'baz.c'], r"""
#include <stdio.h>
void baz()
{
printf("sub1/baz.c 2\n");
}
""")
test.run(arguments = '.',
stderr='(%s|%s'%(sw, TestSCons.noisy_ar[1:]),
match=TestSCons.match_re_dotall)
#test.fail_test(not test.stderr() in ['', sw, TestSCons.noisy_ar])
test.run(program=foo1_exe, stdout='sub1/bar.c\nsub1/baz.c 2\n')
test.pass_test()
# Local Variables:
# tab-width:4
# indent-tabs-mode:nil
# End:
# vim: set expandtab tabstop=4 shiftwidth=4:
|
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# The MIT License (MIT)
#
# Copyright (c) 2015 Shigeru Kitazaki
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
"""Simple command line script.
TODO: Write description on your own.
"""
import argparse
import configparser
import csv
import datetime
import gzip
import hashlib
import json
import logging
import logging.config
import os
import sqlite3
import sys
import time
import traceback
from collections import Counter
from functools import partial
from pathlib import Path
__version__ = '0.1.0'
__author__ = 'Shigeru Kitazaki'
APPNAME = Path(__file__).stem
BASEDIR = Path(__file__).parent.resolve()
DATETIME_FORMAT = '%Y-%m-%d %H:%M:%S'
CONFIG_FILE_ENCODING = 'utf8'
DEFAULT_SQLITE_FILE = ':memory:'
DEFAULT_INPUT_FILE_ENCODING = 'utf8'
DEFAULT_OUTPUT_FILE_ENCODING = 'utf8'
DEFAULT_LOG_DIRECTORY = Path.cwd()
logging.config.dictConfig({
'version': 1,
'disable_existing_loggers': False,
'formatters': {
'standard': {
'datefmt': '%Y-%m-%d %H:%M:%S',
'format': '%(asctime)s [%(levelname)s] %(name)s: %(message)s'
},
'detailed': {
'datefmt': '%Y-%m-%d %H:%M:%S',
'format': '%(asctime)s [%(levelname)s] %(name)s '
'%(filename)s:L%(lineno)-4d: %(message)s'
}
},
'handlers': {
'console': {
'class': 'logging.StreamHandler',
'level': 'INFO',
'stream': 'ext://sys.stderr',
'formatter': 'standard'
},
'file': {
'class': 'logging.handlers.RotatingFileHandler',
'level': 'DEBUG',
'formatter': 'detailed',
'filename': DEFAULT_LOG_DIRECTORY / (APPNAME + '.log'),
'mode': 'a',
'maxBytes': 10485760,
'backupCount': 5,
'encoding': 'utf8'
}
},
'loggers': {
'': {
'handlers': ['console'],
'level': 'INFO',
'propagate': True
},
APPNAME: {
'handlers': ['console', 'file'],
'level': 'WARN',
'propagate': False
}
}
})
def parse_arguments():
"""Parse arguments and set up logging verbosity.
:rtype: parsed arguments as Namespace object.
"""
parser = argparse.ArgumentParser(description=__doc__)
parser.add_argument('-V', '--version', action='version',
version='%(prog)s ' + __version__)
parser.add_argument('-c', '--config', dest='config', required=False,
help='configuration file', metavar='FILE')
parser.add_argument('-s', '--sqlite', dest='sqlite', required=False,
help='local SQLite3 file', metavar='FILE')
parser.add_argument('-M', '--monitor-output', dest='monitor_out',
help='progress monitor dump file', metavar='FILE')
parser.add_argument('-o', '--output', dest='output',
help='output path', metavar='FILE')
parser.add_argument('-n', '--dryrun', dest='dryrun',
help='dry run', default=False, action='store_true')
parser.add_argument('-e', '--encoding', dest='encoding',
help='input file encoding',
default=DEFAULT_INPUT_FILE_ENCODING)
parser.add_argument('-E', '--output-encoding', dest='encoding_out',
help='output file encoding',
default=DEFAULT_OUTPUT_FILE_ENCODING)
parser.add_argument('-r', '--recursive', dest='recursive', default=False,
help='search recursive', action='store_true')
parser.add_argument('files', nargs='*',
help='input files', metavar='FILE')
header = parser.add_mutually_exclusive_group()
header.add_argument('--with-header', dest='header',
action='store_const', const=True,
help='input file has header line (default)')
header.add_argument('--without-header', dest='header',
action='store_const', const=False,
help='input file does not have header line')
parser.set_defaults(header=True)
loglevel = parser.add_mutually_exclusive_group()
loglevel.add_argument('-v', '--verbose', dest='verbose',
action='count', default=0,
help='increase logging verbosity')
loglevel.add_argument('-q', '--quiet', dest='quiet',
default=False, action='store_true',
help='set logging to quiet mode')
try:
args = parser.parse_args()
except IOError:
e = sys.exc_info()[1]
parser.error('File not found: %s' % (e, ))
# Set up logging verbosity level.
logger = logging.getLogger(APPNAME)
if args.quiet:
logger.setLevel(logging.CRITICAL)
elif args.verbose >= 3:
logger.setLevel(logging.DEBUG)
elif args.verbose >= 2:
logger.setLevel(logging.ERROR)
elif args.verbose >= 1:
logger.setLevel(logging.WARN)
else:
logger.setLevel(logging.INFO)
return args
def collect_files(inputs, recursive=False):
'''Collect file paths from input arguments.
The directory whose name starts with "." and the file whose name ends with
"~" are skipped.
'''
logger = logging.getLogger(APPNAME + '.setup')
files = []
if inputs is None or len(inputs) == 0:
return files
for path in inputs:
if os.path.isfile(path):
logger.debug('Target file exists: %s', path)
files.append(path)
elif os.path.isdir(path) and recursive:
logger.debug('Target directory exists: %s', path)
for root, ds, fs in os.walk(path):
# Prune hidden directory.
ds[:] = [d for d in sorted(ds) if not d.startswith('.')]
for f in sorted(filter(lambda f: not f.endswith('~'), fs)):
p = os.path.join(root, f)
files.append(p)
else:
logger.fatal('File not found: %s', path)
sys.exit(1)
logger.debug('Collect {:,} files.'.format(len(files)))
return files
def md5sum(path):
# Calculate MD5 sum value.
chunk_size = 4096
md5 = hashlib.md5()
with open(path, 'rb') as fp:
for buf in iter(partial(fp.read, chunk_size), b''):
md5.update(buf)
return md5.hexdigest()
class ConfigLoader(object):
"""Configuration file loader to support multiple file types.
Supported file types are:
* ini/cfg
* json
:param fp: file pointer to load
:param filetype: either of 'ini|cfg', 'json', or 'yml|yaml' file.
If nothing specified, detect by file extension automatically.
"""
def __init__(self, path):
assert path
self.path = path
self.logger = logging.getLogger(APPNAME + '.config')
def _load(self, extension):
self.logger.debug('Config file extension is "%s".', extension)
if extension == '.json':
with open(self.path, encoding=CONFIG_FILE_ENCODING) as fp:
return json.load(fp)
elif extension in (".ini", ".cfg"):
parser = configparser.SafeConfigParser()
with open(self.path, encoding=CONFIG_FILE_ENCODING) as fp:
parser.readfp(fp)
config = {}
for s in parser.sections():
config[s] = dict(parser.items(s))
return config
else:
self.logger.warn('Unknown file type extension: %s', extension)
def load(self, env=None):
""" Load a section values of given environment.
If nothing to specified, use environmental variable.
If unknown environment was specified, warn it on logger.
:param env: environment key to load in a coercive manner
:type env: string
:rtype: dict
"""
self.logger.debug('Loading config "%s" ...', self.path)
ext = os.path.splitext(self.path)[-1].lower()
if len(ext) == 0:
self.logger.warning('missing file extension: %s', self.path)
return
elif not ext.startswith('.'):
self.logger.warning("file extension doesn't start with dot: %s",
self.path)
return
return self._load(ext)
class ProgressMonitor(object):
TABLE_NAME = '_monitor'
SCHEMA = {
'fields': (
{'name': 'seq', 'type': 'integer',
'constraints': {'required': True, 'unique': True}},
{'name': 'path', 'type': 'string',
'constraints': {'required': True}},
{'name': 'size', 'type': 'integer',
'constraints': {'required': True}},
{'name': 'start_at', 'type': 'float',
'constraints': {'required': True}},
{'name': 'finish_at', 'type': 'float'},
{'name': 'digest', 'type': 'string',
'constraints': {'required': True, 'unique': True}},
{'name': 'result', 'type': 'string'}, # Anything encoded by JSON
),
'primaryKey': ['seq']
}
def __init__(self, db, dump=None):
self.logger = logging.getLogger(APPNAME + '.monitor')
self.db = db
self.create_table()
self.dump = dump
self.current = None
def create_table(self):
# Check whether monitor table already exists.
r = self.fetch_one('sql',
(('type', '=', 'table'),
('tbl_name', '=', ProgressMonitor.TABLE_NAME)),
'sqlite_master')
if r:
self.logger.info('Monitor table is already created.')
self.logger.debug(r[0])
return
d = []
# Pattern mapping against JSON Table Schema
for s in ProgressMonitor.SCHEMA['fields']:
t = 'TEXT'
if s['type'] == 'integer':
t = 'INTEGER'
elif s['type'] == 'float':
t = 'REAL'
f = '{} {}'.format(s['name'], t)
c = s.get('constraints')
if c:
if c.get('required'):
f += ' NOT NULL'
if c.get('unique'):
f += ' UNIQUE'
d.append(f)
f = 'PRIMARY KEY ('
f += ','.join(k for k in ProgressMonitor.SCHEMA['primaryKey'])
f += ')'
d.append(f)
ddl = """CREATE TABLE {} ({})""".format(
ProgressMonitor.TABLE_NAME, ','.join(d))
self.logger.debug('Create monitor table: %s', ddl)
cur = self.db.cursor()
cur.execute(ddl)
cur.close()
self.logger.info('Created monitor table.')
def terminate(self, fields):
if self.dump is None:
return
if os.path.isfile(self.dump):
self.logger.warn('Overwrite dump file: %s', self.dump)
self.logger.info('Dump monitor records as tab-delimited values.')
fp = open(self.dump, 'w')
writer = csv.writer(fp, delimiter='\t', lineterminator='\n')
columns = [f['name'] for f in ProgressMonitor.SCHEMA['fields']]
q = 'SELECT {} FROM {} ORDER BY seq'.format(
','.join(columns), ProgressMonitor.TABLE_NAME)
cur = self.db.cursor()
cur.execute(q)
dumper = Tabular(fields)
writer.writerow(dumper.header())
for r in cur:
t = dict(zip(columns, r))
t['path'] = t['path'].replace('\\', '/')
t['basename'] = os.path.basename(t['path'])
t['extension'] = os.path.splitext(t['path'])[-1].lower()
if t['finish_at'] is not None:
t['elapsed'] = t['finish_at'] - t['start_at']
t['finish_at'] = datetime.datetime.fromtimestamp(t['finish_at'])
t['start_at'] = datetime.datetime.fromtimestamp(t['start_at'])
if t['result']:
result = json.loads(t['result'])
for k in result:
t[k] = result[k]
writer.writerow(dumper(t))
cur.close()
fp.close()
def fetch_one(self, columns, conditions, table=None):
# It's okay `columns` is string or list.
if type(columns) == str:
columns = [columns, ]
d = []
values = []
# Column name, Operator, Value tuples.
for c, o, v in conditions:
# "?" is placement holder.
d.append("{} {} ?".format(c, o))
values.append(v)
q = """SELECT {} FROM {} WHERE {}""".format(
','.join(columns),
table or ProgressMonitor.TABLE_NAME,
' AND '.join(d))
self.logger.debug('Fetch one record: %s; %s', q, values)
cur = self.db.cursor()
cur.execute(q, values)
r = cur.fetchone()
cur.close()
return r
def start(self, path):
md5 = md5sum(path)
r = self.fetch_one(['seq', 'path', 'size', 'start_at', 'finish_at'], (
('digest', '=', md5),
))
if r:
msg = 'Already processed "{}": [{}] {} -> {}'
self.logger.info(msg.format(r[1], r[0], r[2], r[3]))
return r
size = os.path.getsize(path)
self.logger.info('Start monitoring: {} ({}) {:,}bytes'.format(
path, md5, size))
columns = ('path', 'size', 'start_at', 'digest')
values = (path, size, time.time(), md5)
q = """INSERT INTO {} ({}) VALUES ({})""".format(
ProgressMonitor.TABLE_NAME,
','.join(columns),
','.join(['?' for i in range(len(values))])
)
self.logger.debug('Insert one record: %s; %s', q, values)
cur = self.db.cursor()
cur.execute(q, values)
cur.close()
self.current = md5
def finish(self, result=None):
if self.current is None:
self.logger.fatal('Monitor nothing, but `finish()` is called.')
return
r = self.fetch_one(['seq', 'path', 'start_at'], (
('digest', '=', self.current),
))
if r is None:
self.logger.fatal('Monitor "%s", but removed.', self.current)
return
now = time.time()
if result:
values = (now, json.dumps(result), r[0])
columns = ('finish_at = ?', 'result = ?')
else:
values = (now, r[0])
columns = ('finish_at = ?', )
q = """UPDATE {} SET {} WHERE seq = ?""".format(
ProgressMonitor.TABLE_NAME,
','.join(columns)
)
self.logger.debug('Update one record: %s; %s', q, values)
cur = self.db.cursor()
cur.execute(q, values)
cur.close()
self.current = None
self.logger.info('Finish processing: {} [{}] {:,.03f}sec'.format(
r[1], r[0], now - r[2]))
class Tabular(object):
'''JSON Table Schema based record class.
FIELDS = (
{'name': 'id', 'type': 'string'},
{'name': 'updated', 'type': 'datetime', 'format': '%Y-%m-%dT%H:%M:%SZ'},
{'name': 'name', 'type': 'string'},
{'name': 'latitude', 'type': 'float'},
{'name': 'longitude', 'type': 'float'},
{'name': 'zipcode', 'type': 'string'},
{'name': 'kind', 'type': 'string', 'default': 'UNKNOWN'},
{'name': 'update_type', 'type': 'integer'}
)
'''
def __init__(self, fields):
self.fields = fields
def header(self):
return [f['name'] for f in self.fields]
def __call__(self, dt):
out = []
for f in self.fields:
k, t = f['name'], f['type']
v = dt.get(k, f.get('default', ''))
if v is None:
val = ''
elif t == 'string':
val = v
elif t == 'datetime':
val = v.strftime(f['format'])
elif t == 'integer':
val = str(v)
elif t in ('float', 'numeric'):
if 'precision' in f:
v = round(v, f['precision'])
val = str(v)
elif t == 'boolean':
m = f.get('mapping', {})
if v in m:
val = m[v]
else:
val = str(v)
else:
raise ValueError('Unknown type "{}" for "{}"'.format(t, k))
out.append(val)
return out
# Default monitor dump schema. If you add more fields to dump, add it here.
MONITOR_DUMP_FIELDS = (
{'name': 'seq', 'type': 'integer'},
{'name': 'path', 'type': 'string'},
{'name': 'basename', 'type': 'string'},
{'name': 'extension', 'type': 'string'},
{'name': 'size', 'type': 'integer'},
{'name': 'columns', 'type': 'integer'},
{'name': 'lines', 'type': 'integer'},
{'name': 'start_at', 'type': 'datetime', 'format': DATETIME_FORMAT},
{'name': 'finish_at', 'type': 'datetime', 'format': DATETIME_FORMAT},
{'name': 'elapsed', 'type': 'float', 'precision': 4},
{'name': 'digest', 'type': 'string'},
)
class App(object):
"""Main application class.
# TODO: Implement your logic.
"""
def __init__(self, db):
self.logger = logging.getLogger(APPNAME + '.app')
self.db = db
def process(self, fp, header):
lines = 0
if header: # skip header line
next(fp)
lines += 1
# TODO: Implement your logic.
reader = map(str.rstrip, fp)
for l in reader:
lines += 1
return {'lines': lines}
class MainProcess(object):
"""Main process class for wrapping setup/termination.
"""
def __init__(self, dryrun):
self.dryrun = dryrun
self.logger = logging.getLogger(APPNAME + '.main')
def configure(self, configfile):
if not os.path.isfile(configfile):
self.logger.fatal('Configuration file is not found: %s', configfile)
return
loader = ConfigLoader(configfile)
config = loader.load()
if config is None:
self.logger.warn('Nothing to be loaded.')
return
for k in sorted(config):
self.logger.debug('config key: %s', k)
# TODO: Implement your logic.
def initialize(self, config, output, output_encoding,
sqlite=None, monitor_dump=None):
if config:
self.configure(config)
if output:
if os.path.isfile(output):
self.logger.warn('Overwrite output file: %s', output)
self.output = open(output, 'w', encoding=output_encoding)
else:
self.output = sys.stdout
if sqlite and os.path.isfile(sqlite):
self.logger.info('Reuse local SQLite3 file: %s', sqlite)
self.localdb = sqlite3.connect(sqlite or DEFAULT_SQLITE_FILE)
self.monitor = ProgressMonitor(self.localdb, monitor_dump)
def terminate(self):
self.monitor.terminate(MONITOR_DUMP_FIELDS)
if not self.output.isatty():
self.output.close()
self.localdb.commit()
self.logger.info('Terminated the process.')
def run(self, files, encoding, header):
app = App(self.localdb)
if not files:
app.process(sys.stdin, header)
return
counter = Counter()
for path in files:
counter['total'] += 1
canskip = self.monitor.start(path)
if canskip:
counter['skip'] += 1
self.logger.info('Skip to process: %s', path)
continue
_, suffix = os.path.splitext(path)
if suffix == '.gz':
opener = gzip.open
open_mode = 'rt'
else:
opener = open
open_mode = 'r'
with opener(path, open_mode, encoding=encoding) as fp:
r = app.process(fp, header)
self.monitor.finish(r)
if r is None:
counter['ignore'] += 1
else:
counter['process'] += 1
self.logger.info('show summary:')
for k in sorted(counter):
self.logger.info(' - {:20s} : {:,}'.format(k, counter[k]))
CONFIGURATION = """Start running with following configurations.
==============================================================================
Base directory : {basedir}
Current working dir: {cwd}
Configuration file : {configfile}
Local SQLite3 file : {sqlite}
Monitor dump file : {monitordumpfile}
Dry-run : {dryrun}
Input encoding : {encoding}
Input has header : {header}
Input #files : {nfiles}
Search recursive : {recursive}
Output path : {output}
Output encoding : {encoding_out}
==============================================================================
""".rstrip()
def main():
# Parse command line arguments.
args = parse_arguments()
files = collect_files(args.files, args.recursive)
encoding = args.encoding
configfile = os.path.abspath(args.config) if args.config else None
logger = logging.getLogger(APPNAME + '.setup')
logger.info(CONFIGURATION.format(basedir=BASEDIR, cwd=os.getcwd(),
configfile=configfile, dryrun=args.dryrun,
encoding=encoding, nfiles=len(files or []),
recursive=args.recursive, header=args.header,
sqlite=args.sqlite, monitordumpfile=args.monitor_out,
output=args.output, encoding_out=args.encoding_out))
# Initialize main class.
processor = MainProcess(args.dryrun)
processor.initialize(configfile, args.output, args.encoding_out,
args.sqlite, args.monitor_out)
# Dispatch main process, and catch unknown error.
try:
processor.run(files, encoding, args.header)
except Exception:
e = sys.exc_info()[1]
logger.error(e)
traceback.print_exc(file=sys.stderr)
finally:
processor.terminate()
if __name__ == '__main__':
main()
# Test suites to bundle as one file script.
# To run the tests, invoke this script using "-m unittest" option.
# i.e. `python3 -m unittest -v boilerplate.py`
import tempfile
import unittest
class TabularTest(unittest.TestCase):
def setUp(self):
fields = (
{'name': 'id', 'type': 'string'},
{'name': 'updated', 'type': 'datetime',
'format': DATETIME_FORMAT},
{'name': 'name', 'type': 'string'},
{'name': 'latitude', 'type': 'float'},
{'name': 'longitude', 'type': 'float'},
{'name': 'zipcode', 'type': 'string'},
{'name': 'kind', 'type': 'string', 'default': 'UNKNOWN'},
{'name': 'update_type', 'type': 'integer'}
)
self.tabular = Tabular(fields)
def test_header(self):
expected = ['id', 'updated', 'name', 'latitude', 'longitude',
'zipcode', 'kind', 'update_type']
self.assertEqual(expected, self.tabular.header())
def test_call(self):
data = {
'id': '1234567890',
'updated': datetime.datetime(2000, 1, 1, 12, 34, 56),
'name': 'somewhere',
'latitude': 12.34,
'longitude': -123.45678,
'zipcode': 'ABCDEF',
'update_type': 0
}
expected = ['1234567890', '2000-01-01 12:34:56', 'somewhere',
'12.34', '-123.45678', 'ABCDEF', 'UNKNOWN', '0']
self.assertEqual(expected, self.tabular(data))
class ConfigLoaderTest(unittest.TestCase):
def setUp(self):
config = '''
{
"database": {
"host": "127.0.0.1",
"port": 5432,
"name": "t",
"user": "app",
"password": "secret"
}
}
'''
fd, path = tempfile.mkstemp('.json')
with open(path, 'w', encoding=CONFIG_FILE_ENCODING) as fp:
fp.write(config)
self.loader = ConfigLoader(path)
def tearDown(self):
os.unlink(self.loader.path)
def test_load(self):
expected = {
'host': '127.0.0.1',
'port': 5432,
'name': 't',
'user': 'app',
'password': 'secret'
}
config = self.loader.load()
self.assertEqual(expected, config['database'])
def test_initialize_failure(self):
with self.assertRaises(AssertionError):
ConfigLoader(None)
ConfigLoader('')
def test_invalid_path(self):
loader = ConfigLoader('notfound')
self.assertIsNone(loader.load())
|
|
# runhandler.py
# Author: Richard Gibson
#
# A base class for the submit.Submit and deleterun.DeleteRun classes. The
# majority of the functions in this class contain functions that update
# memcache upon deletion / insertion of runs into the database. If we didn't
# care about datastore reads/writes, this class would not be necessary.
# However, I would like to continue to use the free tier usage of GAE as long
# as possible, hence these optimization routines to stay off the database as
# much as possible.
#
# GAE now offers the NDB datastore, which sounds like it is a much better
# option than the DB datastore employed by this app as NDB does auto-caching.
# If we ever migrate to NDB, this class is likely not needed.
#
import handler
import util
import games
import runs
import logging
import json
from operator import itemgetter
from google.appengine.ext import db
from google.appengine.runtime import apiproxy_errors
class RunHandler( handler.Handler ):
def num_runs( self, username, game, category, limit ):
try:
q = db.Query( runs.Runs, keys_only=True )
q.ancestor( runs.key() )
q.filter( 'username =', username )
q.filter( 'game =', game )
q.filter( 'category =', category )
return q.count( limit=limit )
except apiproxy_errors.OverQuotaError, msg:
logging.error( msg )
return 0
def update_runner( self, runner, delta_num_pbs ):
if delta_num_pbs != 0:
runner.num_pbs += delta_num_pbs
runner.put( )
self.update_cache_runner( util.get_code( runner.username ),
runner )
def update_games_put( self, params, delta_num_pbs ):
user = params['user']
game_model = params['game_model']
game = params['game']
category = params['category']
game_code = params['game_code']
category_found = params['category_found']
seconds = params['seconds']
datestr = params['datestr']
video = params['video']
is_bkt = params['is_bkt']
if game_model is None:
# Add a new game to the database
d = dict( category=category, bk_runner=None, bk_seconds=None,
bk_datestr=None, bk_video=None, bk_updater=None )
if is_bkt:
d['bk_runner'] = user.username
d['bk_seconds'] = seconds
d['bk_datestr'] = datestr
d['bk_video'] = video
d['bk_updater'] = user.username
game_model = games.Games( game = game,
info = json.dumps( [ d ] ),
num_pbs = 1,
parent = games.key(),
key_name = game_code )
game_model.put( )
logging.warning( "Put new game " + game + " with "
+ " category " + category + " in database." )
# Update memcache
self.update_cache_game_model( game_code, game_model )
categories = self.get_categories( no_refresh=True )
if categories is not None and categories != self.OVER_QUOTA_ERROR:
categories[ str( game ) ] = [ str( category ) ]
self.update_cache_categories( categories )
return
game_model.num_pbs += delta_num_pbs
if not category_found:
# Add a new category for this game in the database
info = json.loads( game_model.info )
d = dict( category=category, bk_runner=None, bk_seconds=None,
bk_video=None )
if is_bkt:
d['bk_runner'] = user.username
d['bk_seconds'] = seconds
d['bk_datestr'] = datestr
d['bk_video'] = video
d['bk_updater'] = user.username
info.append( d )
game_model.info = json.dumps( info )
game_model.put( )
logging.debug( "Added category " + category + " to game "
+ game + " in database." )
# Update memcache
self.update_cache_game_model( game_code, game_model )
categories = self.get_categories( no_refresh=True )
if categories is not None and categories != self.OVER_QUOTA_ERROR:
categories[ str( game ) ].append( str( category ) )
categories[ str( game ) ].sort( )
self.update_cache_categories( categories )
return
if is_bkt:
# Update the best known time for this game, category
gameinfolist = json.loads( game_model.info )
for gameinfo in gameinfolist:
if gameinfo['category'] == category:
gameinfo['bk_runner'] = user.username
gameinfo['bk_seconds'] = seconds
gameinfo['bk_datestr'] = datestr
gameinfo['bk_video'] = video
gameinfo['bk_updater'] = user.username
game_model.info = json.dumps( gameinfolist )
logging.debug( "Updated best known time for game "
+ game + ", category " + category
+ " in database" )
break
if is_bkt or delta_num_pbs != 0:
# We made some changes, so store in db and update memcache
game_model.put( )
self.update_cache_game_model( game_code, game_model )
def update_games_delete( self, game_model, category, delta_num_pbs ):
# Check if any runs exist now for this category
num_category_runs = 1
try:
q = db.Query( runs.Runs, keys_only=True )
q.ancestor( runs.key() )
q.filter( 'game =', game_model.game )
q.filter( 'category =', category )
num_category_runs = q.count( limit=1 )
except apiproxy_errors.OverQuotaError, msg:
logging.error( msg )
if num_category_runs <= 0:
# Check if any runs exist now for this game at all
num_runs = 1
try:
q = db.Query( runs.Runs, keys_only=True )
q.ancestor( runs.key() )
q.filter( 'game =', game_model.game )
num_runs = q.count( limit=1 )
except apiproxy_errors.OverQuotaError, msg:
logging.error( msg )
if num_runs <= 0:
# No runs exist. Delete this game from the db
game = game_model.game
game_model.delete( )
logging.info( game + " deleted" )
self.update_cache_game_model( util.get_code( game ), None )
# From gamelist in memcache too
cached_gamelists = self.get_cached_gamelists( )
if cached_gamelists is not None:
done = False
for page_num, res in cached_gamelists.iteritems( ):
if done:
break
for i, d in enumerate( res['gamelist'] ):
if d['game'] == game:
del cached_gamelists[ page_num ]['gamelist'][ i ]
done = True
break
self.update_cache_gamelist( cached_gamelists )
return
else:
# Just delete the category from this game
gameinfolist = json.loads( game_model.info )
for i, gameinfo in enumerate( gameinfolist ):
if category == gameinfo['category']:
del gameinfolist[ i ]
logging.info( 'Removed ' + category
+ ' from ' + game_model.game )
game_model.info = json.dumps( gameinfolist )
if num_category_runs <= 0 or delta_num_pbs != 0:
game_model.num_pbs += delta_num_pbs
game_model.put( )
self.update_cache_game_model( util.get_code( game_model.game ),
game_model )
def update_pblist_put( self, params ):
user = params[ 'user' ]
game = params[ 'game' ]
category = params[ 'category' ]
seconds = params[ 'seconds' ]
time = params[ 'time' ]
video = params[ 'video' ]
game_code = params[ 'game_code' ]
date = params[ 'date' ]
version = params[ 'version' ]
# Update pblist in memcache
cached_pblists = self.get_cached_pblists( user.username )
if cached_pblists is None:
return
for page_num, res in cached_pblists.iteritems( ):
pblist = res['pblist']
for pb in pblist:
if( pb['game'] == game ):
pb['num_runs'] += 1
for i, info in enumerate( pb['infolist'] ):
if( info['category'] == category ):
info['num_runs'] += 1
info['avg_seconds'] += ( ( 1.0 / info['num_runs'] )
* ( seconds - info['avg_seconds'] ) )
info['avg_time'] = util.seconds_to_timestr(
info['avg_seconds'], dec_places=0 )
if( info['pb_seconds'] is None
or info['pb_seconds'] > seconds ):
# Update pb
info['pb_seconds'] = seconds
info['pb_time'] = time
info['pb_date'] = date
info['video'] = video
info['version'] = version
pb['infolist'].sort( key=itemgetter('category') )
pb['infolist'].sort( key=itemgetter('num_runs'),
reverse=True )
self.update_cache_pblist( user.username,
cached_pblists )
return
if res['show_all']:
# Found the game, but not the category and we are
# showing all runs. Add the run.
info = dict( username=user.username,
username_code=util.get_code(
user.username ),
category=category,
category_code=util.get_code( category ),
pb_seconds=seconds,
pb_time=time,
pb_date=date,
num_runs=1,
avg_seconds=seconds,
avg_time=time,
video=video,
version=version )
pb['infolist'].append( info )
pb['infolist'].sort( key=itemgetter('category') )
pb['infolist'].sort( key=itemgetter('num_runs'),
reverse=True )
self.update_cache_pblist( user.username,
cached_pblists )
return
if res['show_all']:
# Could not find the game and we are showing all runs. Add
# the game/run.
info = dict( username=user.username,
username_code=util.get_code( user.username ),
category=category,
category_code=util.get_code( category ),
pb_seconds=seconds,
pb_time=time,
pb_date=date,
num_runs=1,
avg_seconds=seconds,
avg_time=time,
video=video,
version=version )
pb = dict( game=game,
game_code=util.get_code( game ),
num_runs=1,
infolist=[ info ] )
pblist.append( pb )
self.update_cache_pblist( user.username, cached_pblists )
return
# Couldn't find this game, category combination, so we must nullify
# memcache. We can't just add the run since we may not have all of
# the pblist pages in memcache, so we don't know if it is the only
# run for this game, category or not.
self.update_cache_pblist( user.username, None )
def update_pblist_delete( self, user, old_run ):
# Update pblist with the removal of the old run
cached_pblists = self.get_cached_pblists( user.username )
if cached_pblists is None:
return
for page_num, res in cached_pblists.iteritems( ):
pblist = res['pblist']
for i, pb in enumerate( pblist ):
if( pb['game'] == old_run['game'] ):
pb['num_runs'] -= 1
for j, info in enumerate( pb['infolist'] ):
if( info['category'] == old_run['category'] ):
if info['num_runs'] <= 1:
# No other runs for game, category combo
del pb[ 'infolist' ][ j ]
if len( pb[ 'infolist' ] ) <= 0:
del cached_pblists[ page_num ]['pblist'][ i ]
self.update_cache_pblist( user.username,
cached_pblists )
return
else:
new_avg = ( ( info['avg_seconds']
* info['num_runs'] )
- old_run['seconds'] )
info['num_runs'] -= 1
info['avg_seconds'] = ( 1.0 * new_avg
/ info['num_runs'] )
info['avg_time'] = util.seconds_to_timestr(
info['avg_seconds'], dec_places=0 )
if info['pb_seconds'] >= old_run['seconds']:
# Update our PB for this game, category
q = db.Query( runs.Runs,
projection=['seconds',
'date',
'video',
'version'] )
q.ancestor( runs.key( ) )
q.filter( 'username =', user.username )
q.filter( 'game =', old_run['game'] )
q.filter( 'category =',
old_run['category'] )
q.order( 'seconds' )
for run in q.run( limit = 1 ):
info['pb_seconds'] = run.seconds
info['pb_time'] = util.seconds_to_timestr( run.seconds )
info['pb_date'] = run.date
info['video'] = run.video
info['version'] = run.version
break
else:
logging.error( 'Failed to update PB for '
+ user.username + ', '
+ old_run['game'] + ', '
+ old_run['category']
+ ' on pblist_delete' )
self.update_cache_pblist(
user.username, None )
return
pb['infolist'][ j ] = info
pb['infolist'].sort( key=itemgetter('category') )
pb['infolist'].sort( key=itemgetter('num_runs'),
reverse=True )
self.update_cache_pblist( user.username,
cached_pblists )
return
# Couldn't find this game, category in memcache, so nothing
# to update
return
def update_gamepage_put( self, params ):
# Update gamepage in memcache
game = params['game']
category = params[ 'category' ]
category_code = util.get_code( category )
self.update_cache_gamepage( game, category_code, None )
def update_gamepage_delete( self, user, old_run ):
# Update gamepage in memcache
game = old_run['game']
category = old_run['category']
category_code = util.get_code( category )
self.update_cache_gamepage( game, category_code, None )
def update_runlist_for_runner_put( self, params ):
user = params[ 'user' ]
game = params[ 'game' ]
game_code = params[ 'game_code' ]
category = params[ 'category' ]
time = params[ 'time' ]
video = params[ 'video' ]
version = params[ 'version' ]
notes = params[ 'notes' ]
date = params[ 'date' ]
datetime_created = params[ 'datetime_created' ]
run_id = params[ 'run_id' ]
# Update runlist for runner in memcache
cached_runlists = self.get_cached_runlists_for_runner( user.username )
if cached_runlists is not None:
res = cached_runlists.get( 1 )
if res is not None:
res['runlist'].insert(
0,
dict( run_id = run_id,
game = game,
game_code = game_code,
category = category,
category_code = util.get_code( category ),
time = time,
date = date,
datetime_created = datetime_created,
video = video,
version = version,
notes = notes ) )
res['runlist'].sort( key=lambda x: util.get_valid_date(
x['date'] ), reverse=True )
self.update_cache_runlist_for_runner( user.username,
cached_runlists )
def update_gamelist_put( self, params ):
game_code = params[ 'game_code' ]
game = params[ 'game' ]
# Update gamelists in memcache if necessary
cached_gamelists = self.get_cached_gamelists( )
if cached_gamelists is None:
return
for page_num, res in cached_gamelists.iteritems( ):
for gamedict in res['gamelist']:
if( gamedict['game_code'] == game_code ):
gamedict['num_pbs'] += 1
res['gamelist'].sort( key=itemgetter('num_pbs'),
reverse=True )
self.update_cache_gamelist( cached_gamelists )
return
# This game wasn't found in the gamelists, so we'll just clear
# the cached gamelists
self.update_cache_gamelist( None )
def update_gamelist_delete( self, old_run ):
# Fix the gamelist with the removal of the old run
cached_gamelists = self.get_cached_gamelists( )
if cached_gamelists is None:
return
for page_num, res in cached_gamelists.iteritems( ):
for i, d in enumerate( res['gamelist'] ):
if d['game'] == old_run['game']:
d['num_pbs'] -= 1
if d['num_pbs'] <= 0:
del cached_gamelists[ page_num ]['gamelist'][ i ]
res['gamelist'].sort( key=itemgetter('num_pbs'),
reverse=True )
self.update_cache_gamelist( cached_gamelists )
return
# Failed to find game
self.update_cache_gamelist( None )
def update_runnerlist_put( self, params ):
user = params[ 'user' ]
# Update runnerlist in memcache if necessary
cached_runnerlists = self.get_cached_runnerlists( )
if cached_runnerlists is not None:
for page_num, res in cached_runnerlists.iteritems( ):
for runnerdict in res['runnerlist']:
if( runnerdict['username'] == user.username ):
runnerdict['num_pbs'] += 1
res['runnerlist'].sort( key=itemgetter('username') )
res['runnerlist'].sort( key=itemgetter('num_pbs'),
reverse=True )
self.update_cache_runnerlist( cached_runnerlists )
return
# Clear the cache
self.update_cache_runnerlist( None )
def update_runnerlist_delete( self, user ):
# Fix the runnerlist with the removal of the old run
cached_runnerlists = self.get_cached_runnerlists( )
if cached_runnerlists is not None:
for page_num, res in cached_runnerlists.iteritems( ):
for runnerdict in res['runnerlist']:
if( runnerdict['username'] == user.username ):
runnerdict['num_pbs'] -= 1
res['runnerlist'].sort( key=itemgetter('username') )
res['runnerlist'].sort( key=itemgetter('num_pbs'),
reverse=True )
self.update_cache_runnerlist( cached_runnerlists )
return
# Failed to find runner
self.update_cache_runnerlist( None )
def update_user_has_run_delete( self, user, old_run ):
# This refresh is so cheap, let's just kill the old value
self.update_cache_user_has_run( user.username, old_run['game'], None )
# Returns True if putting new run succeeded, False otherwise
def put_new_run( self, params ):
user = params.get( 'user' )
game = params.get( 'game' )
category = params.get( 'category' )
seconds = params.get( 'seconds' )
time = params.get( 'time' )
video = params.get( 'video' )
version = params.get( 'version' )
notes = params.get( 'notes' )
valid = params.get( 'valid' )
# Add a new run to the database
try:
new_run = runs.Runs( username = user.username,
game = game,
category = category,
seconds = seconds,
date = params[ 'date' ],
version = version,
notes = notes,
parent = runs.key() )
try:
if video:
new_run.video = video
except db.BadValueError:
params[ 'video_error' ] = "Invalid video URL"
valid = False
except db.BadValueError:
valid = False
if not valid:
return False
new_run.put( )
params[ 'run_id' ] = str( new_run.key().id() )
params[ 'datetime_created' ] = new_run.datetime_created
logging.debug( "Put new run for runner " + user.username
+ ", game = " + game + ", category = " + category
+ ", time = " + time )
# Check whether this is the first run for this username, game,
# category combination. This will determine whether we need to update
# the gamelist and runnerlist, as well as update the num_pbs
# for the game and runner.
delta_num_pbs = 0
num_runs = self.num_runs( user.username, game, category, 2 )
if num_runs == 1:
delta_num_pbs = 1
# Update games.Games, runners.Runners
self.update_runner( user, delta_num_pbs )
self.update_games_put( params, delta_num_pbs )
# Update memcache
self.update_cache_run_by_id( new_run.key().id(), new_run )
# Must update runinfo before updating pblist, gamepage since these
# both rely on runinfo being up to date
self.update_pblist_put( params )
self.update_gamepage_put( params )
self.update_runlist_for_runner_put( params )
self.update_cache_user_has_run( user.username, game, True )
self.update_cache_last_run( user.username, new_run )
if num_runs <= 0:
logging.error( "Unexpected count [" + str( num_runs )
+ "] for number of runs for "
+ username + ", " + game + ", " + category )
self.update_cache_gamelist( None, get_num_pbs=True )
self.update_cache_gamelist( None, get_num_pbs=False )
self.update_cache_runnerlist( None )
if delta_num_pbs == 1:
self.update_gamelist_put( params )
self.update_runnerlist_put( params )
return True
# Returns True on success, False otherwise. Note that params['user']
# is volatile
def put_existing_run( self, params ):
user = params[ 'user' ]
game = params[ 'game' ]
game_code = params[ 'game_code' ]
category = params[ 'category' ]
seconds = params[ 'seconds' ]
time = params[ 'time' ]
video = params[ 'video' ]
version = params[ 'version' ]
notes = params[ 'notes' ]
valid = params[ 'valid' ]
run_id = params[ 'run_id' ]
# Grab the old run, which we will update to be the new run
new_run = self.get_run_by_id( run_id )
if new_run == self.OVER_QUOTA_ERROR:
return False
if ( new_run is None
or ( not user.is_mod and new_run.username != user.username ) ):
return False
# Get the owner of this run
if new_run.username != user.username:
runner = self.get_runner( util.get_code( new_run.username ) )
if runner == self.OVER_QUOTA_ERROR:
return False
params['user'] = runner
else:
runner = user
# Store the contents of the old run
old_run = dict( game = new_run.game,
category = new_run.category,
seconds = new_run.seconds )
old_game_model = self.get_game_model(
util.get_code( old_run['game'] ) )
if old_game_model == self.OVER_QUOTA_ERROR:
return False
# Update the run
try:
new_run.game = game
new_run.category = category
new_run.seconds = seconds
new_run.date = params['date']
new_run.version = version
new_run.notes = notes
except db.BadValueError:
valid = False
if video:
try:
new_run.video = video
except db.BadValueError:
params['video_error'] = "Invalid video URL"
valid = False
elif new_run.video:
new_run.video = None
if not valid:
return False
new_run.put( )
logging.debug( "Put updated run for runner " + runner.username
+ ", game = " + game + ", category = " + category
+ ", time= " + time + ", run_id = " + run_id )
# Figure out the change in num_pbs for the old and new game, as well
# as the runner
delta_num_pbs_old = 0
delta_num_pbs_new = 0
if game != old_run['game'] or category != old_run['category']:
num_runs = self.num_runs( runner.username, old_run[ 'game' ],
old_run[ 'category' ], 1 )
if num_runs == 0:
delta_num_pbs_old = -1
num_runs = self.num_runs( runner.username, game, category, 2 )
if num_runs == 1:
delta_num_pbs_new = 1
# Update games.Games and runners.Runners
self.update_runner( runner, delta_num_pbs_old + delta_num_pbs_new )
if game == old_run['game']:
self.update_games_delete( params['game_model'],
old_run['category'], delta_num_pbs_old )
else:
self.update_games_delete( old_game_model, old_run['category'],
delta_num_pbs_old )
self.update_games_put( params, delta_num_pbs_new )
# Update memcache with the removal of the old run and addition of the
# new run.
self.update_cache_run_by_id( run_id, new_run )
self.update_pblist_delete( runner, old_run )
self.update_pblist_put( params )
self.update_gamepage_delete( runner, old_run )
self.update_gamepage_put( params )
self.update_user_has_run_delete( runner, old_run )
self.update_cache_user_has_run( runner.username, game, True )
# Update gamelist and runnerlist in memcache
if delta_num_pbs_old == -1:
self.update_gamelist_delete( old_run )
self.update_runnerlist_delete( runner )
if delta_num_pbs_new == 1:
self.update_gamelist_put( params )
self.update_runnerlist_put( params )
# Replace the old run in the runlist for runner in memcache
cached_runlists = self.get_cached_runlists_for_runner(
runner.username )
if cached_runlists is not None:
found_run = False
for page_num, res in cached_runlists.iteritems( ):
if found_run:
break
for run in res['runlist']:
if run[ 'run_id' ] == run_id:
run[ 'game' ] = game
run[ 'game_code' ] = game_code
run[ 'category' ] = category
run[ 'category_code' ] = util.get_code( category )
run[ 'time' ] = time
run[ 'date' ] = new_run.date
run[ 'video' ] = video
run[ 'version' ] = version
run[ 'notes' ] = notes
res['runlist'].sort( key=lambda x: util.get_valid_date(
x['date'] ), reverse=True )
self.update_cache_runlist_for_runner( runner.username,
cached_runlists )
found_run = True
break
# Check to see if we need to replace the last run for this user
last_run = self.get_last_run( runner.username, no_refresh=True )
if last_run == self.OVER_QUOTA_ERROR:
self.update_cache_last_run( runner.username, None )
elif( last_run is not None
and new_run.key().id() == last_run.key().id() ):
self.update_cache_last_run( runner.username, new_run )
return True
# Returns True on success, False on failure
def put_new_game( self, game ):
# Add a new game to the database
try:
game_model = games.Games( game = game,
info = json.dumps( [ ] ),
parent = games.key( ),
key_name = util.get_code( game ) )
except db.BadValueError:
return False
game_model.put( )
logging.warning( "Put new game " + game + " in database." )
return True
|
|
# Copyright 2014 IBM Corp.
#
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
#
# @author: Mohammad Banikazemi, IBM Corp.
import socket
import time
import eventlet
from oslo.config import cfg
from neutron.agent.linux import ip_lib
from neutron.agent.linux import ovs_lib
from neutron.agent import rpc as agent_rpc
from neutron.common import config as logging_config
from neutron.common import legacy
from neutron.common import topics
from neutron.common import utils as q_utils
from neutron import context
from neutron.openstack.common import log as logging
from neutron.openstack.common.rpc import dispatcher
from neutron.plugins.ibm.common import config # noqa
from neutron.plugins.ibm.common import constants
LOG = logging.getLogger(__name__)
class SdnvePluginApi(agent_rpc.PluginApi):
def sdnve_info(self, context, info):
return self.call(context,
self.make_msg('sdnve_info', info=info),
topic=self.topic)
class SdnveNeutronAgent():
RPC_API_VERSION = '1.1'
def __init__(self, integ_br, interface_mappings,
info, root_helper, polling_interval,
controller_ip, reset_br, out_of_band):
'''The agent initialization.
Sets the following parameters and sets up the integration
bridge and physical interfaces if need be.
:param integ_br: name of the integration bridge.
:param interface_mappings: interfaces to physical networks.
:param info: local IP address of this hypervisor.
:param root_helper: utility to use when running shell cmds.
:param polling_interval: interval (secs) to poll DB.
:param controller_ip: Ip address of SDN-VE controller.
'''
self.root_helper = root_helper
self.int_bridge_name = integ_br
self.controller_ip = controller_ip
self.interface_mappings = interface_mappings
self.polling_interval = polling_interval
self.info = info
self.reset_br = reset_br
self.out_of_band = out_of_band
if self.int_bridge_name:
self.int_br = self.setup_integration_br(integ_br, reset_br,
out_of_band,
self.controller_ip)
self.setup_physical_interfaces(self.interface_mappings)
else:
self.int_br = None
self.setup_rpc()
def setup_rpc(self):
if self.int_br:
mac = self.int_br.get_local_port_mac()
self.agent_id = '%s%s' % ('sdnve', (mac.replace(":", "")))
else:
nameaddr = socket.gethostbyname(socket.gethostname())
self.agent_id = '%s%s' % ('sdnve_', (nameaddr.replace(".", "_")))
self.topic = topics.AGENT
self.plugin_rpc = SdnvePluginApi(topics.PLUGIN)
self.state_rpc = agent_rpc.PluginReportStateAPI(topics.PLUGIN)
self.context = context.get_admin_context_without_session()
self.dispatcher = self.create_rpc_dispatcher()
consumers = [[constants.INFO, topics.UPDATE]]
self.connection = agent_rpc.create_consumers(self.dispatcher,
self.topic,
consumers)
# Plugin calls the agents through the following
def info_update(self, context, **kwargs):
LOG.debug(_("info_update received"))
info = kwargs.get('info', {})
new_controller = info.get('new_controller')
out_of_band = info.get('out_of_band')
if self.int_br and new_controller:
LOG.debug(_("info_update received. New controller"
"is to be set to: %s"), new_controller)
self.int_br.run_vsctl(["set-controller",
self.int_bridge_name,
"tcp:" + new_controller])
if out_of_band:
LOG.debug(_("info_update received. New controller"
"is set to be out of band"))
self.int_br.set_db_attribute("controller",
self.int_bridge_name,
"connection-mode",
"out-of-band")
def create_rpc_dispatcher(self):
return dispatcher.RpcDispatcher([self])
def setup_integration_br(self, bridge_name, reset_br, out_of_band,
controller_ip=None):
'''Sets up the integration bridge.
Create the bridge and remove all existing flows if reset_br is True.
Otherwise, creates the bridge if not already existing.
:param bridge_name: the name of the integration bridge.
:param reset_br: A boolean to rest the bridge if True.
:param out_of_band: A boolean inidicating controller is out of band.
:param controller_ip: IP address to use as the bridge controller.
:returns: the integration bridge
'''
int_br = ovs_lib.OVSBridge(bridge_name, self.root_helper)
if reset_br:
int_br.reset_bridge()
int_br.remove_all_flows()
else:
int_br.create()
# set the controller
if controller_ip:
int_br.run_vsctl(
["set-controller", bridge_name, "tcp:" + controller_ip])
if out_of_band:
int_br.set_db_attribute("controller", bridge_name,
"connection-mode", "out-of-band")
return int_br
def setup_physical_interfaces(self, interface_mappings):
'''Sets up the physical network interfaces.
Link physical interfaces to the integration bridge.
:param interface_mappings: map physical net names to interface names.
'''
for physical_network, interface in interface_mappings.iteritems():
LOG.info(_("Mapping physical network %(physical_network)s to "
"interface %(interface)s"),
{'physical_network': physical_network,
'interface': interface})
# Connect the physical interface to the bridge
if not ip_lib.device_exists(interface, self.root_helper):
LOG.error(_("Interface %(interface)s for physical network "
"%(physical_network)s does not exist. Agent "
"terminated!"),
{'physical_network': physical_network,
'interface': interface})
raise SystemExit(1)
self.int_br.add_port(interface)
def sdnve_info(self):
details = self.plugin_rpc.sdnve_info(
self.context,
{'info': self.info})
return details
def rpc_loop(self):
while True:
start = time.time()
LOG.debug(_("Agent in the rpc loop."))
# sleep till end of polling interval
elapsed = (time.time() - start)
if (elapsed < self.polling_interval):
time.sleep(self.polling_interval - elapsed)
else:
LOG.info(_("Loop iteration exceeded interval "
"(%(polling_interval)s vs. %(elapsed)s)!"),
{'polling_interval': self.polling_interval,
'elapsed': elapsed})
def daemon_loop(self):
self.rpc_loop()
def create_agent_config_map(config):
interface_mappings = q_utils.parse_mappings(
config.SDNVE.interface_mappings)
controller_ips = config.SDNVE.controller_ips
LOG.info(_("Controller IPs: %s"), controller_ips)
controller_ip = controller_ips[0]
return {
'integ_br': config.SDNVE.integration_bridge,
'interface_mappings': interface_mappings,
'controller_ip': controller_ip,
'info': config.SDNVE.info,
'root_helper': config.SDNVE_AGENT.root_helper,
'polling_interval': config.SDNVE_AGENT.polling_interval,
'reset_br': config.SDNVE.reset_bridge,
'out_of_band': config.SDNVE.out_of_band}
def main():
eventlet.monkey_patch()
cfg.CONF.register_opts(ip_lib.OPTS)
cfg.CONF(project='neutron')
logging_config.setup_logging(cfg.CONF)
legacy.modernize_quantum_config(cfg.CONF)
try:
agent_config = create_agent_config_map(cfg.CONF)
except ValueError as e:
LOG.exception(_("%s Agent terminated!"), e)
raise SystemExit(1)
plugin = SdnveNeutronAgent(**agent_config)
# Start everything.
LOG.info(_("Agent initialized successfully, now running... "))
plugin.daemon_loop()
|
|
# This file is part of the MicroPython project, http://micropython.org/
#
# The MIT License (MIT)
#
# Copyright (c) 2016 Glenn Ruben Bakke
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
"""
MicroPython Seeedstudio TFT Shield V2 driver, SPI interfaces, Analog GPIO
Contains SD-card reader, LCD and Touch sensor
The pca10040 pin layout is used as reference.
Example usage of LCD:
from seeedstudio_tft_shield_v2 import ILI9341
lcd = ILI9341(240, 320)
lcd.text("Hello World!, 32, 32)
lcd.show()
Example usage of SD card reader:
import os
from seeedstudio_tft_shield_v2 import mount_tf
tf = mount_tf()
os.listdir()
"""
import os
import time
import framebuf
from machine import SPI, Pin
from sdcard import SDCard
def mount_tf(self, mount_point="/"):
sd = SDCard(SPI(0), Pin("P15", mode=Pin.OUT))
os.mount(sd, mount_point)
class ILI9341:
def __init__(self, width, height):
self.width = width
self.height = height
self.pages = self.height // 8
self.buffer = bytearray(self.pages * self.width)
self.framebuf = framebuf.FrameBuffer(
self.buffer, self.width, self.height, framebuf.MONO_VLSB
)
self.spi = SPI(0)
# chip select
self.cs = Pin("P16", mode=Pin.OUT, pull=Pin.PULL_UP)
# command
self.dc = Pin("P17", mode=Pin.OUT, pull=Pin.PULL_UP)
# initialize all pins high
self.cs.high()
self.dc.high()
self.spi.init(baudrate=8000000, phase=0, polarity=0)
self.init_display()
def init_display(self):
time.sleep_ms(500)
self.write_cmd(0x01)
time.sleep_ms(200)
self.write_cmd(0xCF)
self.write_data(bytearray([0x00, 0x8B, 0x30]))
self.write_cmd(0xED)
self.write_data(bytearray([0x67, 0x03, 0x12, 0x81]))
self.write_cmd(0xE8)
self.write_data(bytearray([0x85, 0x10, 0x7A]))
self.write_cmd(0xCB)
self.write_data(bytearray([0x39, 0x2C, 0x00, 0x34, 0x02]))
self.write_cmd(0xF7)
self.write_data(bytearray([0x20]))
self.write_cmd(0xEA)
self.write_data(bytearray([0x00, 0x00]))
# Power control
self.write_cmd(0xC0)
# VRH[5:0]
self.write_data(bytearray([0x1B]))
# Power control
self.write_cmd(0xC1)
# SAP[2:0];BT[3:0]
self.write_data(bytearray([0x10]))
# VCM control
self.write_cmd(0xC5)
self.write_data(bytearray([0x3F, 0x3C]))
# VCM control2
self.write_cmd(0xC7)
self.write_data(bytearray([0xB7]))
# Memory Access Control
self.write_cmd(0x36)
self.write_data(bytearray([0x08]))
self.write_cmd(0x3A)
self.write_data(bytearray([0x55]))
self.write_cmd(0xB1)
self.write_data(bytearray([0x00, 0x1B]))
# Display Function Control
self.write_cmd(0xB6)
self.write_data(bytearray([0x0A, 0xA2]))
# 3Gamma Function Disable
self.write_cmd(0xF2)
self.write_data(bytearray([0x00]))
# Gamma curve selected
self.write_cmd(0x26)
self.write_data(bytearray([0x01]))
# Set Gamma
self.write_cmd(0xE0)
self.write_data(
bytearray(
[
0x0F,
0x2A,
0x28,
0x08,
0x0E,
0x08,
0x54,
0xA9,
0x43,
0x0A,
0x0F,
0x00,
0x00,
0x00,
0x00,
]
)
)
# Set Gamma
self.write_cmd(0xE1)
self.write_data(
bytearray(
[
0x00,
0x15,
0x17,
0x07,
0x11,
0x06,
0x2B,
0x56,
0x3C,
0x05,
0x10,
0x0F,
0x3F,
0x3F,
0x0F,
]
)
)
# Exit Sleep
self.write_cmd(0x11)
time.sleep_ms(120)
# Display on
self.write_cmd(0x29)
time.sleep_ms(500)
self.fill(0)
def show(self):
# set col
self.write_cmd(0x2A)
self.write_data(bytearray([0x00, 0x00]))
self.write_data(bytearray([0x00, 0xEF]))
# set page
self.write_cmd(0x2B)
self.write_data(bytearray([0x00, 0x00]))
self.write_data(bytearray([0x01, 0x3F]))
self.write_cmd(0x2C)
num_of_pixels = self.height * self.width
for row in range(0, self.pages):
for pixel_pos in range(0, 8):
for col in range(0, self.width):
compressed_pixel = self.buffer[row * 240 + col]
if ((compressed_pixel >> pixel_pos) & 0x1) == 0:
self.write_data(bytearray([0x00, 0x00]))
else:
self.write_data(bytearray([0xFF, 0xFF]))
def fill(self, col):
self.framebuf.fill(col)
def pixel(self, x, y, col):
self.framebuf.pixel(x, y, col)
def scroll(self, dx, dy):
self.framebuf.scroll(dx, dy)
def text(self, string, x, y, col=1):
self.framebuf.text(string, x, y, col)
def write_cmd(self, cmd):
self.dc.low()
self.cs.low()
self.spi.write(bytearray([cmd]))
self.cs.high()
def write_data(self, buf):
self.dc.high()
self.cs.low()
self.spi.write(buf)
self.cs.high()
|
|
import re
from calendar import monthrange
from datetime import date
from django import forms
from django.core.exceptions import ImproperlyConfigured
from django.utils.translation import ugettext_lazy as _
from oscar.apps.address.forms import AbstractAddressForm
from oscar.core.loading import get_model
from oscar.forms.mixins import PhoneNumberMixin
from . import bankcards
Country = get_model('address', 'Country')
BillingAddress = get_model('order', 'BillingAddress')
Bankcard = get_model('payment', 'Bankcard')
# List of card names for all the card types supported in payment.bankcards
VALID_CARDS = set([card_type[0] for card_type in bankcards.CARD_TYPES])
class BankcardNumberField(forms.CharField):
def __init__(self, *args, **kwargs):
_kwargs = {
'max_length': 20,
'widget': forms.TextInput(attrs={'autocomplete': 'off'}),
'label': _("Card number")
}
if 'types' in kwargs:
self.accepted_cards = set(kwargs.pop('types'))
difference = self.accepted_cards - VALID_CARDS
if difference:
raise ImproperlyConfigured('The following accepted_cards are '
'unknown: %s' % difference)
_kwargs.update(kwargs)
super(BankcardNumberField, self).__init__(*args, **_kwargs)
def clean(self, value):
"""
Check if given CC number is valid and one of the
card types we accept
"""
non_decimal = re.compile(r'\D+')
value = non_decimal.sub('', (value or '').strip())
if value and not bankcards.luhn(value):
raise forms.ValidationError(
_("Please enter a valid credit card number."))
if hasattr(self, 'accepted_cards'):
card_type = bankcards.bankcard_type(value)
if card_type not in self.accepted_cards:
raise forms.ValidationError(
_("%s cards are not accepted." % card_type))
return super(BankcardNumberField, self).clean(value)
class BankcardMonthWidget(forms.MultiWidget):
"""
Widget containing two select boxes for selecting the month and year
"""
def decompress(self, value):
return [value.month, value.year] if value else [None, None]
def format_output(self, rendered_widgets):
html = u' '.join(rendered_widgets)
return u'<span style="white-space: nowrap">%s</span>' % html
class BankcardMonthField(forms.MultiValueField):
"""
A modified version of the snippet: http://djangosnippets.org/snippets/907/
"""
default_error_messages = {
'invalid_month': _('Enter a valid month.'),
'invalid_year': _('Enter a valid year.'),
}
num_years = 5
def __init__(self, *args, **kwargs):
# Allow the number of years to be specified
if 'num_years' in kwargs:
self.num_years = kwargs.pop('num_years')
errors = self.default_error_messages.copy()
if 'error_messages' in kwargs:
errors.update(kwargs['error_messages'])
fields = (
forms.ChoiceField(
choices=self.month_choices(),
error_messages={'invalid': errors['invalid_month']}),
forms.ChoiceField(
choices=self.year_choices(),
error_messages={'invalid': errors['invalid_year']}),
)
if 'widget' not in kwargs:
kwargs['widget'] = BankcardMonthWidget(
widgets=[fields[0].widget, fields[1].widget])
super(BankcardMonthField, self).__init__(fields, *args, **kwargs)
def month_choices(self):
return []
def year_choices(self):
return []
class BankcardExpiryMonthField(BankcardMonthField):
num_years = 10
def __init__(self, *args, **kwargs):
today = date.today()
_kwargs = {
'required': True,
'label': _("Valid to"),
'initial': ["%.2d" % today.month, today.year]
}
_kwargs.update(kwargs)
super(BankcardExpiryMonthField, self).__init__(*args, **_kwargs)
def month_choices(self):
return [("%.2d" % x, "%.2d" % x) for x in range(1, 13)]
def year_choices(self):
return [(x, x) for x in range(
date.today().year,
date.today().year + self.num_years)]
def clean(self, value):
expiry_date = super(BankcardExpiryMonthField, self).clean(value)
if expiry_date and date.today() > expiry_date:
raise forms.ValidationError(
_("The expiration date you entered is in the past."))
return expiry_date
def compress(self, data_list):
if data_list:
if data_list[1] in forms.fields.EMPTY_VALUES:
error = self.error_messages['invalid_year']
raise forms.ValidationError(error)
if data_list[0] in forms.fields.EMPTY_VALUES:
error = self.error_messages['invalid_month']
raise forms.ValidationError(error)
year = int(data_list[1])
month = int(data_list[0])
# find last day of the month
day = monthrange(year, month)[1]
return date(year, month, day)
return None
class BankcardStartingMonthField(BankcardMonthField):
def __init__(self, *args, **kwargs):
_kwargs = {
'required': False,
'label': _("Valid from"),
}
_kwargs.update(kwargs)
super(BankcardStartingMonthField, self).__init__(*args, **_kwargs)
def month_choices(self):
months = [("%.2d" % x, "%.2d" % x) for x in range(1, 13)]
months.insert(0, ("", "--"))
return months
def year_choices(self):
today = date.today()
years = [(x, x) for x in range(
today.year - self.num_years,
today.year + 1)]
years.insert(0, ("", "--"))
return years
def clean(self, value):
starting_date = super(BankcardMonthField, self).clean(value)
if starting_date and date.today() < starting_date:
raise forms.ValidationError(
_("The starting date you entered is in the future."))
return starting_date
def compress(self, data_list):
if data_list:
if data_list[1] in forms.fields.EMPTY_VALUES:
error = self.error_messages['invalid_year']
raise forms.ValidationError(error)
if data_list[0] in forms.fields.EMPTY_VALUES:
error = self.error_messages['invalid_month']
raise forms.ValidationError(error)
year = int(data_list[1])
month = int(data_list[0])
return date(year, month, 1)
return None
class BankcardCCVField(forms.RegexField):
def __init__(self, *args, **kwargs):
_kwargs = {
'required': True,
'label': _("CCV number"),
'widget': forms.TextInput(attrs={'size': '5'}),
'error_messages': {
'invalid': _("Please enter a 3 or 4 digit number")},
'help_text': _("This is the 3 or 4 digit security number "
"on the back of your bankcard")
}
_kwargs.update(kwargs)
super(BankcardCCVField, self).__init__(
r'^\d{3,4}$', *args, **_kwargs)
def clean(self, value):
if value is not None:
value = value.strip()
return super(BankcardCCVField, self).clean(value)
class BankcardForm(forms.ModelForm):
# By default, this number field will accept any number. The only validation
# is whether it passes the luhn check. If you wish to only accept certain
# types of card, you can pass a types kwarg to BankcardNumberField, e.g.
#
# BankcardNumberField(types=[bankcards.VISA, bankcards.VISA_ELECTRON,])
number = BankcardNumberField()
ccv = BankcardCCVField()
start_month = BankcardStartingMonthField()
expiry_month = BankcardExpiryMonthField()
class Meta:
model = Bankcard
fields = ('number', 'start_month', 'expiry_month', 'ccv')
def clean(self):
data = self.cleaned_data
number, ccv = data.get('number'), data.get('ccv')
if number and ccv:
if bankcards.is_amex(number) and len(ccv) != 4:
raise forms.ValidationError(_(
"American Express cards use a 4 digit security code"))
return data
def save(self, *args, **kwargs):
# It doesn't really make sense to save directly from the form as saving
# will obfuscate some of the card details which you normally need to
# pass to a payment gateway. Better to use the bankcard property below
# to get the cleaned up data, then once you've used the sensitive
# details, you can save.
raise RuntimeError("Don't save bankcards directly from form")
@property
def bankcard(self):
"""
Return an instance of the Bankcard model (unsaved)
"""
return Bankcard(number=self.cleaned_data['number'],
expiry_date=self.cleaned_data['expiry_month'],
start_date=self.cleaned_data['start_month'],
ccv=self.cleaned_data['ccv'])
class BillingAddressForm(PhoneNumberMixin, AbstractAddressForm):
def __init__(self, *args, **kwargs):
super(BillingAddressForm, self).__init__(*args, **kwargs)
self.set_country_queryset()
def set_country_queryset(self):
self.fields['country'].queryset = Country._default_manager.all()
class Meta:
model = BillingAddress
fields = [
'title', 'first_name', 'last_name',
'line1', 'line2', 'line3', 'line4',
'state', 'postcode', 'country',
]
|
|
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
"""Unit tests for Superset"""
import json
import unittest
import yaml
from superset import db
from superset.connectors.druid.models import (
DruidColumn, DruidDatasource, DruidMetric,
)
from superset.connectors.sqla.models import SqlaTable, SqlMetric, TableColumn
from superset.utils.core import get_main_database
from .base_tests import SupersetTestCase
DBREF = 'dict_import__export_test'
NAME_PREFIX = 'dict_'
ID_PREFIX = 20000
class DictImportExportTests(SupersetTestCase):
"""Testing export import functionality for dashboards"""
def __init__(self, *args, **kwargs):
super(DictImportExportTests, self).__init__(*args, **kwargs)
@classmethod
def delete_imports(cls):
# Imported data clean up
session = db.session
for table in session.query(SqlaTable):
if DBREF in table.params_dict:
session.delete(table)
for datasource in session.query(DruidDatasource):
if DBREF in datasource.params_dict:
session.delete(datasource)
session.commit()
@classmethod
def setUpClass(cls):
cls.delete_imports()
@classmethod
def tearDownClass(cls):
cls.delete_imports()
def create_table(
self, name, schema='', id=0, cols_names=[], metric_names=[]):
database_name = 'main'
name = '{0}{1}'.format(NAME_PREFIX, name)
params = {DBREF: id, 'database_name': database_name}
dict_rep = {
'database_id': get_main_database(db.session).id,
'table_name': name,
'schema': schema,
'id': id,
'params': json.dumps(params),
'columns': [{'column_name': c}
for c in cols_names],
'metrics': [{'metric_name': c} for c in metric_names],
}
table = SqlaTable(
id=id,
schema=schema,
table_name=name,
params=json.dumps(params),
)
for col_name in cols_names:
table.columns.append(TableColumn(column_name=col_name))
for metric_name in metric_names:
table.metrics.append(SqlMetric(metric_name=metric_name))
return table, dict_rep
def create_druid_datasource(
self, name, id=0, cols_names=[], metric_names=[]):
name = '{0}{1}'.format(NAME_PREFIX, name)
cluster_name = 'druid_test'
params = {DBREF: id, 'database_name': cluster_name}
dict_rep = {
'cluster_name': cluster_name,
'datasource_name': name,
'id': id,
'params': json.dumps(params),
'columns': [{'column_name': c} for c in cols_names],
'metrics': [{'metric_name': c} for c in metric_names],
}
datasource = DruidDatasource(
id=id,
datasource_name=name,
cluster_name=cluster_name,
params=json.dumps(params),
)
for col_name in cols_names:
datasource.columns.append(DruidColumn(column_name=col_name))
for metric_name in metric_names:
datasource.metrics.append(DruidMetric(metric_name=metric_name))
return datasource, dict_rep
def get_datasource(self, datasource_id):
return db.session.query(DruidDatasource).filter_by(
id=datasource_id).first()
def get_table_by_name(self, name):
return db.session.query(SqlaTable).filter_by(
table_name=name).first()
def yaml_compare(self, obj_1, obj_2):
obj_1_str = yaml.safe_dump(obj_1, default_flow_style=False)
obj_2_str = yaml.safe_dump(obj_2, default_flow_style=False)
self.assertEquals(obj_1_str, obj_2_str)
def assert_table_equals(self, expected_ds, actual_ds):
self.assertEquals(expected_ds.table_name, actual_ds.table_name)
self.assertEquals(expected_ds.main_dttm_col, actual_ds.main_dttm_col)
self.assertEquals(expected_ds.schema, actual_ds.schema)
self.assertEquals(len(expected_ds.metrics), len(actual_ds.metrics))
self.assertEquals(len(expected_ds.columns), len(actual_ds.columns))
self.assertEquals(
set([c.column_name for c in expected_ds.columns]),
set([c.column_name for c in actual_ds.columns]))
self.assertEquals(
set([m.metric_name for m in expected_ds.metrics]),
set([m.metric_name for m in actual_ds.metrics]))
def assert_datasource_equals(self, expected_ds, actual_ds):
self.assertEquals(
expected_ds.datasource_name, actual_ds.datasource_name)
self.assertEquals(expected_ds.main_dttm_col, actual_ds.main_dttm_col)
self.assertEquals(len(expected_ds.metrics), len(actual_ds.metrics))
self.assertEquals(len(expected_ds.columns), len(actual_ds.columns))
self.assertEquals(
set([c.column_name for c in expected_ds.columns]),
set([c.column_name for c in actual_ds.columns]))
self.assertEquals(
set([m.metric_name for m in expected_ds.metrics]),
set([m.metric_name for m in actual_ds.metrics]))
def test_import_table_no_metadata(self):
table, dict_table = self.create_table('pure_table', id=ID_PREFIX + 1)
new_table = SqlaTable.import_from_dict(db.session, dict_table)
db.session.commit()
imported_id = new_table.id
imported = self.get_table(imported_id)
self.assert_table_equals(table, imported)
self.yaml_compare(table.export_to_dict(), imported.export_to_dict())
def test_import_table_1_col_1_met(self):
table, dict_table = self.create_table(
'table_1_col_1_met', id=ID_PREFIX + 2,
cols_names=['col1'], metric_names=['metric1'])
imported_table = SqlaTable.import_from_dict(db.session, dict_table)
db.session.commit()
imported = self.get_table(imported_table.id)
self.assert_table_equals(table, imported)
self.assertEquals(
{DBREF: ID_PREFIX + 2, 'database_name': 'main'},
json.loads(imported.params))
self.yaml_compare(table.export_to_dict(), imported.export_to_dict())
def test_import_table_2_col_2_met(self):
table, dict_table = self.create_table(
'table_2_col_2_met', id=ID_PREFIX + 3, cols_names=['c1', 'c2'],
metric_names=['m1', 'm2'])
imported_table = SqlaTable.import_from_dict(db.session, dict_table)
db.session.commit()
imported = self.get_table(imported_table.id)
self.assert_table_equals(table, imported)
self.yaml_compare(table.export_to_dict(), imported.export_to_dict())
def test_import_table_override_append(self):
table, dict_table = self.create_table(
'table_override', id=ID_PREFIX + 3,
cols_names=['col1'],
metric_names=['m1'])
imported_table = SqlaTable.import_from_dict(db.session, dict_table)
db.session.commit()
table_over, dict_table_over = self.create_table(
'table_override', id=ID_PREFIX + 3,
cols_names=['new_col1', 'col2', 'col3'],
metric_names=['new_metric1'])
imported_over_table = SqlaTable.import_from_dict(
db.session,
dict_table_over)
db.session.commit()
imported_over = self.get_table(imported_over_table.id)
self.assertEquals(imported_table.id, imported_over.id)
expected_table, _ = self.create_table(
'table_override', id=ID_PREFIX + 3,
metric_names=['new_metric1', 'm1'],
cols_names=['col1', 'new_col1', 'col2', 'col3'])
self.assert_table_equals(expected_table, imported_over)
self.yaml_compare(expected_table.export_to_dict(),
imported_over.export_to_dict())
def test_import_table_override_sync(self):
table, dict_table = self.create_table(
'table_override', id=ID_PREFIX + 3,
cols_names=['col1'],
metric_names=['m1'])
imported_table = SqlaTable.import_from_dict(db.session, dict_table)
db.session.commit()
table_over, dict_table_over = self.create_table(
'table_override', id=ID_PREFIX + 3,
cols_names=['new_col1', 'col2', 'col3'],
metric_names=['new_metric1'])
imported_over_table = SqlaTable.import_from_dict(
session=db.session,
dict_rep=dict_table_over,
sync=['metrics', 'columns'])
db.session.commit()
imported_over = self.get_table(imported_over_table.id)
self.assertEquals(imported_table.id, imported_over.id)
expected_table, _ = self.create_table(
'table_override', id=ID_PREFIX + 3,
metric_names=['new_metric1'],
cols_names=['new_col1', 'col2', 'col3'])
self.assert_table_equals(expected_table, imported_over)
self.yaml_compare(
expected_table.export_to_dict(),
imported_over.export_to_dict())
def test_import_table_override_identical(self):
table, dict_table = self.create_table(
'copy_cat', id=ID_PREFIX + 4,
cols_names=['new_col1', 'col2', 'col3'],
metric_names=['new_metric1'])
imported_table = SqlaTable.import_from_dict(db.session, dict_table)
db.session.commit()
copy_table, dict_copy_table = self.create_table(
'copy_cat', id=ID_PREFIX + 4,
cols_names=['new_col1', 'col2', 'col3'],
metric_names=['new_metric1'])
imported_copy_table = SqlaTable.import_from_dict(db.session,
dict_copy_table)
db.session.commit()
self.assertEquals(imported_table.id, imported_copy_table.id)
self.assert_table_equals(copy_table, self.get_table(imported_table.id))
self.yaml_compare(imported_copy_table.export_to_dict(),
imported_table.export_to_dict())
def test_import_druid_no_metadata(self):
datasource, dict_datasource = self.create_druid_datasource(
'pure_druid', id=ID_PREFIX + 1)
imported_cluster = DruidDatasource.import_from_dict(db.session,
dict_datasource)
db.session.commit()
imported = self.get_datasource(imported_cluster.id)
self.assert_datasource_equals(datasource, imported)
def test_import_druid_1_col_1_met(self):
datasource, dict_datasource = self.create_druid_datasource(
'druid_1_col_1_met', id=ID_PREFIX + 2,
cols_names=['col1'], metric_names=['metric1'])
imported_cluster = DruidDatasource.import_from_dict(db.session,
dict_datasource)
db.session.commit()
imported = self.get_datasource(imported_cluster.id)
self.assert_datasource_equals(datasource, imported)
self.assertEquals(
{DBREF: ID_PREFIX + 2, 'database_name': 'druid_test'},
json.loads(imported.params))
def test_import_druid_2_col_2_met(self):
datasource, dict_datasource = self.create_druid_datasource(
'druid_2_col_2_met', id=ID_PREFIX + 3, cols_names=['c1', 'c2'],
metric_names=['m1', 'm2'])
imported_cluster = DruidDatasource.import_from_dict(db.session,
dict_datasource)
db.session.commit()
imported = self.get_datasource(imported_cluster.id)
self.assert_datasource_equals(datasource, imported)
def test_import_druid_override_append(self):
datasource, dict_datasource = self.create_druid_datasource(
'druid_override', id=ID_PREFIX + 3, cols_names=['col1'],
metric_names=['m1'])
imported_cluster = DruidDatasource.import_from_dict(db.session,
dict_datasource)
db.session.commit()
table_over, table_over_dict = self.create_druid_datasource(
'druid_override', id=ID_PREFIX + 3,
cols_names=['new_col1', 'col2', 'col3'],
metric_names=['new_metric1'])
imported_over_cluster = DruidDatasource.import_from_dict(
db.session,
table_over_dict)
db.session.commit()
imported_over = self.get_datasource(imported_over_cluster.id)
self.assertEquals(imported_cluster.id, imported_over.id)
expected_datasource, _ = self.create_druid_datasource(
'druid_override', id=ID_PREFIX + 3,
metric_names=['new_metric1', 'm1'],
cols_names=['col1', 'new_col1', 'col2', 'col3'])
self.assert_datasource_equals(expected_datasource, imported_over)
def test_import_druid_override_sync(self):
datasource, dict_datasource = self.create_druid_datasource(
'druid_override', id=ID_PREFIX + 3, cols_names=['col1'],
metric_names=['m1'])
imported_cluster = DruidDatasource.import_from_dict(
db.session,
dict_datasource)
db.session.commit()
table_over, table_over_dict = self.create_druid_datasource(
'druid_override', id=ID_PREFIX + 3,
cols_names=['new_col1', 'col2', 'col3'],
metric_names=['new_metric1'])
imported_over_cluster = DruidDatasource.import_from_dict(
session=db.session,
dict_rep=table_over_dict,
sync=['metrics', 'columns']) # syncing metrics and columns
db.session.commit()
imported_over = self.get_datasource(imported_over_cluster.id)
self.assertEquals(imported_cluster.id, imported_over.id)
expected_datasource, _ = self.create_druid_datasource(
'druid_override', id=ID_PREFIX + 3,
metric_names=['new_metric1'],
cols_names=['new_col1', 'col2', 'col3'])
self.assert_datasource_equals(expected_datasource, imported_over)
def test_import_druid_override_identical(self):
datasource, dict_datasource = self.create_druid_datasource(
'copy_cat', id=ID_PREFIX + 4,
cols_names=['new_col1', 'col2', 'col3'],
metric_names=['new_metric1'])
imported = DruidDatasource.import_from_dict(session=db.session,
dict_rep=dict_datasource)
db.session.commit()
copy_datasource, dict_cp_datasource = self.create_druid_datasource(
'copy_cat', id=ID_PREFIX + 4,
cols_names=['new_col1', 'col2', 'col3'],
metric_names=['new_metric1'])
imported_copy = DruidDatasource.import_from_dict(db.session,
dict_cp_datasource)
db.session.commit()
self.assertEquals(imported.id, imported_copy.id)
self.assert_datasource_equals(
copy_datasource, self.get_datasource(imported.id))
if __name__ == '__main__':
unittest.main()
|
|
import marshal
import os
import subprocess
import urlparse
import dxr.plugins
"""Omniglot - Speaking all commonly-used version control systems.
At present, this plugin is still under development, so not all features are
fully implemented.
Omniglot first scans the project directory looking for the hallmarks of a VCS
(such as the .hg or .git directory). It also looks for these in parent
directories in case DXR is only parsing a fraction of the repository. Once this
information is found, it attempts to extract upstream information about the
repository. From this information, it builds the necessary information to
reproduce the links.
Currently supported VCSes and upstream views:
- git (github)
- mercurial (hgweb)
Todos:
- add gitweb support for git
- add cvs, svn, bzr support
- produce in-DXR blame information using VCSs
- check if the mercurial paths are specific to Mozilla's customization or not.
"""
# Global variables
tree = None
source_repositories = {}
class VCS(object):
"""A class representing an abstract notion of a version-control system.
In general, all path arguments to query methods should be normalized to be
relative to the root directory of the VCS.
"""
def __init__(self, root):
self.root = root
self.untracked_files = set()
def get_root_dir(self):
"""Return the directory that is at the root of the VCS."""
return self.root
def get_vcs_name(self):
"""Return a recognizable name for the VCS."""
return type(self).__name__
def invoke_vcs(self, args):
"""Return the result of invoking said command on the repository, with
the current working directory set to the root directory.
"""
return subprocess.check_output(args, cwd=self.get_root_dir())
def is_tracked(self, path):
"""Does the repository track this file?"""
return path not in self.untracked_files
def get_rev(self, path):
"""Return a human-readable revision identifier for the repository."""
raise NotImplemented
def generate_log(self, path):
"""Return a URL for a page that lists revisions for this file."""
raise NotImplemented
def generate_blame(self, path):
"""Return a URL for a page that lists source annotations for lines in
this file.
"""
raise NotImplemented
def generate_diff(self, path):
"""Return a URL for a page that shows the last change made to this file.
"""
raise NotImplemented
def generate_raw(self, path):
"""Return a URL for a page that returns a raw copy of this file."""
raise NotImplemented
class Mercurial(VCS):
def __init__(self, root):
super(Mercurial, self).__init__(root)
# Find the revision
self.revision = self.invoke_vcs(['hg', 'id', '-i']).strip()
# Sometimes hg id returns + at the end.
if self.revision.endswith("+"):
self.revision = self.revision[:-1]
# Make and normalize the upstream URL
upstream = urlparse.urlparse(self.invoke_vcs(['hg', 'paths', 'default']).strip())
recomb = list(upstream)
if upstream.scheme == 'ssh':
recomb[0] == 'http'
recomb[1] = upstream.hostname # Eliminate any username stuff
recomb[2] = '/' + recomb[2].lstrip('/') # strip all leading '/', add one back
if not upstream.path.endswith('/'):
recomb[2] += '/' # Make sure we have a '/' on the end
recomb[3] = recomb[4] = recomb[5] = '' # Just those three
self.upstream = urlparse.urlunparse(recomb)
# Find all untracked files
self.untracked_files = set(line.split()[1] for line in
self.invoke_vcs(['hg', 'status', '-u', '-i']).split('\n')[:-1])
@staticmethod
def claim_vcs_source(path, dirs):
if '.hg' in dirs:
dirs.remove('.hg')
return Mercurial(path)
return None
def get_rev(self, path):
return self.revision
def generate_log(self, path):
return self.upstream + 'filelog/' + self.revision + '/' + path
def generate_blame(self, path):
return self.upstream + 'annotate/' + self.revision + '/' + path
def generate_diff(self, path):
return self.upstream + 'diff/' + self.revision + '/' + path
def generate_raw(self, path):
return self.upstream + 'raw-file/' + self.revision + '/' + path
class Git(VCS):
def __init__(self, root):
super(Git, self).__init__(root)
self.untracked_files = set(line for line in
self.invoke_vcs(['git', 'ls-files', '-o']).split('\n')[:-1])
self.revision = self.invoke_vcs(['git', 'rev-parse', 'HEAD'])
source_urls = self.invoke_vcs(['git', 'remote', '-v']).split('\n')
for src_url in source_urls:
name, url, _ = src_url.split()
if name == 'origin':
self.upstream = self.synth_web_url(url)
break
@staticmethod
def claim_vcs_source(path, dirs):
if '.git' in dirs:
dirs.remove('.git')
return Git(path)
return None
def get_rev(self, path):
return self.revision[:10]
def generate_log(self, path):
return self.upstream + "/commits/" + self.revision + "/" + path
def generate_blame(self, path):
return self.upstream + "/blame/" + self.revision + "/" + path
def generate_diff(self, path):
# I really want to make this anchor on the file in question, but github
# doesn't seem to do that nicely
return self.upstream + "/commit/" + self.revision
def generate_raw(self, path):
return self.upstream + "/raw/" + self.revision + "/" + path
def synth_web_url(self, repo):
if repo.startswith("[email protected]:"):
self._is_github = True
return "https://github.com/" + repo[len("[email protected]:"):]
elif repo.startswith("git://github.com/"):
self._is_github = True
if repo.endswith(".git"):
repo = repo[:-len(".git")]
return "https" + repo[len("git"):]
raise Exception("I don't know what's going on")
class Perforce(VCS):
def __init__(self, root):
super(Perforce, self).__init__(root)
have = self._p4run(['have'])
self.have = dict((x['path'][len(root) + 1:], x) for x in have)
try:
self.upstream = tree.plugin_omniglot_p4web
except AttributeError:
self.upstream = "http://p4web/"
@staticmethod
def claim_vcs_source(path, dirs):
if 'P4CONFIG' not in os.environ:
return None
if os.path.exists(os.path.join(path, os.environ['P4CONFIG'])):
return Perforce(path)
return None
def _p4run(self, args):
ret = []
env = os.environ
env["PWD"] = self.root
proc = subprocess.Popen(['p4', '-G'] + args,
stdin=subprocess.PIPE,
stdout=subprocess.PIPE,
cwd=self.root,
env=env)
while True:
try:
x = marshal.load(proc.stdout)
except EOFError:
break
ret.append(x)
return ret
def is_tracked(self, path):
return path in self.have
def get_rev(self, path):
info = self.have[path]
return '#' + info['haveRev']
def generate_log(self, path):
info = self.have[path]
return self.upstream + info['depotFile'] + '?ac=22#' + info['haveRev']
def generate_blame(self, path):
info = self.have[path]
return self.upstream + info['depotFile'] + '?ac=193'
def generate_diff(self, path):
info = self.have[path]
haveRev = info['haveRev']
prevRev = str(int(haveRev) - 1)
return (self.upstream + info['depotFile'] + '?ac=19&rev1=' + prevRev +
'&rev2=' + haveRev)
def generate_raw(self, path):
info = self.have[path]
return self.upstream + info['depotFile'] + '?ac=98&rev1=' + info['haveRev']
every_vcs = [Mercurial, Git, Perforce]
# Load global variables
def load(tree_, conn):
global tree, lookup_order
tree = tree_
# Find all of the VCS's in the source directory
for cwd, dirs, files in os.walk(tree.source_folder):
for vcs in every_vcs:
attempt = vcs.claim_vcs_source(cwd, dirs)
if attempt is not None:
source_repositories[attempt.root] = attempt
# It's possible that the root of the tree is not a VCS by itself, so walk up
# the hierarchy until we find a parent folder that is a VCS. If we can't
# find any, than no VCSs exist for the top-level of this repository.
directory = tree.source_folder
while directory != '/' and directory not in source_repositories:
directory = os.path.dirname(directory)
for vcs in every_vcs:
attempt = vcs.claim_vcs_source(directory, os.listdir(directory))
if attempt is not None:
source_repositories[directory] = attempt
# Note: we want to make sure that we look up source repositories by deepest
# directory first.
lookup_order = source_repositories.keys()
lookup_order.sort(key=len, reverse=True)
def find_vcs_for_file(path):
"""Given an absolute path, find a source repository we know about that
claims to track that file.
"""
for directory in lookup_order:
# This seems to be the easiest way to find "is path in the subtree
# rooted at directory?"
if os.path.relpath(path, directory).startswith('..'):
continue
vcs = source_repositories[directory]
if vcs.is_tracked(os.path.relpath(path, vcs.get_root_dir())):
return vcs
return None
class LinksHtmlifier(object):
"""Htmlifier which adds blame and external links to VCS web utilities."""
def __init__(self, path):
if not os.path.isabs(path):
path = os.path.join(tree.source_folder, path)
self.vcs = find_vcs_for_file(path)
if self.vcs is not None:
self.path = os.path.relpath(path, self.vcs.get_root_dir())
self.name = self.vcs.get_vcs_name()
def refs(self):
return []
def regions(self):
return []
def annotations(self):
return []
def links(self):
if self.vcs is None:
yield 5, 'Untracked file', []
return
def items():
yield 'log', "Log", self.vcs.generate_log(self.path)
yield 'blame', "Blame", self.vcs.generate_blame(self.path)
yield 'diff', "Diff", self.vcs.generate_diff(self.path)
yield 'raw', "Raw", self.vcs.generate_raw(self.path)
yield 5, '%s (%s)' % (self.name, self.vcs.get_rev(self.path)), items()
def htmlify(path, text):
return LinksHtmlifier(path)
__all__ = dxr.plugins.htmlifier_exports()
|
|
# $Id: __init__.py 6141 2009-09-25 18:50:30Z milde $
# Author: David Goodger <[email protected]>
# Copyright: This module has been placed in the public domain.
"""
This is ``docutils.parsers.rst`` package. It exports a single class, `Parser`,
the reStructuredText parser.
Usage
=====
1. Create a parser::
parser = docutils.parsers.rst.Parser()
Several optional arguments may be passed to modify the parser's behavior.
Please see `Customizing the Parser`_ below for details.
2. Gather input (a multi-line string), by reading a file or the standard
input::
input = sys.stdin.read()
3. Create a new empty `docutils.nodes.document` tree::
document = docutils.utils.new_document(source, settings)
See `docutils.utils.new_document()` for parameter details.
4. Run the parser, populating the document tree::
parser.parse(input, document)
Parser Overview
===============
The reStructuredText parser is implemented as a state machine, examining its
input one line at a time. To understand how the parser works, please first
become familiar with the `docutils.statemachine` module, then see the
`states` module.
Customizing the Parser
----------------------
Anything that isn't already customizable is that way simply because that type
of customizability hasn't been implemented yet. Patches welcome!
When instantiating an object of the `Parser` class, two parameters may be
passed: ``rfc2822`` and ``inliner``. Pass ``rfc2822=1`` to enable an initial
RFC-2822 style header block, parsed as a "field_list" element (with "class"
attribute set to "rfc2822"). Currently this is the only body-level element
which is customizable without subclassing. (Tip: subclass `Parser` and change
its "state_classes" and "initial_state" attributes to refer to new classes.
Contact the author if you need more details.)
The ``inliner`` parameter takes an instance of `states.Inliner` or a subclass.
It handles inline markup recognition. A common extension is the addition of
further implicit hyperlinks, like "RFC 2822". This can be done by subclassing
`states.Inliner`, adding a new method for the implicit markup, and adding a
``(pattern, method)`` pair to the "implicit_dispatch" attribute of the
subclass. See `states.Inliner.implicit_inline()` for details. Explicit
inline markup can be customized in a `states.Inliner` subclass via the
``patterns.initial`` and ``dispatch`` attributes (and new methods as
appropriate).
"""
__docformat__ = 'reStructuredText'
import docutils.parsers
import docutils.statemachine
from docutils.parsers.rst import states
from docutils import frontend, nodes
class Parser(docutils.parsers.Parser):
"""The reStructuredText parser."""
supported = ('restructuredtext', 'rst', 'rest', 'restx', 'rtxt', 'rstx')
"""Aliases this parser supports."""
settings_spec = (
'reStructuredText Parser Options',
None,
(('Recognize and link to standalone PEP references (like "PEP 258").',
['--pep-references'],
{'action': 'store_true', 'validator': frontend.validate_boolean}),
('Base URL for PEP references '
'(default "http://www.python.org/dev/peps/").',
['--pep-base-url'],
{'metavar': '<URL>', 'default': 'http://www.python.org/dev/peps/',
'validator': frontend.validate_url_trailing_slash}),
('Template for PEP file part of URL. (default "pep-%04d")',
['--pep-file-url-template'],
{'metavar': '<URL>', 'default': 'pep-%04d'}),
('Recognize and link to standalone RFC references (like "RFC 822").',
['--rfc-references'],
{'action': 'store_true', 'validator': frontend.validate_boolean}),
('Base URL for RFC references (default "http://www.faqs.org/rfcs/").',
['--rfc-base-url'],
{'metavar': '<URL>', 'default': 'http://www.faqs.org/rfcs/',
'validator': frontend.validate_url_trailing_slash}),
('Set number of spaces for tab expansion (default 8).',
['--tab-width'],
{'metavar': '<width>', 'type': 'int', 'default': 8,
'validator': frontend.validate_nonnegative_int}),
('Remove spaces before footnote references.',
['--trim-footnote-reference-space'],
{'action': 'store_true', 'validator': frontend.validate_boolean}),
('Leave spaces before footnote references.',
['--leave-footnote-reference-space'],
{'action': 'store_false', 'dest': 'trim_footnote_reference_space'}),
('Disable directives that insert the contents of external file '
'("include" & "raw"); replaced with a "warning" system message.',
['--no-file-insertion'],
{'action': 'store_false', 'default': 1,
'dest': 'file_insertion_enabled',
'validator': frontend.validate_boolean}),
('Enable directives that insert the contents of external file '
'("include" & "raw"). Enabled by default.',
['--file-insertion-enabled'],
{'action': 'store_true'}),
('Disable the "raw" directives; replaced with a "warning" '
'system message.',
['--no-raw'],
{'action': 'store_false', 'default': 1, 'dest': 'raw_enabled',
'validator': frontend.validate_boolean}),
('Enable the "raw" directive. Enabled by default.',
['--raw-enabled'],
{'action': 'store_true'}),))
config_section = 'restructuredtext parser'
config_section_dependencies = ('parsers',)
def __init__(self, rfc2822=None, inliner=None):
if rfc2822:
self.initial_state = 'RFC2822Body'
else:
self.initial_state = 'Body'
self.state_classes = states.state_classes
self.inliner = inliner
def parse(self, inputstring, document):
"""Parse `inputstring` and populate `document`, a document tree."""
self.setup_parse(inputstring, document)
self.statemachine = states.RSTStateMachine(
state_classes=self.state_classes,
initial_state=self.initial_state,
debug=document.reporter.debug_flag)
inputlines = docutils.statemachine.string2lines(
inputstring, tab_width=document.settings.tab_width,
convert_whitespace=1)
self.statemachine.run(inputlines, document, inliner=self.inliner)
self.finish_parse()
class DirectiveError(Exception):
"""
Store a message and a system message level.
To be thrown from inside directive code.
Do not instantiate directly -- use `Directive.directive_error()`
instead!
"""
def __init__(self, level, message, source, line):
"""
Initialize with message `message`. `level` is a system message level.
"""
Exception.__init__(self)
self.level = level
self.msg = message
self.source = source
self.line = line
class Directive(object):
"""
Base class for reStructuredText directives.
The following attributes may be set by subclasses. They are
interpreted by the directive parser (which runs the directive
class):
- `required_arguments`: The number of required arguments (default:
0).
- `optional_arguments`: The number of optional arguments (default:
0).
- `final_argument_whitespace`: A boolean, indicating if the final
argument may contain whitespace (default: False).
- `option_spec`: A dictionary, mapping known option names to
conversion functions such as `int` or `float` (default: {}, no
options). Several conversion functions are defined in the
directives/__init__.py module.
Option conversion functions take a single parameter, the option
argument (a string or ``None``), validate it and/or convert it
to the appropriate form. Conversion functions may raise
`ValueError` and `TypeError` exceptions.
- `has_content`: A boolean; True if content is allowed. Client
code must handle the case where content is required but not
supplied (an empty content list will be supplied).
Arguments are normally single whitespace-separated words. The
final argument may contain whitespace and/or newlines if
`final_argument_whitespace` is True.
If the form of the arguments is more complex, specify only one
argument (either required or optional) and set
`final_argument_whitespace` to True; the client code must do any
context-sensitive parsing.
When a directive implementation is being run, the directive class
is instantiated, and the `run()` method is executed. During
instantiation, the following instance variables are set:
- ``name`` is the directive type or name (string).
- ``arguments`` is the list of positional arguments (strings).
- ``options`` is a dictionary mapping option names (strings) to
values (type depends on option conversion functions; see
`option_spec` above).
- ``content`` is a list of strings, the directive content line by line.
- ``lineno`` is the line number of the first line of the directive.
- ``content_offset`` is the line offset of the first line of the content from
the beginning of the current input. Used when initiating a nested parse.
- ``block_text`` is a string containing the entire directive.
- ``state`` is the state which called the directive function.
- ``state_machine`` is the state machine which controls the state which called
the directive function.
Directive functions return a list of nodes which will be inserted
into the document tree at the point where the directive was
encountered. This can be an empty list if there is nothing to
insert.
For ordinary directives, the list must contain body elements or
structural elements. Some directives are intended specifically
for substitution definitions, and must return a list of `Text`
nodes and/or inline elements (suitable for inline insertion, in
place of the substitution reference). Such directives must verify
substitution definition context, typically using code like this::
if not isinstance(state, states.SubstitutionDef):
error = state_machine.reporter.error(
'Invalid context: the "%s" directive can only be used '
'within a substitution definition.' % (name),
nodes.literal_block(block_text, block_text), line=lineno)
return [error]
"""
# There is a "Creating reStructuredText Directives" how-to at
# <http://docutils.sf.net/docs/howto/rst-directives.html>. If you
# update this docstring, please update the how-to as well.
required_arguments = 0
"""Number of required directive arguments."""
optional_arguments = 0
"""Number of optional arguments after the required arguments."""
final_argument_whitespace = False
"""May the final argument contain whitespace?"""
option_spec = None
"""Mapping of option names to validator functions."""
has_content = False
"""May the directive have content?"""
def __init__(self, name, arguments, options, content, lineno,
content_offset, block_text, state, state_machine):
self.name = name
self.arguments = arguments
self.options = options
self.content = content
self.lineno = lineno
self.content_offset = content_offset
self.block_text = block_text
self.state = state
self.state_machine = state_machine
def run(self):
raise NotImplementedError('Must override run() is subclass.')
# Directive errors:
def directive_error(self, level, message):
"""
Return a DirectiveError suitable for being thrown as an exception.
Call "raise self.directive_error(level, message)" from within
a directive implementation to return one single system message
at level `level`, which automatically gets the directive block
and the line number added.
You'd often use self.error(message) instead, which will
generate an ERROR-level directive error.
"""
# source = self.state_machine.get_source(self.lineno - 1)
try:
(source, line) = self.state_machine.input_lines.info(self.lineno)
except IndexError:
source = self.state_machine.get_source(self.lineno - 1)
line = self.lineno
return DirectiveError(level, message, source, line)
def debug(self, message):
return self.directive_error(0, message)
def info(self, message):
return self.directive_error(1, message)
def warning(self, message):
return self.directive_error(2, message)
def error(self, message):
return self.directive_error(3, message)
def severe(self, message):
return self.directive_error(4, message)
# Convenience methods:
def assert_has_content(self):
"""
Throw an ERROR-level DirectiveError if the directive doesn't
have contents.
"""
if not self.content:
raise self.error('Content block expected for the "%s" directive; '
'none found.' % self.name)
def convert_directive_function(directive_fn):
"""
Define & return a directive class generated from `directive_fn`.
`directive_fn` uses the old-style, functional interface.
"""
class FunctionalDirective(Directive):
option_spec = getattr(directive_fn, 'options', None)
has_content = getattr(directive_fn, 'content', False)
_argument_spec = getattr(directive_fn, 'arguments', (0, 0, False))
required_arguments, optional_arguments, final_argument_whitespace \
= _argument_spec
def run(self):
return directive_fn(
self.name, self.arguments, self.options, self.content,
self.lineno, self.content_offset, self.block_text,
self.state, self.state_machine)
# Return new-style directive.
return FunctionalDirective
|
|
#!/usr/bin/env python
from __future__ import print_function
from __future__ import absolute_import
from builtins import str
from builtins import range
from builtins import object
# standard library
import itertools
# third party
import matplotlib.pyplot as plt
from matplotlib import cm as CM
from matplotlib.colors import hex2color
import numpy as np
# treeCl
from .distance_matrix import CoordinateMatrix, DistanceMatrix
from .partition import Partition
from .utils import flatten_list
from .colours import ggColorSlice
import logging
logger = logging.getLogger(__name__)
# Define some default sets of colours
SET2 = ["#66c2a5","#fc8d62","#8da0cb","#e78ac3","#a6d854","#ffd92f","#e5c494","#b3b3b3"]
SET3 = ["#8dd3c7","#ffffb3","#bebada","#fb8072","#80b1d3","#fdb462","#b3de69","#fccde5","#d9d9d9","#bc80bd","#ccebc5","#ffed6f"]
def heatmap(dm, partition=None, cmap=CM.Blues, fontsize=10):
""" heatmap(dm, partition=None, cmap=CM.Blues, fontsize=10)
Produce a 2D plot of the distance matrix, with values encoded by
coloured cells.
Args:
partition: treeCl.Partition object - if supplied, will reorder
rows and columns of the distance matrix to reflect
the groups defined by the partition
cmap: matplotlib colourmap object - the colour palette to use
fontsize: int or None - sets the size of the locus lab
Returns:
matplotlib plottable object
"""
assert isinstance(dm, DistanceMatrix)
datamax = float(np.abs(dm.values).max())
length = dm.shape[0]
if partition:
sorting = np.array(flatten_list(partition.get_membership()))
new_dm = dm.reorder(dm.df.columns[sorting])
else:
new_dm = dm
fig = plt.figure()
ax = fig.add_subplot(111)
ax.xaxis.tick_top()
ax.grid(False)
tick_positions = np.array(list(range(length))) + 0.5
if fontsize is not None:
ax.set_yticks(tick_positions)
ax.set_xticks(tick_positions)
ax.set_xticklabels(new_dm.df.columns, rotation=90, fontsize=fontsize, ha='center')
ax.set_yticklabels(new_dm.df.index, fontsize=fontsize, va='center')
cbar_ticks_at = [0, 0.5 * datamax, datamax]
cax = ax.imshow(
new_dm.values,
interpolation='nearest',
extent=[0., length, length, 0.],
vmin=0,
vmax=datamax,
cmap=cmap,
)
cbar = fig.colorbar(cax, ticks=cbar_ticks_at, format='%1.2g')
cbar.set_label('Distance')
return fig
def _plotly_3d_scatter(coords, partition=None):
""" _plotly_3d_scatter(coords, partition=None)
Make a scatterplot of treeCl.CoordinateMatrix using the Plotly
plotting engine
"""
from plotly.graph_objs import Scatter3d, Data, Figure, Layout, Line, Margin, Marker
# auto sign-in with credentials or use py.sign_in()
colourmap = {
'A':'#1f77b4',
'B':'#ff7f0e',
'C':'#2ca02c',
'D':'#d62728',
'E':'#9467bd',
1:'#1f77b4',
2:'#ff7f0e',
3:'#2ca02c',
4:'#d62728',
5:'#9467bd'
}
df = coords.df
if partition:
assert len(partition.partition_vector) == df.shape[0]
labels = [x+1 for x in partition.partition_vector]
else:
labels = [1 for _ in range(df.shape[0])]
x, y, z = df.columns[:3]
df['Label'] = labels
colours = [colourmap[lab] for lab in df['Label']]
trace = Scatter3d(x=df[x], y=df[y], z=df[z], mode='markers',
marker=Marker(size=9, color=colours,
line=Line(color=colours, width=0.5), opacity=0.8),
text=[str(ix) for ix in df.index])
data = Data([trace])
layout = Layout(
margin=Margin(l=0, r=0, b=0, t=0 ),
hovermode='x',
)
fig = Figure(data=data, layout=layout)
return fig
def _add_sphere(ax):
""" _add_sphere(ax)
Add a wireframe unit sphere onto matplotlib 3D axes
Args:
ax - matplotlib 3D axes object
Returns:
updated matplotlib 3D axes
"""
(u, v) = np.mgrid[0:2 * np.pi:20j, 0:np.pi:10j]
x = np.cos(u) * np.sin(v)
y = np.sin(u) * np.sin(v)
z = np.cos(v)
ax.plot_wireframe(x, y, z, color='grey', linewidth=0.2)
return ax
def plot_embedding(coordinates, partition=None, add_sphere=False, point_size=8,
colours=None, labels=None, legend=True, outfile=False, **kwargs):
""" plot_embedding(coordinates, partition=None, add_sphere=False, point_size=8,
colours=None, labels=None, legend=True, outfile=False, **kwargs):
Plot a 2D / 3D scatterplot of coordinates, optionally
coloured by group membership.
Args:
coordinates: numpy array or treeCl.CoordinateMatrix -
The coordinates of the points to plot. The number
of columns determines the number of dimensions in
the plot.
add_sphere: bool -
Add a wireframe sphere to a 3D plot. Spectral clustering
places points on the surface of a unit sphere.
colours: list of rgb hexes, or 'auto', or None -
Colours to use to colour the points, as a list of
RGB hex values. If None, defaults
(colorbrewer set3). If 'auto', generates a set
of colours equally spaced from the colour wheel.
labels: Tuple(xlab, ylab, title, zlab) -
Plot labels. Must be given in the above order.
Missing options will be replaced by None. E.g.
to set the title: (None, None, "Some points")
outfile: str -
Save figure to this filename
"""
if isinstance(coordinates, CoordinateMatrix):
coordinates = coordinates.values
dimensions = min(3, coordinates.shape[1])
partition = (partition or
Partition(tuple([0] * len(coordinates))))
ngrp = partition.num_groups()
if colours is None:
colours = SET2
elif colours == 'auto':
colours = ggColorSlice(ngrp)
colour_cycle = itertools.cycle(colours)
colours = np.array([hex2color(c) for c in itertools.islice(colour_cycle, ngrp)])
if labels is None:
xlab, ylab, zlab, title = None, None, None, None
else:
if isinstance(labels, (tuple, list)):
labels = list(labels[:4])
labels.extend([None]*(4-len(labels)))
xlab, ylab, title, zlab = labels
fig = plt.figure()
if dimensions == 3:
ax = fig.add_subplot(111, projection='3d')
if add_sphere:
ax = _add_sphere(ax)
else:
ax = fig.add_subplot(111)
members = partition.get_membership()
for grp in range(ngrp):
index = np.array(members[grp])
points = coordinates[index,:dimensions].T
ax.scatter(*points, s=point_size, c=colours[grp], edgecolor=None, label='Group {}'.format(grp+1), **kwargs)
if xlab:
ax.set_xlabel(xlab)
if ylab:
ax.set_ylabel(ylab)
if zlab:
ax.set_zlabel(zlab)
if title:
ax.set_title(title)
if legend:
plt.legend()
if outfile:
fig.savefig('{0}.pdf'.format(outfile))
return fig
class Plotter(object):
""" DEPRECATED
"""
def __init__(self, *args, **kwargs):
logger.warn("Plotter class is deprecated. Use module level functions\n"
"heatmap(...) and plot_embedding(...) instead.")
def sphere(self, ax):
(u, v) = np.mgrid[0:2 * np.pi:20j, 0:np.pi:10j]
x = np.cos(u) * np.sin(v)
y = np.sin(u) * np.sin(v)
z = np.cos(v)
ax.plot_wireframe(x, y, z, color='grey', linewidth=0.2)
return ax
def heatmap(self, partition=None, cmap=CM.Blues):
""" Plots a visual representation of a distance matrix """
if isinstance(self.dm, DistanceMatrix):
length = self.dm.values.shape[0]
else:
length = self.dm.shape[0]
datamax = float(np.abs(self.dm).max())
fig = plt.figure()
ax = fig.add_subplot(111)
ticks_at = [0, 0.5 * datamax, datamax]
if partition:
sorting = flatten_list(partition.get_membership())
self.dm = self.dm.reorder(sorting)
cax = ax.imshow(
self.dm.values,
interpolation='nearest',
origin='lower',
extent=[0., length, 0., length],
vmin=0,
vmax=datamax,
cmap=cmap,
)
cbar = fig.colorbar(cax, ticks=ticks_at, format='%1.2g')
cbar.set_label('Distance')
return fig
def embedding_plotter(
self, coordinates, partition=None, add_sphere=False, point_size=8,
colours=None, labels=None, legend=True, outfile=False, **kwargs
):
"""
Plot a 2D / 3D scatterplot of the coordinates, optionally
coloured by group membership.
Parameters
==========
coordinates [numpy array|treeCl.CoordinateMatrix] -
The coordinates of the points to plot. The number
of columns determines the number of dimensions in
the plot.
add_sphere [bool] -
Add a wireframe sphere
colours [None|list of rgb hexes|'auto'] -
Colours to use to colour the points, as a list of
RGB hex values. If None, defaults
(colorbrewer set3). If 'auto', generates a set
of colours similar to ggplot.
labels [Tuple(xlab, ylab, title)] -
Plot labels
outfile [str] -
Save figure
"""
if isinstance(coordinates, CoordinateMatrix):
coordinates = coordinates.values
dimensions = min(3, coordinates.shape[1])
partition = (partition or
Partition(tuple([0] * len(coordinates))))
ngrp = partition.num_groups()
if colours is None:
colours = SET2
elif colours == 'auto':
colours = ggColorSlice(ngrp)
colour_cycle = itertools.cycle(colours)
colours = np.array([hex2color(c) for c in itertools.islice(colour_cycle, ngrp)])
if labels is None:
xlab, ylab, zlab, title = None, None, None, None
else:
if isinstance(labels, (tuple, list)):
labels = list(labels[:4])
labels.extend([None]*(4-len(labels)))
xlab, ylab, zlab, title = labels
fig = plt.figure()
if dimensions == 3:
ax = fig.add_subplot(111, projection='3d')
if add_sphere:
ax = self.sphere(ax)
else:
ax = fig.add_subplot(111)
members = partition.get_membership()
for grp in range(ngrp):
index = np.array(members[grp])
points = coordinates[index,:dimensions].T
ax.scatter(*points, s=point_size, c=colours[grp], edgecolor=colours[grp], label='Group {}'.format(grp+1), **kwargs)
if xlab:
ax.set_xlabel(xlab)
if ylab:
ax.set_ylabel(ylab)
if zlab:
ax.set_zlabel(zlab)
if title:
ax.set_title(title)
if legend:
plt.legend()
if outfile:
fig.savefig('{0}.pdf'.format(outfile))
return fig
|
|
# Copyright (c) 2012 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
from copy import deepcopy
import logging
import re
import string
from file_system import FileNotFoundError
def _ClassifySchemaNode(node_name, api):
"""Attempt to classify |node_name| in an API, determining whether |node_name|
refers to a type, function, event, or property in |api|.
"""
if '.' in node_name:
node_name, rest = node_name.split('.', 1)
else:
rest = None
for key, group in [('types', 'type'),
('functions', 'method'),
('events', 'event'),
('properties', 'property')]:
for item in api.get(key, []):
if item['name'] == node_name:
if rest is not None:
ret = _ClassifySchemaNode(rest, item)
if ret is not None:
return ret
else:
return group, node_name
return None
def _MakeKey(namespace, ref):
key = '%s/%s' % (namespace, ref)
# AppEngine doesn't like keys > 500, but there will be some other stuff
# that goes into this key, so truncate it earlier. This shoudn't be
# happening anyway unless there's a bug, such as http://crbug.com/314102.
max_size = 256
if len(key) > max_size:
logging.error('Key was >%s characters: %s' % (max_size, key))
key = key[:max_size]
return key
class ReferenceResolver(object):
"""Resolves references to $ref's by searching through the APIs to find the
correct node.
$ref's have two forms:
$ref:api.node - Replaces the $ref with a link to node on the API page. The
title is set to the name of the node.
$ref:[api.node The Title] - Same as the previous form but title is set to
"The Title".
"""
# Matches after a $ref: that doesn't have []s.
_bare_ref = re.compile('\w+(\.\w+)*')
class Factory(object):
def __init__(self,
api_data_source_factory,
api_models,
object_store_creator):
self._api_data_source_factory = api_data_source_factory
self._api_models = api_models
self._object_store_creator = object_store_creator
def Create(self):
return ReferenceResolver(
self._api_data_source_factory.Create(None),
self._api_models,
self._object_store_creator.Create(ReferenceResolver))
def __init__(self, api_data_source, api_models, object_store):
self._api_data_source = api_data_source
self._api_models = api_models
self._object_store = object_store
def _GetRefLink(self, ref, api_list, namespace):
# Check nodes within each API the ref might refer to.
parts = ref.split('.')
for i, part in enumerate(parts):
api_name = '.'.join(parts[:i])
if api_name not in api_list:
continue
try:
api = self._api_data_source.get(api_name, disable_refs=True)
except FileNotFoundError:
continue
name = '.'.join(parts[i:])
# Attempt to find |name| in the API.
node_info = _ClassifySchemaNode(name, api)
if node_info is None:
# Check to see if this ref is a property. If it is, we want the ref to
# the underlying type the property is referencing.
for prop in api.get('properties', []):
# If the name of this property is in the ref text, replace the
# property with its type, and attempt to classify it.
if prop['name'] in name and 'link' in prop:
name_as_prop_type = name.replace(prop['name'], prop['link']['name'])
node_info = _ClassifySchemaNode(name_as_prop_type, api)
if node_info is not None:
name = name_as_prop_type
text = ref.replace(prop['name'], prop['link']['name'])
break
if node_info is None:
continue
else:
text = ref
category, node_name = node_info
if namespace is not None and text.startswith('%s.' % namespace):
text = text[len('%s.' % namespace):]
return {
'href': '%s.html#%s-%s' % (api_name, category, name.replace('.', '-')),
'text': text,
'name': node_name
}
# If it's not a reference to an API node it might just be a reference to an
# API. Check this last so that links within APIs take precedence over links
# to other APIs.
if ref in api_list:
return {
'href': '%s.html' % ref,
'text': ref,
'name': ref
}
return None
def GetLink(self, ref, namespace=None, title=None):
"""Resolve $ref |ref| in namespace |namespace| if not None, returning None
if it cannot be resolved.
"""
db_key = _MakeKey(namespace, ref)
link = self._object_store.Get(db_key).Get()
if link is None:
api_list = self._api_models.GetNames()
link = self._GetRefLink(ref, api_list, namespace)
if link is None and namespace is not None:
# Try to resolve the ref in the current namespace if there is one.
link = self._GetRefLink('%s.%s' % (namespace, ref), api_list, namespace)
if link is None:
return None
self._object_store.Set(db_key, link)
else:
link = deepcopy(link)
if title is not None:
link['text'] = title
return link
def SafeGetLink(self, ref, namespace=None, title=None):
"""Resolve $ref |ref| in namespace |namespace|, or globally if None. If it
cannot be resolved, pretend like it is a link to a type.
"""
ref_data = self.GetLink(ref, namespace=namespace, title=title)
if ref_data is not None:
return ref_data
logging.error('$ref %s could not be resolved in namespace %s.' %
(ref, namespace))
type_name = ref.rsplit('.', 1)[-1]
return {
'href': '#type-%s' % type_name,
'text': title or ref,
'name': ref
}
def ResolveAllLinks(self, text, namespace=None):
"""This method will resolve all $ref links in |text| using namespace
|namespace| if not None. Any links that cannot be resolved will be replaced
using the default link format that |SafeGetLink| uses.
"""
if text is None or '$ref:' not in text:
return text
split_text = text.split('$ref:')
# |split_text| is an array of text chunks that all start with the
# argument to '$ref:'.
formatted_text = [split_text[0]]
for ref_and_rest in split_text[1:]:
title = None
if ref_and_rest.startswith('[') and ']' in ref_and_rest:
# Text was '$ref:[foo.bar maybe title] other stuff'.
ref_with_title, rest = ref_and_rest[1:].split(']', 1)
ref_with_title = ref_with_title.split(None, 1)
if len(ref_with_title) == 1:
# Text was '$ref:[foo.bar] other stuff'.
ref = ref_with_title[0]
else:
# Text was '$ref:[foo.bar title] other stuff'.
ref, title = ref_with_title
else:
# Text was '$ref:foo.bar other stuff'.
match = self._bare_ref.match(ref_and_rest)
if match is None:
ref = ''
rest = ref_and_rest
else:
ref = match.group()
rest = ref_and_rest[match.end():]
ref_dict = self.SafeGetLink(ref, namespace=namespace, title=title)
formatted_text.append('<a href="%(href)s">%(text)s</a>%(rest)s' %
{ 'href': ref_dict['href'], 'text': ref_dict['text'], 'rest': rest })
return ''.join(formatted_text)
|
|
"""Module to run the phaser rotation search"""
__author__ = "Adam Simpkin & Felix Simkovic"
__date__ = "15 April 2018"
__version__ = "0.4"
import glob
import logging
import os
import shutil
import uuid
logger = logging.getLogger(__name__)
from pyjob.script import ScriptCollector, Script
import simbad.db
import simbad.mr
import simbad.rotsearch
import simbad.core.dat_score
import simbad.core.phaser_score
import simbad.parsers.phaser_parser
import simbad.parsers.refmac_parser
import simbad.parsers.rotsearch_parser
import simbad.util
import simbad.util.pdb_util
import simbad.util.matthews_prob
from simbad.util import EXPORT, CMD_PREFIX, CCP4_SOURCE, CCP4_SCRATCH, MKDIR_CMD, RM_CMD
class PhaserRotationSearch(simbad.rotsearch._RotationSearch):
"""A class to perform the phaser rotation search
Attributes
----------
mtz : str
The path to the input MTZ
i : str
Column label for I
sigi : str
Column label for SIGI
work_dir : str
The path to the working directory
max_to_keep : int
The maximum number of results to keep [default: 20]
eid : int, optional
The estimated sequence identity from which to calculate ermsd
Examples
--------
>>> from simbad.rotsearch.phaser_search import PhaserRotationSearch
>>> rotation_search = PhaserRotationSearch('<mtz>', '<mr_program>', '<tmp_dir>', '<work_dir>', '<max_to_keep>',
... '<skip_mr>', '<eid>', '<process_all>')
>>> rotation_search.run(
... '<models_dir>', '<nproc>', '<min_solvent_content>', '<submit_nproc>', '<submit_qtype>',
... '<submit_queue>', '<chunk_size>'
... )
>>> rotation_search.summarize()
>>> search_results = rotation_search.search_results
If any results are found, an object is returned containing the pdb_code, and the various associated scores
from phaser.
"""
def __init__(self, mtz, mr_program, tmp_dir, work_dir, max_to_keep=20, skip_mr=False, eid=70, process_all=False, **kwargs):
super(PhaserRotationSearch, self).__init__(mtz, mr_program, tmp_dir, work_dir,
max_to_keep=max_to_keep, skip_mr=skip_mr, process_all=process_all)
self.eid = eid
self.ccp4_scr = None
self.script_log_dir = None
self.columns = ['llg', 'rfz']
self.progress = -5
self.score_column = 'rfz'
self.template_model = None
self.template_tmp_dir = None
def run(
self,
models_dir,
nproc=2,
min_solvent_content=20,
submit_qtype=None,
submit_queue=None,
chunk_size=0,
**kwargs
):
"""Run phaser rotation function on a directory of models
Parameters
----------
models_dir : str
The directory containing the models to run the rotation search on
nproc : int, optional
The number of processors to run the job on
min_solvent_content : int, float, optional
The minimum solvent content present in the unit cell with the input model [default: 30]
submit_qtype : str
The cluster submission queue type - currently support SGE and LSF
submit_queue : str
The queue to submit to on the cluster
chunk_size : int, optional
The number of jobs to submit at the same time
Returns
-------
file
log file for each model in the models_dir
"""
from phaser import InputMR_DAT, runMR_DAT, InputCCA, runCCA
self.submit_qtype = submit_qtype
self.submit_queue = submit_queue
self.simbad_dat_files = simbad.db.find_simbad_dat_files(models_dir)
i = InputMR_DAT()
i.setHKLI(self.mtz)
i.setMUTE(True)
run_mr_data = runMR_DAT(i)
mat_prob = simbad.util.matthews_prob.MatthewsProbability(self.mtz_obj.cell.volume_per_image())
dir_name = "simbad-tmp-" + str(uuid.uuid1())
self.script_log_dir = os.path.join(self.work_dir, dir_name)
os.mkdir(self.script_log_dir)
self.ccp4_scr = os.environ["CCP4_SCR"]
default_tmp_dir = os.path.join(self.work_dir, "tmp")
if self.tmp_dir:
self.template_tmp_dir = os.path.join(self.tmp_dir, dir_name + "-{0}")
else:
self.template_tmp_dir = os.path.join(default_tmp_dir, dir_name + "-{0}")
predicted_molecular_weight = 0
if run_mr_data.Success():
i = InputCCA()
i.setSPAC_HALL(run_mr_data.getSpaceGroupHall())
i.setCELL6(run_mr_data.getUnitCell())
i.setMUTE(True)
run_cca = runCCA(i)
if run_cca.Success():
predicted_molecular_weight = run_cca.getAssemblyMW()
dat_models = []
for dat_model in self.simbad_dat_files:
name = os.path.basename(dat_model.replace(".dat", ""))
try:
pdb_struct = simbad.util.pdb_util.PdbStructure.from_file(dat_model)
except Exception: # Catch all issues here
msg = "Skipping %s: Problem with dat file"
logger.debug(msg, name)
continue
solvent_fraction, n_copies = mat_prob.calculate_from_struct(pdb_struct)
solvent_content = solvent_fraction * 100
if solvent_content < min_solvent_content:
msg = "Skipping %s: solvent content is predicted to be less than %.2f"
logger.debug(msg, name, min_solvent_content)
continue
mw_diff = abs(predicted_molecular_weight - pdb_struct.molecular_weight)
info = simbad.core.dat_score.DatModelScore(name, dat_model, mw_diff, None, None, None, None,
solvent_fraction, n_copies)
dat_models.append(info)
sorted_dat_models = sorted(dat_models, key=lambda x: float(x.mw_diff), reverse=False)
n_files = len(sorted_dat_models)
chunk_size = simbad.rotsearch.get_chunk_size(n_files, chunk_size)
total_chunk_cycles = simbad.rotsearch.get_total_chunk_cycles(n_files, chunk_size)
results = []
iteration_range = range(0, n_files, chunk_size)
for cycle, i in enumerate(iteration_range):
logger.info("Working on chunk %d out of %d", cycle + 1, total_chunk_cycles)
if self.solution:
logger.info("Early termination criteria met, skipping chunk %d", cycle + 1)
continue
self.template_model = os.path.join(CCP4_SCRATCH, "{0}.pdb")
collector = ScriptCollector(None)
phaser_files = []
for dat_model in sorted_dat_models[i: i + chunk_size]:
script, run_file = self.generate_script(dat_model)
collector.add(script)
phaser_files.append(run_file)
if len(phaser_files) > 0:
logger.info("Running PHASER rotation functions")
phaser_logs, dat_models = zip(*phaser_files)
simbad.util.submit_chunk(
collector, self.script_log_dir, nproc, "simbad_phaser", submit_qtype, submit_queue, True,
self.progress_monitor,
self.rot_succeeded_log
)
for dat_model, phaser_log in zip(dat_models, phaser_logs):
base = os.path.basename(phaser_log)
pdb_code = base.replace("phaser_", "").replace(".log", "")
try:
phaser_rotation_parser = simbad.parsers.rotsearch_parser.PhaserRotsearchParser(phaser_log)
if phaser_rotation_parser.rfact:
phaser_rotation_parser.llg = 100
phaser_rotation_parser.rfz = 10
score = simbad.core.phaser_score.PhaserRotationScore(
pdb_code, dat_model, phaser_rotation_parser.llg, phaser_rotation_parser.rfz
)
if phaser_rotation_parser.rfz:
results += [score]
except IOError:
pass
else:
logger.critical("No structures to be trialled")
self._search_results = results
shutil.rmtree(self.script_log_dir)
if os.path.isdir(default_tmp_dir):
shutil.rmtree(default_tmp_dir)
def generate_script(self, dat_model):
logger.debug("Generating script to perform PHASER rotation " + "function on %s", dat_model.pdb_code)
pdb_model = self.template_model.format(dat_model.pdb_code)
template_rot_log = os.path.join(CCP4_SCRATCH, "{0}_rot.log")
conv_py = "\"from simbad.db import convert_dat_to_pdb; convert_dat_to_pdb(r'{}', r'{}')\""
conv_py = conv_py.format(dat_model.dat_path, pdb_model)
rot_log = template_rot_log.format(dat_model.pdb_code)
tmp_dir = self.template_tmp_dir.format(dat_model.pdb_code)
phaser_cmd = [
"simbad.rotsearch.phaser_rotation_search",
"-eid",
self.eid,
"-hklin",
self.mtz,
"-f",
self.mtz_obj.f,
"-sigf",
self.mtz_obj.sigf,
"-i",
self.mtz_obj.i,
"-sigi",
self.mtz_obj.sigi,
"-pdbin",
pdb_model,
"-logfile",
rot_log,
"-solvent",
dat_model.solvent,
"-nmol",
dat_model.nmol,
"-work_dir",
tmp_dir,
]
phaser_cmd = " ".join(str(e) for e in phaser_cmd)
source = simbad.util.source_ccp4()
cmd = [
[source],
[EXPORT, "CCP4_SCR=" + tmp_dir],
[MKDIR_CMD, CCP4_SCRATCH, os.linesep],
[CMD_PREFIX, CCP4_SOURCE + "/bin/ccp4-python", "-c", conv_py, os.linesep],
[CMD_PREFIX, CCP4_SOURCE + "/bin/ccp4-python", "-m", phaser_cmd, os.linesep],
[RM_CMD, CCP4_SCRATCH, os.linesep],
[EXPORT, "CCP4_SCR=" + self.ccp4_scr],
]
phaser_script = Script(directory=self.script_log_dir, prefix="phaser_", stem=dat_model.pdb_code)
for c in cmd:
phaser_script.append(" ".join(map(str, c)))
phaser_log = phaser_script.path.rsplit(".", 1)[0] + ".log"
phaser_files = (phaser_log, dat_model.dat_path)
phaser_script.write()
return phaser_script, phaser_files
@staticmethod
def _rot_job_succeeded(phaser_rfz_score):
"""Check values for job success"""
return phaser_rfz_score > 7
def rot_succeeded_log(self, log):
"""Check a rotation search job for it's success
Parameters
----------
log : str
The path to a log file
Returns
-------
bool
Success status of the rot run
"""
if self.skip_mr or self.process_all:
return False
rot_prog, pdb = os.path.basename(log).replace(".log", "").split("_", 1)
rotsearch_parser = simbad.parsers.rotsearch_parser.PhaserRotsearchParser(log)
dat_model = [s for s in self.simbad_dat_files if pdb in s][0]
score = simbad.core.phaser_score.PhaserRotationScore(pdb, dat_model, rotsearch_parser.llg, rotsearch_parser.rfz)
results = [score]
if self._rot_job_succeeded(rotsearch_parser.rfz) or rotsearch_parser.rfact:
if pdb not in self.tested:
self.tested.append(pdb)
output_dir = os.path.join(self.work_dir, "mr_search")
mr = simbad.mr.MrSubmit(
mtz=self.mtz,
mr_program=self.mr_program,
refine_program="refmac5",
refine_type=None,
refine_cycles=0,
output_dir=output_dir,
sgalternative="none",
tmp_dir=self.tmp_dir,
timeout=30,
)
mr.mute = True
mr.submit_jobs(results, nproc=1, process_all=True, submit_qtype=self.submit_qtype,
submit_queue=self.submit_queue)
mr_log = os.path.join(output_dir, pdb, "mr", self.mr_program, pdb + "_mr.log")
refmac_log = os.path.join(output_dir, pdb, "mr", self.mr_program, "refine", pdb + "_ref.log")
if os.path.isfile(refmac_log):
refmac_parser = simbad.parsers.refmac_parser.RefmacParser(refmac_log)
if simbad.mr._refinement_succeeded(refmac_parser.final_r_fact, refmac_parser.final_r_free):
self.solution = True
return True
if os.path.isfile(mr_log):
if self.mr_program == "phaser":
phaser_parser = simbad.parsers.phaser_parser.PhaserParser(mr_log)
if simbad.mr._phaser_succeeded(phaser_parser.llg, phaser_parser.tfz):
self.solution = True
return True
return False
def progress_monitor(self):
total_log_files = 0
log_files = glob.glob(os.path.join(self.script_log_dir, '*.log'))
for log in log_files:
with open(log, 'r') as f:
total_log_files += sum([1 for line in f.readlines() if "EXIT STATUS: SUCCESS" in line])
total_sh_files = len(glob.glob(os.path.join(self.script_log_dir, '*.sh')))
percentage_complete = (total_log_files / total_sh_files) * 100
if percentage_complete - self.progress >= 5:
logger.info("Percentage complete: {:.1f}%".format(percentage_complete))
self.progress = percentage_complete
|
|
"""pack a list of components into as few components as possible.
adapted from phidl.geometry.
"""
import warnings
from typing import Any, Dict, List, Optional, Tuple
import numpy as np
from pydantic import validate_arguments
from gdsfactory.component import Component
from gdsfactory.name import get_name_short
from gdsfactory.types import (
Anchor,
ComponentFactory,
ComponentOrFactory,
Float2,
Number,
)
def _pack_single_bin(
rect_dict: Dict[int, Tuple[Number, Number]],
aspect_ratio: Tuple[Number, Number],
max_size: Tuple[float, float],
sort_by_area: bool,
density: float,
) -> Tuple[Dict[int, Tuple[Number, Number, Number, Number]], Dict[Any, Any]]:
"""Packs a dict of rectangles {id:(w,h)} and tries to
pack it into a bin as small as possible with aspect ratio `aspect_ratio`
Will iteratively grow the bin size until everything fits or the bin size
reaches `max_size`.
Args:
rect_dict: dict of rectangles {id: (w, h)} to pack
aspect_ratio:
max_size: tuple of max X, Y size
sort_by_area: sorts components by area
density: of packing, closer to 1 packs tighter (more compute heavy)
Returns:
packed rectangles dict {id:(x,y,w,h)}
dict of remaining unpacked rectangles
"""
import rectpack
# Compute total area and use it for an initial estimate of the bin size
total_area = 0
for r in rect_dict.values():
total_area += r[0] * r[1]
aspect_ratio = np.asarray(aspect_ratio) / np.linalg.norm(aspect_ratio) # Normalize
# Setup variables
box_size = np.asarray(aspect_ratio * np.sqrt(total_area), dtype=np.float64)
box_size = np.clip(box_size, None, max_size)
if sort_by_area:
rp_sort = rectpack.SORT_AREA
else:
rp_sort = rectpack.SORT_NONE
# Repeatedly run the rectangle-packing algorithm with increasingly larger
# areas until everything fits or we've reached the maximum size
while True:
# Create the pack object
rect_packer = rectpack.newPacker(
mode=rectpack.PackingMode.Offline,
pack_algo=rectpack.MaxRectsBlsf,
sort_algo=rp_sort,
bin_algo=rectpack.PackingBin.BBF,
rotation=False,
)
# Add each rectangle to the pack, create a single bin, and pack
for rid, r in rect_dict.items():
rect_packer.add_rect(width=r[0], height=r[1], rid=rid)
rect_packer.add_bin(width=box_size[0], height=box_size[1])
rect_packer.pack()
# Adjust the box size for next time
box_size *= density # Increase area to try to fit
box_size = np.clip(box_size, None, max_size)
# Quit the loop if we've packed all the rectangles or reached the max size
if len(rect_packer.rect_list()) == len(rect_dict):
break
if all(box_size >= max_size):
break
# Separate packed from unpacked rectangles, make dicts of form {id:(x,y,w,h)}
packed_rect_dict = {r[-1]: r[:-1] for r in rect_packer[0].rect_list()}
unpacked_rect_dict = {}
for k, v in rect_dict.items():
if k not in packed_rect_dict:
unpacked_rect_dict[k] = v
return packed_rect_dict, unpacked_rect_dict
@validate_arguments
def pack(
component_list: List[ComponentOrFactory],
spacing: float = 10.0,
aspect_ratio: Float2 = (1.0, 1.0),
max_size: Tuple[Optional[float], Optional[float]] = (None, None),
sort_by_area: bool = True,
density: float = 1.1,
precision: float = 1e-2,
text: Optional[ComponentFactory] = None,
text_prefix: str = "",
text_offsets: Tuple[Float2, ...] = ((0, 0),),
text_anchors: Tuple[Anchor, ...] = ("cc",),
name_prefix: Optional[str] = None,
rotation: int = 0,
h_mirror: bool = False,
v_mirror: bool = False,
) -> List[Component]:
"""Pack a list of components into as few Components as possible.
Adapted from phidl.geometry
Args:
component_list: list or tuple
spacing: Minimum distance between adjacent shapes
aspect_ratio: (width, height) ratio of the rectangular bin
max_size: Limits the size into which the shapes will be packed
sort_by_area: Pre-sorts the shapes by area
density: Values closer to 1 pack tighter but require more computation
precision: Desired precision for rounding vertex coordinates.
text: Optional function to add text labels.
text_prefix: for labels. For example. 'A' will produce 'A1', 'A2', ...
text_offsets: relative to component size info anchor. Defaults to center.
text_anchors: relative to component (ce cw nc ne nw sc se sw center cc).
name_prefix: for each packed component (avoids the Unnamed cells warning).
Note that the suffix contains a uuid so the name will not be deterministic
rotation: for each component in degrees
h_mirror: horizontal mirror in y axis (x, 1) (1, 0). This is the most common.
v_mirror: vertical mirror using x axis (1, y) (0, y)
"""
if density < 1.01:
raise ValueError(
"pack() `density` argument is too small. "
"The density argument must be >= 1.01"
)
# Santize max_size variable
max_size = [np.inf if v is None else v for v in max_size]
max_size = np.asarray(max_size, dtype=np.float64) # In case it's integers
max_size = max_size / precision
component_list = [
component() if callable(component) else component
for component in component_list
]
# Convert Components to rectangles
rect_dict = {}
for n, D in enumerate(component_list):
w, h = (D.size + spacing) / precision
w, h = int(w), int(h)
if (w > max_size[0]) or (h > max_size[1]):
raise ValueError(
f"pack() failed because Component {D.name!r} has x or y "
"dimension larger than `max_size` and cannot be packed.\n"
f"size = {w*precision, h*precision}, max_size = {max_size*precision}"
)
rect_dict[n] = (w, h)
packed_list = []
while len(rect_dict) > 0:
(packed_rect_dict, rect_dict) = _pack_single_bin(
rect_dict,
aspect_ratio=aspect_ratio,
max_size=max_size,
sort_by_area=sort_by_area,
density=density,
)
packed_list.append(packed_rect_dict)
components_packed_list = []
index = 0
for i, rect_dict in enumerate(packed_list):
name = get_name_short(f"{name_prefix or 'pack'}_{i}")
packed = Component(name, with_uuid=True)
packed.info["components"] = {}
for n, rect in rect_dict.items():
x, y, w, h = rect
xcenter = x + w / 2 + spacing / 2
ycenter = y + h / 2 + spacing / 2
component = component_list[n]
d = component.ref(rotation=rotation, h_mirror=h_mirror, v_mirror=v_mirror)
packed.add(d)
if hasattr(component, "settings"):
packed.info["components"][component.name] = component.settings
d.center = (xcenter * precision, ycenter * precision)
if text:
for text_offset, text_anchor in zip(text_offsets, text_anchors):
label = packed << text(f"{text_prefix}{index}")
label.move(
(np.array(text_offset) + getattr(d.size_info, text_anchor))
)
index += 1
components_packed_list.append(packed)
if len(components_packed_list) > 1:
groups = len(components_packed_list)
warnings.warn(f"unable to pack in one component, creating {groups} components")
return components_packed_list
def test_pack() -> Component:
import gdsfactory as gf
component_list = [
gf.components.ellipse(radii=tuple(np.random.rand(2) * n + 2)) for n in range(2)
]
component_list += [
gf.components.rectangle(size=tuple(np.random.rand(2) * n + 2)) for n in range(2)
]
components_packed_list = pack(
component_list, # Must be a list or tuple of Components
spacing=1.25, # Minimum distance between adjacent shapes
aspect_ratio=(2, 1), # (width, height) ratio of the rectangular bin
max_size=(None, None), # Limits the size into which the shapes will be packed
density=1.05, # Values closer to 1 pack tighter but require more computation
sort_by_area=True, # Pre-sorts the shapes by area
)
c = components_packed_list[0] # Only one bin was created, so we plot that
assert len(c.get_dependencies()) == 4
return c
def test_pack_with_settings() -> Component:
import gdsfactory as gf
component_list = [
gf.components.rectangle(size=(i, i), port_type=None) for i in range(1, 10)
]
component_list += [
gf.components.rectangle(size=(i, i), port_type=None) for i in range(1, 10)
]
components_packed_list = pack(
component_list, # Must be a list or tuple of Components
spacing=1.25, # Minimum distance between adjacent shapes
aspect_ratio=(2, 1), # (width, height) ratio of the rectangular bin
# max_size=(None, None), # Limits the size into which the shapes will be packed
max_size=(20, 20), # Limits the size into which the shapes will be packed
density=1.05, # Values closer to 1 pack tighter but require more computation
sort_by_area=True, # Pre-sorts the shapes by area
precision=1e-3,
)
c = components_packed_list[0]
# print(len(c.get_dependencies()))
return c
if __name__ == "__main__":
# test_pack()
import gdsfactory as gf
# c = test_pack_with_settings()
# c = test_pack()
# c.show()
# c.pprint()
# c.write_gds_with_metadata("mask.gds")
p = pack(
[gf.components.triangle(x=i) for i in range(1, 10)],
spacing=20.0,
max_size=(100, 100),
text=gf.partial(gf.components.text, justify="center"),
text_prefix="R",
name_prefix="demo",
text_anchor="nc",
text_offset=(-10, 0),
v_mirror=True,
)
c = p[0]
print(c.name)
c.show()
|
|
""" test the scalar Timedelta """
from datetime import timedelta
from hypothesis import (
given,
strategies as st,
)
import numpy as np
import pytest
from pandas._libs import lib
from pandas._libs.tslibs import (
NaT,
iNaT,
)
import pandas as pd
from pandas import (
Timedelta,
TimedeltaIndex,
offsets,
to_timedelta,
)
import pandas._testing as tm
class TestTimedeltaUnaryOps:
def test_invert(self):
td = Timedelta(10, unit="d")
msg = "bad operand type for unary ~"
with pytest.raises(TypeError, match=msg):
~td
# check this matches pytimedelta and timedelta64
with pytest.raises(TypeError, match=msg):
~(td.to_pytimedelta())
umsg = "ufunc 'invert' not supported for the input types"
with pytest.raises(TypeError, match=umsg):
~(td.to_timedelta64())
def test_unary_ops(self):
td = Timedelta(10, unit="d")
# __neg__, __pos__
assert -td == Timedelta(-10, unit="d")
assert -td == Timedelta("-10d")
assert +td == Timedelta(10, unit="d")
# __abs__, __abs__(__neg__)
assert abs(td) == td
assert abs(-td) == td
assert abs(-td) == Timedelta("10d")
class TestTimedeltas:
@pytest.mark.parametrize(
"unit, value, expected",
[
("us", 9.999, 9999),
("ms", 9.999999, 9999999),
("s", 9.999999999, 9999999999),
],
)
def test_rounding_on_int_unit_construction(self, unit, value, expected):
# GH 12690
result = Timedelta(value, unit=unit)
assert result.value == expected
result = Timedelta(str(value) + unit)
assert result.value == expected
def test_total_seconds_scalar(self):
# see gh-10939
rng = Timedelta("1 days, 10:11:12.100123456")
expt = 1 * 86400 + 10 * 3600 + 11 * 60 + 12 + 100123456.0 / 1e9
tm.assert_almost_equal(rng.total_seconds(), expt)
rng = Timedelta(np.nan)
assert np.isnan(rng.total_seconds())
def test_conversion(self):
for td in [Timedelta(10, unit="d"), Timedelta("1 days, 10:11:12.012345")]:
pydt = td.to_pytimedelta()
assert td == Timedelta(pydt)
assert td == pydt
assert isinstance(pydt, timedelta) and not isinstance(pydt, Timedelta)
assert td == np.timedelta64(td.value, "ns")
td64 = td.to_timedelta64()
assert td64 == np.timedelta64(td.value, "ns")
assert td == td64
assert isinstance(td64, np.timedelta64)
# this is NOT equal and cannot be roundtripped (because of the nanos)
td = Timedelta("1 days, 10:11:12.012345678")
assert td != td.to_pytimedelta()
def test_fields(self):
def check(value):
# that we are int
assert isinstance(value, int)
# compat to datetime.timedelta
rng = to_timedelta("1 days, 10:11:12")
assert rng.days == 1
assert rng.seconds == 10 * 3600 + 11 * 60 + 12
assert rng.microseconds == 0
assert rng.nanoseconds == 0
msg = "'Timedelta' object has no attribute '{}'"
with pytest.raises(AttributeError, match=msg.format("hours")):
rng.hours
with pytest.raises(AttributeError, match=msg.format("minutes")):
rng.minutes
with pytest.raises(AttributeError, match=msg.format("milliseconds")):
rng.milliseconds
# GH 10050
check(rng.days)
check(rng.seconds)
check(rng.microseconds)
check(rng.nanoseconds)
td = Timedelta("-1 days, 10:11:12")
assert abs(td) == Timedelta("13:48:48")
assert str(td) == "-1 days +10:11:12"
assert -td == Timedelta("0 days 13:48:48")
assert -Timedelta("-1 days, 10:11:12").value == 49728000000000
assert Timedelta("-1 days, 10:11:12").value == -49728000000000
rng = to_timedelta("-1 days, 10:11:12.100123456")
assert rng.days == -1
assert rng.seconds == 10 * 3600 + 11 * 60 + 12
assert rng.microseconds == 100 * 1000 + 123
assert rng.nanoseconds == 456
msg = "'Timedelta' object has no attribute '{}'"
with pytest.raises(AttributeError, match=msg.format("hours")):
rng.hours
with pytest.raises(AttributeError, match=msg.format("minutes")):
rng.minutes
with pytest.raises(AttributeError, match=msg.format("milliseconds")):
rng.milliseconds
# components
tup = to_timedelta(-1, "us").components
assert tup.days == -1
assert tup.hours == 23
assert tup.minutes == 59
assert tup.seconds == 59
assert tup.milliseconds == 999
assert tup.microseconds == 999
assert tup.nanoseconds == 0
# GH 10050
check(tup.days)
check(tup.hours)
check(tup.minutes)
check(tup.seconds)
check(tup.milliseconds)
check(tup.microseconds)
check(tup.nanoseconds)
tup = Timedelta("-1 days 1 us").components
assert tup.days == -2
assert tup.hours == 23
assert tup.minutes == 59
assert tup.seconds == 59
assert tup.milliseconds == 999
assert tup.microseconds == 999
assert tup.nanoseconds == 0
def test_iso_conversion(self):
# GH #21877
expected = Timedelta(1, unit="s")
assert to_timedelta("P0DT0H0M1S") == expected
def test_nat_converters(self):
result = to_timedelta("nat").to_numpy()
assert result.dtype.kind == "M"
assert result.astype("int64") == iNaT
result = to_timedelta("nan").to_numpy()
assert result.dtype.kind == "M"
assert result.astype("int64") == iNaT
@pytest.mark.parametrize(
"unit, np_unit",
[(value, "W") for value in ["W", "w"]]
+ [(value, "D") for value in ["D", "d", "days", "day", "Days", "Day"]]
+ [
(value, "m")
for value in [
"m",
"minute",
"min",
"minutes",
"t",
"Minute",
"Min",
"Minutes",
"T",
]
]
+ [
(value, "s")
for value in [
"s",
"seconds",
"sec",
"second",
"S",
"Seconds",
"Sec",
"Second",
]
]
+ [
(value, "ms")
for value in [
"ms",
"milliseconds",
"millisecond",
"milli",
"millis",
"l",
"MS",
"Milliseconds",
"Millisecond",
"Milli",
"Millis",
"L",
]
]
+ [
(value, "us")
for value in [
"us",
"microseconds",
"microsecond",
"micro",
"micros",
"u",
"US",
"Microseconds",
"Microsecond",
"Micro",
"Micros",
"U",
]
]
+ [
(value, "ns")
for value in [
"ns",
"nanoseconds",
"nanosecond",
"nano",
"nanos",
"n",
"NS",
"Nanoseconds",
"Nanosecond",
"Nano",
"Nanos",
"N",
]
],
)
@pytest.mark.parametrize("wrapper", [np.array, list, pd.Index])
def test_unit_parser(self, unit, np_unit, wrapper):
# validate all units, GH 6855, GH 21762
# array-likes
expected = TimedeltaIndex(
[np.timedelta64(i, np_unit) for i in np.arange(5).tolist()]
)
result = to_timedelta(wrapper(range(5)), unit=unit)
tm.assert_index_equal(result, expected)
result = TimedeltaIndex(wrapper(range(5)), unit=unit)
tm.assert_index_equal(result, expected)
str_repr = [f"{x}{unit}" for x in np.arange(5)]
result = to_timedelta(wrapper(str_repr))
tm.assert_index_equal(result, expected)
result = to_timedelta(wrapper(str_repr))
tm.assert_index_equal(result, expected)
# scalar
expected = Timedelta(np.timedelta64(2, np_unit).astype("timedelta64[ns]"))
result = to_timedelta(2, unit=unit)
assert result == expected
result = Timedelta(2, unit=unit)
assert result == expected
result = to_timedelta(f"2{unit}")
assert result == expected
result = Timedelta(f"2{unit}")
assert result == expected
@pytest.mark.parametrize("unit", ["Y", "y", "M"])
def test_unit_m_y_raises(self, unit):
msg = "Units 'M', 'Y', and 'y' are no longer supported"
with pytest.raises(ValueError, match=msg):
Timedelta(10, unit)
with pytest.raises(ValueError, match=msg):
to_timedelta(10, unit)
with pytest.raises(ValueError, match=msg):
to_timedelta([1, 2], unit)
def test_numeric_conversions(self):
assert Timedelta(0) == np.timedelta64(0, "ns")
assert Timedelta(10) == np.timedelta64(10, "ns")
assert Timedelta(10, unit="ns") == np.timedelta64(10, "ns")
assert Timedelta(10, unit="us") == np.timedelta64(10, "us")
assert Timedelta(10, unit="ms") == np.timedelta64(10, "ms")
assert Timedelta(10, unit="s") == np.timedelta64(10, "s")
assert Timedelta(10, unit="d") == np.timedelta64(10, "D")
def test_timedelta_conversions(self):
assert Timedelta(timedelta(seconds=1)) == np.timedelta64(1, "s").astype(
"m8[ns]"
)
assert Timedelta(timedelta(microseconds=1)) == np.timedelta64(1, "us").astype(
"m8[ns]"
)
assert Timedelta(timedelta(days=1)) == np.timedelta64(1, "D").astype("m8[ns]")
def test_to_numpy_alias(self):
# GH 24653: alias .to_numpy() for scalars
td = Timedelta("10m7s")
assert td.to_timedelta64() == td.to_numpy()
# GH#44460
msg = "dtype and copy arguments are ignored"
with pytest.raises(ValueError, match=msg):
td.to_numpy("m8[s]")
with pytest.raises(ValueError, match=msg):
td.to_numpy(copy=True)
@pytest.mark.parametrize(
"freq,s1,s2",
[
# This first case has s1, s2 being the same as t1,t2 below
(
"N",
Timedelta("1 days 02:34:56.789123456"),
Timedelta("-1 days 02:34:56.789123456"),
),
(
"U",
Timedelta("1 days 02:34:56.789123000"),
Timedelta("-1 days 02:34:56.789123000"),
),
(
"L",
Timedelta("1 days 02:34:56.789000000"),
Timedelta("-1 days 02:34:56.789000000"),
),
("S", Timedelta("1 days 02:34:57"), Timedelta("-1 days 02:34:57")),
("2S", Timedelta("1 days 02:34:56"), Timedelta("-1 days 02:34:56")),
("5S", Timedelta("1 days 02:34:55"), Timedelta("-1 days 02:34:55")),
("T", Timedelta("1 days 02:35:00"), Timedelta("-1 days 02:35:00")),
("12T", Timedelta("1 days 02:36:00"), Timedelta("-1 days 02:36:00")),
("H", Timedelta("1 days 03:00:00"), Timedelta("-1 days 03:00:00")),
("d", Timedelta("1 days"), Timedelta("-1 days")),
],
)
def test_round(self, freq, s1, s2):
t1 = Timedelta("1 days 02:34:56.789123456")
t2 = Timedelta("-1 days 02:34:56.789123456")
r1 = t1.round(freq)
assert r1 == s1
r2 = t2.round(freq)
assert r2 == s2
def test_round_invalid(self):
t1 = Timedelta("1 days 02:34:56.789123456")
for freq, msg in [
("Y", "<YearEnd: month=12> is a non-fixed frequency"),
("M", "<MonthEnd> is a non-fixed frequency"),
("foobar", "Invalid frequency: foobar"),
]:
with pytest.raises(ValueError, match=msg):
t1.round(freq)
def test_round_implementation_bounds(self):
# See also: analogous test for Timestamp
# GH#38964
result = Timedelta.min.ceil("s")
expected = Timedelta.min + Timedelta(seconds=1) - Timedelta(145224193)
assert result == expected
result = Timedelta.max.floor("s")
expected = Timedelta.max - Timedelta(854775807)
assert result == expected
with pytest.raises(OverflowError, match="value too large"):
Timedelta.min.floor("s")
# the second message here shows up in windows builds
msg = "|".join(
["Python int too large to convert to C long", "int too big to convert"]
)
with pytest.raises(OverflowError, match=msg):
Timedelta.max.ceil("s")
@given(val=st.integers(min_value=iNaT + 1, max_value=lib.i8max))
@pytest.mark.parametrize(
"method", [Timedelta.round, Timedelta.floor, Timedelta.ceil]
)
def test_round_sanity(self, val, method):
val = np.int64(val)
td = Timedelta(val)
assert method(td, "ns") == td
res = method(td, "us")
nanos = 1000
assert np.abs((res - td).value) < nanos
assert res.value % nanos == 0
res = method(td, "ms")
nanos = 1_000_000
assert np.abs((res - td).value) < nanos
assert res.value % nanos == 0
res = method(td, "s")
nanos = 1_000_000_000
assert np.abs((res - td).value) < nanos
assert res.value % nanos == 0
res = method(td, "min")
nanos = 60 * 1_000_000_000
assert np.abs((res - td).value) < nanos
assert res.value % nanos == 0
res = method(td, "h")
nanos = 60 * 60 * 1_000_000_000
assert np.abs((res - td).value) < nanos
assert res.value % nanos == 0
res = method(td, "D")
nanos = 24 * 60 * 60 * 1_000_000_000
assert np.abs((res - td).value) < nanos
assert res.value % nanos == 0
def test_contains(self):
# Checking for any NaT-like objects
# GH 13603
td = to_timedelta(range(5), unit="d") + offsets.Hour(1)
for v in [NaT, None, float("nan"), np.nan]:
assert not (v in td)
td = to_timedelta([NaT])
for v in [NaT, None, float("nan"), np.nan]:
assert v in td
def test_identity(self):
td = Timedelta(10, unit="d")
assert isinstance(td, Timedelta)
assert isinstance(td, timedelta)
def test_short_format_converters(self):
def conv(v):
return v.astype("m8[ns]")
assert Timedelta("10") == np.timedelta64(10, "ns")
assert Timedelta("10ns") == np.timedelta64(10, "ns")
assert Timedelta("100") == np.timedelta64(100, "ns")
assert Timedelta("100ns") == np.timedelta64(100, "ns")
assert Timedelta("1000") == np.timedelta64(1000, "ns")
assert Timedelta("1000ns") == np.timedelta64(1000, "ns")
assert Timedelta("1000NS") == np.timedelta64(1000, "ns")
assert Timedelta("10us") == np.timedelta64(10000, "ns")
assert Timedelta("100us") == np.timedelta64(100000, "ns")
assert Timedelta("1000us") == np.timedelta64(1000000, "ns")
assert Timedelta("1000Us") == np.timedelta64(1000000, "ns")
assert Timedelta("1000uS") == np.timedelta64(1000000, "ns")
assert Timedelta("1ms") == np.timedelta64(1000000, "ns")
assert Timedelta("10ms") == np.timedelta64(10000000, "ns")
assert Timedelta("100ms") == np.timedelta64(100000000, "ns")
assert Timedelta("1000ms") == np.timedelta64(1000000000, "ns")
assert Timedelta("-1s") == -np.timedelta64(1000000000, "ns")
assert Timedelta("1s") == np.timedelta64(1000000000, "ns")
assert Timedelta("10s") == np.timedelta64(10000000000, "ns")
assert Timedelta("100s") == np.timedelta64(100000000000, "ns")
assert Timedelta("1000s") == np.timedelta64(1000000000000, "ns")
assert Timedelta("1d") == conv(np.timedelta64(1, "D"))
assert Timedelta("-1d") == -conv(np.timedelta64(1, "D"))
assert Timedelta("1D") == conv(np.timedelta64(1, "D"))
assert Timedelta("10D") == conv(np.timedelta64(10, "D"))
assert Timedelta("100D") == conv(np.timedelta64(100, "D"))
assert Timedelta("1000D") == conv(np.timedelta64(1000, "D"))
assert Timedelta("10000D") == conv(np.timedelta64(10000, "D"))
# space
assert Timedelta(" 10000D ") == conv(np.timedelta64(10000, "D"))
assert Timedelta(" - 10000D ") == -conv(np.timedelta64(10000, "D"))
# invalid
msg = "invalid unit abbreviation"
with pytest.raises(ValueError, match=msg):
Timedelta("1foo")
msg = "unit abbreviation w/o a number"
with pytest.raises(ValueError, match=msg):
Timedelta("foo")
def test_full_format_converters(self):
def conv(v):
return v.astype("m8[ns]")
d1 = np.timedelta64(1, "D")
assert Timedelta("1days") == conv(d1)
assert Timedelta("1days,") == conv(d1)
assert Timedelta("- 1days,") == -conv(d1)
assert Timedelta("00:00:01") == conv(np.timedelta64(1, "s"))
assert Timedelta("06:00:01") == conv(np.timedelta64(6 * 3600 + 1, "s"))
assert Timedelta("06:00:01.0") == conv(np.timedelta64(6 * 3600 + 1, "s"))
assert Timedelta("06:00:01.01") == conv(
np.timedelta64(1000 * (6 * 3600 + 1) + 10, "ms")
)
assert Timedelta("- 1days, 00:00:01") == conv(-d1 + np.timedelta64(1, "s"))
assert Timedelta("1days, 06:00:01") == conv(
d1 + np.timedelta64(6 * 3600 + 1, "s")
)
assert Timedelta("1days, 06:00:01.01") == conv(
d1 + np.timedelta64(1000 * (6 * 3600 + 1) + 10, "ms")
)
# invalid
msg = "have leftover units"
with pytest.raises(ValueError, match=msg):
Timedelta("- 1days, 00")
def test_pickle(self):
v = Timedelta("1 days 10:11:12.0123456")
v_p = tm.round_trip_pickle(v)
assert v == v_p
def test_timedelta_hash_equality(self):
# GH 11129
v = Timedelta(1, "D")
td = timedelta(days=1)
assert hash(v) == hash(td)
d = {td: 2}
assert d[v] == 2
tds = [Timedelta(seconds=1) + Timedelta(days=n) for n in range(20)]
assert all(hash(td) == hash(td.to_pytimedelta()) for td in tds)
# python timedeltas drop ns resolution
ns_td = Timedelta(1, "ns")
assert hash(ns_td) != hash(ns_td.to_pytimedelta())
def test_implementation_limits(self):
min_td = Timedelta(Timedelta.min)
max_td = Timedelta(Timedelta.max)
# GH 12727
# timedelta limits correspond to int64 boundaries
assert min_td.value == iNaT + 1
assert max_td.value == lib.i8max
# Beyond lower limit, a NAT before the Overflow
assert (min_td - Timedelta(1, "ns")) is NaT
msg = "int too (large|big) to convert"
with pytest.raises(OverflowError, match=msg):
min_td - Timedelta(2, "ns")
with pytest.raises(OverflowError, match=msg):
max_td + Timedelta(1, "ns")
# Same tests using the internal nanosecond values
td = Timedelta(min_td.value - 1, "ns")
assert td is NaT
with pytest.raises(OverflowError, match=msg):
Timedelta(min_td.value - 2, "ns")
with pytest.raises(OverflowError, match=msg):
Timedelta(max_td.value + 1, "ns")
def test_total_seconds_precision(self):
# GH 19458
assert Timedelta("30S").total_seconds() == 30.0
assert Timedelta("0").total_seconds() == 0.0
assert Timedelta("-2S").total_seconds() == -2.0
assert Timedelta("5.324S").total_seconds() == 5.324
assert (Timedelta("30S").total_seconds() - 30.0) < 1e-20
assert (30.0 - Timedelta("30S").total_seconds()) < 1e-20
def test_resolution_string(self):
assert Timedelta(days=1).resolution_string == "D"
assert Timedelta(days=1, hours=6).resolution_string == "H"
assert Timedelta(days=1, minutes=6).resolution_string == "T"
assert Timedelta(days=1, seconds=6).resolution_string == "S"
assert Timedelta(days=1, milliseconds=6).resolution_string == "L"
assert Timedelta(days=1, microseconds=6).resolution_string == "U"
assert Timedelta(days=1, nanoseconds=6).resolution_string == "N"
def test_resolution_deprecated(self):
# GH#21344
td = Timedelta(days=4, hours=3)
result = td.resolution
assert result == Timedelta(nanoseconds=1)
# Check that the attribute is available on the class, mirroring
# the stdlib timedelta behavior
result = Timedelta.resolution
assert result == Timedelta(nanoseconds=1)
@pytest.mark.parametrize(
"value, expected",
[
(Timedelta("10S"), True),
(Timedelta("-10S"), True),
(Timedelta(10, unit="ns"), True),
(Timedelta(0, unit="ns"), False),
(Timedelta(-10, unit="ns"), True),
(Timedelta(None), True),
(NaT, True),
],
)
def test_truthiness(value, expected):
# https://github.com/pandas-dev/pandas/issues/21484
assert bool(value) is expected
def test_timedelta_attribute_precision():
# GH 31354
td = Timedelta(1552211999999999872, unit="ns")
result = td.days * 86400
result += td.seconds
result *= 1000000
result += td.microseconds
result *= 1000
result += td.nanoseconds
expected = td.value
assert result == expected
|
|
# encoding: utf-8
import tempfile
import uuid
import errno
import signal
import pwd
import os
import ssl
from slimurl import URL
from tornado.ioloop import IOLoop
from tornado.httpserver import HTTPServer
from tornado.log import gen_log as log
from tornado.options import options, define
from tornado.process import cpu_count
from tornado.concurrent import futures
from tornado.web import Application
from tornado.httpclient import AsyncHTTPClient
from tornado.ioloop import PeriodicCallback
from pypi_server import ROOT
from pypi_server.cache import HOUR, Cache
from pypi_server.handlers.pypi.proxy.client import PYPIClient
from pypi_server.db import init_db
from pypi_server.db.packages import PackageFile
from pypi_server import handlers
define('config', help="Configuration file")
define("address", help="Listen address (default 127.0.0.1) [ENV:ADDRESS]",
default=os.getenv('ADDRESS', "127.0.0.1"))
define("port", help="Listen port (default 8080) [ENV:PORT]",
type=int, default=int(os.getenv('PORT', '8080')))
define("debug", help="Use for attach a debugger",
default=bool(os.getenv("DEBUG")), type=bool)
define("gzip", help="Compress responses (default False) [ENV:GZIP]",
default=bool(os.getenv("GZIP")), type=bool)
define("proxy-mode", help="Process X-headers on requests (default True) [ENV:PROXY_MODE]",
default=bool(os.getenv('PROXY_MODE', '1')), type=bool)
define("pool-size", help="Thread pool size (default cou_count * 2) [ENV:POOL_SIZE]",
type=int, default=int(os.getenv('POOL_SIZE', cpu_count() * 2)))
define("secret", help="Cookie secret (default random) [ENV:SECRET]",
default=os.getenv("SECRET", uuid.uuid4().bytes))
define("user", help="Change UID of current process (not change by default)", default=None)
define("cert", help="Path to the Certificate, make sure you have the full chain to prevent" \
"SSL auth errors (Only valid if ssl is true", default=None)
define("key", help="Path to private key (Only valid if ssl is true", default=None)
define("ssl", help="Enadles SSL for the local pypi server", default=False)
default_storage=os.path.abspath(
os.getenv(
"STORAGE",
os.path.join(os.path.abspath(os.path.curdir), 'packages')
)
)
define(
"storage", help="Packages storage (default $CWD/packages) [ENV:STORAGE]", type=str,
default=default_storage
)
define(
"database", help="Application database (default sqlite:///{storage}/metadata.db) [ENV:DB]",
type=URL,
default=os.getenv(
"DB",
URL(
"sqlite://{0}".format("/".join(
os.path.split(os.path.join(default_storage, 'metadata.db'))
))
)
)
)
define("max_http_clients",
help="Maximum HTTP Client instances for proxy requests (default 25) [ENV:MAX_CLIENTS]",
default=int(os.getenv("MAX_CLIENTS", '25')), type=int)
define("max_body_size",
help="Maximum HTTP Client body size for proxy requests (in megabytes, default 100) [ENV:MAX_BODY_SIZE]",
default=int(os.getenv("MAX_BODY_SIZE", '100')), type=int)
define("pypi_server",
help="PYPI service url. Using for proxy. (default https://pypi.python.org/) [ENV:PYPY_SERVER]",
default=URL(os.getenv("PYPI_SERVER", 'https://pypi.python.org/')), type=URL)
default_cache_dir = os.path.join(tempfile.gettempdir(), 'pypi-server-cache')
define(
"cache_dir",
help='Directory for storing cache files (default: "{}")'.format(default_cache_dir),
default=default_cache_dir
)
define('pypi_proxy', help='Enable proxying to PyPI (default True) [ENV:PYPI_PROXY]',
type=bool, default=bool(os.getenv('PYPI_PROXY', '1')))
def create_app(debug=False, secret="", gzip=False, **kwargs):
return Application(
base_dir=ROOT,
debug=debug,
reload=debug,
cookie_secret=secret,
template_path=os.path.join(ROOT, 'templates'),
default_handler_class=handlers.DefaultHandler,
gzip=gzip,
handlers=handlers.ROUTES,
options=options,
**kwargs
)
def run():
options.parse_command_line()
if options.config:
options.parse_config_file(options.config)
options.storage = os.path.abspath(options.storage)
if os.getuid() == 0 and options.user:
pw = pwd.getpwnam(options.user)
uid, gid = pw.pw_uid, pw.pw_gid
log.info("Changind user to %s [%s:%s]", options.user, uid, gid)
os.setgid(uid)
os.setuid(uid)
try:
if not all(f(options.storage) for f in (os.path.exists, os.path.isdir)):
log.info('Creating new package storage directory: "%s"', options.storage)
os.makedirs(options.storage)
def on_interrupt(*args):
log.warning("Receiving interrupt signal. Application will be stopped.")
exit(errno.EINTR)
log.debug("Preparing signal handling")
for sig in (signal.SIGINT, signal.SIGTERM, signal.SIGQUIT):
signal.signal(sig, on_interrupt)
def handle_pdb(sig, frame):
import pdb
pdb.Pdb().set_trace(frame)
if options.debug:
signal.signal(signal.SIGUSR2, handle_pdb)
log.debug("Creating application instance")
app = create_app(
options.debug,
options.secret,
options.gzip,
)
log.debug("Creating IOLoop instance.")
io_loop = IOLoop.current()
io_loop.run_sync(lambda: init_db(options.database))
if not (os.path.exists(options.cache_dir) and os.path.isdir(options.cache_dir)):
os.makedirs(options.cache_dir)
Cache.CACHE_DIR = options.cache_dir
log.info("Init thread pool with %d threads", options.pool_size)
handlers.base.BaseHandler.THREAD_POOL = futures.ThreadPoolExecutor(options.pool_size)
AsyncHTTPClient.configure(None, max_clients=options.max_http_clients, max_body_size=options.max_body_size*1024*1024)
proxy_url = URL(os.getenv('{0}_proxy'.format(options.pypi_server.scheme)))
if proxy_url:
log.debug("Configuring for proxy: %s", proxy_url)
AsyncHTTPClient.configure(
'tornado.curl_httpclient.CurlAsyncHTTPClient',
defaults={
'proxy_host': proxy_url.host,
'proxy_port': proxy_url.port,
'proxy_username': proxy_url.user,
'proxy_password': proxy_url.password,
}
)
PYPIClient.configure(
options.pypi_server,
handlers.base.BaseHandler.THREAD_POOL
)
if options.pypi_proxy:
pypi_updater = PeriodicCallback(PYPIClient.packages, HOUR * 1000, io_loop)
io_loop.add_callback(PYPIClient.packages)
io_loop.add_callback(pypi_updater.start)
server_message = "Starting server http://{}:{}/".format(options.address, options.port)
ssl_ctx = None
if options.ssl:
ssl_ctx = ssl.create_default_context(ssl.Purpose.CLIENT_AUTH)
ssl_ctx.load_cert_chain(options.cert, options.key)
server_message = "Starting server https://{}:{}/".format(options.address, options.port)
log.info(server_message)
http_server = HTTPServer(app, ssl_options=ssl_ctx, xheaders=options.proxy_mode)
http_server.listen(options.port, address=options.address)
log.debug('Setting "%s" as storage', options.storage)
PackageFile.set_storage(options.storage)
log.debug("Starting main loop")
io_loop.start()
except Exception as e:
log.fatal("Exception on main loop:")
log.exception(e)
exit(1)
else:
exit(0)
if __name__ == '__main__':
run()
|
|
# Copyright 2011-2012 James McCauley
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at:
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
An L2 learning switch.
It is derived from one written live for an SDN crash course.
It is somwhat similar to NOX's pyswitch in that it installs
exact-match rules for each flow.
"""
from pox.core import core
import pox.openflow.libopenflow_01 as of
from pox.lib.util import dpid_to_str
from pox.lib.util import str_to_bool
import time
import random
log = core.getLogger()
# We don't want to flood immediately when a switch connects.
# Can be overriden on commandline.
_flood_delay = 0
FLOW_NUMS = 80
#FLOW_ID = FLOW_NUMS + 1
class LearningSwitch (object):
"""
The learning switch "brain" associated with a single OpenFlow switch.
When we see a packet, we'd like to output it on a port which will
eventually lead to the destination. To accomplish this, we build a
table that maps addresses to ports.
We populate the table by observing traffic. When we see a packet
from some source coming from some port, we know that source is out
that port.
When we want to forward traffic, we look up the desintation in our
table. If we don't know the port, we simply send the message out
all ports except the one it came in on. (In the presence of loops,
this is bad!).
In short, our algorithm looks like this:
For each packet from the switch:
1) Use source address and switch port to update address/port table
2) Is transparent = False and either Ethertype is LLDP or the packet's
destination address is a Bridge Filtered address?
Yes:
2a) Drop packet -- don't forward link-local traffic (LLDP, 802.1x)
DONE
3) Is destination multicast?
Yes:
3a) Flood the packet
DONE
4) Port for destination address in our address/port table?
No:
4a) Flood the packet
DONE
5) Is output port the same as input port?
Yes:
5a) Drop packet and similar ones for a while
6) Install flow table entry in the switch so that this
flow goes out the appopriate port
6a) Send the packet out appropriate port
"""
def __init__ (self, connection, transparent):
# Switch we'll be adding L2 learning switch capabilities to
self.connection = connection
self.transparent = transparent
# Our table
self.macToPort = {}
# We want to hear PacketIn messages, so we listen
# to the connection
connection.addListeners(self)
# We just use this to know when to log a helpful message
self.hold_down_expired = _flood_delay == 0
#log.debug("Initializing LearningSwitch, transparent=%s",
# str(self.transparent))
self.flow_list = []
def _handle_PacketIn (self, event):
"""
Handle packet in messages from the switch to implement above algorithm.
"""
global FLOW_NUMS
packet = event.parsed
def flood (message = None):
""" Floods the packet """
msg = of.ofp_packet_out()
if time.time() - self.connection.connect_time >= _flood_delay:
# Only flood if we've been connected for a little while...
if self.hold_down_expired is False:
# Oh yes it is!
self.hold_down_expired = True
log.info("%s: Flood hold-down expired -- flooding",
dpid_to_str(event.dpid))
if message is not None: log.debug(message)
#log.debug("%i: flood %s -> %s", event.dpid,packet.src,packet.dst)
# OFPP_FLOOD is optional; on some switches you may need to change
# this to OFPP_ALL.
msg.actions.append(of.ofp_action_output(port = of.OFPP_FLOOD))
else:
pass
#log.info("Holding down flood for %s", dpid_to_str(event.dpid))
msg.data = event.ofp
msg.in_port = event.port
self.connection.send(msg)
def drop (duration = None):
"""
Drops this packet and optionally installs a flow to continue
dropping similar ones for a while
"""
if duration is not None:
if not isinstance(duration, tuple):
duration = (duration,duration)
#msg.cookie = FLOW_ID % FLOW_NUMS
#FLOW_ID += 1
if len(self.flow_list) >= FLOW_NUMS:
msg_updt = of.ofp_flow_mod()
msg_updt.command = 4 #OFPFC_DELETE_STRICTLY
flow_rm = random.randint(0, FLOW_NUMS-1)
msg_updt.match = of.ofp_match()
#msg_updt.match.dl_type = 0x0800
#msg_updt.match.nw_proto = 6
try:
#flow_info = self.flow_list[flow_rm]
#msg_updt.match.tp_src = flow_info[0]
#msg_updt.match.tp_src = flow_info[1]
msg_updt.match = self.flow_list[flow_rm]
#log.info("Pop tp_src = %d, tp_dst = %d, size %d" \
# % (self.flow_list[flow_rm][0],\
# self.flow_list[flow_rm][0], \
# len(self.flow_list)))
self.flow_list.pop(flow_rm)
self.connection.send(msg_updt)
except IndexError:
print "flow_rm: %d"%flow_rm
print self.flow_list
msg = of.ofp_flow_mod()
msg.match = of.ofp_match.from_packet(packet)
tcpp = packet.find('tcp')
if tcpp:
msg.match.tp_src = tcpp.srcport
msg.match.tp_dst = tcpp.dstport
#self.flow_list.append((tcpp.srcport, tcpp.dstport))
self.flow_list.append(msg.match)
#msg.idle_timeout = duration[0]
msg.idle_timeout = 0
#msg.hard_timeout = duration[1]
msg.hard_timeout = 0 #PERMANENT
msg.buffer_id = event.ofp.buffer_id
self.connection.send(msg)
elif event.ofp.buffer_id is not None:
msg = of.ofp_packet_out()
msg.buffer_id = event.ofp.buffer_id
msg.in_port = event.port
self.connection.send(msg)
self.macToPort[packet.src] = event.port # 1
if not self.transparent: # 2
if packet.type == packet.LLDP_TYPE or packet.dst.isBridgeFiltered():
drop() # 2a
return
if packet.dst.is_multicast:
flood() # 3a
else:
if packet.dst not in self.macToPort: # 4
print self.macToPort
print packet.dst
flood("Port for %s unknown -- flooding" % (packet.dst,)) # 4a
else:
port = self.macToPort[packet.dst]
if port == event.port: # 5
# 5a
log.warning("Same port for packet from %s -> %s on %s.%s. Drop."
% (packet.src, packet.dst, dpid_to_str(event.dpid), port))
drop(10)
return
# 6
if len(self.flow_list) >= FLOW_NUMS:
msg_updt = of.ofp_flow_mod()
msg_updt.command = 4 #OFPFC_DELETE_STRICTLY
flow_rm = random.randint(0, FLOW_NUMS-1)
msg_updt.match = of.ofp_match()
#msg_updt.match.dl_type = 0x0800
#msg_updt.match.nw_proto = 6
try:
#flow_info = self.flow_list[flow_rm]
msg_updt.match = self.flow_list[flow_rm]
#msg_updt.match.tp_src = flow_info[0]
#msg_updt.match.dst_src = flow_info[1]
#log.info("Pop tp_src = %d, tp_dst = %d, size %d" \
# % (self.flow_list[flow_rm][0],\
# self.flow_list[flow_rm][1], \
# len(self.flow_list)))
self.connection.send(msg_updt)
self.flow_list.pop(flow_rm)
except IndexError:
print "flow_rm: %d"%flow_rm
print self.flow_list
msg = of.ofp_flow_mod()
msg.match = of.ofp_match.from_packet(packet, event.port)
tcpp = packet.find('tcp')
if tcpp:
msg.match.tp_src = tcpp.srcport
msg.match.tp_dst = tcpp.dstport
#self.flow_list.append((tcpp.srcport, tcpp.dstport))
self.flow_list.append(msg.match)
#msg.idle_timeout = 10
msg.idle_timeout = 0
msg.hard_timeout = 0 #PERMANENT
#msg.hard_timeout = 30
msg.actions.append(of.ofp_action_output(port = port))
msg.data = event.ofp # 6a
self.connection.send(msg)
class l2_learning (object):
"""
Waits for OpenFlow switches to connect and makes them learning switches.
"""
def __init__ (self, transparent):
core.openflow.addListeners(self)
self.transparent = transparent
def _handle_ConnectionUp (self, event):
log.debug("Connection %s" % (event.connection,))
LearningSwitch(event.connection, self.transparent)
def _handle_ErrorIn (self, event):
log.debug("ErrorIn %s" % (event.asString(),))
#msg = of.ofp_flow_mod()
#msg.command = OFPFC_DELETE
#self.connection.send(msg)
def launch (transparent=False, hold_down=_flood_delay):
"""
Starts an L2 learning switch.
"""
try:
global _flood_delay
_flood_delay = int(str(hold_down), 10)
assert _flood_delay >= 0
except:
raise RuntimeError("Expected hold-down to be a number")
core.registerNew(l2_learning, str_to_bool(transparent))
|
|
"""
Licensed to the Apache Software Foundation (ASF) under one
or more contributor license agreements. See the NOTICE file
distributed with this work for additional information
regarding copyright ownership. The ASF licenses this file
to you under the Apache License, Version 2.0 (the
"License"); you may not use this file except in compliance
with the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing,
software distributed under the License is distributed on an
"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
KIND, either express or implied. See the License for the
specific language governing permissions and limitations
under the License.
"""
import time
import collections
import pytz
import datetime
from typing import Optional
from session_store import CachingSessionStore
import abcs as session_abcs
from yosaipy2.core.utils.utils import get_logger
from yosaipy2.core import (
AbsoluteExpiredSessionException,
ExpiredSessionException,
IdleExpiredSessionException,
InvalidSessionException,
StoppedSessionException,
)
SessionKey = collections.namedtuple('SessionKey', 'session_id')
session_tuple = collections.namedtuple('session_tuple', ['identifiers', 'session_id'])
class SimpleSession(session_abcs.ValidatingSession):
def __init__(self, absolute_timeout, idle_timeout, host=None):
super(SimpleSession, self).__init__()
self.attributes = {}
self.internal_attributes = {
'run_as_identifiers_session_key': None,
'authenticated_session_key': None,
'identifiers_session_key': None
}
self.is_expired = None
self.stop_timestamp = None
self.start_timestamp = round(time.time() * 1000) # milliseconds
self.last_access_time = self.start_timestamp
self.absolute_timeout = absolute_timeout
self.idle_timeout = idle_timeout
self.host = host
self._logger = get_logger()
@property
def attribute_keys(self):
return self.attributes.keys()
@property
def internal_attribute_keys(self):
if self.internal_attributes is None:
return None
return set(self.internal_attributes) # a set of keys
@property
def is_stopped(self):
return bool(self.stop_timestamp)
def touch(self):
self.last_access_time = round(time.time() * 1000)
def stop(self):
self.stop_timestamp = round(time.time() * 1000)
def expire(self):
self.stop()
self.is_expired = True
@property
def is_valid(self):
return not self.is_stopped and not self.is_expired
@property
def is_absolute_timed_out(self):
current_time = round(time.time() * 1000)
abs_expir = self.start_timestamp + self.absolute_timeout
if current_time > abs_expir:
return True
return False
@property
def is_idle_timed_out(self):
current_time = round(time.time() * 1000)
idle_expir = self.last_access_time + self.idle_timeout
if current_time > idle_expir:
return True
return False
def is_timed_out(self):
"""
determines whether a Session has been inactive/idle for too long a time
OR exceeds the absolute time that a Session may exist
"""
if self.is_expired:
return True
if not self.last_access_time:
msg = ("session.last_access_time for session with id [{}] is null. This value must be "
"set at least once, preferably at least upon instantiation. Please check the {} "
"implementation and ensure self value will be set (perhaps in the constructor?)"
).format(str(self.session_id), self.__class__.__name__)
raise ValueError(msg)
try:
if self.is_absolute_timed_out:
return True
if self.is_idle_timed_out:
return True
except AttributeError:
msg = "Timeouts not set for session with id [{}]. Session is not considered expired."
msg = msg.format(str(self.session_id))
self._logger.debug(msg)
return False
def validate(self):
if self.is_stopped:
msg = ("Session with id [{}] has been explicitly stopped. No further interaction under "
"this session is allowed.").format(str(self.session_id))
raise StoppedSessionException(msg)
if self.is_timed_out():
self.expire()
# throw an exception explaining details of why it expired:
idle_timeout_min = self.idle_timeout / 1000 // 60
absolute_timeout_min = self.absolute_timeout / 1000 // 60
currenttime = datetime.datetime.now(pytz.utc).isoformat()
session_id = str(self.session_id)
msg = ("Session with id [{}] has expired. Last access time: {}. Current time: {}. "
"Session idle timeout is set to {} seconds ({} minutes) and "
"absolute timeout is set to {} seconds ({} minutes)").format(
session_id, str(self.last_access_time), currenttime,
str(self.idle_timeout / 1000), str(idle_timeout_min),
str(self.absolute_timeout / 1000), str(absolute_timeout_min)
)
self._logger.debug(msg)
if self.is_absolute_timed_out:
raise AbsoluteExpiredSessionException(msg)
raise IdleExpiredSessionException(msg)
def get_internal_attribute(self, key):
if not self.internal_attributes:
return None
return self.internal_attributes.get(key)
def set_internal_attribute(self, key, value=None):
self.internal_attributes[key] = value
def set_internal_attributes(self, key_values):
self.internal_attributes.update(key_values)
def remove_internal_attribute(self, key):
if not self.internal_attributes:
return None
else:
return self.internal_attributes.pop(key, None)
def remove_internal_attributes(self, to_remove):
return [self.remove_internal_attribute(key) for key in to_remove]
def get_attribute(self, key):
return self.attributes.get(key)
def get_attributes(self, keys):
"""
:param keys: the keys of attributes to get from the session
:type keys: list of strings
:returns: a dict containing the attributes requested, if they exist
"""
result = {}
for k in keys:
if k not in self.attributes:
pass
result[k] = self.attributes[k]
return result
def set_attribute(self, key, value):
self.attributes[key] = value
# new to yosai is the bulk setting/getting/removing
def set_attributes(self, attributes):
"""
:param attributes: the attributes to add to the session
:type attributes: dict
"""
self.attributes.update(attributes)
def remove_attribute(self, key):
return self.attributes.pop(key, None)
# new to yosai
def remove_attributes(self, keys):
"""
:param keys: the keys of attributes to remove from the session
:type keys: list of strings
:returns: a list of popped attribute values
"""
return [self.attributes.pop(key, None) for key in keys]
def __eq__(self, other):
if self is other:
return True
if isinstance(other, session_abcs.ValidatingSession):
return (
self.session_id == other.session_id and
self.idle_timeout == other.idle_timeout and
self.absolute_timeout == other.absolute_timeout and
self.start_timestamp == other.start_timestamp and
self.attributes == other.attributes and
self.internal_attributes == other.internal_attributes
)
return False
def __repr__(self):
return ("{}(session_id: {}, start_timestamp: {}, stop_timestamp: {}, last_access_time: {},"
"idle_timeout: {}, absolute_timeout: {}, is_expired: {},"
"host: {}, attributes:{}, internal_attributes: {})").format(
self.__class__.__name__,
self.session_id, self.start_timestamp,
self.stop_timestamp, self.last_access_time,
self.idle_timeout, self.absolute_timeout,
self.is_expired, self.host, self.attributes,
self.internal_attributes
)
def __getstate__(self):
return {
'session_id': self.session_id,
'start_timestamp': self.start_timestamp,
'stop_timestamp': self.stop_timestamp,
'last_access_time': self.last_access_time,
'idle_timeout': self.idle_timeout,
'absolute_timeout': self.absolute_timeout,
'is_expired': self.is_expired,
'host': self.host,
'internal_attributes': self.internal_attributes,
'attributes': self.attributes
}
def __setstate__(self, state):
self.session_id = state['session_id']
self.start_timestamp = state['start_timestamp']
self.stop_timestamp = state['stop_timestamp']
self.last_access_time = state['last_access_time']
self.idle_timeout = state['idle_timeout']
self.absolute_timeout = state['absolute_timeout']
self.is_expired = state['is_expired']
self.host = state['host']
self.internal_attributes = state['internal_attributes']
self.attributes = state['attributes']
class NativeSessionHandler(session_abcs.SessionHandler):
def __init__(self,
session_store=CachingSessionStore(),
delete_invalid_sessions=True):
self.delete_invalid_sessions = delete_invalid_sessions
self.session_store = session_store
self.event_bus = None
def create_session(self, session):
"""
:returns: a session_id string
"""
return self.session_store.create(session)
def delete(self, session):
self.session_store.delete(session)
def _retrieve_session(self, session_key):
# type: (SessionKey) -> Optional[SimpleSession]
session_id = session_key.session_id
if session_id is None:
msg = ("Unable to resolve session ID from SessionKey [{0}]."
"Returning null to indicate a session could not be "
"found.").format(session_key)
self._logger.debug(msg)
return None
session = self.session_store.read(session_id)
if session is None:
# session ID was provided, meaning one is expected to be found,
# but we couldn't find one:
msg2 = "Could not find session with ID [{0}]".format(session_id)
raise ValueError(msg2)
return session
def do_get_session(self, session_key):
# type: (SessionKey) -> SimpleSession
session_id = session_key.session_id
msg = "do_get_session: Attempting to retrieve session with key " + str(session_id)
self._logger.debug(msg)
session = self._retrieve_session(session_key)
if session is not None:
self.validate(session, session_key)
return session
def validate(self, session, session_key):
# type: (SimpleSession, SessionKey) -> None
"""
session exception hierarchy: invalid -> stopped -> expired
"""
try:
session.validate()
except AttributeError: # means it's not a validating session
msg = ("The {0} implementation only supports Validating "
"Session implementations of the {1} interface. "
"Please either implement this interface in your "
"session implementation or override the {0}"
".do_validate(Session) method to validate.")
msg = msg.format(self.__class__.__name__, 'ValidatingSession')
raise AttributeError(msg)
except ExpiredSessionException as ese:
self.on_expiration(session, ese, session_key)
raise ese
except InvalidSessionException as ise:
self.on_invalidation(session, ise, session_key)
raise ise
def on_start(self, session, session_context):
"""
placeholder for subclasses to react to a new session being created
"""
pass
def on_stop(self, session, session_key):
# session_key is used by the child class
try:
session.last_access_time = session.stop_timestamp
except AttributeError:
msg = "not working with a SimpleSession instance"
self._logger.warning(msg)
self.on_change(session)
def after_stopped(self, session):
if self.delete_invalid_sessions:
self.delete(session)
def on_expiration(self, session, expired_session_exception=None, session_key=None):
if expired_session_exception and session_key:
try:
self.on_change(session)
msg = "Session with id [{0}] has expired.".format(session.session_id)
self._logger.debug(msg)
identifiers = session.get_internal_attribute('identifiers_session_key')
mysession = session_tuple(identifiers, session_key.session_id)
self.notify_event(mysession, 'SESSION.EXPIRE')
except:
raise
finally:
self.after_expired(session)
elif not expired_session_exception and not session_key:
self.on_change(session)
else:
msg = "on_exception takes either 1 argument or 3 arguments"
raise ValueError(msg)
def after_expired(self, session):
if self.delete_invalid_sessions:
self.delete(session)
def on_invalidation(self, session, ise, session_key):
# session exception hierarchy: invalid -> stopped -> expired
if isinstance(ise, ExpiredSessionException):
self.on_expiration(session, ise, session_key)
return
msg = "Session with id [{0}] is invalid.".format(session.session_id)
self._logger.debug(msg)
try:
self.on_stop(session, session_key)
identifiers = session.get_internal_attribute('identifiers_session_key')
mysession = session_tuple(identifiers, session_key.session_id)
self.notify_event(mysession, 'SESSION.STOP')
except:
raise
# DG: this results in a redundant delete operation (from shiro):
finally:
self.after_stopped(session)
def on_change(self, session):
self.session_store.update(session)
def notify_event(self, session_info, topic):
try:
self.event_bus.send_message(topic, items=session_info)
except AttributeError:
msg = "Could not publish {} event".format(topic)
raise AttributeError(msg)
|
|
# Copyright 2010 United States Government as represented by the
# Administrator of the National Aeronautics and Space Administration.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from oslo_log import log as logging
from conveyor import exception
from conveyor.resource.driver import base
from conveyor.resource import resource
from conveyor.resource import resource_state
from conveyor import volume
LOG = logging.getLogger(__name__)
class VolumeResource(base.Resource):
def __init__(self, context, collected_resources=None,
collected_parameters=None, collected_dependencies=None):
self.context = context
self.cinder_api = volume.API()
self._collected_resources = collected_resources or {}
self._collected_parameters = collected_parameters or {}
self._collected_dependencies = collected_dependencies or {}
def extract_volumes(self, volume_ids, parent_name=None,
parent_resources=None):
volume_dicts = []
volumeResources = []
if not volume_ids:
LOG.info('Extract resources of all volumes.')
volume_dicts = self.cinder_api.get_all(self.context)
else:
LOG.info('Extract resources of volumes: %s', volume_ids)
# remove duplicate volume
volume_ids = {}.fromkeys(volume_ids).keys()
for volume_id in volume_ids:
try:
volume = self.cinder_api.get(self.context, volume_id)
volume_dicts.append(volume)
except Exception as e:
msg = "Volume resource <%s> could not be found. %s" \
% (volume_id, unicode(e))
LOG.error(msg)
raise exception.ResourceNotFound(message=msg)
for volume in volume_dicts:
volume_id = volume['id']
vol_state = volume.get('status', None)
if vol_state not in resource_state.VOLUME_CLONE_STATE:
LOG.error("Volume %(id)s state is %(state)s not available \
or in-use", {'id': volume_id, 'state': vol_state})
raise exception.PlanCreateFailed
volume_res = self._collected_resources.get(volume_id)
if volume_res:
volumeResources.append(volume_res)
continue
properties = {
'size': volume['size'],
'name': volume['display_name'],
'availability_zone': volume['availability_zone']
}
if volume.get('display_description'):
properties['description'] = volume['display_description']
vol_metadata = volume.get('volume_metadata', None)
if vol_metadata:
vol_metadata.pop('__hc_vol_id', None)
vol_metadata.pop('__openstack_region_name', None)
properties['metadata'] = vol_metadata
resource_type = "OS::Cinder::Volume"
resource_name = 'volume_%d' % self.\
_get_resource_num(resource_type)
if parent_name and volume_id in parent_resources:
resource_name = parent_name + '.' + resource_name
volume_res = resource.Resource(resource_name, resource_type,
volume_id, properties=properties)
volume_dep = resource.ResourceDependency(volume_id,
resource_name,
volume['display_name'],
resource_type)
volume_res.add_extra_property('status', vol_state)
volume_res.add_extra_property('copy_data', True)
volume_type_name = volume['volume_type']
if volume_type_name:
volume_types = self.cinder_api.volume_type_list(self.context)
volume_type_id = None
for vtype in volume_types:
if vtype['name'] == volume_type_name:
volume_type_id = vtype['id']
break
if volume_type_id:
volume_type_res = \
self.extract_volume_types([volume_type_id],
parent_name,
parent_resources)
if volume_type_res:
t_name = volume_type_res[0].name
volume_res.add_property('volume_type',
{'get_resource': t_name})
dep_res_name = \
volume_type_res[0].properties.get('name', '')
volume_dep.add_dependency(volume_type_res[0].id,
volume_type_res[0].name,
dep_res_name,
volume_type_res[0].type)
if volume['bootable'] and volume.get('volume_image_metadata'):
image_id = volume['volume_image_metadata'].get('image_id')
if image_id:
image_para_name = self.extract_image(image_id,
parent_name,
parent_resources)
description = ("Image to use to boot server or volume")
constraints = [{'custom_constraint': "glance.image"}]
volume_res.add_parameter(image_para_name, description,
default=image_id,
constraints=constraints)
volume_res.add_property('image',
{'get_param': image_para_name})
volume_res.add_extra_property('boot_index', 0)
self._collected_resources[volume_id] = volume_res
self._collected_dependencies[volume_id] = volume_dep
volumeResources.append(volume_res)
if volume_ids and not volumeResources:
msg = "Volume resource extracted failed, \
can't find the volume with id of %s." % volume_ids
LOG.error(msg)
raise exception.ResourceNotFound(message=msg)
LOG.info('Extracting volume resources has finished')
return volumeResources
def extract_volume_types(self, volume_type_ids, parent_name=None,
parent_resources=None):
volume_type_dicts = []
volumeTypeResources = []
if not volume_type_ids:
LOG.debug('Extract resources of all volume_types.')
volume_type_dicts = self.cinder_api.volume_type_list(self.context)
else:
LOG.debug('Extract resources of volume_types: %s',
volume_type_ids)
# remove duplicate volume_type
volume_type_ids = {}.fromkeys(volume_type_ids).keys()
for volume_type_id in volume_type_ids:
try:
volume_type = \
self.cinder_api.get_volume_type(self.context,
volume_type_id)
volume_type_dicts.append(volume_type)
except Exception as e:
msg = "VolumeType resource <%s> could not be found. %s" \
% (volume_type_id, unicode(e))
LOG.error(msg)
raise exception.ResourceNotFound(message=msg)
for volume_type in volume_type_dicts:
volume_type_id = volume_type['id']
volume_type_res = self._collected_resources.get(volume_type_id)
if volume_type_res:
volumeTypeResources.append(volume_type_res)
continue
properties = {
'name': volume_type['name']
}
dependencies = []
# 2. check volume has qos or not, if having, build qos resource
qos_id = volume_type.get('qos_specs_id', None)
if qos_id:
qos_driver = \
QosResource(
self.context,
collected_resources=self._collected_resources,
collected_parameters=self._collected_parameters,
collected_dependencies=self._collected_dependencies)
qos_res = qos_driver.extract_qos(qos_id, parent_name,
parent_resources)
properties['qos_specs_id'] = {'get_resource': qos_res.name}
dependencies.append({'id': qos_res.id, 'name': qos_res.name,
'name_in_template': '',
'type': qos_res.type})
self._collected_resources = \
qos_driver.get_collected_resources()
self._collected_dependencies = \
qos_driver.get_collected_dependencies()
if volume_type.get('extra_specs'):
properties['metadata'] = volume_type['extra_specs']
resource_type = "OS::Cinder::VolumeType"
resource_name = 'volume_type_%d' % \
self._get_resource_num(resource_type)
if parent_name and volume_type_id in parent_resources:
resource_name = parent_name + '.' + resource_name
volume_type_res = resource.Resource(resource_name, resource_type,
volume_type_id,
properties=properties)
volume_type_dep = resource.ResourceDependency(
volume_type_id,
resource_name,
volume_type['name'],
resource_type,
dependencies=dependencies)
self._collected_resources[volume_type_id] = volume_type_res
self._collected_dependencies[volume_type_id] = volume_type_dep
volumeTypeResources.append(volume_type_res)
if volume_type_ids and not volumeTypeResources:
msg = "VolumeType resource extracted failed, \
can't find the volume type with id of %s." % \
volume_type_ids
LOG.error(msg)
raise exception.ResourceNotFound(message=msg)
return volumeTypeResources
def extract_image(self, image_id, parent_name=None, parent_resources=None):
parameter_name = self._collected_parameters.get(image_id)
if not parameter_name:
parameter_name = "image_%d" % self._get_parameter_num()
if parent_name and image_id in parent_resources:
parameter_name = parent_name + '.' + parameter_name
self._collected_parameters[image_id] = parameter_name
return parameter_name
class Volume(base.Resource):
def __init__(self, context, collected_resources=None,
collected_parameters=None, collected_dependencies=None):
self.context = context
self.cinder_api = volume.API()
self._collected_resources = collected_resources or {}
self._collected_parameters = collected_parameters or {}
self._collected_dependencies = collected_dependencies or {}
def extract_volumes(self, volume_ids, parent_name=None,
parent_resources=None):
if not volume_ids:
_msg = 'No volume resource to extract.'
LOG.info(_msg)
return
try:
for volume_id in volume_ids:
self.extract_volume(volume_id, parent_name, parent_resources)
except exception.ResourceExtractFailed:
raise
except exception.ResourceNotFound:
raise
except Exception as e:
_msg = 'Create volume resource error: %s' % e
LOG.error(_msg)
raise exception.ResourceExtractFailed(_msg)
def extract_volume(self, volume_id, parent_name=None,
parent_resources=None):
LOG.debug('Create volume resource start: %s', volume_id)
# 1.query volume info
try:
volume = self.cinder_api.get(self.context, volume_id)
except Exception as e:
msg = "Volume resource <%s> could not be found. %s" \
% (volume_id, unicode(e))
LOG.error(msg)
raise exception.ResourceNotFound(message=msg)
volume_id = volume.get('id')
vol_state = volume.get('status', None)
if vol_state not in resource_state.VOLUME_CLONE_STATE:
LOG.error("Volume %(id)s state is %(state)s not available \
or in-use", {'id': volume_id, 'state': vol_state})
raise exception.PlanCreateFailed
v_res = self._collected_resources.get(volume_id)
# check volume resource is existing or not
if v_res:
return v_res
# 2. bulid volume resource
properties = {
'size': volume['size'],
'name': volume['display_name'],
'availability_zone': volume['availability_zone']
}
if volume.get('display_description'):
properties['description'] = volume['display_description']
vol_metadata = volume.get('volume_metadata', None)
if vol_metadata:
vol_metadata.pop('__hc_vol_id', None)
vol_metadata.pop('__openstack_region_name', None)
properties['metadata'] = vol_metadata
resource_type = "OS::Cinder::Volume"
resource_name = 'volume_%d' % self._get_resource_num(resource_type)
if parent_name and volume_id in parent_resources:
resource_name = parent_name + '.' + resource_name
volume_res = resource.Resource(resource_name, resource_type,
volume_id, properties=properties)
volume_dep = resource.ResourceDependency(volume_id,
resource_name,
volume['display_name'],
resource_type)
self._collected_resources[volume_id] = volume_res
self._collected_dependencies[volume_id] = volume_dep
volume_res.add_extra_property('status', vol_state)
volume_res.add_extra_property('copy_data', True)
# 3. if volume has volume type, building volume type resource
# and updating dependences
volume_type_name = volume.get('volume_type')
if volume_type_name:
volume_types = self.cinder_api.volume_type_list(self.context)
type_id = None
for vtype in volume_types:
if vtype['name'] == volume_type_name:
type_id = vtype['id']
break
if type_id:
type_driver = \
VolumeType(
self.context,
collected_resources=self._collected_resources,
collected_parameters=self._collected_parameters,
collected_dependencies=self._collected_dependencies)
volume_type_res = type_driver.extract_volume_type(
type_id, parent_name, parent_resources)
if volume_type_res:
t_name = volume_type_res.name
volume_res.add_property('volume_type',
{'get_resource': t_name})
dep_res_name = volume_type_res.properties.get('name', '')
volume_dep.add_dependency(volume_type_res.id,
volume_type_res.name,
dep_res_name,
volume_type_res.type)
self._collected_resources = \
type_driver.get_collected_resources()
self._collected_dependencies = \
type_driver.get_collected_dependencies()
# 4. if volume has image or not, add image info to volume resource
if volume['bootable'] == 'true' and \
volume.get('volume_image_metadata'):
image_id = volume['volume_image_metadata'].get('image_id')
if image_id:
image_para_name = self.extract_image(image_id, parent_name,
parent_resources)
description = ("Image to use to boot server or volume")
constraints = [{'custom_constraint': "glance.image"}]
volume_res.add_parameter(image_para_name, description,
default=image_id,
constraints=constraints)
volume_res.add_property('image',
{'get_param': image_para_name})
# 5.if volume in consistency group, collect consistency group resource
cg_id = volume.get('consistencygroup_id')
if cg_id:
from conveyor.resource.driver.consistencygroup import \
ConsistencyGroup
consisgroup_driver = \
ConsistencyGroup(
self.context,
collected_resources=self._collected_resources,
collected_parameters=self._collected_parameters,
collected_dependencies=self._collected_dependencies)
cons_res = consisgroup_driver.extract_consistency_group(
cg_id,
parent_name,
parent_resources)
volume_res.add_property('consistencygroup_id',
{'get_resource': cons_res.name})
dep_res_name = cons_res.properties.get('name', '')
volume_dep.add_dependency(cons_res.id,
cons_res.name,
dep_res_name,
cons_res.type)
self._collected_resources = \
consisgroup_driver.get_collected_resources()
self._collected_dependencies = \
consisgroup_driver.get_collected_dependencies()
LOG.debug('Create volume resource end: %s', volume_id)
return volume_res
def extract_image(self, image_id, parent_name=None,
parent_resources=None):
parameter_name = self._collected_parameters.get(image_id)
if not parameter_name:
parameter_name = "image_%d" % self._get_parameter_num()
if parent_name and image_id in parent_resources:
parameter_name = parent_name + '.' + parameter_name
self._collected_parameters[image_id] = parameter_name
return parameter_name
class VolumeType(base.Resource):
def __init__(self, context, collected_resources=None,
collected_parameters=None, collected_dependencies=None):
self.context = context
self.cinder_api = volume.API()
self._collected_resources = collected_resources or {}
self._collected_parameters = collected_parameters or {}
self._collected_dependencies = collected_dependencies or {}
def extract_volume_types(self, volume_type_ids, parent_name=None,
parent_resources=None):
if not volume_type_ids:
_msg = 'Create volume type resource error: id is null.'
LOG.error(_msg)
raise exception.InvalidInput(reason=_msg)
volume_type_res = []
try:
for volume_type_id in volume_type_ids:
type_res = self.extract_volume_type(volume_type_id,
parent_name,
parent_resources)
volume_type_res.append(type_res)
except exception.ResourceExtractFailed:
raise
except exception.ResourceNotFound:
raise
except Exception as e:
_msg = 'Create volume type resource error: %s' % e
LOG.error(_msg)
raise exception.ResourceExtractFailed(_msg)
return volume_type_res
def extract_volume_type(self, volume_type_id, parent_name=None,
parent_resources=None):
LOG.debug('Create volume type resource start: %s', volume_type_id)
properties = {}
dependencies = []
# 1. query volume type info
try:
volume_type = self.cinder_api.get_volume_type(self.context,
volume_type_id)
except Exception as e:
msg = "VolumeType resource <%s> could not be found. %s" \
% (volume_type_id, unicode(e))
LOG.error(msg)
raise exception.ResourceNotFound(message=msg)
volume_type_id = volume_type['id']
# check volume type resource is existing or not
volume_type_res = self._collected_resources.get(volume_type_id)
if volume_type_res:
return volume_type_res
# 2. check volume has qos or not, if having, build qos resource
qos_id = volume_type.get('qos_specs_id')
if qos_id:
qos_driver = \
QosResource(
self.context,
collected_resources=self._collected_resources,
collected_parameters=self._collected_parameters,
collected_dependencies=self._collected_dependencies)
qos_res = qos_driver.extract_qos(qos_id, parent_name,
parent_resources)
self._collected_resources = qos_driver.get_collected_resources()
self._collected_dependencies = \
qos_driver.get_collected_dependencies()
properties['qos_specs_id'] = {'get_resource': qos_res.name}
dependencies.append({'id': qos_res.id, 'name': qos_res.name,
'name_in_template': '', 'type': qos_res.type})
# 3. bulid volume type resource
properties['name'] = volume_type.get('name')
if volume_type.get('extra_specs'):
properties['metadata'] = volume_type['extra_specs']
resource_type = "OS::Cinder::VolumeType"
resource_name = 'volume_type_%d' % \
self._get_resource_num(resource_type)
if parent_name and volume_type_id in parent_resources:
resource_name = parent_name + '.' + resource_name
volume_type_res = resource.Resource(resource_name, resource_type,
volume_type_id,
properties=properties)
type_dep = resource.ResourceDependency(volume_type_id,
resource_name,
volume_type['name'],
resource_type,
dependencies=dependencies)
self._collected_resources[volume_type_id] = volume_type_res
self._collected_dependencies[volume_type_id] = type_dep
LOG.debug('Create volume type resource end: %s', volume_type_id)
return volume_type_res
class QosResource(base.Resource):
def __init__(self, context, collected_resources=None,
collected_parameters=None, collected_dependencies=None):
self.context = context
self.cinder_api = volume.API()
self._collected_resources = collected_resources or {}
self._collected_parameters = collected_parameters or {}
self._collected_dependencies = collected_dependencies or {}
def extract_qos(self, qos_id, parent_name=None,
parent_resources=None):
LOG.debug('Create qos resource start: %s', qos_id)
properties = {}
# 1 check qos resource is existing or not
qos_res = self._collected_resources.get(qos_id, None)
if qos_res:
LOG.debug('Create qos resource exist: %s', qos_id)
return qos_res
# 2 query qos info
try:
qos_info = self.cinder_api.get_qos_specs(self.context, qos_id)
except Exception as e:
_msg = 'Create volume qos error: %s' % e
LOG.error(_msg)
raise exception.ResourceExtractFailed(reason=_msg)
properties['specs'] = qos_info.get('specs')
properties['name'] = qos_info.get('name')
qos_type = "OS::Cinder::Qos"
qos_name = 'CinderQos_%d' % self._get_resource_num(qos_type)
if parent_name and qos_id in parent_resources:
qos_name = parent_name + '.' + qos_name
qos_res = resource.Resource(qos_name, qos_type,
qos_id, properties=properties)
qos_dep = resource.ResourceDependency(qos_id, qos_name, '',
qos_type)
self._collected_resources[qos_id] = qos_res
self._collected_dependencies[qos_id] = qos_dep
LOG.debug('Create qos resource end: %s', qos_id)
return qos_res
|
|
#!/usr/bin/python -u
# Copyright (c) 2010-2012 OpenStack Foundation
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from io import BytesIO
from unittest import main
from uuid import uuid4
import os
import time
import shutil
import re
from swiftclient import client
from swift.obj.diskfile import get_data_dir
from test.probe.common import ReplProbeTest
from swift.common.request_helpers import get_reserved_name
from swift.common.utils import readconf
EXCLUDE_FILES = re.compile(r'^(hashes\.(pkl|invalid)|lock(-\d+)?)$')
def collect_info(path_list):
"""
Recursive collect dirs and files in path_list directory.
:param path_list: start directory for collecting
:return: files_list, dir_list tuple of included
directories and files
"""
files_list = []
dir_list = []
for path in path_list:
temp_files_list = []
temp_dir_list = []
for root, dirs, files in os.walk(path):
files = [f for f in files if not EXCLUDE_FILES.match(f)]
temp_files_list += files
temp_dir_list += dirs
files_list.append(temp_files_list)
dir_list.append(temp_dir_list)
return files_list, dir_list
def find_max_occupancy_node(dir_list):
"""
Find node with maximum occupancy.
:param dir_list: list of directories for each node.
:return: number number node in list_dir
"""
count = 0
number = 0
length = 0
for dirs in dir_list:
if length < len(dirs):
length = len(dirs)
number = count
count += 1
return number
class TestReplicatorFunctions(ReplProbeTest):
"""
Class for testing replicators and replication servers.
By default configuration - replication servers not used.
For testing separate replication servers servers need to change
ring's files using set_info command or new ring's files with
different port values.
"""
def put_data(self):
container = 'container-%s' % uuid4()
client.put_container(self.url, self.token, container,
headers={'X-Storage-Policy':
self.policy.name})
obj = 'object-%s' % uuid4()
client.put_object(self.url, self.token, container, obj, 'VERIFY')
def test_main(self):
# Create one account, container and object file.
# Find node with account, container and object replicas.
# Delete all directories and files from this node (device).
# Wait 60 seconds and check replication results.
# Delete directories and files in objects storage without
# deleting file "hashes.pkl".
# Check, that files not replicated.
# Delete file "hashes.pkl".
# Check, that all files were replicated.
path_list = []
data_dir = get_data_dir(self.policy)
# Figure out where the devices are
for node_id in range(1, 5):
conf = readconf(self.configs['object-server'][node_id])
device_path = conf['app:object-server']['devices']
for dev in self.object_ring.devs:
if dev['port'] == int(conf['app:object-server']['bind_port']):
device = dev['device']
path_list.append(os.path.join(device_path, device))
# Put data to storage nodes
self.put_data()
# Get all data file information
(files_list, dir_list) = collect_info(path_list)
num = find_max_occupancy_node(dir_list)
test_node = path_list[num]
test_node_files_list = []
for files in files_list[num]:
if not files.endswith('.pending'):
test_node_files_list.append(files)
test_node_dir_list = []
for d in dir_list[num]:
if not d.startswith('tmp'):
test_node_dir_list.append(d)
# Run all replicators
try:
# Delete some files
for directory in os.listdir(test_node):
shutil.rmtree(os.path.join(test_node, directory))
self.assertFalse(os.listdir(test_node))
self.replicators.start()
# We will keep trying these tests until they pass for up to 60s
begin = time.time()
while True:
(new_files_list, new_dir_list) = collect_info([test_node])
try:
# Check replicate files and dir
for files in test_node_files_list:
self.assertIn(files, new_files_list[0])
for directory in test_node_dir_list:
self.assertIn(directory, new_dir_list[0])
# We want to make sure that replication is completely
# settled; any invalidated hashes should be rehashed so
# hashes.pkl is stable
for directory in os.listdir(
os.path.join(test_node, data_dir)):
hashes_invalid_path = os.path.join(
test_node, data_dir, directory, 'hashes.invalid')
self.assertEqual(os.stat(
hashes_invalid_path).st_size, 0)
break
except Exception:
if time.time() - begin > 60:
raise
time.sleep(1)
self.replicators.stop()
# Delete directories and files in objects storage without
# deleting file "hashes.pkl".
for directory in os.listdir(os.path.join(test_node, data_dir)):
for input_dir in os.listdir(os.path.join(
test_node, data_dir, directory)):
if os.path.isdir(os.path.join(
test_node, data_dir, directory, input_dir)):
shutil.rmtree(os.path.join(
test_node, data_dir, directory, input_dir))
self.replicators.once()
# Check, that files not replicated.
for directory in os.listdir(os.path.join(
test_node, data_dir)):
for input_dir in os.listdir(os.path.join(
test_node, data_dir, directory)):
self.assertFalse(os.path.isdir(
os.path.join(test_node, data_dir,
directory, input_dir)))
self.replicators.start()
# Now, delete file "hashes.pkl".
# Check, that all files were replicated.
for directory in os.listdir(os.path.join(test_node, data_dir)):
os.remove(os.path.join(
test_node, data_dir, directory, 'hashes.pkl'))
# We will keep trying these tests until they pass for up to 60s
begin = time.time()
while True:
try:
(new_files_list, new_dir_list) = collect_info([test_node])
# Check replicate files and dirs
for files in test_node_files_list:
self.assertIn(files, new_files_list[0])
for directory in test_node_dir_list:
self.assertIn(directory, new_dir_list[0])
break
except Exception:
if time.time() - begin > 60:
raise
time.sleep(1)
finally:
self.replicators.stop()
class TestReplicatorFunctionsReservedNames(TestReplicatorFunctions):
def put_data(self):
int_client = self.make_internal_client()
int_client.create_account(self.account)
container = get_reserved_name('container', str(uuid4()))
int_client.create_container(self.account, container,
headers={'X-Storage-Policy':
self.policy.name})
obj = get_reserved_name('object', str(uuid4()))
int_client.upload_object(
BytesIO(b'VERIFY'), self.account, container, obj)
if __name__ == '__main__':
main()
|
|
"""
threadly a simple threadpool and scheduler for python.
"""
import threading
import logging
import Queue
from Queue import Empty as EmptyException
from threadly.Structures import SortedLockingList
from threadly.KeyedExecutor import KeyedExecutor
from threadly.Futures import ListenableFuture
from threadly.Futures import future_job
from threadly.Clock import Clock
class Scheduler(object):
"""
Main Scheduler Object.
"""
def __init__(self, poolsize):
"""
Construct an Scheduler instance with the set thread pool size.
`poolsize` positive integer for the number of threads you want
in this pool .
"""
self.__log = logging.getLogger("root.threadly")
self.__clock = Clock()
self.__key_lock = threading.Condition()
self.__poolsize = poolsize
self.__running = True
self.__in_shutdown = False
self.__main_queue = Queue.Queue()
self.__delayed_tasks = SortedLockingList()
self.__in_delay = False
self.__threads = list()
self.__delay_lock = threading.Condition()
self.__keys = dict()
for i in xrange(self.__poolsize):
tmp_thread = threading.Thread(target=self.__thread_pool)
tmp_thread.name = "Executor-Pool-Thread-%d" % (i)
tmp_thread.daemon = True
tmp_thread.start()
self.__threads.append(tmp_thread)
def get_poolsize(self):
"""
Returns the number of threads used in this Pool.
"""
return len(self.__threads)
def get_queue_size(self):
"""
Returns the number of items currently awaiting Execution.
"""
return self.__main_queue.qsize()
def execute(self, task, args=None, kwargs=None):
"""
Execute a given task as soon as possible.
`task` is a callable to be called on the Scheduler.
`args` are the arguments to pass to the callable when called.
`kwargs` are the keyword args to be passed to the callable when called.
"""
args = args or ()
kwargs = kwargs or {}
self.schedule(task, args=args, kwargs=kwargs)
def schedule_with_future(self, task, delay=0, key=None, args=None, kwargs=None):
"""
Returns a `ListenableFuture` for this task. Once the task is
completed the future will also be completed. This works pretty much
exactly like `schedule` except you can not make a task recurring.
`task` is a callable to be called on the Scheduler.
`delay` this is the time to wait (in milliseconds!!) before scheduler
will call the passed task.
`key` this is any python object to use as a key. All tasks using
this key will be ran in a single threaded manor.
`args` are the arguments to pass to the callable when called.
`kwargs` are the keyword args to be passed to the callable when called.
"""
args = args or ()
kwargs = kwargs or {}
job = (task, args, kwargs)
future = ListenableFuture()
self.schedule(future_job, delay=delay, key=key, args=(future, job))
return future
def schedule(self, task, delay=0, recurring=False, key=None, args=None, kwargs=None):
"""
This schedules a task to be executed. It can be delayed, and set
to a key. It can also be marked as recurring.
`task` is a callable to be called on the Scheduler.
`delay` this is the time to wait (in milliseconds!!) before scheduler
will call the passed task.
`recurring` set this to True if this should be a recurring.
You should be careful that delay is > 0 when setting this to True.
`key` this is any python object to use as a key. All tasks using this
key will be ran in a single threaded manor.
`args` are the arguments to pass to the callable when called.
`kwargs` are the keyword args to be passed to the callable when called.
"""
args = args or ()
kwargs = kwargs or {}
if delay > 0:
s_task = int(self.__clock.accurate_time() * 1000) + delay
send = False
if delay / 1000.0 <= self.__get_next_wait_time():
send = True
self.__delayed_tasks.add((s_task, task, delay, recurring, key, args, kwargs))
if send:
self.__main_queue.put((self.__empty, (), {}))
else:
if key is not None:
self.__key_lock.acquire()
if key not in self.__keys:
tmp = KeyedExecutor()
self.__keys[key] = tmp
self.__key_lock.release()
run_key = self.__keys[key]
run_key.add((task, args, kwargs))
run_key.lock.acquire()
if not run_key.in_queue and run_key.size() > 0:
run_key.in_queue = True
self.__main_queue.put((run_key.run_all, (), {}))
run_key.lock.release()
else:
self.__main_queue.put((task, args, kwargs))
def remove(self, task):
"""
Remove a scheduled task from the queue. This is a best effort remove,
the task could still possibly run. This is most useful to cancel
recurring tasks. If there is more then one task with this callable
scheduled only the first one is removed.
`task` callable task to remove from the scheduled tasks list.
"""
count = 0
found = False
for tasks in self.__delayed_tasks.safeIterator():
if tasks[1] == task:
found = True
break
else:
count += 1
if found:
self.__delayed_tasks.pop(count)
return True
return False
def shutdown(self):
"""
Shuts down the threadpool. Any task currently on the queue will be
ran, but all Scheduled tasks will removed and no more tasks can be
added.
"""
self.__running = False
self.__delayed_tasks.clear()
self.execute(self.__internal_shutdown)
def shutdown_now(self):
"""
Shuts down the threadpool. Any task currently being executed will
still complete, but the queue will be emptied out.
"""
self.__running = False
self.__delayed_tasks.clear()
while not self.__main_queue.empty():
try:
self.__main_queue.get_nowait()
except:
pass
self.__internal_shutdown()
def __internal_shutdown(self):
self.__running = False
for tmp_thread in self.__threads:
if tmp_thread is not None and tmp_thread.isAlive() and threading is not None and tmp_thread != threading.current_thread():
self.__main_queue.put((self.__empty, (), {}))
self.__main_queue.put((self.__empty, (), {}))
self.__main_queue.put((self.__empty, (), {}))
self.__main_queue.put((self.__empty, (), {}))
def __empty(self):
pass
def __get_next_wait_time(self):
tmp = self.__delayed_tasks.peek()
if tmp is None or self.__delayed_tasks.size() == 0:
return 2 ** 32
else:
task = tmp[0] - int(self.__clock.accurate_time() * 1000)
return (task / 1000.0) - .0005
def __check_delay_queue(self):
dl = self.__delayed_tasks.lock()
if dl:
try:
time_out = self.__get_next_wait_time()
while time_out <= 0:
run_task = self.__delayed_tasks.pop(0)
self.schedule(run_task[1], key=run_task[4], args=run_task[5], kwargs=run_task[6])
#run_task[3] is recurring, if so we add again as a scheduled event
if run_task[3] == True and not self.__in_shutdown:
self.schedule(run_task[1], run_task[2], run_task[3], run_task[4], run_task[5], run_task[6])
time_out = self.__get_next_wait_time()
finally:
self.__delayed_tasks.unlock()
return dl
def __thread_pool(self):
while self.__running:
try:
runner = None
time_out = self.__get_next_wait_time()
if time_out <= 0 and self.__check_delay_queue():
time_out = self.__get_next_wait_time()
if time_out <= 0:
time_out = 5
if runner is None:
runner = self.__main_queue.get(True, time_out)
if runner is not None:
runner[0](*runner[1], **runner[2])
except IndexError as exp:
pass
except EmptyException as exp:
pass
except Exception as exp:
self.__log.error("Exception while Executing: %s, %s"%(runner, exp))
|
|
# -*- coding: utf-8 -*-
#
# Copyright 2012-2015 Spotify AB
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import functools
import os
import multiprocessing
import shutil
import signal
import time
import tempfile
from helpers import unittest, skipOnTravisAndGithubActions
import luigi.rpc
import luigi.server
import luigi.cmdline
from luigi.configuration import get_config
from luigi.scheduler import Scheduler
from urllib.parse import (
urlencode, ParseResult, quote as urlquote
)
import tornado.ioloop
from tornado.testing import AsyncHTTPTestCase
import pytest
try:
from unittest import mock
except ImportError:
import mock
def _is_running_from_main_thread():
"""
Return true if we're the same thread as the one that created the Tornado
IOLoop. In practice, the problem is that we get annoying intermittent
failures because sometimes the KeepAliveThread jumps in and "disturbs" the
intended flow of the test case. Worse, it fails in the terrible way that
the KeepAliveThread is kept alive, bugging the execution of subsequent test
casses.
Oh, I so wish Tornado would explicitly say that you're acessing it from
different threads and things will just not work.
"""
return tornado.ioloop.IOLoop.current(instance=False)
class ServerTestBase(AsyncHTTPTestCase):
def get_app(self):
return luigi.server.app(Scheduler())
def setUp(self):
super(ServerTestBase, self).setUp()
self._old_fetch = luigi.rpc.RemoteScheduler._fetch
def _fetch(obj, url, body, *args, **kwargs):
if _is_running_from_main_thread():
body = urlencode(body).encode('utf-8')
response = self.fetch(url, body=body, method='POST')
if response.code >= 400:
raise luigi.rpc.RPCError(
'Errror when connecting to remote scheduler'
)
return response.body.decode('utf-8')
luigi.rpc.RemoteScheduler._fetch = _fetch
def tearDown(self):
super(ServerTestBase, self).tearDown()
luigi.rpc.RemoteScheduler._fetch = self._old_fetch
class ServerTest(ServerTestBase):
def setUp(self):
super(ServerTest, self).setUp()
get_config().remove_section('cors')
self._default_cors = luigi.server.cors()
get_config().set('cors', 'enabled', 'true')
get_config().set('cors', 'allow_any_origin', 'true')
get_config().set('cors', 'allow_null_origin', 'true')
def tearDown(self):
super(ServerTest, self).tearDown()
get_config().remove_section('cors')
def test_visualiser(self):
page = self.fetch('/').body
self.assertTrue(page.find(b'<title>') != -1)
def _test_404(self, path):
response = self.fetch(path)
self.assertEqual(response.code, 404)
def test_404(self):
self._test_404('/foo')
def test_api_404(self):
self._test_404('/api/foo')
def test_root_redirect(self):
response = self.fetch("/", follow_redirects=False)
self.assertEqual(response.code, 302)
self.assertEqual(response.headers['Location'], 'static/visualiser/index.html') # assert that doesnt beging with leading slash !
def test_api_preflight_cors_headers(self):
response = self.fetch('/api/graph', method='OPTIONS', headers={'Origin': 'foo'})
headers = dict(response.headers)
self.assertEqual(self._default_cors.allowed_headers,
headers['Access-Control-Allow-Headers'])
self.assertEqual(self._default_cors.allowed_methods,
headers['Access-Control-Allow-Methods'])
self.assertEqual('*', headers['Access-Control-Allow-Origin'])
self.assertEqual(str(self._default_cors.max_age), headers['Access-Control-Max-Age'])
self.assertIsNone(headers.get('Access-Control-Allow-Credentials'))
self.assertIsNone(headers.get('Access-Control-Expose-Headers'))
def test_api_preflight_cors_headers_all_response_headers(self):
get_config().set('cors', 'allow_credentials', 'true')
get_config().set('cors', 'exposed_headers', 'foo, bar')
response = self.fetch('/api/graph', method='OPTIONS', headers={'Origin': 'foo'})
headers = dict(response.headers)
self.assertEqual(self._default_cors.allowed_headers,
headers['Access-Control-Allow-Headers'])
self.assertEqual(self._default_cors.allowed_methods,
headers['Access-Control-Allow-Methods'])
self.assertEqual('*', headers['Access-Control-Allow-Origin'])
self.assertEqual(str(self._default_cors.max_age), headers['Access-Control-Max-Age'])
self.assertEqual('true', headers['Access-Control-Allow-Credentials'])
self.assertEqual('foo, bar', headers['Access-Control-Expose-Headers'])
def test_api_preflight_cors_headers_null_origin(self):
response = self.fetch('/api/graph', method='OPTIONS', headers={'Origin': 'null'})
headers = dict(response.headers)
self.assertEqual(self._default_cors.allowed_headers,
headers['Access-Control-Allow-Headers'])
self.assertEqual(self._default_cors.allowed_methods,
headers['Access-Control-Allow-Methods'])
self.assertEqual('null', headers['Access-Control-Allow-Origin'])
self.assertEqual(str(self._default_cors.max_age), headers['Access-Control-Max-Age'])
self.assertIsNone(headers.get('Access-Control-Allow-Credentials'))
self.assertIsNone(headers.get('Access-Control-Expose-Headers'))
def test_api_preflight_cors_headers_disallow_null(self):
get_config().set('cors', 'allow_null_origin', 'false')
response = self.fetch('/api/graph', method='OPTIONS', headers={'Origin': 'null'})
headers = dict(response.headers)
self.assertNotIn('Access-Control-Allow-Headers', headers)
self.assertNotIn('Access-Control-Allow-Methods', headers)
self.assertNotIn('Access-Control-Allow-Origin', headers)
self.assertNotIn('Access-Control-Max-Age', headers)
self.assertNotIn('Access-Control-Allow-Credentials', headers)
self.assertNotIn('Access-Control-Expose-Headers', headers)
def test_api_preflight_cors_headers_disallow_any(self):
get_config().set('cors', 'allow_any_origin', 'false')
get_config().set('cors', 'allowed_origins', '["foo", "bar"]')
response = self.fetch('/api/graph', method='OPTIONS', headers={'Origin': 'foo'})
headers = dict(response.headers)
self.assertEqual(self._default_cors.allowed_headers,
headers['Access-Control-Allow-Headers'])
self.assertEqual(self._default_cors.allowed_methods,
headers['Access-Control-Allow-Methods'])
self.assertEqual('foo', headers['Access-Control-Allow-Origin'])
self.assertEqual(str(self._default_cors.max_age), headers['Access-Control-Max-Age'])
self.assertIsNone(headers.get('Access-Control-Allow-Credentials'))
self.assertIsNone(headers.get('Access-Control-Expose-Headers'))
def test_api_preflight_cors_headers_disallow_any_no_matched_allowed_origins(self):
get_config().set('cors', 'allow_any_origin', 'false')
get_config().set('cors', 'allowed_origins', '["foo", "bar"]')
response = self.fetch('/api/graph', method='OPTIONS', headers={'Origin': 'foobar'})
headers = dict(response.headers)
self.assertNotIn('Access-Control-Allow-Headers', headers)
self.assertNotIn('Access-Control-Allow-Methods', headers)
self.assertNotIn('Access-Control-Allow-Origin', headers)
self.assertNotIn('Access-Control-Max-Age', headers)
self.assertNotIn('Access-Control-Allow-Credentials', headers)
self.assertNotIn('Access-Control-Expose-Headers', headers)
def test_api_preflight_cors_headers_disallow_any_no_allowed_origins(self):
get_config().set('cors', 'allow_any_origin', 'false')
response = self.fetch('/api/graph', method='OPTIONS', headers={'Origin': 'foo'})
headers = dict(response.headers)
self.assertNotIn('Access-Control-Allow-Headers', headers)
self.assertNotIn('Access-Control-Allow-Methods', headers)
self.assertNotIn('Access-Control-Allow-Origin', headers)
self.assertNotIn('Access-Control-Max-Age', headers)
self.assertNotIn('Access-Control-Allow-Credentials', headers)
self.assertNotIn('Access-Control-Expose-Headers', headers)
def test_api_preflight_cors_headers_disabled(self):
get_config().set('cors', 'enabled', 'false')
response = self.fetch('/api/graph', method='OPTIONS', headers={'Origin': 'foo'})
headers = dict(response.headers)
self.assertNotIn('Access-Control-Allow-Headers', headers)
self.assertNotIn('Access-Control-Allow-Methods', headers)
self.assertNotIn('Access-Control-Allow-Origin', headers)
self.assertNotIn('Access-Control-Max-Age', headers)
self.assertNotIn('Access-Control-Allow-Credentials', headers)
self.assertNotIn('Access-Control-Expose-Headers', headers)
def test_api_preflight_cors_headers_no_origin_header(self):
response = self.fetch('/api/graph', method='OPTIONS')
headers = dict(response.headers)
self.assertNotIn('Access-Control-Allow-Headers', headers)
self.assertNotIn('Access-Control-Allow-Methods', headers)
self.assertNotIn('Access-Control-Allow-Origin', headers)
self.assertNotIn('Access-Control-Max-Age', headers)
self.assertNotIn('Access-Control-Allow-Credentials', headers)
self.assertNotIn('Access-Control-Expose-Headers', headers)
def test_api_cors_headers(self):
response = self.fetch('/api/graph', headers={'Origin': 'foo'})
headers = dict(response.headers)
self.assertEqual('*', headers['Access-Control-Allow-Origin'])
def test_api_cors_headers_null_origin(self):
response = self.fetch('/api/graph', headers={'Origin': 'null'})
headers = dict(response.headers)
self.assertEqual('null', headers['Access-Control-Allow-Origin'])
def test_api_cors_headers_disallow_null(self):
get_config().set('cors', 'allow_null_origin', 'false')
response = self.fetch('/api/graph', headers={'Origin': 'null'})
headers = dict(response.headers)
self.assertIsNone(headers.get('Access-Control-Allow-Origin'))
def test_api_cors_headers_disallow_any(self):
get_config().set('cors', 'allow_any_origin', 'false')
get_config().set('cors', 'allowed_origins', '["foo", "bar"]')
response = self.fetch('/api/graph', headers={'Origin': 'foo'})
headers = dict(response.headers)
self.assertEqual('foo', headers['Access-Control-Allow-Origin'])
def test_api_cors_headers_disallow_any_no_matched_allowed_origins(self):
get_config().set('cors', 'allow_any_origin', 'false')
get_config().set('cors', 'allowed_origins', '["foo", "bar"]')
response = self.fetch('/api/graph', headers={'Origin': 'foobar'})
headers = dict(response.headers)
self.assertIsNone(headers.get('Access-Control-Allow-Origin'))
def test_api_cors_headers_disallow_any_no_allowed_origins(self):
get_config().set('cors', 'allow_any_origin', 'false')
response = self.fetch('/api/graph', headers={'Origin': 'foo'})
headers = dict(response.headers)
self.assertIsNone(headers.get('Access-Control-Allow-Origin'))
def test_api_cors_headers_disabled(self):
get_config().set('cors', 'enabled', 'false')
response = self.fetch('/api/graph', headers={'Origin': 'foo'})
headers = dict(response.headers)
self.assertIsNone(headers.get('Access-Control-Allow-Origin'))
def test_api_cors_headers_no_origin_header(self):
response = self.fetch('/api/graph')
headers = dict(response.headers)
self.assertIsNone(headers.get('Access-Control-Allow-Origin'))
def test_api_allow_head_on_root(self):
response = self.fetch('/', method='HEAD')
self.assertEqual(response.code, 204)
class _ServerTest(unittest.TestCase):
"""
Test to start and stop the server in a more "standard" way
"""
server_client_class = "To be defined by subclasses"
def start_server(self):
self._process = multiprocessing.Process(
target=self.server_client.run_server
)
self._process.start()
time.sleep(0.1) # wait for server to start
self.sch = self.server_client.scheduler()
self.sch._wait = lambda: None
def stop_server(self):
self._process.terminate()
self._process.join(timeout=1)
if self._process.is_alive():
os.kill(self._process.pid, signal.SIGKILL)
def setUp(self):
self.server_client = self.server_client_class()
state_path = tempfile.mktemp(suffix=self.id())
self.addCleanup(functools.partial(os.unlink, state_path))
luigi.configuration.get_config().set('scheduler', 'state_path', state_path)
self.start_server()
def tearDown(self):
self.stop_server()
@skipOnTravisAndGithubActions('https://travis-ci.org/spotify/luigi/jobs/78315794')
def test_ping(self):
self.sch.ping(worker='xyz')
@skipOnTravisAndGithubActions('https://travis-ci.org/spotify/luigi/jobs/78023665')
def test_raw_ping(self):
self.sch._request('/api/ping', {'worker': 'xyz'})
@skipOnTravisAndGithubActions('https://travis-ci.org/spotify/luigi/jobs/78023665')
def test_raw_ping_extended(self):
self.sch._request('/api/ping', {'worker': 'xyz', 'foo': 'bar'})
@skipOnTravisAndGithubActions('https://travis-ci.org/spotify/luigi/jobs/166833694')
def test_404(self):
with self.assertRaises(luigi.rpc.RPCError):
self.sch._request('/api/fdsfds', {'dummy': 1})
@skipOnTravisAndGithubActions('https://travis-ci.org/spotify/luigi/jobs/72953884')
def test_save_state(self):
self.sch.add_task(worker='X', task_id='B', deps=('A',))
self.sch.add_task(worker='X', task_id='A')
self.assertEqual(self.sch.get_work(worker='X')['task_id'], 'A')
self.stop_server()
self.start_server()
work = self.sch.get_work(worker='X')['running_tasks'][0]
self.assertEqual(work['task_id'], 'A')
@pytest.mark.unixsocket
class UNIXServerTest(_ServerTest):
class ServerClient:
def __init__(self):
self.tempdir = tempfile.mkdtemp()
self.unix_socket = os.path.join(self.tempdir, 'luigid.sock')
def run_server(self):
luigi.server.run(unix_socket=self.unix_socket)
def scheduler(self):
url = ParseResult(
scheme='http+unix',
netloc=urlquote(self.unix_socket, safe=''),
path='',
params='',
query='',
fragment='',
).geturl()
return luigi.rpc.RemoteScheduler(url)
server_client_class = ServerClient
def tearDown(self):
super(UNIXServerTest, self).tearDown()
shutil.rmtree(self.server_client.tempdir)
class INETServerClient:
def __init__(self):
# Just some port
self.port = 8083
def scheduler(self):
return luigi.rpc.RemoteScheduler('http://localhost:' + str(self.port))
class _INETServerTest(_ServerTest):
# HACK: nose ignores class whose name starts with underscore
# see: https://github.com/nose-devs/nose/blob/6f9dada1a5593b2365859bab92c7d1e468b64b7b/nose/selector.py#L72
# This hack affects derived classes of this class e.g. INETProcessServerTest, INETLuigidServerTest, INETLuigidDaemonServerTest.
__test__ = False
def test_with_cmdline(self):
"""
Test to run against the server as a normal luigi invocation does
"""
params = ['Task', '--scheduler-port', str(self.server_client.port), '--no-lock']
self.assertTrue(luigi.interface.run(params))
class INETProcessServerTest(_INETServerTest):
__test__ = True
class ServerClient(INETServerClient):
def run_server(self):
luigi.server.run(api_port=self.port, address='127.0.0.1')
server_client_class = ServerClient
class INETURLLibServerTest(INETProcessServerTest):
@mock.patch.object(luigi.rpc, 'HAS_REQUESTS', False)
def start_server(self, *args, **kwargs):
super(INETURLLibServerTest, self).start_server(*args, **kwargs)
@skipOnTravisAndGithubActions('https://travis-ci.org/spotify/luigi/jobs/81022689')
def patching_test(self):
"""
Check that HAS_REQUESTS patching is meaningful
"""
fetcher1 = luigi.rpc.RemoteScheduler()._fetcher
with mock.patch.object(luigi.rpc, 'HAS_REQUESTS', False):
fetcher2 = luigi.rpc.RemoteScheduler()._fetcher
self.assertNotEqual(fetcher1.__class__, fetcher2.__class__)
class INETLuigidServerTest(_INETServerTest):
__test__ = True
class ServerClient(INETServerClient):
def run_server(self):
# I first tried to things like "subprocess.call(['luigid', ...]),
# But it ended up to be a total mess getting the cleanup to work
# unfortunately.
luigi.cmdline.luigid(['--port', str(self.port)])
server_client_class = ServerClient
class INETLuigidDaemonServerTest(_INETServerTest):
__test__ = True
class ServerClient(INETServerClient):
def __init__(self):
super(INETLuigidDaemonServerTest.ServerClient, self).__init__()
self.tempdir = tempfile.mkdtemp()
@mock.patch('daemon.DaemonContext')
def run_server(self, daemon_context):
luigi.cmdline.luigid([
'--port', str(self.port),
'--background', # This makes it a daemon
'--logdir', self.tempdir,
'--pidfile', os.path.join(self.tempdir, 'luigid.pid')
])
def tearDown(self):
super(INETLuigidDaemonServerTest, self).tearDown()
shutil.rmtree(self.server_client.tempdir)
server_client_class = ServerClient
class MetricsHandlerTest(unittest.TestCase):
def setUp(self):
self.mock_scheduler = mock.MagicMock()
self.handler = luigi.server.MetricsHandler(tornado.web.Application(), mock.MagicMock(),
scheduler=self.mock_scheduler)
def test_initialize(self):
self.assertIs(self.handler._scheduler, self.mock_scheduler)
def test_get(self):
mock_metrics = mock.MagicMock()
self.mock_scheduler._state._metrics_collector.generate_latest.return_value = mock_metrics
with mock.patch.object(self.handler, 'write') as patched_write:
self.handler.get()
patched_write.assert_called_once_with(mock_metrics)
self.mock_scheduler._state._metrics_collector.configure_http_handler.assert_called_once_with(
self.handler)
def test_get_no_metrics(self):
self.mock_scheduler._state._metrics_collector.generate_latest.return_value = None
with mock.patch.object(self.handler, 'write') as patched_write:
self.handler.get()
patched_write.assert_not_called()
|
|
# Copyright 2014, Rackspace, US, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""API over the nova service.
"""
from django.http import HttpResponse
from django.template.defaultfilters import slugify
from django.utils import http as utils_http
from django.views import generic
from novaclient import exceptions
from openstack_dashboard import api
from openstack_dashboard.api.rest import json_encoder
from openstack_dashboard.api.rest import urls
from openstack_dashboard.api.rest import utils as rest_utils
@urls.register
class Keypairs(generic.View):
"""API for nova keypairs.
"""
url_regex = r'nova/keypairs/$'
@rest_utils.ajax()
def get(self, request):
"""Get a list of keypairs associated with the current logged-in
account.
The listing result is an object with property "items".
"""
result = api.nova.keypair_list(request)
return {'items': [u.to_dict() for u in result]}
@rest_utils.ajax(data_required=True)
def post(self, request):
"""Create a keypair.
Create a keypair using the parameters supplied in the POST
application/json object. The parameters are:
:param name: the name to give the keypair
:param public_key: (optional) a key to import
This returns the new keypair object on success.
"""
if 'public_key' in request.DATA:
new = api.nova.keypair_import(request, request.DATA['name'],
request.DATA['public_key'])
else:
new = api.nova.keypair_create(request, request.DATA['name'])
return rest_utils.CreatedResponse(
'/api/nova/keypairs/%s' % utils_http.urlquote(new.name),
new.to_dict()
)
@urls.register
class Keypair(generic.View):
url_regex = r'nova/keypairs/(?P<keypair_name>.+)/$'
def get(self, request, keypair_name):
"""Creates a new keypair and associates it to the current project.
* Since the response for this endpoint creates a new keypair and
is not idempotent, it normally would be represented by a POST HTTP
request. However, this solution was adopted as it
would support automatic file download across browsers.
:param keypair_name: the name to associate the keypair to
:param regenerate: (optional) if set to the string 'true',
replaces the existing keypair with a new keypair
This returns the new keypair object on success.
"""
try:
regenerate = request.GET.get('regenerate') == 'true'
if regenerate:
api.nova.keypair_delete(request, keypair_name)
keypair = api.nova.keypair_create(request, keypair_name)
except exceptions.Conflict:
return HttpResponse(status=409)
except Exception:
return HttpResponse(status=500)
else:
response = HttpResponse(content_type='application/binary')
response['Content-Disposition'] = ('attachment; filename=%s.pem'
% slugify(keypair_name))
response.write(keypair.private_key)
response['Content-Length'] = str(len(response.content))
return response
@urls.register
class Services(generic.View):
"""API for nova services.
"""
url_regex = r'nova/services/$'
@rest_utils.ajax()
def get(self, request):
"""Get a list of nova services.
Will return HTTP 501 status code if the service_list extension is
not supported.
"""
if api.base.is_service_enabled(request, 'compute') \
and api.nova.extension_supported('Services', request):
result = api.nova.service_list(request)
return {'items': [u.to_dict() for u in result]}
else:
raise rest_utils.AjaxError(501, '')
@urls.register
class AvailabilityZones(generic.View):
"""API for nova availability zones.
"""
url_regex = r'nova/availzones/$'
@rest_utils.ajax()
def get(self, request):
"""Get a list of availability zones.
The following get parameters may be passed in the GET
request:
:param detailed: If this equals "true" then the result will
include more detail.
The listing result is an object with property "items".
"""
detailed = request.GET.get('detailed') == 'true'
result = api.nova.availability_zone_list(request, detailed)
return {'items': [u.to_dict() for u in result]}
@urls.register
class Limits(generic.View):
"""API for nova limits.
"""
url_regex = r'nova/limits/$'
@rest_utils.ajax(json_encoder=json_encoder.NaNJSONEncoder)
def get(self, request):
"""Get an object describing the current project limits.
Note: the Horizon API doesn't support any other project (tenant) but
the underlying client does...
The following get parameters may be passed in the GET
request:
:param reserved: This may be set to "true" but it's not
clear what the result of that is.
The result is an object with limits as properties.
"""
reserved = request.GET.get('reserved') == 'true'
result = api.nova.tenant_absolute_limits(request, reserved)
return result
@urls.register
class Servers(generic.View):
"""API over all servers.
"""
url_regex = r'nova/servers/$'
_optional_create = [
'block_device_mapping', 'block_device_mapping_v2', 'nics', 'meta',
'availability_zone', 'instance_count', 'admin_pass', 'disk_config',
'config_drive'
]
@rest_utils.ajax()
def get(self, request):
"""Get a list of servers.
The listing result is an object with property "items". Each item is
a server.
Example GET:
http://localhost/api/nova/servers
"""
servers = api.nova.server_list(request)[0]
return {'items': [s.to_dict() for s in servers]}
@rest_utils.ajax(data_required=True)
def post(self, request):
"""Create a server.
Create a server using the parameters supplied in the POST
application/json object. The required parameters as specified by
the underlying novaclient are:
:param name: The new server name.
:param source_id: The ID of the image to use.
:param flavor_id: The ID of the flavor to use.
:param key_name: (optional extension) name of previously created
keypair to inject into the instance.
:param user_data: user data to pass to be exposed by the metadata
server this can be a file type object as well or a
string.
:param security_groups: An array of one or more objects with a "name"
attribute.
Other parameters are accepted as per the underlying novaclient:
"block_device_mapping", "block_device_mapping_v2", "nics", "meta",
"availability_zone", "instance_count", "admin_pass", "disk_config",
"config_drive"
This returns the new server object on success.
"""
try:
args = (
request,
request.DATA['name'],
request.DATA['source_id'],
request.DATA['flavor_id'],
request.DATA['key_name'],
request.DATA['user_data'],
request.DATA['security_groups'],
)
except KeyError as e:
raise rest_utils.AjaxError(400, 'missing required parameter '
"'%s'" % e.args[0])
kw = {}
for name in self._optional_create:
if name in request.DATA:
kw[name] = request.DATA[name]
new = api.nova.server_create(*args, **kw)
return rest_utils.CreatedResponse(
'/api/nova/servers/%s' % utils_http.urlquote(new.id),
new.to_dict()
)
@urls.register
class Server(generic.View):
"""API for retrieving a single server
"""
url_regex = r'nova/servers/(?P<server_id>[^/]+|default)$'
@rest_utils.ajax()
def get(self, request, server_id):
"""Get a specific server
http://localhost/api/nova/servers/1
"""
return api.nova.server_get(request, server_id).to_dict()
@urls.register
class ServerMetadata(generic.View):
"""API for server metadata.
"""
url_regex = r'nova/servers/(?P<server_id>[^/]+|default)/metadata$'
@rest_utils.ajax()
def get(self, request, server_id):
"""Get a specific server's metadata
http://localhost/api/nova/servers/1/metadata
"""
return api.nova.server_get(request,
server_id).to_dict().get('metadata')
@rest_utils.ajax()
def patch(self, request, server_id):
"""Update metadata items for a server
http://localhost/api/nova/servers/1/metadata
"""
updated = request.DATA['updated']
removed = request.DATA['removed']
if updated:
api.nova.server_metadata_update(request, server_id, updated)
if removed:
api.nova.server_metadata_delete(request, server_id, removed)
@urls.register
class Extensions(generic.View):
"""API for nova extensions.
"""
url_regex = r'nova/extensions/$'
@rest_utils.ajax()
def get(self, request):
"""Get a list of extensions.
The listing result is an object with property "items". Each item is
an image.
Example GET:
http://localhost/api/nova/extensions
"""
result = api.nova.list_extensions(request)
return {'items': [e.to_dict() for e in result]}
@urls.register
class Flavors(generic.View):
"""API for nova flavors.
"""
url_regex = r'nova/flavors/$'
@rest_utils.ajax()
def get(self, request):
"""Get a list of flavors.
The listing result is an object with property "items". Each item is
a flavor. By default this will return the flavors for the user's
current project. If the user is admin, public flavors will also be
returned.
:param is_public: For a regular user, set to True to see all public
flavors. For an admin user, set to False to not see public flavors.
:param get_extras: Also retrieve the extra specs.
Example GET:
http://localhost/api/nova/flavors?is_public=true
"""
is_public = request.GET.get('is_public')
is_public = (is_public and is_public.lower() == 'true')
get_extras = request.GET.get('get_extras')
get_extras = bool(get_extras and get_extras.lower() == 'true')
flavors = api.nova.flavor_list(request, is_public=is_public,
get_extras=get_extras)
result = {'items': []}
for flavor in flavors:
d = flavor.to_dict()
if get_extras:
d['extras'] = flavor.extras
result['items'].append(d)
return result
@rest_utils.ajax(data_required=True)
def post(self, request):
flavor_access = request.DATA.get('flavor_access', [])
flavor_id = request.DATA['id']
is_public = not flavor_access
flavor = api.nova.flavor_create(request,
name=request.DATA['name'],
memory=request.DATA['ram'],
vcpu=request.DATA['vcpus'],
disk=request.DATA['disk'],
ephemeral=request
.DATA['OS-FLV-EXT-DATA:ephemeral'],
swap=request.DATA['swap'],
flavorid=flavor_id,
is_public=is_public
)
for project in flavor_access:
api.nova.add_tenant_to_flavor(
request, flavor.id, project.get('id'))
return rest_utils.CreatedResponse(
'/api/nova/flavors/%s' % flavor.id,
flavor.to_dict()
)
@urls.register
class Flavor(generic.View):
"""API for retrieving a single flavor
"""
url_regex = r'nova/flavors/(?P<flavor_id>[^/]+)/$'
@rest_utils.ajax()
def get(self, request, flavor_id):
"""Get a specific flavor
:param get_extras: Also retrieve the extra specs.
Example GET:
http://localhost/api/nova/flavors/1
"""
get_extras = self.extract_boolean(request, 'get_extras')
get_access_list = self.extract_boolean(request, 'get_access_list')
flavor = api.nova.flavor_get(request, flavor_id, get_extras=get_extras)
result = flavor.to_dict()
# Bug: nova API stores and returns empty string when swap equals 0
# https://bugs.launchpad.net/nova/+bug/1408954
if 'swap' in result and result['swap'] == '':
result['swap'] = 0
if get_extras:
result['extras'] = flavor.extras
if get_access_list and not flavor.is_public:
access_list = [item.tenant_id for item in
api.nova.flavor_access_list(request, flavor_id)]
result['access-list'] = access_list
return result
@rest_utils.ajax()
def delete(self, request, flavor_id):
api.nova.flavor_delete(request, flavor_id)
@rest_utils.ajax(data_required=True)
def patch(self, request, flavor_id):
flavor_access = request.DATA.get('flavor_access', [])
is_public = not flavor_access
# Grab any existing extra specs, because flavor edit is currently
# implemented as a delete followed by a create.
extras_dict = api.nova.flavor_get_extras(request, flavor_id, raw=True)
# Mark the existing flavor as deleted.
api.nova.flavor_delete(request, flavor_id)
# Then create a new flavor with the same name but a new ID.
# This is in the same try/except block as the delete call
# because if the delete fails the API will error out because
# active flavors can't have the same name.
flavor = api.nova.flavor_create(request,
name=request.DATA['name'],
memory=request.DATA['ram'],
vcpu=request.DATA['vcpus'],
disk=request.DATA['disk'],
ephemeral=request
.DATA['OS-FLV-EXT-DATA:ephemeral'],
swap=request.DATA['swap'],
flavorid=flavor_id,
is_public=is_public
)
for project in flavor_access:
api.nova.add_tenant_to_flavor(
request, flavor.id, project.get('id'))
if extras_dict:
api.nova.flavor_extra_set(request, flavor.id, extras_dict)
def extract_boolean(self, request, name):
bool_string = request.GET.get(name)
return bool(bool_string and bool_string.lower() == 'true')
@urls.register
class FlavorExtraSpecs(generic.View):
"""API for managing flavor extra specs
"""
url_regex = r'nova/flavors/(?P<flavor_id>[^/]+)/extra-specs/$'
@rest_utils.ajax()
def get(self, request, flavor_id):
"""Get a specific flavor's extra specs
Example GET:
http://localhost/api/nova/flavors/1/extra-specs
"""
return api.nova.flavor_get_extras(request, flavor_id, raw=True)
@rest_utils.ajax(data_required=True)
def patch(self, request, flavor_id):
"""Update a specific flavor's extra specs.
This method returns HTTP 204 (no content) on success.
"""
if request.DATA.get('removed'):
api.nova.flavor_extra_delete(
request, flavor_id, request.DATA.get('removed')
)
api.nova.flavor_extra_set(
request, flavor_id, request.DATA['updated']
)
@urls.register
class AggregateExtraSpecs(generic.View):
"""API for managing aggregate extra specs
"""
url_regex = r'nova/aggregates/(?P<aggregate_id>[^/]+)/extra-specs/$'
@rest_utils.ajax()
def get(self, request, aggregate_id):
"""Get a specific aggregate's extra specs
Example GET:
http://localhost/api/nova/flavors/1/extra-specs
"""
return api.nova.aggregate_get(request, aggregate_id).metadata
@rest_utils.ajax(data_required=True)
def patch(self, request, aggregate_id):
"""Update a specific aggregate's extra specs.
This method returns HTTP 204 (no content) on success.
"""
updated = request.DATA['updated']
if request.DATA.get('removed'):
for name in request.DATA.get('removed'):
updated[name] = None
api.nova.aggregate_set_metadata(request, aggregate_id, updated)
|
|
import logging
import numpy as np
from scipy.optimize import brent
from math import gcd
from qcodes import Instrument
from qcodes.utils import validators as vals
from qcodes.instrument.parameter import ManualParameter
from pycqed.utilities.general import add_suffix_to_dict_keys
from pycqed.measurement import detector_functions as det
from pycqed.measurement import composite_detector_functions as cdet
from pycqed.measurement import mc_parameter_wrapper as pw
from pycqed.measurement import sweep_functions as swf
from pycqed.measurement import awg_sweep_functions as awg_swf
from pycqed.analysis import measurement_analysis as ma
from pycqed.analysis_v2 import measurement_analysis as ma2
from pycqed.analysis_v2 import alignment_analysis as aa
from pycqed.analysis import analysis_toolbox as a_tools
from pycqed.measurement.calibration_toolbox import mixer_carrier_cancellation_5014
from pycqed.measurement.calibration_toolbox import mixer_carrier_cancellation_UHFQC
from pycqed.measurement.calibration_toolbox import mixer_skewness_calibration_5014
from pycqed.measurement.optimization import nelder_mead
import pycqed.measurement.pulse_sequences.single_qubit_tek_seq_elts as sq
from pycqed.instrument_drivers.pq_parameters import InstrumentParameter
from .qubit_object import Transmon
from .CBox_driven_transmon import CBox_driven_transmon
# FIXME: It would be better to inherit from Transmon directly and put all the common
# stuff in there but for now I am inheriting from what I already have
# MAR april 2016
class Tektronix_driven_transmon(CBox_driven_transmon):
'''
Setup configuration:
Drive: Tektronix 5014 AWG
Acquisition: CBox or UHFQC
(in the future to be compatible with both CBox and ATS)
Readout pulse configuration:
Set by parameter RO_pulse_type ['MW_IQmod_pulse', 'Gated_MW_RO_pulse']
- LO modulated using AWG: 'MW_IQmod_pulse'
- LO + RF-pulsed with marker: 'Gated_MW_RO_pulse'
Depending on the RO_pulse_type some parameters are not used
'''
def __init__(self, name, **kw):
super(CBox_driven_transmon, self).__init__(name, **kw)
# Change this when inheriting directly from Transmon instead of
# from CBox driven Transmon.
# Adding instrument parameters
self.add_parameter('LO', parameter_class=InstrumentParameter)
self.add_parameter('cw_source', parameter_class=InstrumentParameter)
self.add_parameter('td_source', parameter_class=InstrumentParameter)
self.add_parameter('IVVI', parameter_class=InstrumentParameter)
self.add_parameter('FluxCtrl', parameter_class=InstrumentParameter)
self.add_parameter('AWG', parameter_class=InstrumentParameter)
self.add_parameter('heterodyne_instr',
parameter_class=InstrumentParameter)
self.add_parameter('LutMan', parameter_class=InstrumentParameter)
self.add_parameter('CBox', parameter_class=InstrumentParameter)
self.add_parameter('MC', parameter_class=InstrumentParameter)
self.add_parameter('Magnet',parameter_class=InstrumentParameter)
self.add_parameter('RF_RO_source',
parameter_class=InstrumentParameter)
self.add_parameter('mod_amp_cw', label='RO modulation ampl cw',
unit='V', initial_value=0.5,
parameter_class=ManualParameter)
self.add_parameter('RO_power_cw', label='RO power cw',
unit='dBm',
parameter_class=ManualParameter)
self.add_parameter('spec_pow', label='spectroscopy power',
unit='dBm',
parameter_class=ManualParameter)
self.add_parameter('spec_pow_pulsed',
label='pulsed spectroscopy power',
unit='dBm',
parameter_class=ManualParameter)
self.add_parameter('td_source_pow',
label='Time-domain power',
unit='dBm',
parameter_class=ManualParameter)
self.add_parameter('spec_pulse_type', label='Pulsed spec pulse type',
parameter_class=ManualParameter,
initial_value='SquarePulse',
vals=vals.Enum('SquarePulse')) # , SSB_DRAG_pulse))
# we should also implement SSB_DRAG_pulse for pulsed spec
self.add_parameter('spec_pulse_length',
label='Pulsed spec pulse duration',
unit='s',
vals=vals.Numbers(1e-9, 20e-6),
parameter_class=ManualParameter)
self.add_parameter('spec_pulse_marker_channel',
unit='s',
vals=vals.Strings(),
parameter_class=ManualParameter)
self.add_parameter('spec_pulse_depletion_time',
unit='s',
vals=vals.Numbers(1e-9, 50e-6),
parameter_class=ManualParameter)
# Rename f_RO_mod
# Time-domain parameters
self.add_parameter('pulse_I_channel', initial_value='ch1',
vals=vals.Strings(),
parameter_class=ManualParameter)
self.add_parameter('pulse_Q_channel', initial_value='ch2',
vals=vals.Strings(),
parameter_class=ManualParameter)
self.add_parameter('pulse_I_offset', initial_value=0.0,
vals=vals.Numbers(min_value=-0.12, max_value=0.12),
parameter_class=ManualParameter)
self.add_parameter('pulse_Q_offset', initial_value=0.0,
vals=vals.Numbers(min_value=-0.12, max_value=0.12),
parameter_class=ManualParameter)
# readout parameters for time domain
self.add_parameter('RO_acq_averages', initial_value=1024,
vals=vals.Numbers(min_value=0, max_value=1e6),
parameter_class=ManualParameter)
self.add_parameter('RO_acq_integration_length', initial_value=1e-6,
vals=vals.Numbers(
min_value=10e-9, max_value=1000e-6),
parameter_class=ManualParameter)
self.add_parameter('RO_acq_weight_function_I', initial_value=0,
vals=vals.Ints(0, 5),
parameter_class=ManualParameter)
self.add_parameter('RO_acq_weight_function_Q', initial_value=1,
vals=vals.Ints(0, 5),
parameter_class=ManualParameter)
# These parameters are only relevant if using MW_IQmod_pulse type
# RO
self.add_parameter('RO_I_channel', initial_value='ch3',
vals=vals.Strings(),
parameter_class=ManualParameter)
self.add_parameter('RO_Q_channel', initial_value='ch4',
vals=vals.Strings(),
parameter_class=ManualParameter)
self.add_parameter('RO_I_offset', initial_value=0.0,
vals=vals.Numbers(min_value=-0.1, max_value=0.1),
parameter_class=ManualParameter)
self.add_parameter('RO_Q_offset', initial_value=0.0,
vals=vals.Numbers(min_value=-0.1, max_value=0.1),
parameter_class=ManualParameter)
self.add_parameter('RO_pulse_type', initial_value='MW_IQmod_pulse_tek',
vals=vals.Enum('MW_IQmod_pulse_tek',
'MW_IQmod_pulse_UHFQC',
'Gated_MW_RO_pulse'),
parameter_class=ManualParameter)
# Relevant when using a marker channel to gate a MW-RO tone.
self.add_parameter('RO_pulse_marker_channel',
vals=vals.Strings(),
parameter_class=ManualParameter)
self.add_parameter('RO_pulse_power', unit='dBm',
parameter_class=ManualParameter)
self.add_parameter('f_pulse_mod',
initial_value=-100e6,
label='pulse-modulation frequency', unit='Hz',
parameter_class=ManualParameter)
self.add_parameter('f_RO_mod',
label='Readout-modulation frequency', unit='Hz',
initial_value=-2e7,
parameter_class=ManualParameter)
self.add_parameter('amp180',
label='Pi-pulse amplitude', unit='V',
initial_value=.25,
vals=vals.Numbers(min_value=-2.25, max_value=2.25),
parameter_class=ManualParameter)
self.add_parameter('amp90_scale',
label='pulse amplitude scaling factor', unit='',
initial_value=.5,
vals=vals.Numbers(min_value=0, max_value=1.0),
parameter_class=ManualParameter)
self.add_parameter('gauss_sigma', unit='s',
initial_value=10e-9,
parameter_class=ManualParameter)
self.add_parameter('motzoi', label='Motzoi parameter', unit='',
initial_value=0,
parameter_class=ManualParameter)
self.add_parameter('phi_skew', label='IQ phase skewness', unit='deg',
vals=vals.Numbers(-180, 180),
initial_value=0,
parameter_class=ManualParameter)
self.add_parameter('alpha', label='QI amplitude skewness', unit='',
vals=vals.Numbers(.1, 2),
initial_value=1,
parameter_class=ManualParameter)
# Single shot readout specific parameters
self.add_parameter('RO_threshold', unit='dac-value',
initial_value=0,
parameter_class=ManualParameter)
# CBox specific parameter
self.add_parameter('signal_line', parameter_class=ManualParameter,
vals=vals.Enum(0, 1), initial_value=0)
self.add_parameter('acquisition_instr',
set_cmd=self._do_set_acquisition_instr,
get_cmd=self._do_get_acquisition_instr,
vals=vals.Strings())
self.add_parameter('flux_pulse_buffer',
label='Flux pulse buffer', unit='s',
initial_value=0.,
vals=vals.Numbers(min_value=0., max_value=50e-6),
parameter_class=ManualParameter)
self.add_parameter('fluxing_channel', initial_value='ch1',
vals=vals.Strings(),
parameter_class=ManualParameter)
self.add_parameter('fluxing_amp',
label='SWAP resolution', unit='V',
initial_value=.5,
vals=vals.Numbers(min_value=-1., max_value=1.),
parameter_class=ManualParameter)
self.add_parameter('SWAP_amp',
label='SWAP amplitude', unit='V',
initial_value=0.02,
vals=vals.Numbers(min_value=0.02, max_value=4.5),
parameter_class=ManualParameter)
self.add_parameter('SWAP_time',
label='SWAP Time', unit='s',
initial_value=0.,
vals=vals.Numbers(min_value=0., max_value=1e-6),
parameter_class=ManualParameter)
self.add_parameter('flux_dead_time',
label='Time between flux pulse and comp.', unit='s',
initial_value=0.,
vals=vals.Numbers(min_value=0., max_value=50e-6),
parameter_class=ManualParameter)
self.add_parameter('mw_to_flux_delay',
label='time between and mw pulse and start of flux pulse', unit='s',
initial_value=0.,
vals=vals.Numbers(min_value=0., max_value=50e-6),
parameter_class=ManualParameter)
self.add_parameter('dist_dict',
get_cmd=self.get_dist_dict,
set_cmd=self.set_dist_dict,
vals=vals.Anything())
def get_dist_dict(self):
return self._dist_dict
def set_dist_dict(self, dist_dict):
self._dist_dict = dist_dict
def prepare_for_continuous_wave(self):
# makes sure the settings of the acquisition instrument are reloaded
self.acquisition_instr(self.acquisition_instr())
self.heterodyne_instr.get_instr().acquisition_instr(self.acquisition_instr())
# Heterodyne tone configuration
if not self.f_RO():
RO_freq = self.f_res()
else:
RO_freq = self.f_RO()
self.heterodyne_instr.get_instr()._disable_auto_seq_loading = False
self.heterodyne_instr.get_instr().RF.on()
self.heterodyne_instr.get_instr().LO.on()
if hasattr(self.heterodyne_instr.get_instr(), 'mod_amp'):
self.heterodyne_instr.get_instr().set('mod_amp', self.mod_amp_cw.get())
else:
self.heterodyne_instr.get_instr().RF_power(self.RO_power_cw())
self.heterodyne_instr.get_instr().set('f_RO_mod', self.f_RO_mod.get())
self.heterodyne_instr.get_instr().frequency.set(RO_freq)
self.heterodyne_instr.get_instr().RF.power(self.RO_power_cw())
self.heterodyne_instr.get_instr().RF_power(self.RO_power_cw())
self.heterodyne_instr.get_instr().nr_averages(self.RO_acq_averages())
# Turning off TD source
if self.td_source.get_instr() is not None:
self.td_source.get_instr().off()
# Updating Spec source
if self.cw_source() is not None:
self.cw_source.get_instr().power(self.spec_pow())
self.cw_source.get_instr().frequency(self.f_qubit())
self.cw_source.get_instr().off()
if hasattr(self.cw_source.get_instr(), 'pulsemod_state'):
self.cw_source.get_instr().pulsemod_state('off')
if hasattr(self.RF_RO_source.get_instr(), 'pulsemod_state'):
self.RF_RO_source.get_instr().pulsemod_state('Off')
else:
logging.warning('No spectrocscopy source (cw_source) specified')
def prepare_for_pulsed_spec(self):
# TODO: fix prepare for pulsed spec
# TODO: make measure pulsed spec
self.prepare_for_timedomain()
if self.td_source.get_instr() != None:
self.td_source.get_instr().off()
self.cw_source.get_instr().frequency(self.f_qubit())
self.cw_source.get_instr().power(self.spec_pow_pulsed())
if hasattr(self.cw_source.get_instr(), 'pulsemod_state'):
self.cw_source.get_instr().pulsemod_state('On')
else:
RuntimeError(
'Spec source for pulsed spectroscopy does not support pulsing!')
self.cw_source.get_instr().on()
def prepare_for_timedomain(self, input_averaging=False):
# makes sure the settings of the acquisition instrument are reloaded
self.acquisition_instr(self.acquisition_instr())
if self.td_source.get_instr() != None:
self.td_source.get_instr().pulsemod_state('Off')
self.LO.get_instr().on()
if self.cw_source.get_instr() != None:
self.cw_source.get_instr().off()
if self.td_source.get_instr() != None:
self.td_source.get_instr().on()
# Ensures the self.pulse_pars and self.RO_pars get created and updated
self.get_pulse_pars()
# Set source to fs =f-f_mod such that pulses appear at f = fs+f_mod
if self.td_source.get_instr() != None:
self.td_source.get_instr().frequency.set(self.f_qubit.get()
- self.f_pulse_mod.get())
# Use resonator freq unless explicitly specified
if self.f_RO.get() is None:
f_RO = self.f_res.get()
else:
f_RO = self.f_RO.get()
self.LO.get_instr().frequency.set(f_RO - self.f_RO_mod.get())
if self.td_source.get_instr() != None:
self.td_source.get_instr().power.set(self.td_source_pow.get())
# # makes sure dac range is used optimally, 20% overhead for mixer skew
# # use 60% of based on linear range in mathematica
self.AWG.get_instr().set('{}_amp'.format(self.pulse_I_channel()),
self.amp180()*3.0)
self.AWG.get_instr().set('{}_amp'.format(self.pulse_Q_channel()),
self.amp180()*3.0)
self.AWG.get_instr().set(self.pulse_I_channel.get()+'_offset',
self.pulse_I_offset.get())
self.AWG.get_instr().set(self.pulse_Q_channel.get()+'_offset',
self.pulse_Q_offset.get())
if self.RO_pulse_type() == 'MW_IQmod_pulse_tek':
self.AWG.get_instr().set(self.RO_I_channel.get()+'_offset',
self.RO_I_offset.get())
self.AWG.get_instr().set(self.RO_Q_channel.get()+'_offset',
self.RO_Q_offset.get())
elif self.RO_pulse_type() == 'MW_IQmod_pulse_UHFQC':
eval('self._acquisition_instr.sigouts_{}_offset({})'.format(
self.RO_I_channel(), self.RO_I_offset()))
eval('self._acquisition_instr.sigouts_{}_offset({})'.format(
self.RO_Q_channel(), self.RO_Q_offset()))
# This is commented out as doing this by default breaks multiplexed readout
# it should instead be done using the lutmanman
# self._acquisition_instr.awg_sequence_acquisition_and_pulse_SSB(
# f_RO_mod=self.f_RO_mod(), RO_amp=self.RO_amp(),
# RO_pulse_length=self.RO_pulse_length(), acquisition_delay=270e-9)
elif self.RO_pulse_type.get() == 'Gated_MW_RO_pulse':
self.RF_RO_source.get_instr().pulsemod_state('On')
self.RF_RO_source.get_instr().frequency(self.f_RO.get())
self.RF_RO_source.get_instr().power(self.RO_pulse_power.get())
self.RF_RO_source.get_instr().frequency(self.f_RO())
self.RF_RO_source.get_instr().on()
if 'UHFQC' in self.acquisition_instr():
# self._acquisition_instr.awg_sequence_acquisition()
# temperarliy removed for debugging
pass
def calibrate_mixer_offsets(self, signal_hound, offs_type='pulse',
update=True):
'''
input:
signal_hound: instance of the SH instrument
offs_type: ['pulse' | 'RO'] whether to calibrate the
RO or pulse IQ offsets
update: update the values in the qubit object
Calibrates the mixer skewness and updates the I and Q offsets in
the qubit object.
signal hound needs to be given as it this is not part of the qubit
object in order to reduce dependencies.
'''
# ensures freq is set correctly
# Still need to test this, start by doing this in notebook
self.prepare_for_timedomain()
self.AWG.get_instr().stop() # Make sure no waveforms are played
if offs_type == 'pulse':
AWG_channel1 = self.pulse_I_channel.get()
AWG_channel2 = self.pulse_Q_channel.get()
source = self.td_source.get_instr()
elif offs_type == 'RO':
AWG_channel1 = self.RO_I_channel.get()
AWG_channel2 = self.RO_Q_channel.get()
source = self.LO.get_instr()
else:
raise ValueError('offs_type "{}" not recognized'.format(offs_type))
offset_I, offset_Q = mixer_carrier_cancellation_5014(
AWG=self.AWG.get_instr(), SH=signal_hound, source=source, MC=self.MC.get_instr(),
AWG_channel1=AWG_channel1, AWG_channel2=AWG_channel2, xtol=0.0003)
if update:
if offs_type == 'pulse':
self.pulse_I_offset.set(offset_I)
self.pulse_Q_offset.set(offset_Q)
if offs_type == 'RO':
self.RO_I_offset.set(offset_I)
self.RO_Q_offset.set(offset_Q)
def calibrate_mixer_offsets_IQ_mod_RO_UHFQC(self, signal_hound,
update=True):
'''
input:
signal_hound: instance of the SH instrument
offs_type: ['pulse' | 'RO'] whether to calibrate the
RO or pulse IQ offsets
update: update the values in the qubit object
Calibrates the mixer skewness and updates the I and Q offsets in
the qubit object.
signal hound needs to be given as it this is not part of the qubit
object in order to reduce dependencies.
'''
# ensures freq is set correctly
# Still need to test this, start by doing this in notebook
self.prepare_for_timedomain()
self.AWG.get_instr().stop() # Make sure no waveforms are played
AWG_channel1 = self.RO_I_channel.get()
AWG_channel2 = self.RO_Q_channel.get()
source = self.LO.get_instr()
offset_I, offset_Q = mixer_carrier_cancellation_UHFQC(
UHFQC=self._acquisition_instr, SH=signal_hound, source=source, MC=self.MC.get_instr(
),
AWG_channel1=AWG_channel1, AWG_channel2=AWG_channel2)
if update:
self.RO_I_offset.set(offset_I)
self.RO_Q_offset.set(offset_Q)
def calibrate_mixer_skewness(self, signal_hound, station, update=True):
'''
Calibrates mixer skewness at the frequency relevant for qubit driving.
Note: I don'tR like that you have to pass station here but I don't want
to introduce extra variables at this point, it should be available to
you in the notebook (MAR).
'''
self.prepare_for_timedomain()
phi, alpha = mixer_skewness_calibration_5014(
signal_hound, self.td_source.get_instr(), station,
f_mod=self.f_pulse_mod.get(),
I_ch=self.pulse_I_channel.get(), Q_ch=self.pulse_Q_channel.get(),
name='Mixer_skewness'+self.msmt_suffix)
if update:
self.phi_skew.set(phi)
self.alpha.set(alpha)
def calibrate_RO_threshold(self, method='conventional',
MC=None, close_fig=True,
verbose=False, make_fig=True):
raise NotImplementedError()
def measure_heterodyne_spectroscopy(self, freqs, MC=None,
analyze=True, close_fig=True):
self.prepare_for_continuous_wave()
# sqts.Pulsed_spec_seq(spec_pars, RO_pars)
if MC is None:
MC = self.MC.get_instr()
MC.set_sweep_function(pw.wrap_par_to_swf(
self.heterodyne_instr.get_instr().frequency, retrieve_value=True))
MC.set_sweep_points(freqs)
MC.set_detector_function(
det.Heterodyne_probe(self.heterodyne_instr.get_instr(),
trigger_separation=self.RO_acq_integration_length()+5e-6, RO_length=self.RO_acq_integration_length()))
MC.run(name='Resonator_scan'+self.msmt_suffix)
if analyze:
ma.MeasurementAnalysis(auto=True, close_fig=close_fig)
def measure_spectroscopy(self, freqs, pulsed=False, MC=None,
analyze=True, close_fig=True,
force_load=True, use_max=False, update=True):
self.prepare_for_continuous_wave()
self.cw_source.get_instr().on()
if MC is None:
MC = self.MC.get_instr()
if pulsed:
# Redirect to the pulsed spec function
return self.measure_pulsed_spectroscopy(freqs=freqs,
MC=MC,
analyze=analyze,
close_fig=close_fig,
update=update,
upload=force_load)
probe = det.Heterodyne_probe(self.heterodyne_instr.get_instr(),
trigger_separation=5e-6 + self.RO_acq_integration_length(),
RO_length=self.RO_acq_integration_length())
probe.prepare()
phase = probe.acquire_data_point()[1]
self.RF_RO_source.get_instr().phase(
(self.RF_RO_source.get_instr().phase()-phase)%360)
MC.set_sweep_function(pw.wrap_par_to_swf(
self.cw_source.get_instr().frequency, retrieve_value=True))
MC.set_sweep_points(freqs)
MC.set_detector_function(
det.Heterodyne_probe(
self.heterodyne_instr.get_instr(),
trigger_separation=5e-6 + self.RO_acq_integration_length(),
RO_length=self.RO_acq_integration_length()))
MC.run(name='spectroscopy'+self.msmt_suffix)
if analyze:
ma.MeasurementAnalysis(auto=True, close_fig=close_fig)
self.cw_source.get_instr().off()
def measure_pulsed_spectroscopy(self, freqs, MC=None, analyze=True,
return_detector=False,
close_fig=True, upload=True, update=True,
use_max=False):
"""
Measure pulsed spec with the qubit.
Accepts a manual sequence parameters, which has to be a call to a
pulse generation allowing for alternative sequences to be played
instead of the standard one
"""
self.prepare_for_pulsed_spec()
self.heterodyne_instr.get_instr()._disable_auto_seq_loading = True
self.cw_source.get_instr().pulsemod_state.set('On')
self.cw_source.get_instr().power.set(self.spec_pow_pulsed.get())
self.cw_source.get_instr().on()
# Bugfix: need to suppress auto-reload of the sequence, else the pulsed
# spec sequence gets overwritten
auto_seq_loading = self.heterodyne_instr.get_instr().auto_seq_loading()
self.heterodyne_instr.get_instr().auto_seq_loading(False)
if MC is None:
MC = self.MC.get_instr()
spec_pars, RO_pars = self.get_spec_pars()
# Upload the AWG sequence
sq.Pulsed_spec_seq(spec_pars = spec_pars, RO_pars = RO_pars)
self.AWG.get_instr().start()
if return_detector:
return det.Heterodyne_probe(self.heterodyne_instr.get_instr())
else:
MC.set_sweep_function(pw.wrap_par_to_swf(
self.cw_source.get_instr().frequency, retrieve_value=True))
MC.set_sweep_points(freqs)
MC.set_detector_function(
det.Heterodyne_probe(self.heterodyne_instr.get_instr()))
MC.run(name='pulsed-spec'+self.msmt_suffix)
self.heterodyne_instr.get_instr().auto_seq_loading(auto_seq_loading)
if analyze or update:
ma_obj = ma.Qubit_Spectroscopy_Analysis(
auto=True, label='pulsed', close_fig=close_fig)
if use_max:
f_qubit = ma_obj.peaks['peak']
else:
f_qubit = ma_obj.fitted_freq
if update:
self.f_qubit(f_qubit)
self.cw_source.get_instr().off()
return f_qubit
def measure_rabi(self, amps=np.linspace(-.5, .5, 31), n=1,
MC=None, analyze=True, close_fig=True,
verbose=False, upload=True):
# prepare for timedomain takes care of rescaling
self.prepare_for_timedomain()
# # Extra rescaling only happens if the amp180 was far too low for the Rabi
if max(abs(amps))*2 > self.AWG.get_instr().get('{}_amp'.format(self.pulse_I_channel())):
logging.warning('Auto rescaling AWG amplitude as amp180 {}'.format(
self.amp180()) +
' was set very low in comparison to Rabi range')
self.AWG.get_instr().set('{}_amp'.format(self.pulse_I_channel()),
np.max(abs(amps))*3.0)
self.AWG.get_instr().set('{}_amp'.format(self.pulse_Q_channel()),
np.max(abs(amps))*3.0)
if MC is None:
MC = self.MC.get_instr()
MC.set_sweep_function(awg_swf.Rabi(
pulse_pars=self.pulse_pars, RO_pars=self.RO_pars, n=n, upload=upload))
MC.set_sweep_points(amps)
MC.set_detector_function(self.int_avg_det)
MC.run('Rabi-n{}'.format(n)+self.msmt_suffix)
if analyze:
ma.Rabi_Analysis(auto=True, close_fig=close_fig)
def measure_flipping(self, number_of_flips=2*np.arange(20), equator=True,
MC=None, analyze=True, close_fig=True, update=True,
ax='x', angle='180',upload=True):
# prepare for timedomain takes care of rescaling
self.prepare_for_timedomain()
if MC is None:
MC = self.MC.get_instr()
nf = np.array(number_of_flips)
dn = nf[1] - nf[0]
nf = np.concatenate([nf,
(nf[-1]+1*dn,
nf[-1]+2*dn,
nf[-1]+3*dn,
nf[-1]+4*dn)])
MC.set_sweep_function(awg_swf.Flipping(
pulse_pars=self.pulse_pars, RO_pars=self.RO_pars, upload=upload))
MC.set_sweep_points(nf)
MC.set_detector_function(self.int_avg_det)
MC.run('flipping_'+self.msmt_suffix)
if analyze:
ma2.FlippingAnalysis(options_dict={'scan_label':'flipping'})
def measure_rabi_amp90(self,
scales=np.linspace(-0.7, 0.7, 31), n=1,
MC=None, analyze=True, close_fig=True,
verbose=False):
self.prepare_for_timedomain()
if MC is None:
MC = self.MC.get_instr()
MC.set_sweep_function(awg_swf.Rabi_amp90(
pulse_pars=self.pulse_pars, RO_pars=self.RO_pars, n=n))
MC.set_sweep_points(scales)
MC.set_detector_function(self.int_avg_det)
MC.run('Rabi_amp90_scales_n{}'.format(n)+self.msmt_suffix)
if analyze:
ma.Rabi_Analysis(auto=True, close_fig=close_fig)
def measure_T1(self, times, MC=None,
analyze=True, upload=True, close_fig=True):
self.prepare_for_timedomain()
if MC is None:
MC = self.MC.get_instr()
MC.set_sweep_function(awg_swf.T1(
pulse_pars=self.pulse_pars, RO_pars=self.RO_pars, upload=upload))
MC.set_sweep_points(times)
MC.set_detector_function(self.int_avg_det)
MC.run('T1'+self.msmt_suffix)
if analyze:
a = ma.T1_Analysis(auto=True, close_fig=close_fig)
return a.T1
def measure_T1_qp(self, times, N_pi_pulses=2,
N_pi_pulse_delay=100e-9, MC=None, analyze=True,
upload=True, close_fig=True):
self.prepare_for_timedomain()
if MC is None:
MC = self.MC.get_instr()
times = np.concatenate([times,
(times[-1]+times[1],
times[-1]+times[2],
times[-1]+times[3],
times[-1]+times[4])])
MC.set_sweep_function(awg_swf.T1_qp(
pulse_pars=self.pulse_pars, RO_pars=self.RO_pars,
N_pi_pulses=N_pi_pulses, N_pi_pulse_delay=N_pi_pulse_delay,
upload=upload))
MC.set_sweep_points(times)
MC.set_detector_function(self.int_avg_det)
MC.run('T1_qp_N_'+str(N_pi_pulses)+'_tau_pi_'+str(N_pi_pulse_delay)+'_'+self.msmt_suffix)
if analyze:
a = ma.T1_Analysis(auto=True, close_fig=close_fig)
return a.T1
def measure_T1_2pi_qp(self, times, N_2pi_pulses=2,
N_2pi_pulse_delay=100e-9, MC=None, analyze=True,
upload=True, close_fig=True):
self.prepare_for_timedomain()
if MC is None:
MC = self.MC.get_instr()
times = np.concatenate([times,
(times[-1]+times[1],
times[-1]+times[2],
times[-1]+times[3],
times[-1]+times[4])])
MC.set_sweep_function(awg_swf.T1_2pi_qp(
pulse_pars=self.pulse_pars, RO_pars=self.RO_pars,
N_2pi_pulses=N_2pi_pulses, N_2pi_pulse_delay=N_2pi_pulse_delay,
upload=upload))
MC.set_sweep_points(times)
MC.set_detector_function(self.int_avg_det)
MC.run('T1_2pi_qp_N_'+str(N_2pi_pulses)+'_tau_2pi_'+str(N_2pi_pulse_delay)+'_'+self.msmt_suffix)
if analyze:
a = ma.T1_Analysis(auto=True, close_fig=close_fig)
return a.T1
def measure_ramsey(self, times, artificial_detuning=0,
f_qubit=None, label='',
MC=None, analyze=True, close_fig=True, verbose=True,
upload=True, analyze_double_freq = False):
self.prepare_for_timedomain()
if MC is None:
MC = self.MC.get_instr()
if f_qubit is None:
f_qubit = self.f_qubit.get()
self.td_source.get_instr().set(
'frequency', f_qubit - self.f_pulse_mod.get())
Rams_swf = awg_swf.Ramsey(
pulse_pars=self.pulse_pars, RO_pars=self.RO_pars,
artificial_detuning=artificial_detuning, upload=upload)
MC.set_sweep_function(Rams_swf)
MC.set_sweep_points(times)
MC.set_detector_function(self.int_avg_det)
MC.run('Ramsey'+label+self.msmt_suffix)
if analyze:
if analyze_double_freq == False:
a = ma.Ramsey_Analysis(auto=True, close_fig=close_fig)
if verbose:
fitted_freq = a.fit_res.params['frequency'].value
print('Artificial detuning: {:.2e}'.format(
artificial_detuning))
print('Fitted detuning: {:.2e}'.format(fitted_freq))
print('Actual detuning:{:.2e}'.format(
fitted_freq-artificial_detuning))
else:
a = ma.DoubleFrequency(auto=True, close_fig=close_fig)
#Implement 'verbose' later, this is fine fow now
if verbose:
print("Verbose output")
# fitted_freq = a.fit_res.params[]
def measure_echo(self, times, label='', MC=None,
artificial_detuning=None, upload=True,
analyze=True, close_fig=True, verbose=True):
self.prepare_for_timedomain()
if MC is None:
MC = self.MC.get_instr()
Echo_swf = awg_swf.Echo(
pulse_pars=self.pulse_pars, RO_pars=self.RO_pars,
artificial_detuning=artificial_detuning, upload=upload)
MC.set_sweep_function(Echo_swf)
MC.set_sweep_points(times)
MC.set_detector_function(self.int_avg_det)
MC.run('Echo'+label+self.msmt_suffix)
if analyze:
a = ma.Ramsey_Analysis(
auto=True, close_fig=close_fig, label='Echo')
return a
def measure_CPMG(self, times, CPMG_order=4, label='', MC=None,
artificial_detuning=None, upload=True,
analyze=True, close_fig=True, verbose=True):
'''
Runs the CPMG sequence, consisting of a starting x90, then CPMG_order
times Y180 pulses, and a (artificially detuning) finishing x90 pulse
times specify the times between the centers of starting and
finishing pulses. Ensure that minimal time does not lead
to pulse overlap
'''
self.prepare_for_timedomain()
if MC is None:
MC = self.MC.get_instr()
CPMG_swf = awg_swf.CPMG(
pulse_pars=self.pulse_pars, RO_pars=self.RO_pars,
CPMG_order=CPMG_order,
artificial_detuning=artificial_detuning, upload=upload)
MC.set_sweep_function(CPMG_swf)
MC.set_sweep_points(times)
MC.set_detector_function(self.int_avg_det)
MC.run('CPMG_'+str(CPMG_order)+label+self.msmt_suffix)
if analyze:
a = ma.Ramsey_Analysis(
auto=True, close_fig=close_fig, label='CPMG')
return a
def measure_allxy(self, double_points=True,
MC=None,
analyze=True, close_fig=True, verbose=True):
self.prepare_for_timedomain()
if MC is None:
MC = self.MC.get_instr()
MC.set_sweep_function(awg_swf.AllXY(
pulse_pars=self.pulse_pars, RO_pars=self.RO_pars,
double_points=double_points))
MC.set_detector_function(self.int_avg_det)
MC.run('AllXY'+self.msmt_suffix)
if analyze:
a = ma.AllXY_Analysis(close_main_fig=close_fig)
return a
def measure_randomized_benchmarking(self, nr_cliffords,
nr_seeds=50, T1=None,
MC=None, analyze=True, close_fig=True,
verbose=False, upload=True):
'''
Performs a randomized benchmarking fidelity.
Optionally specifying T1 also shows the T1 limited fidelity.
'''
self.prepare_for_timedomain()
if MC is None:
MC = self.MC.get_instr()
MC.set_sweep_function(awg_swf.Randomized_Benchmarking(
pulse_pars=self.pulse_pars, RO_pars=self.RO_pars,
double_curves=True,
nr_cliffords=nr_cliffords, nr_seeds=nr_seeds, upload=upload))
MC.set_detector_function(self.int_avg_det)
MC.run('RB_{}seeds'.format(nr_seeds)+self.msmt_suffix)
ma.RB_double_curve_Analysis(
close_main_fig=close_fig, T1=T1,
pulse_delay=self.pulse_delay.get())
def measure_ssro(self, no_fits=False,
return_detector=False,
MC=None,
analyze=True,
close_fig=True,
verbose=True, optimized_weights=False, SSB=True,
one_weight_function_UHFQC=False,
multiplier=1, nr_shots=4095):
self.prepare_for_timedomain()
if MC is None:
MC = self.MC.get_instr()
d = cdet.SSRO_Fidelity_Detector_Tek(
'SSRO'+self.msmt_suffix,
analyze=analyze,
raw=no_fits,
MC=MC,
AWG=self.AWG.get_instr(), acquisition_instr=self._acquisition_instr,
pulse_pars=self.pulse_pars, RO_pars=self.RO_pars, IF=self.f_RO_mod(),
weight_function_I=self.RO_acq_weight_function_I(),
weight_function_Q=self.RO_acq_weight_function_Q(),
nr_shots=nr_shots, one_weight_function_UHFQC=one_weight_function_UHFQC,
optimized_weights=optimized_weights,
integration_length=self.RO_acq_integration_length(),
close_fig=close_fig, SSB=SSB, multiplier=multiplier,
nr_averages=self.RO_acq_averages())
if return_detector:
return d
d.prepare()
d.acquire_data_point()
# if analyze:
# return ma.SSRO_Analysis(rotate=soft_rotate, label='SSRO'+self.msmt_suffix,
# no_fits=no_fits, close_fig=close_fig)
def measure_butterfly(self, return_detector=False,
MC=None,
analyze=True,
close_fig=True,
verbose=True,
initialize=False,
post_msmt_delay=2e-6, case=True):
self.prepare_for_timedomain()
if MC is None:
MC = self.MC.get_instr()
MC.set_sweep_function(awg_swf.Butterfly(
pulse_pars=self.pulse_pars, RO_pars=self.RO_pars,
initialize=initialize, post_msmt_delay=post_msmt_delay))
MC.set_detector_function(self.int_log_det)
MC.run('Butterfly{}initialize_{}'.format(self.msmt_suffix, initialize))
# first perform SSRO analysis to extract the optimal rotation angle
# theta
a = ma.SSRO_discrimination_analysis(
label='Butterfly',
current_threshold=None,
close_fig=close_fig,
plot_2D_histograms=True)
# the, run it a second time to determin the optimum threshold along the
# rotated I axis
b = ma.SSRO_discrimination_analysis(
label='Butterfly',
current_threshold=None,
close_fig=close_fig,
plot_2D_histograms=True, theta_in=-a.theta)
c = ma.butterfly_analysis(
close_main_fig=close_fig, initialize=initialize,
theta_in=-a.theta,
threshold=b.opt_I_threshold, digitize=True, case=case)
return c.butterfly_coeffs
def measure_transients(self, return_detector=False,
MC=None,
analyze=True,
close_fig=True,
verbose=True,
nr_samples=512):
self.prepare_for_timedomain()
if MC is None:
MC = self.MC.get_instr()
self.MC.get_instr().set_sweep_function(awg_swf.OffOn(pulse_pars=self.pulse_pars,
RO_pars=self.RO_pars,
pulse_comb='OffOff',
nr_samples=nr_samples))
self.MC.get_instr().set_sweep_points(np.arange(nr_samples))
self.input_average_detector.nr_samples = nr_samples
self.MC.get_instr().set_detector_function(self.input_average_detector)
self.MC.get_instr().run(
'Measure_transients_{}_0'.format(self.msmt_suffix))
a0 = ma.MeasurementAnalysis(auto=True, close_fig=close_fig)
self.MC.get_instr().set_sweep_function(awg_swf.OffOn(pulse_pars=self.pulse_pars,
RO_pars=self.RO_pars,
pulse_comb='OnOn',
nr_samples=nr_samples))
# self.MC.get_instr().set_sweep_points(np.arange(nr_samples))
self.input_average_detector.nr_samples = nr_samples
self.MC.get_instr().set_detector_function(self.input_average_detector)
self.MC.get_instr().run(
'Measure_transients_{}_1'.format(self.msmt_suffix))
a1 = ma.MeasurementAnalysis(auto=True, close_fig=close_fig)
def measure_rb_vs_amp(self, amps, nr_cliff=1,
resetless=True,
MC=None, analyze=True, close_fig=True,
verbose=False):
raise NotImplementedError()
def measure_motzoi_XY(self, motzois, MC=None, analyze=True, close_fig=True,
verbose=True, update=True):
self.prepare_for_timedomain()
if MC is None:
MC = self.MC.get_instr()
MC.set_sweep_function(awg_swf.Motzoi_XY(
pulse_pars=self.pulse_pars, RO_pars=self.RO_pars, motzois=motzois))
MC.set_detector_function(self.int_avg_det)
MC.run('Motzoi_XY'+self.msmt_suffix)
if analyze:
a = ma.Motzoi_XY_analysis(close_fig=close_fig)
if update:
self.motzoi.set(a.optimal_motzoi)
return a
def measure_freq_XY(self, f_span, n_f, MC=None, analyze=True, close_fig=True,
verbose=True, update=True):
self.prepare_for_timedomain()
if MC is None:
MC = self.MC.get_instr()
freqs = np.linspace(-f_span*0.5, f_span*0.5, n_f) + \
self.f_pulse_mod.get()
MC.set_sweep_function(awg_swf.Freq_XY(
pulse_pars=self.pulse_pars, RO_pars=self.RO_pars, freqs=freqs))
MC.set_detector_function(self.int_avg_det)
MC.run('Freq_XY'+self.msmt_suffix)
# if analyze:
# a = ma.Motzoi_XY_analysis(close_fig=close_fig)
# if update:
# self.motzoi.set(a.optimal_motzoi)
# return a
def measure_BusT1(self, times, MC=None):
if MC is None:
MC = self.MC.get_instr()
cal_points = 4
lengths_cal = times[-1] + \
np.arange(1, 1+cal_points)*(times[1]-times[0])
lengths_vec = np.concatenate((times, lengths_cal))
mw_pulse_pars, RO_pars = self.get_pulse_pars()
flux_pulse_pars, dist_dict = self.get_flux_pars()
BusT1 = awg_swf.BusT1(times,
mw_pulse_pars,
RO_pars,
flux_pulse_pars,
dist_dict=dist_dict,
AWG=self.AWG.get_instr(),
upload=False, return_seq=True)
exec('self.AWG.get_instr().ch%d_amp(2.)' % self.fluxing_channel())
seq = BusT1.pre_upload()
MC.set_sweep_function(BusT1)
MC.set_sweep_points(lengths_vec)
MC.set_detector_function(self.int_avg_det)
self.AWG.get_instr().ch4_amp(flux_pulse_pars['swap_amp'])
MC.run('Bus_T1')
ma.T1_Analysis(auto=True, label='Bus_T1')
def measure_BusT2(self, times, MC=None):
if MC is None:
MC = self.MC.get_instr()
cal_points = 4
lengths_cal = times[-1] + \
np.arange(1, 1+cal_points)*(times[1]-times[0])
lengths_vec = np.concatenate((times, lengths_cal))
mw_pulse_pars, RO_pars = self.get_pulse_pars()
flux_pulse_pars, dist_dict = self.get_flux_pars()
BusT2 = awg_swf.BusT2(times_vec=times,
mw_pulse_pars=mw_pulse_pars,
RO_pars=RO_pars,
# artificial_detuning=artificial_detuning,
flux_pulse_pars=flux_pulse_pars,
dist_dict=dist_dict,
AWG=self.AWG.get_instr(),
upload=False, return_seq=True)
exec('self.AWG.get_instr().ch%d_amp(2.)' % self.fluxing_channel())
seq = BusT2.pre_upload()
MC.set_sweep_function(BusT2)
MC.set_sweep_points(lengths_vec)
MC.set_detector_function(self.int_avg_det)
self.AWG.get_instr().ch4_amp(flux_pulse_pars['swap_amp'])
MC.run('Bus_Echo')
ma.Ramsey_Analysis(auto=True, label='Bus_T2')
def measure_BusEcho(self, times, artificial_detuning, MC=None):
if MC is None:
MC = self.MC.get_instr()
cal_points = 4
lengths_cal = times[-1] + \
np.arange(1, 1+cal_points)*(times[1]-times[0])
lengths_vec = np.concatenate((times, lengths_cal))
mw_pulse_pars, RO_pars = self.get_pulse_pars()
flux_pulse_pars, dist_dict = self.get_flux_pars()
BusEcho = awg_swf.BusEcho(times_vec=times,
mw_pulse_pars=mw_pulse_pars,
RO_pars=RO_pars,
artificial_detuning=artificial_detuning,
flux_pulse_pars=flux_pulse_pars,
dist_dict=dist_dict,
AWG=self.AWG.get_instr(),
upload=False, return_seq=True)
exec('self.AWG.get_instr().ch%d_amp(2.)' % self.fluxing_channel())
seq = BusEcho.pre_upload()
MC.set_sweep_function(BusEcho)
MC.set_sweep_points(lengths_vec)
MC.set_detector_function(self.int_avg_det)
self.AWG.get_instr().ch4_amp(flux_pulse_pars['swap_amp'])
MC.run('Bus_Echo')
ma.Ramsey_Analysis(auto=True, label='Bus_Echo')
def _do_get_acquisition_instr(self):
# Specifying the int_avg det here should allow replacing it with ATS
# or potential digitizer acquisition easily
return self._acquisition_instr.name
def _do_set_acquisition_instr(self, acquisition_instr):
# Specifying the int_avg det here should allow replacing it with ATS
# or potential digitizer acquisition easily
self._acquisition_instr = self.find_instrument(acquisition_instr)
if 'CBox' in acquisition_instr:
if self.AWG() != 'None':
logging.info("setting CBox acquisition")
print('starting int avg')
self.int_avg_det = det.CBox_integrated_average_detector(
self._acquisition_instr,
self.AWG.get_instr(),
nr_averages=self.RO_acq_averages(),
integration_length=self.RO_acq_integration_length(
),
normalize=True)
print('starting int avg rot')
self.int_avg_det_rot = det.CBox_integrated_average_detector(
self._acquisition_instr,
self.AWG.get_instr(),
nr_averages=self.RO_acq_averages(),
integration_length=self.RO_acq_integration_length(
),
normalize=True)
print('starting int log det')
self.int_log_det = det.CBox_integration_logging_det(
self._acquisition_instr,
self.AWG.get_instr(),
integration_length=self.RO_acq_integration_length())
self.input_average_detector = det.CBox_input_average_detector(
self._acquisition_instr,
self.AWG.get_instr(), nr_averages=self.RO_acq_averages())
elif 'UHFQC' in acquisition_instr:
logging.info("setting UHFQC acquisition")
self.input_average_detector = det.UHFQC_input_average_detector(
UHFQC=self._acquisition_instr,
AWG=self.AWG.get_instr(), nr_averages=self.RO_acq_averages())
self.int_avg_det = det.UHFQC_integrated_average_detector(
UHFQC=self._acquisition_instr, AWG=self.AWG.get_instr(),
channels=[self.RO_acq_weight_function_I(),
self.RO_acq_weight_function_Q()],
nr_averages=self.RO_acq_averages(),
integration_length=self.RO_acq_integration_length())
self.int_log_det = det.UHFQC_integration_logging_det(
UHFQC=self._acquisition_instr, AWG=self.AWG.get_instr(),
channels=[self.RO_acq_weight_function_I(),
self.RO_acq_weight_function_Q()],
integration_length=self.RO_acq_integration_length())
elif 'DDM' in acquisition_instr:
logging.info("setting DDM acquisition")
self.input_average_detector = det.DDM_input_average_detector(
DDM=self._acquisition_instr,
AWG=self.AWG, nr_averages=self.RO_acq_averages())
self.int_avg_det = det.DDM_integrated_average_detector(
DDM=self._acquisition_instr, AWG=self.AWG,
channels=[self.RO_acq_weight_function_I(),
self.RO_acq_weight_function_Q()],
nr_averages=self.RO_acq_averages(),
integration_length=self.RO_acq_integration_length())
self.int_log_det = det.DDM_integration_logging_det(
DDM=self._acquisition_instr, AWG=self.AWG,
channels=[self.RO_acq_weight_function_I(),
self.RO_acq_weight_function_Q()],
integration_length=self.RO_acq_integration_length())
elif 'ATS' in acquisition_instr:
logging.info("setting ATS acquisition")
raise NotImplementedError()
# self.int_avg_det = det.ATS_integrated_average_continuous_detector(
# ATS=self._acquisition_instr.card,
# ATS_acq=self._acquisition_instr.controller, AWG=self.AWG.get_instr(),
# nr_averages=self.RO_acq_averages())
def get_pulse_dict(self, pulse_dict={}):
'''
Returns a dictionary containing the pulse parameters of the qubit.
This function is intended to replace the old get_pulse_pars.
Dictionary contains the keys formatted as follows:
operation self.name
Input args:
pulse_dict (dict): Optionally specify an existing pulse dict to update
(currently only contains single qubit pulses)
'''
drive_pars, RO_pars = self.get_pulse_pars()
pulse_dict.update(add_suffix_to_dict_keys(
sq.get_pulse_dict_from_pars(drive_pars), ' ' + self.name))
pulse_dict.update({'RO {}'.format(self.name): RO_pars})
spec_pars, RO_pars = self.get_spec_pars()
pulse_dict.update({'Spec {}'.format(self.name): spec_pars})
return pulse_dict
def get_pulse_pars(self):
self.pulse_pars = {
'I_channel': self.pulse_I_channel.get(),
'Q_channel': self.pulse_Q_channel.get(),
'amplitude': self.amp180.get(),
'amp90_scale': self.amp90_scale(),
'sigma': self.gauss_sigma.get(),
'nr_sigma': 4,
'motzoi': self.motzoi.get(),
'mod_frequency': self.f_pulse_mod.get(),
'f_pulse_mod': self.f_pulse_mod.get(),
'pulse_delay': self.pulse_delay.get(),
'phi_skew': self.phi_skew.get(),
'alpha': self.alpha.get(),
'phase': 0,
'operation_type': 'MW',
'target_qubit': self.name,
'pulse_type': 'SSB_DRAG_pulse'}
self.RO_pars = {
'I_channel': self.RO_I_channel.get(),
'Q_channel': self.RO_Q_channel.get(),
'RO_pulse_marker_channel': self.RO_pulse_marker_channel.get(),
'amplitude': self.RO_amp.get(),
'length': self.RO_pulse_length.get(),
'pulse_delay': self.RO_pulse_delay.get(),
'f_RO_mod': self.f_RO_mod.get(),
'acq_marker_delay': self.RO_acq_marker_delay.get(),
'acq_marker_channel': self.RO_acq_marker_channel.get(),
'phase': 0,
'operation_type': 'RO',
'target_qubit': self.name,
'pulse_type': self.RO_pulse_type.get()}
return self.pulse_pars, self.RO_pars
def get_spec_pars(self):
# logging.warning('deprecated use get_operation_dict')
pulse_pars, RO_pars = self.get_pulse_pars()
spec_pars = {'pulse_type': 'SquarePulse',
'length': self.spec_pulse_length.get(),
'amplitude': 1,
'operation_type': 'MW',
'target_qubit': self.name,
'channel': self.spec_pulse_marker_channel.get(),
'f_pulse_mod': self.f_pulse_mod.get()}
# RO_pars['pulse_delay'] += spec_pars['length'] Why is this here??
spec_pars['pulse_delay'] = (RO_pars['length'] +
self.spec_pulse_depletion_time.get())
return spec_pars, RO_pars
def get_operation_dict(self, operation_dict={}):
operation_dict = super().get_operation_dict(operation_dict)
operation_dict['SpecPulse '+self.name] = self.get_spec_pars()[0]
self.get_pulse_dict(operation_dict)
return operation_dict
def calibrate_field_alignment_to_sample_plane(self, alignment_resonator,
MC_outer_loop, aligment_freq_span = 12e6, Coil_X_field_span = 0.1e-3,
field_perp_step_size = 1.0e-5, MC=None,detector_type=None):
# /* Script to align the magnetic field to a resonator*/
alignmente_resonator_name, alignment_resonator_freq = list(alignment_resonator.items())[0]
f_res_aligning = alignment_resonator_freq
minf = f_res_aligning-aligment_freq_span*0.9
maxf = f_res_aligning+aligment_freq_span*0.1+1e6
freqs = np.arange(minf,maxf,2e5)
if detector_type == None:
detector_type = 'Homodyne'
elif detector_type == 'VNA':
raise NotImplementedError()
if MC is None:
MC = self.MC.get_instr()
current_xfield = self.Magnet.get_instr().coil_x.field()
searching_points_xfield = np.arange(-Coil_X_field_span/2+current_xfield,
Coil_X_field_span/2+current_xfield+0.01e-6,
field_perp_step_size)
def measure_fres(minf,maxf,MC):
tmp_suffix = self.msmt_suffix
self.msmt_suffix = self.msmt_suffix + '_coarse'
f_res_aligning = self.find_resonator_frequency(update=False, use_min=True
,freqs = np.arange(minf,maxf,2e5))
self.msmt_suffix = tmp_suffix
fine_freq_span =7e6
minf = f_res_aligning-fine_freq_span/2
maxf = f_res_aligning+fine_freq_span/2
freq_list_res = fine_reso_freq_range(start_freq=minf,stop_freq=maxf,
target_freq=f_res_aligning,precise_range=2e6)
tmp_suffix = self.msmt_suffix
self.msmt_suffix = self.msmt_suffix + '_fine'
f_res_aligning = self.find_resonator_frequency(freqs=freq_list_res,
update=False,use_min=False)
self.msmt_suffix = tmp_suffix
timestamp = a_tools.latest_data(contains=self.name,
return_timestamp=True)[0]
params_dict = {'amp':'amp'}
numeric_params = ['amp']
data = a_tools.get_data_from_timestamp_list([timestamp],
params_dict,
numeric_params=numeric_params,
filter_no_analysis=False)
return {'f_res':f_res_aligning}
d = det.Function_Detector(get_function=measure_fres,
msmt_kw={'minf':minf,
'maxf':maxf,
'MC':MC},
value_units=['Hz'],
value_names=['f_res'],
result_keys=['f_res'])
MC_outer_loop.set_detector_function(d)
MC_outer_loop.set_sweep_function(pw.wrap_par_to_swf(
self.Magnet.get_instr().coil_x.field,
retrieve_value = True))
MC_outer_loop.set_sweep_points(searching_points_xfield)
MC_outer_loop.run('Alignment of '+self.msmt_suffix+' on resonator ' +alignmente_resonator_name)
timestamp = a_tools.latest_data(contains='Alignment',
return_timestamp=True)[0]
params_dict = {'f_res':'measured_values',
'xfield':'sweep_points'}
numeric_params = ['f_res','xfield']
data = a_tools.get_data_from_timestamp_list([timestamp],
params_dict,
numeric_params=numeric_params,
filter_no_analysis=False)
ind_fmax = np.argmax(data['f_res'][0,0])
xfields = data['xfield'][0]
opt_xfield = xfields[ind_fmax]
self.Magnet.get_instr().coil_x.field(opt_xfield)
aa.AlignmentAnalysis(timestamp)
return opt_xfield
def fine_reso_freq_range(start_freq,stop_freq,target_freq=None,precise_range=5e6,verbose = False):
'''
Create a finer frequency range around the resonator based on the previous resonator position.
Please use start_freq < stop_freq.
start_freq and stop_freq are both in Hertz
'''
if (target_freq == None):
previous_timestamp = a_tools.latest_data(contains='Resonator_scan', return_timestamp=True)[0]
reso_dict = {'f_res_fit':'Fitted Params HM.f0.value'}
numeric_params = ['f_res_fit']
data = (a_tools.get_data_from_timestamp_list([previous_timestamp], reso_dict,
numeric_params=numeric_params, filter_no_analysis=False))
precise_range = precise_range
reso_freq = data['f_res_fit'][0]*1e9
else:
reso_freq = target_freq
if verbose:
print('Making a fine list around '+str(reso_freq/1e9)+' GHz')
if reso_freq == None:
freq_list_res = np.arange(start_freq,stop_freq,2e5) # Straight part fast, because reso = None
elif reso_freq < start_freq or reso_freq > stop_freq:
freq_list_res = np.arange(start_freq,stop_freq,2e5) # Straight part fast, because reso out of range
elif reso_freq <= start_freq + precise_range/2.:
freq_list_res = np.hstack([np.arange(start_freq,reso_freq+precise_range/2.,2.5e4), # Reso part precise
np.arange(reso_freq+precise_range/2.,stop_freq,2e5)]) # Straight part fast
elif reso_freq >= stop_freq - precise_range/2.:
freq_list_res = np.hstack([np.arange(start_freq,reso_freq-precise_range/2.,2e5), # Straight part fast
np.arange(reso_freq-precise_range/2.,stop_freq,2.5e4)]) # Reso part precise
else:
freq_list_res = np.hstack([np.arange(start_freq,reso_freq-precise_range/2.,2e5), # Straight part fast
np.arange(reso_freq-precise_range/2.,reso_freq+precise_range/2.,2.5e4), # Reso part precise
np.arange(reso_freq+precise_range/2.,stop_freq,2e5)]) # Straight part fast
return freq_list_res
|
|
from __future__ import with_statement
import unittest
import tempfile
import inspect
import shutil
import sys
import os
import re
from os.path import abspath, basename, dirname, exists, join, normpath
from robot.errors import DataError
from robot.utils.importer import Importer, ByPathImporter
from robot.utils.asserts import (assert_equals, assert_true, assert_raises,
assert_raises_with_msg)
CURDIR = dirname(abspath(__file__))
LIBDIR = normpath(join(CURDIR, '..', '..', 'atest', 'testresources', 'testlibs'))
TEMPDIR = tempfile.gettempdir()
TESTDIR = join(TEMPDIR, 'robot-importer-testing')
WINDOWS_PATH_IN_ERROR = re.compile(r"'\w:\\")
def assert_prefix(error, expected):
message = unicode(error)
count = 3 if WINDOWS_PATH_IN_ERROR.search(message) else 2
prefix = ':'.join(message.split(':')[:count]) + ':'
assert_equals(prefix, expected)
def create_temp_file(name, attr=42, extra_content=''):
if not exists(TESTDIR):
os.mkdir(TESTDIR)
path = join(TESTDIR, name)
with open(path, 'w') as file:
file.write('attr = %r\n' % attr)
file.write('def func():\n return attr\n')
file.write(extra_content)
return path
class LoggerStub(object):
def __init__(self, remove_extension=False):
self.messages = []
self.remove_extension = remove_extension
def info(self, msg):
if self.remove_extension:
for ext in '$py.class', '.pyc', '.py':
msg = msg.replace(ext, '')
self.messages.append(msg)
def assert_message(self, msg, index=0):
assert_equals(self.messages[index], msg)
class TestImportByPath(unittest.TestCase):
def setUp(self):
self.tearDown()
def tearDown(self):
if exists(TESTDIR):
shutil.rmtree(TESTDIR)
def test_python_file(self):
path = create_temp_file('test.py')
self._import_and_verify(path, remove='test')
self._assert_imported_message('test', path)
def test_python_directory(self):
create_temp_file('__init__.py')
module_name = basename(TESTDIR)
self._import_and_verify(TESTDIR, remove=module_name)
self._assert_imported_message(module_name, TESTDIR)
def test_import_same_file_multiple_times(self):
path = create_temp_file('test.py')
self._import_and_verify(path, remove='test')
self._assert_imported_message('test', path)
self._import_and_verify(path)
self._assert_imported_message('test', path)
self._import_and_verify(path, name='library')
self._assert_imported_message('test', path, type='library module')
def test_import_different_file_and_directory_with_same_name(self):
path1 = create_temp_file('test.py', attr=1)
self._import_and_verify(path1, attr=1, remove='test')
self._assert_imported_message('test', path1)
path2 = join(TESTDIR, 'test')
os.mkdir(path2)
create_temp_file(join(path2, '__init__.py'), attr=2)
self._import_and_verify(path2, attr=2, directory=path2)
self._assert_removed_message('test')
self._assert_imported_message('test', path2, index=1)
path3 = create_temp_file(join(path2, 'test.py'), attr=3)
self._import_and_verify(path3, attr=3, directory=path2)
self._assert_removed_message('test')
self._assert_imported_message('test', path3, index=1)
def test_import_class_from_file(self):
path = create_temp_file('test.py', extra_content='class test:\n def m(s): return 1')
klass = self._import(path, remove='test')
self._assert_imported_message('test', path, type='class')
assert_true(inspect.isclass(klass))
assert_equals(klass.__name__, 'test')
assert_equals(klass().m(), 1)
def test_invalid_python_file(self):
path = create_temp_file('test.py', extra_content='invalid content')
error = assert_raises(DataError, self._import_and_verify, path, remove='test')
assert_prefix(error, "Importing '%s' failed: SyntaxError:" % path)
if sys.platform.startswith('java'):
def test_java_class_with_java_extension(self):
path = join(CURDIR, 'ImportByPath.java')
self._import_and_verify(path, remove='ImportByPath')
self._assert_imported_message('ImportByPath', path, type='class')
def test_java_class_with_class_extension(self):
path = join(CURDIR, 'ImportByPath.class')
self._import_and_verify(path, remove='ImportByPath', name='java')
self._assert_imported_message('ImportByPath', path, type='java class')
def test_importing_java_package_fails(self):
path = join(LIBDIR, 'javapkg')
assert_raises_with_msg(DataError,
"Importing '%s' failed: Expected class or "
"module, got <javapackage>." % path,
self._import, path, remove='javapkg')
def test_removing_from_sys_modules_when_importing_multiple_times(self):
path = join(CURDIR, 'ImportByPath.java')
self._import(path, name='java', remove='ImportByPath')
self._assert_imported_message('ImportByPath', path, 'java class')
self._import(path)
self._assert_removed_message('ImportByPath')
self._assert_imported_message('ImportByPath', path, 'class', index=1)
def _import_and_verify(self, path, attr=42, directory=TESTDIR,
name=None, remove=None):
module = self._import(path, name, remove)
assert_equals(module.attr, attr)
assert_equals(module.func(), attr)
if hasattr(module, '__file__'):
assert_equals(dirname(abspath(module.__file__)), directory)
def _import(self, path, name=None, remove=None):
if remove and remove in sys.modules:
del sys.modules[remove]
self.logger = LoggerStub()
importer = Importer(name, self.logger)
sys_path_before = sys.path[:]
try:
return importer.import_class_or_module_by_path(path)
finally:
assert_equals(sys.path, sys_path_before)
def _assert_imported_message(self, name, source, type='module', index=0):
msg = "Imported %s '%s' from '%s'." % (type, name, source)
self.logger.assert_message(msg, index=index)
def _assert_removed_message(self, name, index=0):
msg = "Removed module '%s' from sys.modules to import fresh module." % name
self.logger.assert_message(msg, index=index)
class TestInvalidImportPath(unittest.TestCase):
def test_non_existing(self):
path = 'non-existing.py'
assert_raises_with_msg(DataError,
"Importing '%s' failed: File or directory does not exist." % path,
Importer().import_class_or_module_by_path, path)
path = abspath(path)
assert_raises_with_msg(DataError,
"Importing test file '%s' failed: File or directory does not exist." % path,
Importer('test file').import_class_or_module_by_path, path)
def test_non_absolute(self):
path = os.listdir('.')[0]
assert_raises_with_msg(DataError,
"Importing '%s' failed: Import path must be absolute." % path,
Importer().import_class_or_module_by_path, path)
assert_raises_with_msg(DataError,
"Importing file '%s' failed: Import path must be absolute." % path,
Importer('file').import_class_or_module_by_path, path)
def test_invalid_format(self):
path = join(CURDIR, '..', '..', 'README.rst')
assert_raises_with_msg(DataError,
"Importing '%s' failed: Not a valid file or directory to import." % path,
Importer().import_class_or_module_by_path, path)
assert_raises_with_msg(DataError,
"Importing xxx '%s' failed: Not a valid file or directory to import." % path,
Importer('xxx').import_class_or_module_by_path, path)
class TestImportClassOrModule(unittest.TestCase):
def test_import_module_file(self):
module = self._import_module('classes')
assert_equals(module.__version__, 'N/A')
def test_import_module_directory(self):
module = self._import_module('pythonmodule')
assert_equals(module.some_string, 'Hello, World!')
def test_import_non_existing(self):
error = assert_raises(DataError, self._import, 'NonExisting')
assert_prefix(error, "Importing 'NonExisting' failed: ImportError:")
def test_import_sub_module(self):
module = self._import_module('pythonmodule.library')
assert_equals(module.keyword_from_submodule('Kitty'), 'Hello, Kitty!')
module = self._import_module('pythonmodule.submodule')
assert_equals(module.attribute, 42)
module = self._import_module('pythonmodule.submodule.sublib')
assert_equals(module.keyword_from_deeper_submodule(), 'hi again')
def test_import_class_with_same_name_as_module(self):
klass = self._import_class('ExampleLibrary')
assert_equals(klass().return_string_from_library('xxx'), 'xxx')
def test_import_class_from_module(self):
klass = self._import_class('ExampleLibrary.ExampleLibrary')
assert_equals(klass().return_string_from_library('yyy'), 'yyy')
def test_import_class_from_sub_module(self):
klass = self._import_class('pythonmodule.submodule.sublib.Sub')
assert_equals(klass().keyword_from_class_in_deeper_submodule(), 'bye')
def test_import_non_existing_item_from_existing_module(self):
assert_raises_with_msg(DataError,
"Importing 'pythonmodule.NonExisting' failed: "
"Module 'pythonmodule' does not contain 'NonExisting'.",
self._import, 'pythonmodule.NonExisting')
assert_raises_with_msg(DataError,
"Importing test library 'pythonmodule.none' failed: "
"Module 'pythonmodule' does not contain 'none'.",
self._import, 'pythonmodule.none', 'test library')
def test_invalid_item_from_existing_module(self):
assert_raises_with_msg(DataError,
"Importing 'pythonmodule.some_string' failed: "
"Expected class or module, got <str>.",
self._import, 'pythonmodule.some_string')
assert_raises_with_msg(DataError,
"Importing xxx 'pythonmodule.submodule.attribute' failed: "
"Expected class or module, got <int>.",
self._import, 'pythonmodule.submodule.attribute', 'xxx')
def test_item_from_non_existing_module(self):
error = assert_raises(DataError, self._import, 'nonex.item')
assert_prefix(error, "Importing 'nonex.item' failed: ImportError:")
def test_import_file_by_path(self):
import bytelib as expected
module = self._import_module(join(LIBDIR, 'bytelib.py'))
assert_equals(module.__name__, expected.__name__)
assert_equals(dirname(normpath(module.__file__)),
dirname(normpath(expected.__file__)))
assert_equals(dir(module), dir(expected))
def test_import_class_from_file_by_path(self):
klass = self._import_class(join(LIBDIR, 'ExampleLibrary.py'))
assert_equals(klass().return_string_from_library('test'), 'test')
def test_invalid_file_by_path(self):
path = join(TEMPDIR, 'robot_import_invalid_test_file.py')
try:
with open(path, 'w') as file:
file.write('invalid content')
error = assert_raises(DataError, self._import, path)
assert_prefix(error, "Importing '%s' failed: SyntaxError:" % path)
finally:
os.remove(path)
def test_logging_when_importing_module(self):
logger = LoggerStub(remove_extension=True)
self._import_module('classes', 'test library', logger)
logger.assert_message("Imported test library module 'classes' from '%s'."
% join(LIBDIR, 'classes'))
def test_logging_when_importing_python_class(self):
logger = LoggerStub(remove_extension=True)
self._import_class('ExampleLibrary', logger=logger)
logger.assert_message("Imported class 'ExampleLibrary' from '%s'."
% join(LIBDIR, 'ExampleLibrary'))
if sys.platform.startswith('java'):
def test_import_java_class(self):
klass = self._import_class('ExampleJavaLibrary')
assert_equals(klass().getCount(), 1)
def test_import_java_class_in_package(self):
klass = self._import_class('javapkg.JavaPackageExample')
assert_equals(klass().returnValue('xmas'), 'xmas')
def test_import_java_file_by_path(self):
import ExampleJavaLibrary as expected
klass = self._import_class(join(LIBDIR, 'ExampleJavaLibrary.java'))
assert_equals(klass().getCount(), 1)
assert_equals(klass.__name__, expected.__name__)
assert_equals(dir(klass), dir(expected))
def test_importing_java_package_fails(self):
assert_raises_with_msg(DataError,
"Importing test library 'javapkg' failed: "
"Expected class or module, got <javapackage>.",
self._import, 'javapkg', 'test library')
def test_logging_when_importing_java_class(self):
logger = LoggerStub()
self._import_class('ExampleJavaLibrary', 'java', logger)
logger.assert_message("Imported java class 'ExampleJavaLibrary' "
"from unknown location.")
def _import_module(self, name, type=None, logger=None):
module = self._import(name, type, logger)
assert_true(inspect.ismodule(module))
return module
def _import_class(self, name, type=None, logger=None):
klass = self._import(name, type, logger)
assert_true(inspect.isclass(klass))
return klass
def _import(self, name, type=None, logger=None):
return Importer(type, logger or LoggerStub()).import_class_or_module(name)
class TestErrorDetails(unittest.TestCase):
def test_no_traceback(self):
error = self._failing_import('NoneExisting')
assert_equals(self._get_traceback(error),
'Traceback (most recent call last):\n None')
def test_traceback(self):
path = create_temp_file('tb.py', extra_content='import nonex')
try:
error = self._failing_import(path)
finally:
shutil.rmtree(TESTDIR)
assert_equals(self._get_traceback(error),
'Traceback (most recent call last):\n'
' File "%s", line 4, in <module>\n'
' import nonex' % path)
def test_pythonpath(self):
error = self._failing_import('NoneExisting')
lines = self._get_pythonpath(error).splitlines()
assert_equals(lines[0], 'PYTHONPATH:')
for line in lines[1:]:
assert_true(line.startswith(' '))
def test_non_ascii_bytes_in_pythonpath(self):
sys.path.append('hyv\xe4')
try:
error = self._failing_import('NoneExisting')
finally:
sys.path.pop()
last_line = self._get_pythonpath(error).splitlines()[-1].strip()
assert_true(last_line.startswith('hyv'))
if sys.platform.startswith('java'):
def test_classpath(self):
error = self._failing_import('NoneExisting')
lines = self._get_classpath(error).splitlines()
assert_equals(lines[0], 'CLASSPATH:')
for line in lines[1:]:
assert_true(line.startswith(' '))
def test_structure(self):
error = self._failing_import('NoneExisting')
message = "Importing 'NoneExisting' failed: ImportError: No module named NoneExisting"
expected = (message, self._get_traceback(error),
self._get_pythonpath(error), self._get_classpath(error))
assert_equals(unicode(error), '\n'.join(expected).strip())
def _failing_import(self, name):
importer = Importer().import_class_or_module
return assert_raises(DataError, importer, name)
def _get_traceback(self, error):
return '\n'.join(self._block(error, 'Traceback (most recent call last):',
'PYTHONPATH:'))
def _get_pythonpath(self, error):
return '\n'.join(self._block(error, 'PYTHONPATH:', 'CLASSPATH:'))
def _get_classpath(self, error):
return '\n'.join(self._block(error, 'CLASSPATH:'))
def _block(self, error, start, end=None):
include = False
for line in unicode(error).splitlines():
if line == end:
return
if line == start:
include = True
if include:
yield line
class TestSplitPathToModule(unittest.TestCase):
def _verify(self, file_name, expected_name):
path = abspath(file_name)
actual = ByPathImporter(None)._split_path_to_module(path)
assert_equals(actual, (dirname(path), expected_name))
def test_normal_file(self):
self._verify('hello.py', 'hello')
self._verify('hello.class', 'hello')
self._verify('hello.world.java', 'hello.world')
def test_jython_class_file(self):
self._verify('hello$py.class', 'hello')
self._verify('__init__$py.class', '__init__')
def test_directory(self):
self._verify('hello', 'hello')
self._verify('hello'+os.sep, 'hello')
class TestInstantiation(unittest.TestCase):
def setUp(self):
self.tearDown()
def tearDown(self):
if exists(TESTDIR):
shutil.rmtree(TESTDIR)
def test_when_importing_by_name(self):
from ExampleLibrary import ExampleLibrary
lib = Importer().import_class_or_module('ExampleLibrary',
instantiate_with_args=())
assert_true(not inspect.isclass(lib))
assert_true(isinstance(lib, ExampleLibrary))
def test_with_arguments(self):
lib = Importer().import_class_or_module('libswithargs.Mixed', range(5))
assert_equals(lib.get_args(), (0, 1, '2 3 4'))
def test_when_importing_by_path(self):
path = create_temp_file('args.py', extra_content='class args: a=1')
lib = Importer().import_class_or_module_by_path(path, ())
assert_true(not inspect.isclass(lib))
assert_equals(lib.__class__.__name__, 'args')
assert_equals(lib.a, 1)
def test_instantiate_failure(self):
err = assert_raises(DataError, Importer().import_class_or_module,
'ExampleLibrary', ['accepts', 'no', 'args'])
assert_true(unicode(err).startswith("Importing 'ExampleLibrary' failed: "
"Creating instance failed: TypeError:"))
def test_modules_do_not_take_arguments(self):
path = create_temp_file('no_args_allowed.py')
assert_raises_with_msg(DataError,
"Importing '%s' failed: Modules do not take arguments." % path,
Importer().import_class_or_module_by_path,
path, ['invalid'])
if __name__ == '__main__':
unittest.main()
|
|
from datetime import datetime, timedelta
from tzlocal import get_localzone
import six
from apscheduler.triggers.base import BaseTrigger
from apscheduler.triggers.cron.fields import (
BaseField, WeekField, DayOfMonthField, DayOfWeekField, DEFAULT_VALUES)
from apscheduler.util import datetime_ceil, convert_to_datetime, datetime_repr, astimezone
class CronTrigger(BaseTrigger):
"""
Triggers when current time matches all specified time constraints,
similarly to how the UNIX cron scheduler works.
:param int|str year: 4-digit year
:param int|str month: month (1-12)
:param int|str day: day of the (1-31)
:param int|str week: ISO week (1-53)
:param int|str day_of_week: number or name of weekday (0-6 or mon,tue,wed,thu,fri,sat,sun)
:param int|str hour: hour (0-23)
:param int|str minute: minute (0-59)
:param int|str second: second (0-59)
:param datetime|str start_date: earliest possible date/time to trigger on (inclusive)
:param datetime|str end_date: latest possible date/time to trigger on (inclusive)
:param datetime.tzinfo|str timezone: time zone to use for the date/time calculations (defaults
to scheduler timezone)
.. note:: The first weekday is always **monday**.
"""
FIELD_NAMES = ('year', 'month', 'day', 'week', 'day_of_week', 'hour', 'minute', 'second')
FIELDS_MAP = {
'year': BaseField,
'month': BaseField,
'week': WeekField,
'day': DayOfMonthField,
'day_of_week': DayOfWeekField,
'hour': BaseField,
'minute': BaseField,
'second': BaseField
}
__slots__ = 'timezone', 'start_date', 'end_date', 'fields'
def __init__(self, year=None, month=None, day=None, week=None, day_of_week=None, hour=None,
minute=None, second=None, start_date=None, end_date=None, timezone=None):
if timezone:
self.timezone = astimezone(timezone)
elif isinstance(start_date, datetime) and start_date.tzinfo:
self.timezone = start_date.tzinfo
elif isinstance(end_date, datetime) and end_date.tzinfo:
self.timezone = end_date.tzinfo
else:
self.timezone = get_localzone()
self.start_date = convert_to_datetime(start_date, self.timezone, 'start_date')
self.end_date = convert_to_datetime(end_date, self.timezone, 'end_date')
values = dict((key, value) for (key, value) in six.iteritems(locals())
if key in self.FIELD_NAMES and value is not None)
self.fields = []
assign_defaults = False
for field_name in self.FIELD_NAMES:
if field_name in values:
exprs = values.pop(field_name)
is_default = False
assign_defaults = not values
elif assign_defaults:
exprs = DEFAULT_VALUES[field_name]
is_default = True
else:
exprs = '*'
is_default = True
field_class = self.FIELDS_MAP[field_name]
field = field_class(field_name, exprs, is_default)
self.fields.append(field)
def _increment_field_value(self, dateval, fieldnum):
"""
Increments the designated field and resets all less significant fields to their minimum
values.
:type dateval: datetime
:type fieldnum: int
:return: a tuple containing the new date, and the number of the field that was actually
incremented
:rtype: tuple
"""
values = {}
i = 0
while i < len(self.fields):
field = self.fields[i]
if not field.REAL:
if i == fieldnum:
fieldnum -= 1
i -= 1
else:
i += 1
continue
if i < fieldnum:
values[field.name] = field.get_value(dateval)
i += 1
elif i > fieldnum:
values[field.name] = field.get_min(dateval)
i += 1
else:
value = field.get_value(dateval)
maxval = field.get_max(dateval)
if value == maxval:
fieldnum -= 1
i -= 1
else:
values[field.name] = value + 1
i += 1
difference = datetime(**values) - dateval.replace(tzinfo=None)
return self.timezone.normalize(dateval + difference), fieldnum
def _set_field_value(self, dateval, fieldnum, new_value):
values = {}
for i, field in enumerate(self.fields):
if field.REAL:
if i < fieldnum:
values[field.name] = field.get_value(dateval)
elif i > fieldnum:
values[field.name] = field.get_min(dateval)
else:
values[field.name] = new_value
return self.timezone.localize(datetime(**values))
def get_next_fire_time(self, previous_fire_time, now):
if previous_fire_time:
start_date = min(now, previous_fire_time + timedelta(microseconds=1))
if start_date == previous_fire_time:
start_date += timedelta(microseconds=1)
else:
start_date = max(now, self.start_date) if self.start_date else now
fieldnum = 0
next_date = datetime_ceil(start_date).astimezone(self.timezone)
while 0 <= fieldnum < len(self.fields):
field = self.fields[fieldnum]
curr_value = field.get_value(next_date)
next_value = field.get_next_value(next_date)
if next_value is None:
# No valid value was found
next_date, fieldnum = self._increment_field_value(next_date, fieldnum - 1)
elif next_value > curr_value:
# A valid, but higher than the starting value, was found
if field.REAL:
next_date = self._set_field_value(next_date, fieldnum, next_value)
fieldnum += 1
else:
next_date, fieldnum = self._increment_field_value(next_date, fieldnum)
else:
# A valid value was found, no changes necessary
fieldnum += 1
# Return if the date has rolled past the end date
if self.end_date and next_date > self.end_date:
return None
if fieldnum >= 0:
return next_date
def __getstate__(self):
return {
'version': 1,
'timezone': self.timezone,
'start_date': self.start_date,
'end_date': self.end_date,
'fields': self.fields
}
def __setstate__(self, state):
# This is for compatibility with APScheduler 3.0.x
if isinstance(state, tuple):
state = state[1]
if state.get('version', 1) > 1:
raise ValueError(
'Got serialized data for version %s of %s, but only version 1 can be handled' %
(state['version'], self.__class__.__name__))
self.timezone = state['timezone']
self.start_date = state['start_date']
self.end_date = state['end_date']
self.fields = state['fields']
def __str__(self):
options = ["%s='%s'" % (f.name, f) for f in self.fields if not f.is_default]
return 'cron[%s]' % (', '.join(options))
def __repr__(self):
options = ["%s='%s'" % (f.name, f) for f in self.fields if not f.is_default]
if self.start_date:
options.append("start_date='%s'" % datetime_repr(self.start_date))
return "<%s (%s, timezone='%s')>" % (
self.__class__.__name__, ', '.join(options), self.timezone)
|
|
from .. utils import TranspileTestCase, UnaryOperationTestCase, BinaryOperationTestCase, InplaceOperationTestCase
class ListTests(TranspileTestCase):
def test_setattr(self):
self.assertCodeExecution("""
x = [1, 2, 3]
try:
x.attr = 42
except AttributeError as err:
print(err)
""")
def test_getattr(self):
self.assertCodeExecution("""
x = [1, 2, 3]
try:
print(x.attr)
except AttributeError as err:
print(err)
""")
def test_creation(self):
# Empty list
self.assertCodeExecution("""
x = []
print(x)
""")
self.assertCodeExecution("""
x = [1, 2, 3, 4, 5]
print(x)
""")
def test_getitem(self):
# Simple positive index
self.assertCodeExecution("""
x = [1, 2, 3, 4, 5]
print(x[2])
""")
# Simple negative index
self.assertCodeExecution("""
x = [1, 2, 3, 4, 5]
print(x[-2])
""")
# Positive index out of range
self.assertCodeExecution("""
x = [1, 2, 3, 4, 5]
try:
print(x[10])
except IndexError as err:
print(err)
""")
# Negative index out of range
self.assertCodeExecution("""
x = [1, 2, 3, 4, 5]
try:
print(x[-10])
except IndexError as err:
print(err)
""")
def test_setitem(self):
self.assertCodeExecution("""
x = [1]
x[0] = 5
print(x[0])
""")
self.assertCodeExecution("""
x = [1, 2, 3]
x[1] = "hello"
x[2] = "there"
print(x)
""")
# Out of bounds
self.assertCodeExecution("""
x = []
try:
x[0] = 5
except IndexError as err:
print(err)
""")
# Out of bounds (negative)
self.assertCodeExecution("""
x = [1]
try:
x[-2] = 5
except IndexError as err:
print(err)
""")
def test_append(self):
# New list
self.assertCodeExecution("""
x = []
x.append("hello")
x.append(5)
print(x[0], x[1])
""")
# Existing list
self.assertCodeExecution("""
x = [1, 2, 3, 4]
x.append(5)
x.append("hello")
print(x[4], x[5])
""")
def test_remove(self):
# Remove integer
self.assertCodeExecution("""
x = [1, 2, 3]
x.remove(1)
print(x)
""")
# Remove only first duplicate
self.assertCodeExecution("""
x = [1, 2, 2, 3, 2]
x.remove(2)
print(x)
""")
# Remove boolean
self.assertCodeExecution("""
x = [True, False, True, False]
x.remove(1)
print(x)
""")
# Not in list
self.assertCodeExecution("""
x = [1, 2]
try:
x.remove(3)
except ValueError as err:
print(err)
print(x)
""")
def test_slice(self):
# Full slice
self.assertCodeExecution("""
x = [1, 2, 3, 4, 5]
print(x[:])
""")
# Left bound slice
self.assertCodeExecution("""
x = [1, 2, 3, 4, 5]
print(x[1:])
""")
# Right bound slice
self.assertCodeExecution("""
x = [1, 2, 3, 4, 5]
print(x[:4])
""")
# Slice bound in both directions
self.assertCodeExecution("""
x = [1, 2, 3, 4, 5]
print(x[1:4])
""")
# Slice bound in both directions with end out of bounds
self.assertCodeExecution("""
x = [1, 2, 3, 4, 5]
print(x[1:6])
""")
# Slice bound in both directions with start out of bounds
self.assertCodeExecution("""
x = [1, 2, 3, 4, 5]
print(x[6:7])
""")
# when step is 0
def test_slice_with_zero_step(self):
self.assertCodeExecution("""
x = [1, 2, 3, 4, 5]
try:
print(x[1:3:0])
except ValueError as err:
print(err)
""")
def test_slice_in_reverse(self):
# Full slice with a negative step
self.assertCodeExecution("""
x = [1, 2, 3, 4, 5]
print (x[::-1])
""")
# left bound slice with a negative step
self.assertCodeExecution("""
x = [1, 2, 3, 4, 5]
print (x[4::-2])
""")
# Right bound slice with a negative step
self.assertCodeExecution("""
x = [1, 2, 3, 4, 5]
print (x[:4:-1])
""")
# Right bound and left bound slice with a negative step
self.assertCodeExecution("""
x = [1, 2, 3, 4, 5]
print (x[1:4:-2])
""")
def test_count(self):
# Normal Count
self.assertCodeExecution("""
x = [1, 1, 1, 4, 5]
print(x.count(1))
""")
# Bool Count
self.assertCodeExecution("""
x = [1, 1, False, 1, 4, True, 5, True]
print(x.count(1))
""")
# Element doesn't exist count
self.assertCodeExecution("""
x = [1, False, 1, 1, True, 4, 5, True]
print(x.count(2))
""")
self.assertCodeExecution("""
x = [1, 1, 1, 4, 5, True]
print(x.count(1))
""")
def test_contains(self):
# Normal Contains
self.assertCodeExecution("""
x = [1, 2, 3, 4, 5]
print(1 in x)
""")
# Element doesn't exist
self.assertCodeExecution("""
x = [1, 2, 3, 4, 5]
print(0 in x)
""")
# Checking for boolean
self.assertCodeExecution("""
x = [True, False]
print(x.count(1))
""")
def test_sort(self):
self.assertCodeExecution("""
fixtures = [
[9, 4, 7],
['beta', 'theta', 'alpha'],
]
for x in fixtures:
x.sort()
print(x)
""")
self.assertCodeExecution("""
fixtures = [
[9, 4, 7],
['beta', 'theta', 'alpha'],
]
for x in fixtures:
x.sort(reverse=True)
print(x)
""")
self.assertCodeExecution("""
def second(s):
return s[1]
x = ['abc', 'bza', 'cda', 'daa']
x.sort(key=second)
print(x)
""")
self.assertCodeExecution("""
def second(s):
return s[1]
x = ['abc', 'bza', 'cda', 'daa']
x.sort(key=second, reverse=True)
print(x)
""")
def test_pop(self):
self.assertCodeExecution("""
x = [1, 2, 3]
print(x.pop())
print(x)
""")
self.assertCodeExecution("""
x = [1, 2, 3]
print(x.pop(0))
print(x)
""")
self.assertCodeExecution("""
x = [1, 2, 3]
print(x.pop(-2))
print(x)
""")
def test_pop_exceptions(self):
self.assertCodeExecution("""
x = []
try:
print(x.pop())
except IndexError as err:
print(err)
print(x)
""")
self.assertCodeExecution("""
x = [1, 2, 3]
try:
print(x.pop(3))
except IndexError as err:
print(err)
print(x)
""")
self.assertCodeExecution("""
x = [1, 2, 3]
try:
print(x.pop(-4))
except IndexError as err:
print(err)
print(x)
""")
def test_copy(self):
self.assertCodeExecution("""
x = [1, 2, 3]
y = x.copy()
print(y)
""")
self.assertCodeExecution("""
x = [1, 2, 3]
y = x.copy()
print(x == y)
""")
self.assertCodeExecution("""
x = [1, 2, 3]
y = x.copy()
print(x is not y)
""")
self.assertCodeExecution("""
x = [1, 2, 3]
y = x.copy()
y.append(4)
print(x == y)
""")
self.assertCodeExecution("""
x = [[1], 2, 3]
y = x.copy()
print(x[0] is y[0])
""")
def test_index(self):
self.assertCodeExecution("""
x = [1, 2, 3]
print(x.index(1))
""")
self.assertCodeExecution("""
x = [1, 2, 1]
print(x.index(1, 1))
""")
self.assertCodeExecution("""
x = [1, 2, 3, 4]
print(x.index(4, 0, len(x)))
""")
self.assertCodeExecution("""
x = [1, 2, 3, 4]
print(x.index(2, 1, 2))
""")
self.assertCodeExecution("""
x = [1, 2, 3, 4]
print(x.index(2, 0, 10))
""")
self.assertCodeExecution("""
x = [1, 2, 1]
print(x.index(1, 0, -2))
""")
self.assertCodeExecution("""
x = [1, 2, 1]
print(x.index(1, -3, -2))
""")
# cases for 'ValueError: not in list'
self.assertCodeExecution("""
x = [1, 2, 3]
try:
print(x.index(4))
except ValueError as err:
print(err)
x = [1, 2, 1]
try:
print(x.index(2, 0, 1))
except ValueError as err:
print(err)
x = [1, 2, 3, 4]
try:
print(x.index(4, 0, 3))
except ValueError as err:
print(err)
x = [1, 2, 1]
try:
print(x.index(3, 0, 10))
except ValueError as err:
print(err)
x = [1, 2, 3, 4]
try:
print(x.index(2, 10, 20))
except ValueError as err:
print(err)
x = [1, 2, 3, 4]
try:
print(x.index(2, 10, 0))
except ValueError as err:
print(err)
x = []
try:
print(x.index(1, 0, 10))
except ValueError as err:
print(err)
""")
def test_lt_reflected(self):
self.assertCodeExecution("""
class A:
def __gt__(self, other):
return True
x = A()
y = A()
# verify that A doesn't have __lt__()
print(x.__lt__(x))
# ensure rich comparison logic is used
print([x] < [x]) # False, x is x and same size
print([x] < [y]) # True, x is not y, reflected
# when elements are non-identical, return that comparison, even if size is not
print([x, y] < [y]) # True, x is not y, reflected
# ensure tie breaker by size is still used when identical elements
print([x, y] < [x]) # False, larger size
print([x] < [x, y]) # True, smaller size
""")
def test_le_reflected(self):
self.assertCodeExecution("""
class A:
def __ge__(self, other):
return True
x = A()
y = A()
# verify that A doesn't have __le__()
print(x.__le__(x))
# ensure rich comparison logic is used
print([x] <= [x]) # False, x is x and same size
print([x] <= [y]) # True, x is not y, reflected
# when elements are non-identical, return that comparison, even if size is not
print([x, y] <= [y]) # True, x is not y, reflected
# ensure tie breaker by size is still used when identical elements
print([x, y] <= [x]) # False, larger size
print([x] <= [x, y]) # True, smaller size
""")
def test_gt_reflected(self):
self.assertCodeExecution("""
class A:
def __lt__(self, other):
return True
x = A()
y = A()
# verify that A doesn't have __gt__()
print(x.__gt__(x))
# ensure rich comparison logic is used
print([x] > [x]) # False, x is x and same size
print([x] > [y]) # True, x is not y, reflected
# when elements are non-identical, return that comparison, even if size is not
print([x, y] > [y]) # True, x is not y, reflected
# ensure tie breaker by size is still used when identical elements
print([x, y] > [x]) # False, larger size
print([x] > [x, y]) # True, smaller size
""")
def test_ge_reflected(self):
self.assertCodeExecution("""
class A:
def __le__(self, other):
return True
x = A()
y = A()
# verify that A doesn't have __ge__()
print(x.__ge__(x))
# ensure rich comparison logic is used
print([x] >= [x]) # False, x is x and same size
print([x] >= [y]) # True, x is not y, reflected
# when elements are non-identical, return that comparison, even if size is not
print([x, y] >= [y]) # True, x is not y, reflected
# ensure tie breaker by size is still used when identical elements
print([x, y] >= [x]) # False, larger size
print([x] >= [x, y]) # True, smaller size
""")
def test_eq_reflected(self):
self.assertCodeExecution("""
class A:
def __eq__(self, other):
return True
class B:
def __eq__(self, other):
return False
x = A()
y = B()
print([x] == [x]) # True, identity implies equality
print([x, x] == [x]) # False, size not equal
print([x] == [y]) # True, x is not y, x.__eq__(y)
print([y] == [x]) # False, y is not x, y.__eq__(x)
""")
class UnaryListOperationTests(UnaryOperationTestCase, TranspileTestCase):
data_type = 'list'
class BinaryListOperationTests(BinaryOperationTestCase, TranspileTestCase):
data_type = 'list'
not_implemented = [
'test_add_class',
'test_add_frozenset',
'test_and_class',
'test_and_frozenset',
'test_direct_eq_bytes',
'test_direct_ge_bytes',
'test_direct_gt_bytes',
'test_direct_le_bytes',
'test_direct_lt_bytes',
'test_direct_ne_bytes',
'test_direct_ge_list',
'test_direct_gt_list',
'test_direct_le_list',
'test_direct_lt_list',
'test_direct_eq_frozenset',
'test_direct_ge_frozenset',
'test_direct_gt_frozenset',
'test_direct_le_frozenset',
'test_direct_lt_frozenset',
'test_direct_ne_frozenset',
'test_eq_class',
'test_eq_frozenset',
'test_floor_divide_class',
'test_floor_divide_complex',
'test_floor_divide_frozenset',
'test_ge_class',
'test_ge_frozenset',
'test_ge_list',
'test_gt_class',
'test_gt_frozenset',
'test_gt_list',
'test_le_class',
'test_le_frozenset',
'test_le_list',
'test_lshift_class',
'test_lshift_frozenset',
'test_lt_class',
'test_lt_frozenset',
'test_lt_list',
'test_modulo_class',
'test_modulo_complex',
'test_modulo_frozenset',
'test_multiply_class',
'test_multiply_frozenset',
'test_ne_class',
'test_ne_frozenset',
'test_or_class',
'test_or_frozenset',
'test_power_class',
'test_power_frozenset',
'test_rshift_class',
'test_rshift_frozenset',
'test_subscr_bool',
'test_subscr_class',
'test_subscr_frozenset',
'test_subscr_slice',
'test_subtract_class',
'test_subtract_frozenset',
'test_true_divide_class',
'test_true_divide_frozenset',
'test_xor_class',
'test_xor_frozenset',
]
class InplaceListOperationTests(InplaceOperationTestCase, TranspileTestCase):
data_type = 'list'
not_implemented = [
'test_add_bytearray',
'test_add_bytes',
'test_add_class',
'test_add_dict',
'test_add_frozenset',
'test_add_range',
'test_add_set',
'test_add_str',
'test_and_class',
'test_and_frozenset',
'test_floor_divide_class',
'test_floor_divide_complex',
'test_floor_divide_frozenset',
'test_lshift_class',
'test_lshift_frozenset',
'test_modulo_class',
'test_modulo_complex',
'test_modulo_frozenset',
'test_multiply_class',
'test_multiply_frozenset',
'test_or_class',
'test_or_frozenset',
'test_power_class',
'test_power_frozenset',
'test_rshift_class',
'test_rshift_frozenset',
'test_subtract_class',
'test_subtract_frozenset',
'test_true_divide_class',
'test_true_divide_frozenset',
'test_xor_class',
'test_xor_frozenset',
]
|
|
"""
Dynamically generated set of TestCases based on set of yaml files decribing
some integration tests. These files are shared among all official Elasticsearch
clients.
"""
import re
from os import walk, environ
from os.path import exists, join, dirname, pardir
import yaml
from elasticsearch import TransportError
from elasticsearch.compat import string_types
from elasticsearch.helpers.test import _get_version
from ..test_cases import SkipTest
from . import ElasticsearchTestCase
# some params had to be changed in python, keep track of them so we can rename
# those in the tests accordingly
PARAMS_RENAMES = {
'type': 'doc_type',
'from': 'from_',
}
# mapping from catch values to http status codes
CATCH_CODES = {
'missing': 404,
'conflict': 409,
}
# test features we have implemented
IMPLEMENTED_FEATURES = ('gtelte', 'stash_in_path')
# broken YAML tests on some releases
SKIP_TESTS = {
(1, 1, 2): set(('TestCatRecovery10Basic', )),
'*': set(('TestSearchExists20QueryString', 'TestSearchExists10Basic'))
}
class InvalidActionType(Exception):
pass
class YamlTestCase(ElasticsearchTestCase):
def setUp(self):
super(YamlTestCase, self).setUp()
if hasattr(self, '_setup_code'):
self.run_code(self._setup_code)
self.last_response = None
self._state = {}
def _resolve(self, value):
# resolve variables
if isinstance(value, string_types) and value.startswith('$'):
value = value[1:]
self.assertIn(value, self._state)
value = self._state[value]
if isinstance(value, string_types):
value = value.strip()
elif isinstance(value, dict):
value = dict((k, self._resolve(v)) for (k, v) in value.items())
elif isinstance(value, list):
value = list(map(self._resolve, value))
return value
def _lookup(self, path):
# fetch the possibly nested value from last_response
value = self.last_response
if path == '$body':
return value
path = path.replace(r'\.', '\1')
for step in path.split('.'):
if not step:
continue
step = step.replace('\1', '.')
step = self._resolve(step)
if step.isdigit() and step not in value:
step = int(step)
self.assertIsInstance(value, list)
self.assertGreater(len(value), step)
else:
self.assertIn(step, value)
value = value[step]
return value
def run_code(self, test):
""" Execute an instruction based on it's type. """
for action in test:
self.assertEquals(1, len(action))
action_type, action = list(action.items())[0]
if hasattr(self, 'run_' + action_type):
getattr(self, 'run_' + action_type)(action)
else:
raise InvalidActionType(action_type)
def run_do(self, action):
""" Perform an api call with given parameters. """
catch = action.pop('catch', None)
self.assertEquals(1, len(action))
method, args = list(action.items())[0]
# locate api endpoint
api = self.client
for m in method.split('.'):
self.assertTrue(hasattr(api, m))
api = getattr(api, m)
# some parameters had to be renamed to not clash with python builtins,
# compensate
for k in PARAMS_RENAMES:
if k in args:
args[PARAMS_RENAMES[k]] = args.pop(k)
# resolve vars
for k in args:
args[k] = self._resolve(args[k])
try:
self.last_response = api(**args)
except Exception as e:
if not catch:
raise
self.run_catch(catch, e)
else:
if catch:
raise AssertionError('Failed to catch %r in %r.' % (catch, self.last_response))
def _get_nodes(self):
if not hasattr(self, '_node_info'):
self._node_info = list(self.client.nodes.info(node_id='_all', metric='clear')['nodes'].values())
return self._node_info
def _get_data_nodes(self):
return len([info for info in self._get_nodes() if info.get('attributes', {}).get('data', 'true') == 'true'])
def _get_benchmark_nodes(self):
return len([info for info in self._get_nodes() if info.get('attributes', {}).get('bench', 'false') == 'true'])
def run_skip(self, skip):
if 'features' in skip:
if skip['features'] in IMPLEMENTED_FEATURES:
return
elif skip['features'] == 'requires_replica':
if self._get_data_nodes() > 1:
return
elif skip['features'] == 'benchmark':
if self._get_benchmark_nodes():
return
raise SkipTest(skip.get('reason', 'Feature %s is not supported' % skip['features']))
if 'version' in skip:
version, reason = skip['version'], skip['reason']
if version == 'all':
raise SkipTest(reason)
min_version, max_version = version.split('-')
min_version = _get_version(min_version) or (0, )
max_version = _get_version(max_version) or (999, )
if min_version <= self.es_version <= max_version:
raise SkipTest(reason)
def run_catch(self, catch, exception):
if catch == 'param':
self.assertIsInstance(exception, TypeError)
return
self.assertIsInstance(exception, TransportError)
if catch in CATCH_CODES:
self.assertEquals(CATCH_CODES[catch], exception.status_code)
elif catch[0] == '/' and catch[-1] == '/':
self.assertTrue(re.search(catch[1:-1], exception.error + ' ' + repr(exception.info)), '%s not in %r' % (catch, exception.info))
self.last_response = exception.info
def run_gt(self, action):
for key, value in action.items():
self.assertGreater(self._lookup(key), value)
def run_gte(self, action):
for key, value in action.items():
self.assertGreaterEqual(self._lookup(key), value)
def run_lt(self, action):
for key, value in action.items():
self.assertLess(self._lookup(key), value)
def run_lte(self, action):
for key, value in action.items():
self.assertLessEqual(self._lookup(key), value)
def run_set(self, action):
for key, value in action.items():
self._state[value] = self._lookup(key)
def run_is_false(self, action):
try:
value = self._lookup(action)
except AssertionError:
pass
else:
self.assertIn(value, ('', None, False, 0))
def run_is_true(self, action):
value = self._lookup(action)
self.assertNotIn(value, ('', None, False, 0))
def run_length(self, action):
for path, expected in action.items():
value = self._lookup(path)
expected = self._resolve(expected)
self.assertEquals(expected, len(value))
def run_match(self, action):
for path, expected in action.items():
value = self._lookup(path)
expected = self._resolve(expected)
if isinstance(expected, string_types) and \
expected.startswith('/') and expected.endswith('/'):
expected = re.compile(expected[1:-1], re.VERBOSE)
self.assertTrue(expected.search(value))
else:
self.assertEquals(expected, value)
def construct_case(filename, name):
"""
Parse a definition of a test case from a yaml file and construct the
TestCase subclass dynamically.
"""
def make_test(test_name, definition, i):
def m(self):
if name in SKIP_TESTS.get(self.es_version, ()) or name in SKIP_TESTS.get('*', ()):
raise SkipTest()
self.run_code(definition)
m.__doc__ = '%s:%s.test_from_yaml_%d (%s): %s' % (
__name__, name, i, '/'.join(filename.split('/')[-2:]), test_name)
m.__name__ = 'test_from_yaml_%d' % i
return m
with open(filename) as f:
tests = list(yaml.load_all(f))
attrs = {
'_yaml_file': filename
}
i = 0
for test in tests:
for test_name, definition in test.items():
if test_name == 'setup':
attrs['_setup_code'] = definition
continue
attrs['test_from_yaml_%d' % i] = make_test(test_name, definition, i)
i += 1
return type(name, (YamlTestCase, ), attrs)
YAML_DIR = environ.get(
'TEST_ES_YAML_DIR',
join(
dirname(__file__), pardir, pardir, pardir,
'elasticsearch', 'rest-api-spec', 'src', 'main', 'resources', 'rest-api-spec', 'test'
)
)
if exists(YAML_DIR):
# find all the test definitions in yaml files ...
for (path, dirs, files) in walk(YAML_DIR):
for filename in files:
if not filename.endswith(('.yaml', '.yml')):
continue
# ... parse them
name = ('Test' + ''.join(s.title() for s in path[len(YAML_DIR) + 1:].split('/')) + filename.rsplit('.', 1)[0].title()).replace('_', '').replace('.', '')
# and insert them into locals for test runner to find them
locals()[name] = construct_case(join(path, filename), name)
|
|
#
# Copyright (c) 2010-2014, MIT Probabilistic Computing Project
#
# Lead Developers: Dan Lovell and Jay Baxter
# Authors: Dan Lovell, Baxter Eaves, Jay Baxter, Vikash Mansinghka
# Research Leads: Vikash Mansinghka, Patrick Shafto
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import os
#
import numpy
import pylab
pylab.ion()
from scipy.spatial.distance import pdist
from scipy.cluster.hierarchy import linkage, dendrogram
#
import crosscat.utils.general_utils as gu
import crosscat.utils.file_utils as fu
def save_current_figure(filename, dir='./', close=True, format=None):
if filename is not None:
fu.ensure_dir(dir)
full_filename = os.path.join(dir, filename)
pylab.savefig(full_filename, format=format)
if close:
pylab.close()
def get_aspect_ratio(T_array):
num_rows = len(T_array)
num_cols = len(T_array[0])
aspect_ratio = float(num_cols)/num_rows
return aspect_ratio
def plot_T(T_array, M_c, filename=None, dir='./', close=True):
num_cols = len(T_array[0])
column_names = [M_c['idx_to_name'][str(idx)] for idx in range(num_cols)]
column_names = numpy.array(column_names)
aspect_ratio = get_aspect_ratio(T_array)
pylab.figure()
pylab.imshow(T_array, aspect=aspect_ratio, interpolation='none',
cmap=pylab.matplotlib.cm.Greens)
pylab.gca().set_xticks(range(num_cols))
pylab.gca().set_xticklabels(column_names, rotation=90, size='x-small')
pylab.show()
save_current_figure(filename, dir, close)
def plot_views(T_array, X_D, X_L, M_c, filename=None, dir='./', close=True,
format=None, do_colorbar=False):
view_assignments = X_L['column_partition']['assignments']
view_assignments = numpy.array(view_assignments)
num_features = len(view_assignments)
column_names = [M_c['idx_to_name'][str(idx)] for idx in range(num_features)]
column_names = numpy.array(column_names)
num_views = len(set(view_assignments)) + do_colorbar
disLeft = 0.1
disRight = 0.1
viewSpacing = 0.1 / (max(2, num_views) - 1)
nxtAxDisLeft = disLeft
axpos2 = 0.2
axpos4 = 0.75
view_spacing_2 = (1-viewSpacing*(num_views-1.)-disLeft-disRight) / num_features
fig = pylab.figure()
for view_idx, X_D_i in enumerate(X_D):
# figure out some sizing
is_this_view = view_assignments==view_idx
num_cols_i = sum(is_this_view)
nxtAxWidth = float(num_cols_i) * view_spacing_2
axes_pos = nxtAxDisLeft, axpos2, nxtAxWidth, axpos4
nxtAxDisLeft = nxtAxDisLeft+nxtAxWidth+viewSpacing
# define some helpers
def norm_T(T_array):
mincols = T_array_sub.min(axis=0)
maxcols = T_array_sub.max(axis=0)
T_range = maxcols[numpy.newaxis,:] - mincols[numpy.newaxis,:]
return (T_array_sub-mincols[numpy.newaxis,:]) / T_range
def plot_cluster_lines(X_D_i, num_cols_i):
old_tmp = 0
for cluster_i in range(max(X_D_i)):
cluster_num_rows = numpy.sum(numpy.array(X_D_i) == cluster_i)
if cluster_num_rows > 5:
xs = numpy.arange(num_cols_i + 1) - 0.5
ys = [old_tmp + cluster_num_rows] * (num_cols_i + 1)
pylab.plot(xs, ys, color='red', linewidth=2, hold='true')
pass
old_tmp = old_tmp + cluster_num_rows
pass
return
# plot
argsorted = numpy.argsort(X_D_i)
T_array_sub = T_array[:,is_this_view][argsorted]
normed_T = norm_T(T_array_sub)
currax = fig.add_axes(axes_pos)
pylab.imshow(normed_T, aspect = 'auto',
interpolation='none', cmap=pylab.matplotlib.cm.Greens)
plot_cluster_lines(X_D_i, num_cols_i)
# munge plots
pylab.gca().set_xticks(range(num_cols_i))
pylab.gca().set_xticklabels(column_names[is_this_view], rotation=90, size='x-small')
pylab.gca().set_yticklabels([])
pylab.xlim([-0.5, num_cols_i-0.5])
pylab.ylim([0, len(T_array_sub)])
if view_idx!=0: pylab.gca().set_yticklabels([])
if do_colorbar:
nxtAxWidth = float(1.) * view_spacing_2
axes_pos = nxtAxDisLeft, axpos2, nxtAxWidth, axpos4
cax = fig.add_axes(axes_pos)
cb = pylab.colorbar(cax=cax, ax=currax)
save_current_figure(filename, dir, close, format=format)
def plot_predicted_value(value, samples, modelType, filename='imputed_value_hist.png', plotcolor='red', truth=None, x_axis_lim=None):
fig = pylab.figure()
# Find 50% bounds
curr_std = numpy.std(samples)
curr_delta = 2*curr_std/100;
ndraws = len(samples)
for thresh in numpy.arange(curr_delta, 2*curr_std, curr_delta):
withinbounds = len([i for i in range(len(samples)) if samples[i] < (value+thresh) and samples[i] > (value-thresh)])
if float(withinbounds)/ndraws > 0.5:
break
bounds = [value-thresh, value+thresh]
# Plot histogram
# 'normal_inverse_gamma': continuous_imputation,
# 'symmetric_dirichlet_discrete': multinomial_imputation,
if modelType == 'normal_inverse_gamma':
nx, xbins, rectangles = pylab.hist(samples,bins=40,normed=0,color=plotcolor)
elif modelType == 'symmetric_dirichlet_discrete':
bin_edges = numpy.arange(numpy.min(samples)-0.5, numpy.max(samples)-0.5, 1)
nx, xbins, rectangles = pylab.hist(samples,bin_edges,normed=0,color=plotcolor)
else:
print 'Unsupported model type'
pylab.clf()
nx_frac = nx/float(sum(nx))
x_width = [(xbins[i+1]-xbins[i]) for i in range(len(xbins)-1)]
pylab.bar(xbins[0:len(xbins)-1],nx_frac,x_width,color=plotcolor)
pylab.plot([value, value],[0,1], color=plotcolor, hold=True,linewidth=2)
pylab.plot([bounds[0], bounds[0]],[0,1], color=plotcolor, hold=True, linestyle='--',linewidth=2)
pylab.plot([bounds[1], bounds[1]],[0,1], color=plotcolor, hold=True, linestyle='--',linewidth=2)
if truth != None:
pylab.plot([truth, truth],[0,1], color='green', hold=True, linestyle='--',linewidth=2)
pylab.show()
if x_axis_lim != None:
pylab.xlim(x_axis_lim)
save_current_figure(filename, './', False)
return pylab.gca().get_xlim()
def do_gen_feature_z(X_L_list, X_D_list, M_c, filename, tablename=''):
num_cols = len(X_L_list[0]['column_partition']['assignments'])
column_names = [M_c['idx_to_name'][str(idx)] for idx in range(num_cols)]
column_names = numpy.array(column_names)
# extract unordered z_matrix
num_latent_states = len(X_L_list)
z_matrix = numpy.zeros((num_cols, num_cols))
for X_L in X_L_list:
assignments = X_L['column_partition']['assignments']
for i in range(num_cols):
for j in range(num_cols):
if assignments[i] == assignments[j]:
z_matrix[i, j] += 1
z_matrix /= float(num_latent_states)
# hierachically cluster z_matrix
Y = pdist(z_matrix)
Z = linkage(Y)
pylab.figure()
dendrogram(Z)
intify = lambda x: int(x.get_text())
reorder_indices = map(intify, pylab.gca().get_xticklabels())
pylab.close()
# REORDER!
z_matrix_reordered = z_matrix[:, reorder_indices][reorder_indices, :]
column_names_reordered = column_names[reorder_indices]
# actually create figure
fig = pylab.figure()
fig.set_size_inches(16, 12)
pylab.imshow(z_matrix_reordered, interpolation='none',
cmap=pylab.matplotlib.cm.Greens)
pylab.colorbar()
if num_cols < 14:
pylab.gca().set_yticks(range(num_cols))
pylab.gca().set_yticklabels(column_names_reordered, size='x-small')
pylab.gca().set_xticks(range(num_cols))
pylab.gca().set_xticklabels(column_names_reordered, rotation=90, size='x-small')
else:
pylab.gca().set_yticks(range(num_cols)[::2])
pylab.gca().set_yticklabels(column_names_reordered[::2], size='x-small')
pylab.gca().set_xticks(range(num_cols)[1::2])
pylab.gca().set_xticklabels(column_names_reordered[1::2],
rotation=90, size='small')
pylab.title('column dependencies for: %s' % tablename)
pylab.savefig(filename)
pylab.close()
def legend_outside(ax=None, bbox_to_anchor=(0.5, -.25), loc='upper center',
ncol=None, label_cmp=None):
# labels must be set in original plot call: plot(..., label=label)
if ax is None:
ax = pylab.gca()
handles, labels = ax.get_legend_handles_labels()
label_to_handle = dict(zip(labels, handles))
labels = label_to_handle.keys()
if label_cmp is not None:
labels = sorted(labels, cmp=label_cmp)
handles = [label_to_handle[label] for label in labels]
if ncol is None:
ncol = min(len(labels), 3)
lgd = ax.legend(handles, labels, loc=loc, ncol=ncol,
bbox_to_anchor=bbox_to_anchor, prop={"size":14})
return
int_cmp = lambda x, y: cmp(int(x), int(y))
def legend_outside_from_dicts(marker_dict, color_dict,
marker_label_prepend='', color_label_prepend='',
ax=None, bbox_to_anchor=(0.5, -.07), loc='upper center',
ncol=None, label_cmp=None,
marker_color='k'):
marker_handles = []
marker_labels = []
for label in sorted(marker_dict.keys(), cmp=int_cmp):
marker = marker_dict[label]
handle = pylab.Line2D([],[], color=marker_color, marker=marker, linewidth=0)
marker_handles.append(handle)
marker_labels.append(marker_label_prepend+label)
color_handles = []
color_labels = []
for label in sorted(color_dict.keys(), cmp=int_cmp):
color = color_dict[label]
handle = pylab.Line2D([],[], color=color, linewidth=3)
color_handles.append(handle)
color_labels.append(color_label_prepend+label)
num_marker_handles = len(marker_handles)
num_color_handles = len(color_handles)
num_to_add = abs(num_marker_handles - num_color_handles)
if num_marker_handles < num_color_handles:
add_to_handles = marker_handles
add_to_labels = marker_labels
else:
add_to_handles = color_handles
add_to_labels = color_labels
for add_idx in range(num_to_add):
add_to_handles.append(pylab.Line2D([],[], color=None, linewidth=0))
add_to_labels.append('')
handles = gu.roundrobin(marker_handles, color_handles)
labels = gu.roundrobin(marker_labels, color_labels)
if ax is None:
ax = pylab.gca()
if ncol is None:
ncol = max(num_marker_handles, num_color_handles)
lgd = ax.legend(handles, labels, loc=loc, ncol=ncol,
bbox_to_anchor=bbox_to_anchor, prop={"size":14})
return
def savefig_legend_outside(filename, ax=None, bbox_inches='tight', dir='./'):
if ax is None:
ax = pylab.gca()
lgd = ax.get_legend()
fu.ensure_dir(dir)
full_filename = os.path.join(dir, filename)
pylab.savefig(full_filename,
bbox_extra_artists=(lgd,),
bbox_inches=bbox_inches,
)
return
def _plot_diagnostic_with_mean(data_arr, hline=None):
data_mean = data_arr.mean(axis=1)
#
fh = pylab.figure()
pylab.plot(data_arr, color='k')
pylab.plot(data_mean, linewidth=3, color='r')
if hline is not None:
pylab.axhline(hline)
return fh
def plot_diagnostics(diagnostics_dict, hline_lookup=None, which_diagnostics=None):
if which_diagnostics is None:
which_diagnostics = diagnostics_dict.keys()
if hline_lookup is None:
hline_lookup = dict()
for which_diagnostic in which_diagnostics:
data_arr = diagnostics_dict[which_diagnostic]
hline = hline_lookup.get(which_diagnostic)
fh = _plot_diagnostic_with_mean(data_arr, hline=hline)
pylab.xlabel('iter')
pylab.ylabel(which_diagnostic)
return fh
def show_parameters(parameters):
if len(parameters) == 0: return
ax = pylab.gca()
text = gu.get_dict_as_text(parameters)
pylab.text(0, 1, text, transform=ax.transAxes,
va='top', size='small', linespacing=1.0)
return
|
|
from presenter import *
import wiki
import sys
from outbuffer import *
from visitor import *
from entities import SpecificEnabler, DeprecatedSpecificEnabler, Application, PrettyPrinter
import logging
from fidoc import FIdoc
def generate_page(dw, outpage, meta):
# out = FileBuffer(outfile)
out = PageBuffer(dw, outpage)
out << dw.heading(1, "Generated output from FIcontent's Meta-Structure")
generated_content = []
pp = PrettyPrinter()
# Overall timeline of experiments
#######################################
generated_content += [
("Timeline of Experiments", ExperimentTimelinePresenter()),
]
# Experiments per site
#######################################
sites = ["Zurich", "Brittany", "Lancaster", "Cologne", "Berlin", "Barcelona"]
generated_content += [
("Experiments in %s" % s, ExperimentTimelinePresenter(s)) for s in sites
]
# All tested scenarios
#######################################
generated_content += [
("All Tested Scenarios", ListPresenter(TestedScenariosVisitor(), pp.print_Scenario)),
]
# All SEs and their relations
#######################################
generated_content += [(
"Relations of %s SE" % se.get_name(),
SEGraphPresenter(se, pp.dispatch)
) for se in meta.get_specific_enablers()
]
# All SEs and their descriptions
#######################################
generated_content += [(
"Description of %s SE" % se.get_name(),
PropertyPresenter(se, '/spec/documentation/what-it-does')
) for se in meta.get_specific_enablers()
]
# All SEs and their resources
#######################################
generated_content += [(
"Resources of %s SE" % se.get_name(),
ResourcesPresenter(dw, se, pp.dispatch)
) for se in meta.get_specific_enablers()
]
# All SEs and their release cycle
#######################################
generated_content += [(
"Release cycle of %s SE" % se.get_name(),
ReleaseCyclePresenter(dw, se, pp.dispatch)
) for se in meta.get_specific_enablers()
]
# Dependencies per scenario
#######################################
# v = ExperimentsVisitor()
# v.visit(meta_structure)
# experiments = list(set([(e.scenario, e.site) for e in v.result]))
# Dependencies per scenario (only actual usage)
# generated_content += [
# ('Scenario "%s" on Site %s - USES' % e, DependencyPresenter(e[0], e[1], ['USES'])) for e in experiments
# ]
# Dependencies per scenario (actual and planned usage)
# relations = ['USES', 'WILL USE', 'MAY USE']
# generated_content += [
# ('Scenario "%s" on Site %s - ALL' % e, DependencyPresenter(e[0], e[1], relations)) for e in experiments
# ]
# Enablers used in experiments
# niceenabler = lambda e: e.identifier + ' ' + e.entity
# experiments = v.result # [e for e in v.result if (e.site == "Barcelona") and (e.application.identifier == "Smart City Guide (Android App)")]
# generated_content += [(
# 'Enablers tested in Scenario "%s" on Site %s at %s' % (e.scenario, e.site, e.date),
# ListPresenter(
# EnablersTestedVisitor(e.application, ts = e.date),
# niceenabler
# )
# ) for e in experiments
# ]
# GE Utilization
#######################################
generated_content += [(
"Utilization of %s GE" % ge.get_name(),
ListPresenter(UsedByVisitor(
ge,
follow_relations = ['USES'],
collect_entities = [SpecificEnabler, DeprecatedSpecificEnabler, Application]
), pp.dispatch)
) for ge in meta.get_generic_enablers()
]
# Overall Uptake of Generic Enablers
#######################################
generated_content += [
("Overall Uptake of Generic Enablers", UptakePresenter(pp.dispatch, hideunused=True))
]
# FI-PPP SEis Usage and General Information
#######################################
col_fippp = ['name', 'owner', 'product', 'open-source', 'mode', 'last-update', 'next-update', 'assets', 'catalog']
col_overview = ['name', 'owner', 'final-release']
generated_content += [
("FI-PPP SEis Usage and General Information", CockpitPresenter(col_fippp, pp.dispatch)),
("Overview of FIcontent SEs", CockpitPresenter(col_overview, pp.dispatch, sort = ['name']))
]
# SE Discovery Summary
#######################################
generated_content += [
("SE Discovery Summary", SummaryPresenter())
]
# Incomplete/invalid SEis
#######################################
generated_content += [
("Incomplete and/or invalid SEs", ListPresenter(InvalidEntitiesVisitor('SE'), pp.dispatch))
]
# GE Validation Survey
#######################################
# generated_content += [
# ("GE Validation Survey", GESurveyPresenter())
# ]
# Roadmap Releases
#######################################
# releases = set([rel.get_name() for rel in meta.get_releases()])
roadmaps = ['socialtv', 'smartcity', 'gaming', 'common']
for rel in meta.get_releases():
generated_content += [(
"Roadmap %s - %s" % (road, rel.get_name()),
RoadmapPresenter(dw, road, rel)
) for road in roadmaps
]
#######################################
# main generation loop
#######################################
for h, p in generated_content:
logging.info('Generating -> %s ...' % h)
p.present(meta)
out << dw.heading(2, h)
p.dump(out)
out << ''
logging.info("Flushing generated content ...")
out.flush()
def generate_meta_information(fidoc, generatedpage):
dw = fidoc.get_wiki()
meta = fidoc.get_meta_structure()
# pub = fidoc.get_publisher()
if meta is None:
logging.fatal("Invalid meta structure.")
generate_page(dw, generatedpage, meta)
if __name__ == "__main__":
import wikiconfig
metapage = ":FIcontent:private:meta:"
if len(sys.argv) > 1:
metapage = sys.argv[1]
generatedpage = ":FIcontent:private:meta:generated"
if len(sys.argv) > 2:
generatedpage = sys.argv[2]
try:
logging.info("Connecting to remote DokuWiki at %s" % wikiconfig.url)
# dw = wiki.DokuWikiLocal(url, 'pages', 'media')
dw = wiki.DokuWikiRemote(wikiconfig.url, wikiconfig.user, wikiconfig.passwd)
skipchecks = [
# tv
# 'Content Similarity', 'Audio Fingerprinting',
# city
# 'Local Information', 'Recommendation Services',
# gaming
# 'Visual Agent Design', 'Augmented Reality - Marker Tracking', 'Networked Virtual Character',
# common
# 'POI Storage', 'Content Sharing'
]
logging.info("Loading FIdoc object ...")
fidoc = FIdoc(dw, skipchecks)
generate_meta_information(fidoc, generatedpage)
logging.info("Finished")
except logging.FatalError:
pass
|
|
"""Implementation of the cache provider."""
# This plugin was not named "cache" to avoid conflicts with the external
# pytest-cache version.
import json
import os
from pathlib import Path
from typing import Dict
from typing import Generator
from typing import Iterable
from typing import List
from typing import Optional
from typing import Set
from typing import Union
import attr
import py
from .pathlib import resolve_from_str
from .pathlib import rm_rf
from .reports import CollectReport
from _pytest import nodes
from _pytest._io import TerminalWriter
from _pytest.compat import final
from _pytest.config import Config
from _pytest.config import ExitCode
from _pytest.config import hookimpl
from _pytest.config.argparsing import Parser
from _pytest.deprecated import check_ispytest
from _pytest.fixtures import fixture
from _pytest.fixtures import FixtureRequest
from _pytest.main import Session
from _pytest.python import Module
from _pytest.python import Package
from _pytest.reports import TestReport
README_CONTENT = """\
# pytest cache directory #
This directory contains data from the pytest's cache plugin,
which provides the `--lf` and `--ff` options, as well as the `cache` fixture.
**Do not** commit this to version control.
See [the docs](https://docs.pytest.org/en/stable/cache.html) for more information.
"""
CACHEDIR_TAG_CONTENT = b"""\
Signature: 8a477f597d28d172789f06886806bc55
# This file is a cache directory tag created by pytest.
# For information about cache directory tags, see:
# http://www.bford.info/cachedir/spec.html
"""
@final
@attr.s(init=False)
class Cache:
_cachedir = attr.ib(type=Path, repr=False)
_config = attr.ib(type=Config, repr=False)
# sub-directory under cache-dir for directories created by "makedir"
_CACHE_PREFIX_DIRS = "d"
# sub-directory under cache-dir for values created by "set"
_CACHE_PREFIX_VALUES = "v"
def __init__(
self, cachedir: Path, config: Config, *, _ispytest: bool = False
) -> None:
check_ispytest(_ispytest)
self._cachedir = cachedir
self._config = config
@classmethod
def for_config(cls, config: Config, *, _ispytest: bool = False) -> "Cache":
"""Create the Cache instance for a Config.
:meta private:
"""
check_ispytest(_ispytest)
cachedir = cls.cache_dir_from_config(config, _ispytest=True)
if config.getoption("cacheclear") and cachedir.is_dir():
cls.clear_cache(cachedir, _ispytest=True)
return cls(cachedir, config, _ispytest=True)
@classmethod
def clear_cache(cls, cachedir: Path, _ispytest: bool = False) -> None:
"""Clear the sub-directories used to hold cached directories and values.
:meta private:
"""
check_ispytest(_ispytest)
for prefix in (cls._CACHE_PREFIX_DIRS, cls._CACHE_PREFIX_VALUES):
d = cachedir / prefix
if d.is_dir():
rm_rf(d)
@staticmethod
def cache_dir_from_config(config: Config, *, _ispytest: bool = False) -> Path:
"""Get the path to the cache directory for a Config.
:meta private:
"""
check_ispytest(_ispytest)
return resolve_from_str(config.getini("cache_dir"), config.rootpath)
def warn(self, fmt: str, *, _ispytest: bool = False, **args: object) -> None:
"""Issue a cache warning.
:meta private:
"""
check_ispytest(_ispytest)
import warnings
from _pytest.warning_types import PytestCacheWarning
warnings.warn(
PytestCacheWarning(fmt.format(**args) if args else fmt),
self._config.hook,
stacklevel=3,
)
def makedir(self, name: str) -> py.path.local:
"""Return a directory path object with the given name.
If the directory does not yet exist, it will be created. You can use
it to manage files to e.g. store/retrieve database dumps across test
sessions.
:param name:
Must be a string not containing a ``/`` separator.
Make sure the name contains your plugin or application
identifiers to prevent clashes with other cache users.
"""
path = Path(name)
if len(path.parts) > 1:
raise ValueError("name is not allowed to contain path separators")
res = self._cachedir.joinpath(self._CACHE_PREFIX_DIRS, path)
res.mkdir(exist_ok=True, parents=True)
return py.path.local(res)
def _getvaluepath(self, key: str) -> Path:
return self._cachedir.joinpath(self._CACHE_PREFIX_VALUES, Path(key))
def get(self, key: str, default):
"""Return the cached value for the given key.
If no value was yet cached or the value cannot be read, the specified
default is returned.
:param key:
Must be a ``/`` separated value. Usually the first
name is the name of your plugin or your application.
:param default:
The value to return in case of a cache-miss or invalid cache value.
"""
path = self._getvaluepath(key)
try:
with path.open("r") as f:
return json.load(f)
except (ValueError, OSError):
return default
def set(self, key: str, value: object) -> None:
"""Save value for the given key.
:param key:
Must be a ``/`` separated value. Usually the first
name is the name of your plugin or your application.
:param value:
Must be of any combination of basic python types,
including nested types like lists of dictionaries.
"""
path = self._getvaluepath(key)
try:
if path.parent.is_dir():
cache_dir_exists_already = True
else:
cache_dir_exists_already = self._cachedir.exists()
path.parent.mkdir(exist_ok=True, parents=True)
except OSError:
self.warn("could not create cache path {path}", path=path, _ispytest=True)
return
if not cache_dir_exists_already:
self._ensure_supporting_files()
data = json.dumps(value, indent=2, sort_keys=True)
try:
f = path.open("w")
except OSError:
self.warn("cache could not write path {path}", path=path, _ispytest=True)
else:
with f:
f.write(data)
def _ensure_supporting_files(self) -> None:
"""Create supporting files in the cache dir that are not really part of the cache."""
readme_path = self._cachedir / "README.md"
readme_path.write_text(README_CONTENT)
gitignore_path = self._cachedir.joinpath(".gitignore")
msg = "# Created by pytest automatically.\n*\n"
gitignore_path.write_text(msg, encoding="UTF-8")
cachedir_tag_path = self._cachedir.joinpath("CACHEDIR.TAG")
cachedir_tag_path.write_bytes(CACHEDIR_TAG_CONTENT)
class LFPluginCollWrapper:
def __init__(self, lfplugin: "LFPlugin") -> None:
self.lfplugin = lfplugin
self._collected_at_least_one_failure = False
@hookimpl(hookwrapper=True)
def pytest_make_collect_report(self, collector: nodes.Collector):
if isinstance(collector, Session):
out = yield
res: CollectReport = out.get_result()
# Sort any lf-paths to the beginning.
lf_paths = self.lfplugin._last_failed_paths
res.result = sorted(
res.result,
key=lambda x: 0 if Path(str(x.fspath)) in lf_paths else 1,
)
return
elif isinstance(collector, Module):
if Path(str(collector.fspath)) in self.lfplugin._last_failed_paths:
out = yield
res = out.get_result()
result = res.result
lastfailed = self.lfplugin.lastfailed
# Only filter with known failures.
if not self._collected_at_least_one_failure:
if not any(x.nodeid in lastfailed for x in result):
return
self.lfplugin.config.pluginmanager.register(
LFPluginCollSkipfiles(self.lfplugin), "lfplugin-collskip"
)
self._collected_at_least_one_failure = True
session = collector.session
result[:] = [
x
for x in result
if x.nodeid in lastfailed
# Include any passed arguments (not trivial to filter).
or session.isinitpath(x.fspath)
# Keep all sub-collectors.
or isinstance(x, nodes.Collector)
]
return
yield
class LFPluginCollSkipfiles:
def __init__(self, lfplugin: "LFPlugin") -> None:
self.lfplugin = lfplugin
@hookimpl
def pytest_make_collect_report(
self, collector: nodes.Collector
) -> Optional[CollectReport]:
# Packages are Modules, but _last_failed_paths only contains
# test-bearing paths and doesn't try to include the paths of their
# packages, so don't filter them.
if isinstance(collector, Module) and not isinstance(collector, Package):
if Path(str(collector.fspath)) not in self.lfplugin._last_failed_paths:
self.lfplugin._skipped_files += 1
return CollectReport(
collector.nodeid, "passed", longrepr=None, result=[]
)
return None
class LFPlugin:
"""Plugin which implements the --lf (run last-failing) option."""
def __init__(self, config: Config) -> None:
self.config = config
active_keys = "lf", "failedfirst"
self.active = any(config.getoption(key) for key in active_keys)
assert config.cache
self.lastfailed: Dict[str, bool] = config.cache.get("cache/lastfailed", {})
self._previously_failed_count: Optional[int] = None
self._report_status: Optional[str] = None
self._skipped_files = 0 # count skipped files during collection due to --lf
if config.getoption("lf"):
self._last_failed_paths = self.get_last_failed_paths()
config.pluginmanager.register(
LFPluginCollWrapper(self), "lfplugin-collwrapper"
)
def get_last_failed_paths(self) -> Set[Path]:
"""Return a set with all Paths()s of the previously failed nodeids."""
rootpath = self.config.rootpath
result = {rootpath / nodeid.split("::")[0] for nodeid in self.lastfailed}
return {x for x in result if x.exists()}
def pytest_report_collectionfinish(self) -> Optional[str]:
if self.active and self.config.getoption("verbose") >= 0:
return "run-last-failure: %s" % self._report_status
return None
def pytest_runtest_logreport(self, report: TestReport) -> None:
if (report.when == "call" and report.passed) or report.skipped:
self.lastfailed.pop(report.nodeid, None)
elif report.failed:
self.lastfailed[report.nodeid] = True
def pytest_collectreport(self, report: CollectReport) -> None:
passed = report.outcome in ("passed", "skipped")
if passed:
if report.nodeid in self.lastfailed:
self.lastfailed.pop(report.nodeid)
self.lastfailed.update((item.nodeid, True) for item in report.result)
else:
self.lastfailed[report.nodeid] = True
@hookimpl(hookwrapper=True, tryfirst=True)
def pytest_collection_modifyitems(
self, config: Config, items: List[nodes.Item]
) -> Generator[None, None, None]:
yield
if not self.active:
return
if self.lastfailed:
previously_failed = []
previously_passed = []
for item in items:
if item.nodeid in self.lastfailed:
previously_failed.append(item)
else:
previously_passed.append(item)
self._previously_failed_count = len(previously_failed)
if not previously_failed:
# Running a subset of all tests with recorded failures
# only outside of it.
self._report_status = "%d known failures not in selected tests" % (
len(self.lastfailed),
)
else:
if self.config.getoption("lf"):
items[:] = previously_failed
config.hook.pytest_deselected(items=previously_passed)
else: # --failedfirst
items[:] = previously_failed + previously_passed
noun = "failure" if self._previously_failed_count == 1 else "failures"
suffix = " first" if self.config.getoption("failedfirst") else ""
self._report_status = "rerun previous {count} {noun}{suffix}".format(
count=self._previously_failed_count, suffix=suffix, noun=noun
)
if self._skipped_files > 0:
files_noun = "file" if self._skipped_files == 1 else "files"
self._report_status += " (skipped {files} {files_noun})".format(
files=self._skipped_files, files_noun=files_noun
)
else:
self._report_status = "no previously failed tests, "
if self.config.getoption("last_failed_no_failures") == "none":
self._report_status += "deselecting all items."
config.hook.pytest_deselected(items=items[:])
items[:] = []
else:
self._report_status += "not deselecting items."
def pytest_sessionfinish(self, session: Session) -> None:
config = self.config
if config.getoption("cacheshow") or hasattr(config, "workerinput"):
return
assert config.cache is not None
saved_lastfailed = config.cache.get("cache/lastfailed", {})
if saved_lastfailed != self.lastfailed:
config.cache.set("cache/lastfailed", self.lastfailed)
class NFPlugin:
"""Plugin which implements the --nf (run new-first) option."""
def __init__(self, config: Config) -> None:
self.config = config
self.active = config.option.newfirst
assert config.cache is not None
self.cached_nodeids = set(config.cache.get("cache/nodeids", []))
@hookimpl(hookwrapper=True, tryfirst=True)
def pytest_collection_modifyitems(
self, items: List[nodes.Item]
) -> Generator[None, None, None]:
yield
if self.active:
new_items: Dict[str, nodes.Item] = {}
other_items: Dict[str, nodes.Item] = {}
for item in items:
if item.nodeid not in self.cached_nodeids:
new_items[item.nodeid] = item
else:
other_items[item.nodeid] = item
items[:] = self._get_increasing_order(
new_items.values()
) + self._get_increasing_order(other_items.values())
self.cached_nodeids.update(new_items)
else:
self.cached_nodeids.update(item.nodeid for item in items)
def _get_increasing_order(self, items: Iterable[nodes.Item]) -> List[nodes.Item]:
return sorted(items, key=lambda item: item.fspath.mtime(), reverse=True)
def pytest_sessionfinish(self) -> None:
config = self.config
if config.getoption("cacheshow") or hasattr(config, "workerinput"):
return
if config.getoption("collectonly"):
return
assert config.cache is not None
config.cache.set("cache/nodeids", sorted(self.cached_nodeids))
def pytest_addoption(parser: Parser) -> None:
group = parser.getgroup("general")
group.addoption(
"--lf",
"--last-failed",
action="store_true",
dest="lf",
help="rerun only the tests that failed "
"at the last run (or all if none failed)",
)
group.addoption(
"--ff",
"--failed-first",
action="store_true",
dest="failedfirst",
help="run all tests, but run the last failures first.\n"
"This may re-order tests and thus lead to "
"repeated fixture setup/teardown.",
)
group.addoption(
"--nf",
"--new-first",
action="store_true",
dest="newfirst",
help="run tests from new files first, then the rest of the tests "
"sorted by file mtime",
)
group.addoption(
"--cache-show",
action="append",
nargs="?",
dest="cacheshow",
help=(
"show cache contents, don't perform collection or tests. "
"Optional argument: glob (default: '*')."
),
)
group.addoption(
"--cache-clear",
action="store_true",
dest="cacheclear",
help="remove all cache contents at start of test run.",
)
cache_dir_default = ".pytest_cache"
if "TOX_ENV_DIR" in os.environ:
cache_dir_default = os.path.join(os.environ["TOX_ENV_DIR"], cache_dir_default)
parser.addini("cache_dir", default=cache_dir_default, help="cache directory path.")
group.addoption(
"--lfnf",
"--last-failed-no-failures",
action="store",
dest="last_failed_no_failures",
choices=("all", "none"),
default="all",
help="which tests to run with no previously (known) failures.",
)
def pytest_cmdline_main(config: Config) -> Optional[Union[int, ExitCode]]:
if config.option.cacheshow:
from _pytest.main import wrap_session
return wrap_session(config, cacheshow)
return None
@hookimpl(tryfirst=True)
def pytest_configure(config: Config) -> None:
config.cache = Cache.for_config(config, _ispytest=True)
config.pluginmanager.register(LFPlugin(config), "lfplugin")
config.pluginmanager.register(NFPlugin(config), "nfplugin")
@fixture
def cache(request: FixtureRequest) -> Cache:
"""Return a cache object that can persist state between testing sessions.
cache.get(key, default)
cache.set(key, value)
Keys must be ``/`` separated strings, where the first part is usually the
name of your plugin or application to avoid clashes with other cache users.
Values can be any object handled by the json stdlib module.
"""
assert request.config.cache is not None
return request.config.cache
def pytest_report_header(config: Config) -> Optional[str]:
"""Display cachedir with --cache-show and if non-default."""
if config.option.verbose > 0 or config.getini("cache_dir") != ".pytest_cache":
assert config.cache is not None
cachedir = config.cache._cachedir
# TODO: evaluate generating upward relative paths
# starting with .., ../.. if sensible
try:
displaypath = cachedir.relative_to(config.rootpath)
except ValueError:
displaypath = cachedir
return f"cachedir: {displaypath}"
return None
def cacheshow(config: Config, session: Session) -> int:
from pprint import pformat
assert config.cache is not None
tw = TerminalWriter()
tw.line("cachedir: " + str(config.cache._cachedir))
if not config.cache._cachedir.is_dir():
tw.line("cache is empty")
return 0
glob = config.option.cacheshow[0]
if glob is None:
glob = "*"
dummy = object()
basedir = config.cache._cachedir
vdir = basedir / Cache._CACHE_PREFIX_VALUES
tw.sep("-", "cache values for %r" % glob)
for valpath in sorted(x for x in vdir.rglob(glob) if x.is_file()):
key = str(valpath.relative_to(vdir))
val = config.cache.get(key, dummy)
if val is dummy:
tw.line("%s contains unreadable content, will be ignored" % key)
else:
tw.line("%s contains:" % key)
for line in pformat(val).splitlines():
tw.line(" " + line)
ddir = basedir / Cache._CACHE_PREFIX_DIRS
if ddir.is_dir():
contents = sorted(ddir.rglob(glob))
tw.sep("-", "cache directories for %r" % glob)
for p in contents:
# if p.check(dir=1):
# print("%s/" % p.relto(basedir))
if p.is_file():
key = str(p.relative_to(basedir))
tw.line(f"{key} is a file of length {p.stat().st_size:d}")
return 0
|
|
r"""
Greb is a command line tool to find meaning of words.
Usage: greb (<WORD> [-leyn] [-h | --help] | -d | -t | -w)
Options:
-l --all Lists everything
-e --sen Lists sentence
-y --syn Lists synonyms
-n --ant Lists antonyms
-d --rdm Displays a random from searched history
-t --trn Displays trending words from Merriam Webster
-w --wrd Displays the word of the day from Merriam Webster
--version Lists version
-h --help Lists help
"""
from __future__ import absolute_import
import json
import os
import requests
from bs4 import BeautifulSoup
from collections import OrderedDict
from colorama import Fore
from docopt import docopt
from os.path import expanduser
from random import SystemRandom
from . import opts
__version__ = '0.0.8'
HOME_PAGE_URL = 'http://www.merriam-webster.com'
BASE_URL = 'http://www.merriam-webster.com/dictionary/{word}'
HOME = expanduser('~')
MEANINGS_FILE_NAME = 'meanings.json'
FILE_PATH = os.path.join(HOME, MEANINGS_FILE_NAME)
SUGGESTION_CHECK_STRING = 'spelling suggestion below'
requests.packages.urllib3.disable_warnings()
def print_word(word):
print('\n'+'#'*26)
print('#{:^24}#'.format(word.upper()))
print('#'*26)
def print_heading(heading, color=None):
'''prints the heading for a section of output'''
heading = heading.upper()
color = color or eval(opts.COLOR.get(heading, 'Fore.WHITE'))
print('')
print(color + heading + Fore.RESET)
print('')
def print_error_messages(msg):
'''prints the error messgaes in red'''
print(Fore.RED + msg + Fore.RESET)
print('')
def print_result(result):
for key, value in result.items():
if value:
if key in ('info_msg',):
print(value)
elif key not in ('word',):
print_heading(key)
for each in value:
print(each)
print('')
else:
print_word(value)
else:
print_error_messages('Ohh! There is no value for {}.'.format(key))
def write_meaning_to_file(meaning_as_json, file_path=None):
'''saves the meaning json to the file `meanings.json` under home directory'''
meanings_file_path = FILE_PATH
if file_path:
meanings_file_path = file_path
if not os.path.isfile(meanings_file_path):
with open(meanings_file_path, 'w') as f:
json.dump([], f)
# first read the contents of file
with open(meanings_file_path, 'r') as f:
existing_meanings = json.load(f)
# before appending check if the word exists or not
existing_words = [each['word'] for each in existing_meanings]
# append the current meaning only if it is not already there
if meaning_as_json['word'] not in existing_words:
existing_meanings.append(meaning_as_json)
# write this to the same file
with open(meanings_file_path, 'w') as f:
json.dump(existing_meanings, f, indent=2)
def find_meaning_from_history(file_path=None):
'''displays a random meaning from searched history.
searched history is saved in a file `meanings.json` under home directory'''
searched_meaning = OrderedDict()
random_instance = SystemRandom()
meanings_file_path = FILE_PATH
if file_path:
meanings_file_path = file_path
if os.path.isfile(meanings_file_path):
with open(meanings_file_path, 'r') as f:
all_meanings_searched = json.load(f)
r_int = random_instance.randrange(len(all_meanings_searched))
# to not break the existing meanings file, need to create a OrderedDict here
# so that word comes before meaning key
searched_meaning['word'] = all_meanings_searched[r_int]['word']
searched_meaning['meaning'] = all_meanings_searched[r_int]['meaning']
return searched_meaning
def find_sentences(tree, word):
sentences = []
try:
sentence_html = tree.find('div', {'class': 'card-primary-content def-text'}).find_all('li')
except (AttributeError, Exception) as e: # noqa
sentence_html = []
if sentence_html:
for i, each in enumerate(sentence_html, 1):
each = (Fore.CYAN + str(i) + '. ' + Fore.RESET +
each.get_text().replace(word, Fore.CYAN + word + Fore.RESET))
sentences.append(each)
return sentences
def find_synonyms(tree):
'''prints the synonyms for a given word'''
synonyms = []
synonyms_html = tree.find('div', {'class': 'card-box small-box related-box end'})
if synonyms_html:
synonyms_html = synonyms_html.find('div', {'class': 'definition-block'})
synonyms_str = synonyms_html.get_text()
synonyms_str = synonyms_str[synonyms_str.find('Synonyms') + len('Synonyms '): synonyms_str.find('Antonyms')]
synonyms.append(synonyms_str)
return synonyms
def find_antonyms(tree):
'''prints the antonyms for a given word'''
antonyms = []
antonyms_html = tree.find('div', {'class': 'card-box small-box related-box end'})
if antonyms_html:
antonyms_html = antonyms_html.find('div', {'class': 'definition-block'})
antonyms_str = antonyms_html.get_text()
antonyms_str = antonyms_str[antonyms_str.find('Antonyms') + len('Antonyms '):
antonyms_str.find('Related Words')]
antonyms.append(antonyms_str)
return antonyms
def find_trending_words(tree):
'''prints the trending words on Merriam Webster'''
trending_words = []
try:
trending_words_html = tree.find('div', {'class': 'wgt-wap-home-trending-items'}).find_all('li')
except (AttributeError, Exception) as e: # noqa
trending_words_html = []
if trending_words_html:
for i, each in enumerate(trending_words_html, 1):
word = each.find('p', {'class': 'title'}).get_text().strip()
desc = each.find('p', {'class': 'blurb'}).get_text().strip()
each = (Fore.RED + str(i) + ' ' + word + Fore.RESET +
' --> ' + Fore.YELLOW + desc + Fore.RESET)
trending_words.append(each)
return trending_words
def find_word_of_the_day(tree):
'''prints the word of the day from Merriam Webster'''
word_of_day = []
word_of_day_html = tree.find('div', {'class': 'wgt-wod-home'})
if word_of_day_html:
word = word_of_day_html.find('h4', {'class': 'wh-word'}).get_text().strip()
meaning = word_of_day_html.find('p', {'class': 'wh-def-text'}).get_text().strip()
word_of_day_str = (Fore.GREEN + word.upper() + Fore.RESET + ' : ' + Fore.YELLOW + meaning + Fore.RESET)
word_of_day.append(word_of_day_str)
return word_of_day
def find_suggestions(tree):
'''lists the suggestions for a word in case of 404'''
result = OrderedDict()
if SUGGESTION_CHECK_STRING in tree.get_text():
suggestion_html = tree.find_all('p', {'class': 'definition-inner-item with-sense'})
if suggestion_html:
info_msg = ('\n' + Fore.BLUE + 'It seems that you have not entered a valid word. '
'I know' + Fore.RESET + Fore.GREEN + ' To err is human.' +
Fore.RESET + Fore.BLUE + ' Hence the suggestions.' + Fore.RESET)
result['info_msg'] = info_msg
suggestion_str = ', '.join([each.get_text() for each in suggestion_html[0].find_all('a')])
result['suggestion'] = [suggestion_str]
else:
result['info_msg'] = ("The word you've entered was not found. However I tried finding suggestions "
"thinking that you may have misspelled the word. But I failed miserably :(")
return result
def read_page(url, timeout=5):
try:
response = requests.get(url, timeout=timeout, headers={'User-Agent':
'Mozilla/5.0 (Macintosh; Intel Mac OS X 10.7; rv:11.0) Gecko/20100101 Firefox/11.0'})
except requests.exceptions.ConnectionError as e: # noqa
return(None, False)
return(response, response.status_code)
def make_parse_tree(url):
response, status_code = read_page(url)
if status_code in [200, 404]:
response = BeautifulSoup(response.text, 'html.parser')
return (response, status_code)
def find_meaning(tree):
meaning_div = (tree.select('ul > li > p.definition-inner-item') or
tree.select('div.card-primary-content')[0].find_all('p') or
tree.find_all('p', {'class': 'definition-inner-item with-sense'}))
if meaning_div:
meanings = []
for each in meaning_div:
each = each.get_text().strip().encode('ascii', 'ignore')
meanings.append(each.decode('utf-8'))
else:
meanings = None
return meanings
def greb(**kwargs):
terminal_display = kwargs.get('display_terminal', False)
if terminal_display:
result = find_meaning_from_history()
print_result(result)
else:
word = kwargs.get('word', None)
if word:
url = BASE_URL.format(word=word)
else:
url = HOME_PAGE_URL
tree, status_code = make_parse_tree(url)
result = OrderedDict()
if status_code == requests.codes.ok:
if kwargs.get('meaning', False):
meanings = find_meaning(tree)
result['word'] = word
result['meaning'] = meanings
file_path = kwargs.get('file_path', None)
if meanings:
write_meaning_to_file(result, file_path=file_path)
if kwargs.get('sentence', False):
sentences = find_sentences(tree, word)
result['sentence'] = sentences
if kwargs.get('synonym', False):
synonyms = find_synonyms(tree)
result['synonym'] = synonyms
if kwargs.get('antonym', False):
antonyms = find_antonyms(tree)
result['antonym'] = antonyms
if kwargs.get('trending_words', False):
trending_words = find_trending_words(tree)
result['trending words'] = trending_words
if kwargs.get('word_of_day', False):
word_of_day = find_word_of_the_day(tree)
result['word of the day'] = word_of_day
elif status_code == 404:
result = find_suggestions(tree)
else:
result['info_msg'] = 'Can you please check whether your Net Connection is working properly'
print_result(result)
def main():
'''greb is a command line tool to find meanings'''
arguments = docopt(__doc__, version=__version__)
options = {}
if not arguments:
print(__doc__)
else:
if arguments.get('-d') or arguments.get('--rdm'):
options.update({
'display_terminal': True
})
elif arguments.get('-t') or arguments.get('--trn'):
options.update({
'trending_words': True
})
elif arguments.get('-w') or arguments.get('--wrd'):
options.update({
'word_of_day': True
})
elif arguments['<WORD>']:
options.update({
'word': arguments['<WORD>'].lower().strip(),
'meaning': True
})
if (arguments.get('-l') or arguments.get('--all')):
flag_sentence, flag_synonym, flag_antonym = [True]*3
else:
flag_sentence = (arguments.get('-e') or arguments.get('--sen')) or False
flag_synonym = (arguments.get('-y') or arguments.get('--syn')) or False
flag_antonym = (arguments.get('-n') or arguments.get('--ant')) or False
options.update({
'sentence': flag_sentence,
'synonym': flag_synonym,
'antonym': flag_antonym,
})
greb(**options)
if __name__ == '__main__':
main()
|
|
#!/usr/bin/env python2
# Copyright (c) 2014-2015 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
#
# Test REST interface
#
from test_framework.test_framework import BitcoinTestFramework
from test_framework.util import *
from struct import *
from io import BytesIO
from codecs import encode
import binascii
try:
import http.client as httplib
except ImportError:
import httplib
try:
import urllib.parse as urlparse
except ImportError:
import urlparse
def deser_uint256(f):
r = 0
for i in range(8):
t = unpack(b"<I", f.read(4))[0]
r += t << (i * 32)
return r
#allows simple http get calls
def http_get_call(host, port, path, response_object = 0):
conn = httplib.HTTPConnection(host, port)
conn.request('GET', path)
if response_object:
return conn.getresponse()
return conn.getresponse().read().decode('utf-8')
#allows simple http post calls with a request body
def http_post_call(host, port, path, requestdata = '', response_object = 0):
conn = httplib.HTTPConnection(host, port)
conn.request('POST', path, requestdata)
if response_object:
return conn.getresponse()
return conn.getresponse().read()
class RESTTest (BitcoinTestFramework):
FORMAT_SEPARATOR = "."
def setup_chain(self):
print("Initializing test directory "+self.options.tmpdir)
initialize_chain_clean(self.options.tmpdir, 3)
def setup_network(self, split=False):
self.nodes = start_nodes(3, self.options.tmpdir)
connect_nodes_bi(self.nodes,0,1)
connect_nodes_bi(self.nodes,1,2)
connect_nodes_bi(self.nodes,0,2)
self.is_network_split=False
self.sync_all()
def run_test(self):
url = urlparse.urlparse(self.nodes[0].url)
print "Mining blocks..."
self.nodes[0].generate(1)
self.sync_all()
self.nodes[2].generate(100)
self.sync_all()
assert_equal(self.nodes[0].getbalance(), 50)
txid = self.nodes[0].sendtoaddress(self.nodes[1].getnewaddress(), 0.1)
self.sync_all()
self.nodes[2].generate(1)
self.sync_all()
bb_hash = self.nodes[0].getbestblockhash()
assert_equal(self.nodes[1].getbalance(), Decimal("0.1")) #balance now should be 0.1 on node 1
# load the latest 0.1 tx over the REST API
json_string = http_get_call(url.hostname, url.port, '/rest/tx/'+txid+self.FORMAT_SEPARATOR+"json")
json_obj = json.loads(json_string)
vintx = json_obj['vin'][0]['txid'] # get the vin to later check for utxo (should be spent by then)
# get n of 0.1 outpoint
n = 0
for vout in json_obj['vout']:
if vout['value'] == 0.1:
n = vout['n']
######################################
# GETUTXOS: query a unspent outpoint #
######################################
json_request = '/checkmempool/'+txid+'-'+str(n)
json_string = http_get_call(url.hostname, url.port, '/rest/getutxos'+json_request+self.FORMAT_SEPARATOR+'json')
json_obj = json.loads(json_string)
#check chainTip response
assert_equal(json_obj['chaintipHash'], bb_hash)
#make sure there is one utxo
assert_equal(len(json_obj['utxos']), 1)
assert_equal(json_obj['utxos'][0]['value'], 0.1)
################################################
# GETUTXOS: now query a already spent outpoint #
################################################
json_request = '/checkmempool/'+vintx+'-0'
json_string = http_get_call(url.hostname, url.port, '/rest/getutxos'+json_request+self.FORMAT_SEPARATOR+'json')
json_obj = json.loads(json_string)
#check chainTip response
assert_equal(json_obj['chaintipHash'], bb_hash)
#make sure there is no utox in the response because this oupoint has been spent
assert_equal(len(json_obj['utxos']), 0)
#check bitmap
assert_equal(json_obj['bitmap'], "0")
##################################################
# GETUTXOS: now check both with the same request #
##################################################
json_request = '/checkmempool/'+txid+'-'+str(n)+'/'+vintx+'-0'
json_string = http_get_call(url.hostname, url.port, '/rest/getutxos'+json_request+self.FORMAT_SEPARATOR+'json')
json_obj = json.loads(json_string)
assert_equal(len(json_obj['utxos']), 1)
assert_equal(json_obj['bitmap'], "10")
#test binary response
bb_hash = self.nodes[0].getbestblockhash()
binaryRequest = b'\x01\x02'
binaryRequest += hex_str_to_bytes(txid)
binaryRequest += pack("i", n)
binaryRequest += hex_str_to_bytes(vintx)
binaryRequest += pack("i", 0)
bin_response = http_post_call(url.hostname, url.port, '/rest/getutxos'+self.FORMAT_SEPARATOR+'bin', binaryRequest)
output = BytesIO()
output.write(bin_response)
output.seek(0)
chainHeight = unpack("i", output.read(4))[0]
hashFromBinResponse = hex(deser_uint256(output))[2:].zfill(65).rstrip("L")
assert_equal(bb_hash, hashFromBinResponse) #check if getutxo's chaintip during calculation was fine
assert_equal(chainHeight, 102) #chain height must be 102
############################
# GETUTXOS: mempool checks #
############################
# do a tx and don't sync
txid = self.nodes[0].sendtoaddress(self.nodes[1].getnewaddress(), 0.1)
json_string = http_get_call(url.hostname, url.port, '/rest/tx/'+txid+self.FORMAT_SEPARATOR+"json")
json_obj = json.loads(json_string)
vintx = json_obj['vin'][0]['txid'] # get the vin to later check for utxo (should be spent by then)
# get n of 0.1 outpoint
n = 0
for vout in json_obj['vout']:
if vout['value'] == 0.1:
n = vout['n']
json_request = '/'+txid+'-'+str(n)
json_string = http_get_call(url.hostname, url.port, '/rest/getutxos'+json_request+self.FORMAT_SEPARATOR+'json')
json_obj = json.loads(json_string)
assert_equal(len(json_obj['utxos']), 0) #there should be a outpoint because it has just added to the mempool
json_request = '/checkmempool/'+txid+'-'+str(n)
json_string = http_get_call(url.hostname, url.port, '/rest/getutxos'+json_request+self.FORMAT_SEPARATOR+'json')
json_obj = json.loads(json_string)
assert_equal(len(json_obj['utxos']), 1) #there should be a outpoint because it has just added to the mempool
#do some invalid requests
json_request = '{"checkmempool'
response = http_post_call(url.hostname, url.port, '/rest/getutxos'+self.FORMAT_SEPARATOR+'json', json_request, True)
assert_equal(response.status, 500) #must be a 500 because we send a invalid json request
json_request = '{"checkmempool'
response = http_post_call(url.hostname, url.port, '/rest/getutxos'+self.FORMAT_SEPARATOR+'bin', json_request, True)
assert_equal(response.status, 500) #must be a 500 because we send a invalid bin request
response = http_post_call(url.hostname, url.port, '/rest/getutxos/checkmempool'+self.FORMAT_SEPARATOR+'bin', '', True)
assert_equal(response.status, 500) #must be a 500 because we send a invalid bin request
#test limits
json_request = '/checkmempool/'
for x in range(0, 20):
json_request += txid+'-'+str(n)+'/'
json_request = json_request.rstrip("/")
response = http_post_call(url.hostname, url.port, '/rest/getutxos'+json_request+self.FORMAT_SEPARATOR+'json', '', True)
assert_equal(response.status, 500) #must be a 500 because we exceeding the limits
json_request = '/checkmempool/'
for x in range(0, 15):
json_request += txid+'-'+str(n)+'/'
json_request = json_request.rstrip("/")
response = http_post_call(url.hostname, url.port, '/rest/getutxos'+json_request+self.FORMAT_SEPARATOR+'json', '', True)
assert_equal(response.status, 200) #must be a 500 because we exceeding the limits
self.nodes[0].generate(1) #generate block to not affect upcoming tests
self.sync_all()
################
# /rest/block/ #
################
# check binary format
response = http_get_call(url.hostname, url.port, '/rest/block/'+bb_hash+self.FORMAT_SEPARATOR+"bin", True)
assert_equal(response.status, 200)
assert_greater_than(int(response.getheader('content-length')), 80)
response_str = response.read()
# compare with block header
response_header = http_get_call(url.hostname, url.port, '/rest/headers/1/'+bb_hash+self.FORMAT_SEPARATOR+"bin", True)
assert_equal(response_header.status, 200)
assert_equal(int(response_header.getheader('content-length')), 80)
response_header_str = response_header.read()
assert_equal(response_str[0:80], response_header_str)
# check block hex format
response_hex = http_get_call(url.hostname, url.port, '/rest/block/'+bb_hash+self.FORMAT_SEPARATOR+"hex", True)
assert_equal(response_hex.status, 200)
assert_greater_than(int(response_hex.getheader('content-length')), 160)
response_hex_str = response_hex.read()
assert_equal(encode(response_str, "hex_codec")[0:160], response_hex_str[0:160])
# compare with hex block header
response_header_hex = http_get_call(url.hostname, url.port, '/rest/headers/1/'+bb_hash+self.FORMAT_SEPARATOR+"hex", True)
assert_equal(response_header_hex.status, 200)
assert_greater_than(int(response_header_hex.getheader('content-length')), 160)
response_header_hex_str = response_header_hex.read()
assert_equal(response_hex_str[0:160], response_header_hex_str[0:160])
assert_equal(encode(response_header_str, "hex_codec")[0:160], response_header_hex_str[0:160])
# check json format
block_json_string = http_get_call(url.hostname, url.port, '/rest/block/'+bb_hash+self.FORMAT_SEPARATOR+'json')
block_json_obj = json.loads(block_json_string)
assert_equal(block_json_obj['hash'], bb_hash)
# compare with json block header
response_header_json = http_get_call(url.hostname, url.port, '/rest/headers/1/'+bb_hash+self.FORMAT_SEPARATOR+"json", True)
assert_equal(response_header_json.status, 200)
response_header_json_str = response_header_json.read().decode('utf-8')
json_obj = json.loads(response_header_json_str, parse_float=Decimal)
assert_equal(len(json_obj), 1) #ensure that there is one header in the json response
assert_equal(json_obj[0]['hash'], bb_hash) #request/response hash should be the same
#compare with normal RPC block response
rpc_block_json = self.nodes[0].getblock(bb_hash)
assert_equal(json_obj[0]['hash'], rpc_block_json['hash'])
assert_equal(json_obj[0]['confirmations'], rpc_block_json['confirmations'])
assert_equal(json_obj[0]['height'], rpc_block_json['height'])
assert_equal(json_obj[0]['version'], rpc_block_json['version'])
assert_equal(json_obj[0]['merkleroot'], rpc_block_json['merkleroot'])
assert_equal(json_obj[0]['time'], rpc_block_json['time'])
assert_equal(json_obj[0]['nonce'], rpc_block_json['nonce'])
assert_equal(json_obj[0]['bits'], rpc_block_json['bits'])
assert_equal(json_obj[0]['difficulty'], rpc_block_json['difficulty'])
assert_equal(json_obj[0]['chainwork'], rpc_block_json['chainwork'])
assert_equal(json_obj[0]['previousblockhash'], rpc_block_json['previousblockhash'])
#see if we can get 5 headers in one response
self.nodes[1].generate(5)
self.sync_all()
response_header_json = http_get_call(url.hostname, url.port, '/rest/headers/5/'+bb_hash+self.FORMAT_SEPARATOR+"json", True)
assert_equal(response_header_json.status, 200)
response_header_json_str = response_header_json.read().decode('utf-8')
json_obj = json.loads(response_header_json_str)
assert_equal(len(json_obj), 5) #now we should have 5 header objects
# do tx test
tx_hash = block_json_obj['tx'][0]['txid']
json_string = http_get_call(url.hostname, url.port, '/rest/tx/'+tx_hash+self.FORMAT_SEPARATOR+"json")
json_obj = json.loads(json_string)
assert_equal(json_obj['txid'], tx_hash)
# check hex format response
hex_string = http_get_call(url.hostname, url.port, '/rest/tx/'+tx_hash+self.FORMAT_SEPARATOR+"hex", True)
assert_equal(hex_string.status, 200)
assert_greater_than(int(response.getheader('content-length')), 10)
# check block tx details
# let's make 3 tx and mine them on node 1
txs = []
txs.append(self.nodes[0].sendtoaddress(self.nodes[2].getnewaddress(), 11))
txs.append(self.nodes[0].sendtoaddress(self.nodes[2].getnewaddress(), 11))
txs.append(self.nodes[0].sendtoaddress(self.nodes[2].getnewaddress(), 11))
self.sync_all()
# check that there are exactly 3 transactions in the TX memory pool before generating the block
json_string = http_get_call(url.hostname, url.port, '/rest/mempool/info'+self.FORMAT_SEPARATOR+'json')
json_obj = json.loads(json_string)
assert_equal(json_obj['size'], 3)
# the size of the memory pool should be greater than 3x ~100 bytes
assert_greater_than(json_obj['bytes'], 300)
# check that there are our submitted transactions in the TX memory pool
json_string = http_get_call(url.hostname, url.port, '/rest/mempool/contents'+self.FORMAT_SEPARATOR+'json')
json_obj = json.loads(json_string)
for tx in txs:
assert_equal(tx in json_obj, True)
# now mine the transactions
newblockhash = self.nodes[1].generate(1)
self.sync_all()
#check if the 3 tx show up in the new block
json_string = http_get_call(url.hostname, url.port, '/rest/block/'+newblockhash[0]+self.FORMAT_SEPARATOR+'json')
json_obj = json.loads(json_string)
for tx in json_obj['tx']:
if not 'coinbase' in tx['vin'][0]: #exclude coinbase
assert_equal(tx['txid'] in txs, True)
#check the same but without tx details
json_string = http_get_call(url.hostname, url.port, '/rest/block/notxdetails/'+newblockhash[0]+self.FORMAT_SEPARATOR+'json')
json_obj = json.loads(json_string)
for tx in txs:
assert_equal(tx in json_obj['tx'], True)
#test rest bestblock
bb_hash = self.nodes[0].getbestblockhash()
json_string = http_get_call(url.hostname, url.port, '/rest/chaininfo.json')
json_obj = json.loads(json_string)
assert_equal(json_obj['bestblockhash'], bb_hash)
if __name__ == '__main__':
RESTTest ().main ()
|
|
## This file is part of Scapy
## See http://www.secdev.org/projects/scapy for more informations
## Copyright (C) Philippe Biondi <[email protected]>
## This program is published under a GPLv2 license
"""
IPv4 (Internet Protocol v4).
"""
import os,time,struct,re,socket,new
from select import select
from collections import defaultdict
from scapy.utils import checksum
from scapy.layers.l2 import *
from scapy.config import conf
from scapy.fields import *
from scapy.packet import *
from scapy.volatile import *
from scapy.sendrecv import sr,sr1,srp1
from scapy.plist import PacketList,SndRcvList
from scapy.automaton import Automaton,ATMT
import scapy.as_resolvers
####################
## IP Tools class ##
####################
class IPTools:
"""Add more powers to a class that have a "src" attribute."""
def whois(self):
os.system("whois %s" % self.src)
def ottl(self):
t = [32,64,128,255]+[self.ttl]
t.sort()
return t[t.index(self.ttl)+1]
def hops(self):
return self.ottl()-self.ttl-1
_ip_options_names = { 0: "end_of_list",
1: "nop",
2: "security",
3: "loose_source_route",
4: "timestamp",
5: "extended_security",
6: "commercial_security",
7: "record_route",
8: "stream_id",
9: "strict_source_route",
10: "experimental_measurement",
11: "mtu_probe",
12: "mtu_reply",
13: "flow_control",
14: "access_control",
15: "encode",
16: "imi_traffic_descriptor",
17: "extended_IP",
18: "traceroute",
19: "address_extension",
20: "router_alert",
21: "selective_directed_broadcast_mode",
23: "dynamic_packet_state",
24: "upstream_multicast_packet",
25: "quick_start",
30: "rfc4727_experiment",
}
class _IPOption_HDR(Packet):
fields_desc = [ BitField("copy_flag",0, 1),
BitEnumField("optclass",0,2,{0:"control",2:"debug"}),
BitEnumField("option",0,5, _ip_options_names) ]
class IPOption(Packet):
name = "IP Option"
fields_desc = [ _IPOption_HDR,
FieldLenField("length", None, fmt="B", # Only option 0 and 1 have no length and value
length_of="value", adjust=lambda pkt,l:l+2),
StrLenField("value", "",length_from=lambda pkt:pkt.length-2) ]
def extract_padding(self, p):
return "",p
registered_ip_options = {}
@classmethod
def register_variant(cls):
cls.registered_ip_options[cls.option.default] = cls
@classmethod
def dispatch_hook(cls, pkt=None, *args, **kargs):
if pkt:
opt = ord(pkt[0])&0x1f
if opt in cls.registered_ip_options:
return cls.registered_ip_options[opt]
return cls
class IPOption_EOL(IPOption):
name = "IP Option End of Options List"
option = 0
fields_desc = [ _IPOption_HDR ]
class IPOption_NOP(IPOption):
name = "IP Option No Operation"
option=1
fields_desc = [ _IPOption_HDR ]
class IPOption_Security(IPOption):
name = "IP Option Security"
copy_flag = 1
option = 2
fields_desc = [ _IPOption_HDR,
ByteField("length", 11),
ShortField("security",0),
ShortField("compartment",0),
ShortField("handling_restrictions",0),
StrFixedLenField("transmission_control_code","xxx",3),
]
class IPOption_LSRR(IPOption):
name = "IP Option Loose Source and Record Route"
copy_flag = 1
option = 3
fields_desc = [ _IPOption_HDR,
FieldLenField("length", None, fmt="B",
length_of="routers", adjust=lambda pkt,l:l+3),
ByteField("pointer",4), # 4 is first IP
FieldListField("routers",[],IPField("","0.0.0.0"),
length_from=lambda pkt:pkt.length-3)
]
def get_current_router(self):
return self.routers[self.pointer/4-1]
class IPOption_RR(IPOption_LSRR):
name = "IP Option Record Route"
option = 7
class IPOption_SSRR(IPOption_LSRR):
name = "IP Option Strict Source and Record Route"
option = 9
class IPOption_Stream_Id(IPOption):
name = "IP Option Stream ID"
option = 8
fields_desc = [ _IPOption_HDR,
ByteField("length", 4),
ShortField("security",0), ]
class IPOption_MTU_Probe(IPOption):
name = "IP Option MTU Probe"
option = 11
fields_desc = [ _IPOption_HDR,
ByteField("length", 4),
ShortField("mtu",0), ]
class IPOption_MTU_Reply(IPOption_MTU_Probe):
name = "IP Option MTU Reply"
option = 12
class IPOption_Traceroute(IPOption):
name = "IP Option Traceroute"
copy_flag = 1
option = 18
fields_desc = [ _IPOption_HDR,
ByteField("length", 12),
ShortField("id",0),
ShortField("outbound_hops",0),
ShortField("return_hops",0),
IPField("originator_ip","0.0.0.0") ]
class IPOption_Address_Extension(IPOption):
name = "IP Option Address Extension"
copy_flag = 1
option = 19
fields_desc = [ _IPOption_HDR,
ByteField("length", 10),
IPField("src_ext","0.0.0.0"),
IPField("dst_ext","0.0.0.0") ]
class IPOption_Router_Alert(IPOption):
name = "IP Option Router Alert"
copy_flag = 1
option = 20
fields_desc = [ _IPOption_HDR,
ByteField("length", 4),
ShortEnumField("alert",0, {0:"router_shall_examine_packet"}), ]
class IPOption_SDBM(IPOption):
name = "IP Option Selective Directed Broadcast Mode"
copy_flag = 1
option = 21
fields_desc = [ _IPOption_HDR,
FieldLenField("length", None, fmt="B",
length_of="addresses", adjust=lambda pkt,l:l+2),
FieldListField("addresses",[],IPField("","0.0.0.0"),
length_from=lambda pkt:pkt.length-2)
]
TCPOptions = (
{ 0 : ("EOL",None),
1 : ("NOP",None),
2 : ("MSS","!H"),
3 : ("WScale","!B"),
4 : ("SAckOK",None),
5 : ("SAck","!"),
8 : ("Timestamp","!II"),
14 : ("AltChkSum","!BH"),
15 : ("AltChkSumOpt",None),
25 : ("Mood","!p")
},
{ "EOL":0,
"NOP":1,
"MSS":2,
"WScale":3,
"SAckOK":4,
"SAck":5,
"Timestamp":8,
"AltChkSum":14,
"AltChkSumOpt":15,
"Mood":25
} )
class TCPOptionsField(StrField):
islist=1
def getfield(self, pkt, s):
opsz = (pkt.dataofs-5)*4
if opsz < 0:
warning("bad dataofs (%i). Assuming dataofs=5"%pkt.dataofs)
opsz = 0
return s[opsz:],self.m2i(pkt,s[:opsz])
def m2i(self, pkt, x):
opt = []
while x:
onum = ord(x[0])
if onum == 0:
opt.append(("EOL",None))
x=x[1:]
break
if onum == 1:
opt.append(("NOP",None))
x=x[1:]
continue
olen = ord(x[1])
if olen < 2:
warning("Malformed TCP option (announced length is %i)" % olen)
olen = 2
oval = x[2:olen]
if TCPOptions[0].has_key(onum):
oname, ofmt = TCPOptions[0][onum]
if onum == 5: #SAck
ofmt += "%iI" % (len(oval)/4)
if ofmt and struct.calcsize(ofmt) == len(oval):
oval = struct.unpack(ofmt, oval)
if len(oval) == 1:
oval = oval[0]
opt.append((oname, oval))
else:
opt.append((onum, oval))
x = x[olen:]
return opt
def i2m(self, pkt, x):
opt = ""
for oname,oval in x:
if type(oname) is str:
if oname == "NOP":
opt += "\x01"
continue
elif oname == "EOL":
opt += "\x00"
continue
elif TCPOptions[1].has_key(oname):
onum = TCPOptions[1][oname]
ofmt = TCPOptions[0][onum][1]
if onum == 5: #SAck
ofmt += "%iI" % len(oval)
if ofmt is not None and (type(oval) is not str or "s" in ofmt):
if type(oval) is not tuple:
oval = (oval,)
oval = struct.pack(ofmt, *oval)
else:
warning("option [%s] unknown. Skipped."%oname)
continue
else:
onum = oname
if type(oval) is not str:
warning("option [%i] is not string."%onum)
continue
opt += chr(onum)+chr(2+len(oval))+oval
return opt+"\x00"*(3-((len(opt)+3)%4))
def randval(self):
return [] # XXX
class ICMPTimeStampField(IntField):
re_hmsm = re.compile("([0-2]?[0-9])[Hh:](([0-5]?[0-9])([Mm:]([0-5]?[0-9])([sS:.]([0-9]{0,3}))?)?)?$")
def i2repr(self, pkt, val):
if val is None:
return "--"
else:
sec, milli = divmod(val, 1000)
min, sec = divmod(sec, 60)
hour, min = divmod(min, 60)
return "%d:%d:%d.%d" %(hour, min, sec, int(milli))
def any2i(self, pkt, val):
if type(val) is str:
hmsms = self.re_hmsm.match(val)
if hmsms:
h,_,m,_,s,_,ms = hmsms = hmsms.groups()
ms = int(((ms or "")+"000")[:3])
val = ((int(h)*60+int(m or 0))*60+int(s or 0))*1000+ms
else:
val = 0
elif val is None:
val = int((time.time()%(24*60*60))*1000)
return val
class IP(Packet, IPTools):
name = "IP"
fields_desc = [ BitField("version" , 4 , 4),
BitField("ihl", None, 4),
XByteField("tos", 0),
ShortField("len", None),
ShortField("id", 1),
FlagsField("flags", 0, 3, ["MF","DF","evil"]),
BitField("frag", 0, 13),
ByteField("ttl", 64),
ByteEnumField("proto", 0, IP_PROTOS),
XShortField("chksum", None),
#IPField("src", "127.0.0.1"),
#Emph(SourceIPField("src","dst")),
Emph(IPField("src", "16.0.0.1")),
Emph(IPField("dst", "48.0.0.1")),
PacketListField("options", [], IPOption, length_from=lambda p:p.ihl*4-20) ]
def post_build(self, p, pay):
ihl = self.ihl
p += "\0"*((-len(p))%4) # pad IP options if needed
if ihl is None:
ihl = len(p)/4
p = chr(((self.version&0xf)<<4) | ihl&0x0f)+p[1:]
if self.len is None:
l = len(p)+len(pay)
p = p[:2]+struct.pack("!H", l)+p[4:]
if self.chksum is None:
ck = checksum(p)
p = p[:10]+chr(ck>>8)+chr(ck&0xff)+p[12:]
return p+pay
def extract_padding(self, s):
l = self.len - (self.ihl << 2)
return s[:l],s[l:]
def send(self, s, slp=0):
for p in self:
try:
s.sendto(str(p), (p.dst,0))
except socket.error, msg:
log_runtime.error(msg)
if slp:
time.sleep(slp)
def route(self):
dst = self.dst
if isinstance(dst,Gen):
dst = iter(dst).next()
return conf.route.route(dst)
def hashret(self):
if ( (self.proto == socket.IPPROTO_ICMP)
and (isinstance(self.payload, ICMP))
and (self.payload.type in [3,4,5,11,12]) ):
return self.payload.payload.hashret()
else:
if conf.checkIPsrc and conf.checkIPaddr:
return strxor(inet_aton(self.src),inet_aton(self.dst))+struct.pack("B",self.proto)+self.payload.hashret()
else:
return struct.pack("B", self.proto)+self.payload.hashret()
def answers(self, other):
if not isinstance(other,IP):
return 0
if conf.checkIPaddr and (self.dst != other.src):
return 0
if ( (self.proto == socket.IPPROTO_ICMP) and
(isinstance(self.payload, ICMP)) and
(self.payload.type in [3,4,5,11,12]) ):
# ICMP error message
return self.payload.payload.answers(other)
else:
if ( (conf.checkIPaddr and (self.src != other.dst)) or
(self.proto != other.proto) ):
return 0
return self.payload.answers(other.payload)
def mysummary(self):
s = self.sprintf("%IP.src% > %IP.dst% %IP.proto%")
if self.frag:
s += " frag:%i" % self.frag
return s
def fragment(self, fragsize=1480):
"""Fragment IP datagrams"""
fragsize = (fragsize+7)/8*8
lst = []
fnb = 0
fl = self
while fl.underlayer is not None:
fnb += 1
fl = fl.underlayer
for p in fl:
s = str(p[fnb].payload)
nb = (len(s)+fragsize-1)/fragsize
for i in range(nb):
q = p.copy()
del(q[fnb].payload)
del(q[fnb].chksum)
del(q[fnb].len)
if i == nb-1:
q[IP].flags &= ~1
else:
q[IP].flags |= 1
q[IP].frag = i*fragsize/8
r = conf.raw_layer(load=s[i*fragsize:(i+1)*fragsize])
r.overload_fields = p[IP].payload.overload_fields.copy()
q.add_payload(r)
lst.append(q)
return lst
class TCP(Packet):
name = "TCP"
fields_desc = [ ShortEnumField("sport", 20, TCP_SERVICES),
ShortEnumField("dport", 80, TCP_SERVICES),
IntField("seq", 0),
IntField("ack", 0),
BitField("dataofs", None, 4),
BitField("reserved", 0, 4),
FlagsField("flags", 0x2, 8, "FSRPAUEC"),
ShortField("window", 8192),
XShortField("chksum", None),
ShortField("urgptr", 0),
TCPOptionsField("options", {}) ]
def post_build(self, p, pay):
p += pay
dataofs = self.dataofs
if dataofs is None:
dataofs = 5+((len(self.get_field("options").i2m(self,self.options))+3)/4)
p = p[:12]+chr((dataofs << 4) | ord(p[12])&0x0f)+p[13:]
if self.chksum is None:
if isinstance(self.underlayer, IP):
if self.underlayer.len is not None:
ln = self.underlayer.len-20
else:
ln = len(p)
psdhdr = struct.pack("!4s4sHH",
inet_aton(self.underlayer.src),
inet_aton(self.underlayer.dst),
self.underlayer.proto,
ln)
ck=checksum(psdhdr+p)
p = p[:16]+struct.pack("!H", ck)+p[18:]
elif conf.ipv6_enabled and isinstance(self.underlayer, scapy.layers.inet6.IPv6) or isinstance(self.underlayer, scapy.layers.inet6._IPv6ExtHdr):
ck = scapy.layers.inet6.in6_chksum(socket.IPPROTO_TCP, self.underlayer, p)
p = p[:16]+struct.pack("!H", ck)+p[18:]
else:
warning("No IP underlayer to compute checksum. Leaving null.")
return p
def hashret(self):
if conf.checkIPsrc:
return struct.pack("H",self.sport ^ self.dport)+self.payload.hashret()
else:
return self.payload.hashret()
def answers(self, other):
if not isinstance(other, TCP):
return 0
if conf.checkIPsrc:
if not ((self.sport == other.dport) and
(self.dport == other.sport)):
return 0
if (abs(other.seq-self.ack) > 2+len(other.payload)):
return 0
return 1
def mysummary(self):
if isinstance(self.underlayer, IP):
return self.underlayer.sprintf("TCP %IP.src%:%TCP.sport% > %IP.dst%:%TCP.dport% %TCP.flags%")
elif conf.ipv6_enabled and isinstance(self.underlayer, scapy.layers.inet6.IPv6):
return self.underlayer.sprintf("TCP %IPv6.src%:%TCP.sport% > %IPv6.dst%:%TCP.dport% %TCP.flags%")
else:
return self.sprintf("TCP %TCP.sport% > %TCP.dport% %TCP.flags%")
class UDP(Packet):
name = "UDP"
fields_desc = [ ShortEnumField("sport", 53, UDP_SERVICES),
ShortEnumField("dport", 53, UDP_SERVICES),
ShortField("len", None),
XShortField("chksum", None), ]
def post_build(self, p, pay):
p += pay
l = self.len
if l is None:
l = len(p)
p = p[:4]+struct.pack("!H",l)+p[6:]
if self.chksum is None:
if isinstance(self.underlayer, IP):
if self.underlayer.len is not None:
ln = self.underlayer.len-20
else:
ln = len(p)
psdhdr = struct.pack("!4s4sHH",
inet_aton(self.underlayer.src),
inet_aton(self.underlayer.dst),
self.underlayer.proto,
ln)
ck=checksum(psdhdr+p)
p = p[:6]+struct.pack("!H", ck)+p[8:]
elif isinstance(self.underlayer, scapy.layers.inet6.IPv6) or isinstance(self.underlayer, scapy.layers.inet6._IPv6ExtHdr):
ck = scapy.layers.inet6.in6_chksum(socket.IPPROTO_UDP, self.underlayer, p)
p = p[:6]+struct.pack("!H", ck)+p[8:]
else:
warning("No IP underlayer to compute checksum. Leaving null.")
return p
def extract_padding(self, s):
l = self.len - 8
return s[:l],s[l:]
def hashret(self):
return self.payload.hashret()
def answers(self, other):
if not isinstance(other, UDP):
return 0
if conf.checkIPsrc:
if self.dport != other.sport:
return 0
return self.payload.answers(other.payload)
def mysummary(self):
if isinstance(self.underlayer, IP):
return self.underlayer.sprintf("UDP %IP.src%:%UDP.sport% > %IP.dst%:%UDP.dport%")
elif isinstance(self.underlayer, scapy.layers.inet6.IPv6):
return self.underlayer.sprintf("UDP %IPv6.src%:%UDP.sport% > %IPv6.dst%:%UDP.dport%")
else:
return self.sprintf("UDP %UDP.sport% > %UDP.dport%")
icmptypes = { 0 : "echo-reply",
3 : "dest-unreach",
4 : "source-quench",
5 : "redirect",
8 : "echo-request",
9 : "router-advertisement",
10 : "router-solicitation",
11 : "time-exceeded",
12 : "parameter-problem",
13 : "timestamp-request",
14 : "timestamp-reply",
15 : "information-request",
16 : "information-response",
17 : "address-mask-request",
18 : "address-mask-reply" }
icmpcodes = { 3 : { 0 : "network-unreachable",
1 : "host-unreachable",
2 : "protocol-unreachable",
3 : "port-unreachable",
4 : "fragmentation-needed",
5 : "source-route-failed",
6 : "network-unknown",
7 : "host-unknown",
9 : "network-prohibited",
10 : "host-prohibited",
11 : "TOS-network-unreachable",
12 : "TOS-host-unreachable",
13 : "communication-prohibited",
14 : "host-precedence-violation",
15 : "precedence-cutoff", },
5 : { 0 : "network-redirect",
1 : "host-redirect",
2 : "TOS-network-redirect",
3 : "TOS-host-redirect", },
11 : { 0 : "ttl-zero-during-transit",
1 : "ttl-zero-during-reassembly", },
12 : { 0 : "ip-header-bad",
1 : "required-option-missing", }, }
class ICMP(Packet):
name = "ICMP"
fields_desc = [ ByteEnumField("type",8, icmptypes),
MultiEnumField("code",0, icmpcodes, depends_on=lambda pkt:pkt.type,fmt="B"),
XShortField("chksum", None),
ConditionalField(XShortField("id",0), lambda pkt:pkt.type in [0,8,13,14,15,16,17,18]),
ConditionalField(XShortField("seq",0), lambda pkt:pkt.type in [0,8,13,14,15,16,17,18]),
ConditionalField(ICMPTimeStampField("ts_ori", None), lambda pkt:pkt.type in [13,14]),
ConditionalField(ICMPTimeStampField("ts_rx", None), lambda pkt:pkt.type in [13,14]),
ConditionalField(ICMPTimeStampField("ts_tx", None), lambda pkt:pkt.type in [13,14]),
ConditionalField(IPField("gw","0.0.0.0"), lambda pkt:pkt.type==5),
ConditionalField(ByteField("ptr",0), lambda pkt:pkt.type==12),
ConditionalField(X3BytesField("reserved",0), lambda pkt:pkt.type==12),
ConditionalField(IPField("addr_mask","0.0.0.0"), lambda pkt:pkt.type in [17,18]),
ConditionalField(IntField("unused",0), lambda pkt:pkt.type not in [0,5,8,12,13,14,15,16,17,18]),
]
def post_build(self, p, pay):
p += pay
if self.chksum is None:
ck = checksum(p)
p = p[:2]+chr(ck>>8)+chr(ck&0xff)+p[4:]
return p
def hashret(self):
if self.type in [0,8,13,14,15,16,17,18]:
return struct.pack("HH",self.id,self.seq)+self.payload.hashret()
return self.payload.hashret()
def answers(self, other):
if not isinstance(other,ICMP):
return 0
if ( (other.type,self.type) in [(8,0),(13,14),(15,16),(17,18)] and
self.id == other.id and
self.seq == other.seq ):
return 1
return 0
def guess_payload_class(self, payload):
if self.type in [3,4,5,11,12]:
return IPerror
else:
return None
def mysummary(self):
if isinstance(self.underlayer, IP):
return self.underlayer.sprintf("ICMP %IP.src% > %IP.dst% %ICMP.type% %ICMP.code%")
else:
return self.sprintf("ICMP %ICMP.type% %ICMP.code%")
class IPerror(IP):
name = "IP in ICMP"
def answers(self, other):
if not isinstance(other, IP):
return 0
if not ( ((conf.checkIPsrc == 0) or (self.dst == other.dst)) and
(self.src == other.src) and
( ((conf.checkIPID == 0)
or (self.id == other.id)
or (conf.checkIPID == 1 and self.id == socket.htons(other.id)))) and
(self.proto == other.proto) ):
return 0
return self.payload.answers(other.payload)
def mysummary(self):
return Packet.mysummary(self)
class TCPerror(TCP):
name = "TCP in ICMP"
def answers(self, other):
if not isinstance(other, TCP):
return 0
if conf.checkIPsrc:
if not ((self.sport == other.sport) and
(self.dport == other.dport)):
return 0
if conf.check_TCPerror_seqack:
if self.seq is not None:
if self.seq != other.seq:
return 0
if self.ack is not None:
if self.ack != other.ack:
return 0
return 1
def mysummary(self):
return Packet.mysummary(self)
class UDPerror(UDP):
name = "UDP in ICMP"
def answers(self, other):
if not isinstance(other, UDP):
return 0
if conf.checkIPsrc:
if not ((self.sport == other.sport) and
(self.dport == other.dport)):
return 0
return 1
def mysummary(self):
return Packet.mysummary(self)
class ICMPerror(ICMP):
name = "ICMP in ICMP"
def answers(self, other):
if not isinstance(other,ICMP):
return 0
if not ((self.type == other.type) and
(self.code == other.code)):
return 0
if self.code in [0,8,13,14,17,18]:
if (self.id == other.id and
self.seq == other.seq):
return 1
else:
return 0
else:
return 1
def mysummary(self):
return Packet.mysummary(self)
bind_layers( Ether, IP, type=2048)
bind_layers( CookedLinux, IP, proto=2048)
bind_layers( GRE, IP, proto=2048)
bind_layers( SNAP, IP, code=2048)
bind_layers( IPerror, IPerror, frag=0, proto=4)
bind_layers( IPerror, ICMPerror, frag=0, proto=1)
bind_layers( IPerror, TCPerror, frag=0, proto=6)
bind_layers( IPerror, UDPerror, frag=0, proto=17)
bind_layers( IP, IP, frag=0, proto=4)
bind_layers( IP, ICMP, frag=0, proto=1)
bind_layers( IP, TCP, frag=0, proto=6)
bind_layers( IP, UDP, frag=0, proto=17)
bind_layers( IP, GRE, frag=0, proto=47)
conf.l2types.register(101, IP)
conf.l2types.register_num2layer(12, IP)
conf.l3types.register(ETH_P_IP, IP)
conf.l3types.register_num2layer(ETH_P_ALL, IP)
conf.neighbor.register_l3(Ether, IP, lambda l2,l3: getmacbyip(l3.dst))
conf.neighbor.register_l3(Dot3, IP, lambda l2,l3: getmacbyip(l3.dst))
###################
## Fragmentation ##
###################
@conf.commands.register
def fragment(pkt, fragsize=1480):
"""Fragment a big IP datagram"""
fragsize = (fragsize+7)/8*8
lst = []
for p in pkt:
s = str(p[IP].payload)
nb = (len(s)+fragsize-1)/fragsize
for i in range(nb):
q = p.copy()
del(q[IP].payload)
del(q[IP].chksum)
del(q[IP].len)
if i == nb-1:
q[IP].flags &= ~1
else:
q[IP].flags |= 1
q[IP].frag = i*fragsize/8
r = conf.raw_layer(load=s[i*fragsize:(i+1)*fragsize])
r.overload_fields = p[IP].payload.overload_fields.copy()
q.add_payload(r)
lst.append(q)
return lst
def overlap_frag(p, overlap, fragsize=8, overlap_fragsize=None):
if overlap_fragsize is None:
overlap_fragsize = fragsize
q = p.copy()
del(q[IP].payload)
q[IP].add_payload(overlap)
qfrag = fragment(q, overlap_fragsize)
qfrag[-1][IP].flags |= 1
return qfrag+fragment(p, fragsize)
@conf.commands.register
def defrag(plist):
"""defrag(plist) -> ([not fragmented], [defragmented],
[ [bad fragments], [bad fragments], ... ])"""
frags = defaultdict(PacketList)
nofrag = PacketList()
for p in plist:
ip = p[IP]
if IP not in p:
nofrag.append(p)
continue
if ip.frag == 0 and ip.flags & 1 == 0:
nofrag.append(p)
continue
uniq = (ip.id,ip.src,ip.dst,ip.proto)
frags[uniq].append(p)
defrag = []
missfrag = []
for lst in frags.itervalues():
lst.sort(key=lambda x: x.frag)
p = lst[0]
lastp = lst[-1]
if p.frag > 0 or lastp.flags & 1 != 0: # first or last fragment missing
missfrag.append(lst)
continue
p = p.copy()
if conf.padding_layer in p:
del(p[conf.padding_layer].underlayer.payload)
ip = p[IP]
if ip.len is None or ip.ihl is None:
clen = len(ip.payload)
else:
clen = ip.len - (ip.ihl<<2)
txt = conf.raw_layer()
for q in lst[1:]:
if clen != q.frag<<3: # Wrong fragmentation offset
if clen > q.frag<<3:
warning("Fragment overlap (%i > %i) %r || %r || %r" % (clen, q.frag<<3, p,txt,q))
missfrag.append(lst)
break
if q[IP].len is None or q[IP].ihl is None:
clen += len(q[IP].payload)
else:
clen += q[IP].len - (q[IP].ihl<<2)
if conf.padding_layer in q:
del(q[conf.padding_layer].underlayer.payload)
txt.add_payload(q[IP].payload.copy())
else:
ip.flags &= ~1 # !MF
del(ip.chksum)
del(ip.len)
p = p/txt
defrag.append(p)
defrag2=PacketList()
for p in defrag:
defrag2.append(p.__class__(str(p)))
return nofrag,defrag2,missfrag
@conf.commands.register
def defragment(plist):
"""defrag(plist) -> plist defragmented as much as possible """
frags = defaultdict(lambda:[])
final = []
pos = 0
for p in plist:
p._defrag_pos = pos
pos += 1
if IP in p:
ip = p[IP]
if ip.frag != 0 or ip.flags & 1:
ip = p[IP]
uniq = (ip.id,ip.src,ip.dst,ip.proto)
frags[uniq].append(p)
continue
final.append(p)
defrag = []
missfrag = []
for lst in frags.itervalues():
lst.sort(key=lambda x: x.frag)
p = lst[0]
lastp = lst[-1]
if p.frag > 0 or lastp.flags & 1 != 0: # first or last fragment missing
missfrag += lst
continue
p = p.copy()
if conf.padding_layer in p:
del(p[conf.padding_layer].underlayer.payload)
ip = p[IP]
if ip.len is None or ip.ihl is None:
clen = len(ip.payload)
else:
clen = ip.len - (ip.ihl<<2)
txt = conf.raw_layer()
for q in lst[1:]:
if clen != q.frag<<3: # Wrong fragmentation offset
if clen > q.frag<<3:
warning("Fragment overlap (%i > %i) %r || %r || %r" % (clen, q.frag<<3, p,txt,q))
missfrag += lst
break
if q[IP].len is None or q[IP].ihl is None:
clen += len(q[IP].payload)
else:
clen += q[IP].len - (q[IP].ihl<<2)
if conf.padding_layer in q:
del(q[conf.padding_layer].underlayer.payload)
txt.add_payload(q[IP].payload.copy())
else:
ip.flags &= ~1 # !MF
del(ip.chksum)
del(ip.len)
p = p/txt
p._defrag_pos = max(x._defrag_pos for x in lst)
defrag.append(p)
defrag2=[]
for p in defrag:
q = p.__class__(str(p))
q._defrag_pos = p._defrag_pos
defrag2.append(q)
final += defrag2
final += missfrag
final.sort(key=lambda x: x._defrag_pos)
for p in final:
del(p._defrag_pos)
if hasattr(plist, "listname"):
name = "Defragmented %s" % plist.listname
else:
name = "Defragmented"
return PacketList(final, name=name)
### Add timeskew_graph() method to PacketList
def _packetlist_timeskew_graph(self, ip, **kargs):
"""Tries to graph the timeskew between the timestamps and real time for a given ip"""
res = map(lambda x: self._elt2pkt(x), self.res)
b = filter(lambda x:x.haslayer(IP) and x.getlayer(IP).src == ip and x.haslayer(TCP), res)
c = []
for p in b:
opts = p.getlayer(TCP).options
for o in opts:
if o[0] == "Timestamp":
c.append((p.time,o[1][0]))
if not c:
warning("No timestamps found in packet list")
return
d = map(lambda (x,y): (x%2000,((x-c[0][0])-((y-c[0][1])/1000.0))),c)
g = Gnuplot.Gnuplot()
g.plot(Gnuplot.Data(d,**kargs))
return g
PacketList.timeskew_graph = new.instancemethod(_packetlist_timeskew_graph, None, PacketList)
### Create a new packet list
class TracerouteResult(SndRcvList):
def __init__(self, res=None, name="Traceroute", stats=None):
PacketList.__init__(self, res, name, stats)
self.graphdef = None
self.graphASres = 0
self.padding = 0
self.hloc = None
self.nloc = None
def show(self):
return self.make_table(lambda (s,r): (s.sprintf("%IP.dst%:{TCP:tcp%ir,TCP.dport%}{UDP:udp%ir,UDP.dport%}{ICMP:ICMP}"),
s.ttl,
r.sprintf("%-15s,IP.src% {TCP:%TCP.flags%}{ICMP:%ir,ICMP.type%}")))
def get_trace(self):
trace = {}
for s,r in self.res:
if IP not in s:
continue
d = s[IP].dst
if d not in trace:
trace[d] = {}
trace[d][s[IP].ttl] = r[IP].src, ICMP not in r
for k in trace.values():
m = filter(lambda x:k[x][1], k.keys())
if not m:
continue
m = min(m)
for l in k.keys():
if l > m:
del(k[l])
return trace
def trace3D(self):
"""Give a 3D representation of the traceroute.
right button: rotate the scene
middle button: zoom
left button: move the scene
left button on a ball: toggle IP displaying
ctrl-left button on a ball: scan ports 21,22,23,25,80 and 443 and display the result"""
trace = self.get_trace()
import visual
class IPsphere(visual.sphere):
def __init__(self, ip, **kargs):
visual.sphere.__init__(self, **kargs)
self.ip=ip
self.label=None
self.setlabel(self.ip)
def setlabel(self, txt,visible=None):
if self.label is not None:
if visible is None:
visible = self.label.visible
self.label.visible = 0
elif visible is None:
visible=0
self.label=visual.label(text=txt, pos=self.pos, space=self.radius, xoffset=10, yoffset=20, visible=visible)
def action(self):
self.label.visible ^= 1
visual.scene = visual.display()
visual.scene.exit = True
start = visual.box()
rings={}
tr3d = {}
for i in trace:
tr = trace[i]
tr3d[i] = []
ttl = tr.keys()
for t in range(1,max(ttl)+1):
if t not in rings:
rings[t] = []
if t in tr:
if tr[t] not in rings[t]:
rings[t].append(tr[t])
tr3d[i].append(rings[t].index(tr[t]))
else:
rings[t].append(("unk",-1))
tr3d[i].append(len(rings[t])-1)
for t in rings:
r = rings[t]
l = len(r)
for i in range(l):
if r[i][1] == -1:
col = (0.75,0.75,0.75)
elif r[i][1]:
col = visual.color.green
else:
col = visual.color.blue
s = IPsphere(pos=((l-1)*visual.cos(2*i*visual.pi/l),(l-1)*visual.sin(2*i*visual.pi/l),2*t),
ip = r[i][0],
color = col)
for trlst in tr3d.values():
if t <= len(trlst):
if trlst[t-1] == i:
trlst[t-1] = s
forecol = colgen(0.625, 0.4375, 0.25, 0.125)
for trlst in tr3d.values():
col = forecol.next()
start = (0,0,0)
for ip in trlst:
visual.cylinder(pos=start,axis=ip.pos-start,color=col,radius=0.2)
start = ip.pos
movcenter=None
while 1:
visual.rate(50)
if visual.scene.kb.keys:
k = visual.scene.kb.getkey()
if k == "esc" or k == "q":
break
if visual.scene.mouse.events:
ev = visual.scene.mouse.getevent()
if ev.press == "left":
o = ev.pick
if o:
if ev.ctrl:
if o.ip == "unk":
continue
savcolor = o.color
o.color = (1,0,0)
a,b=sr(IP(dst=o.ip)/TCP(dport=[21,22,23,25,80,443]),timeout=2)
o.color = savcolor
if len(a) == 0:
txt = "%s:\nno results" % o.ip
else:
txt = "%s:\n" % o.ip
for s,r in a:
txt += r.sprintf("{TCP:%IP.src%:%TCP.sport% %TCP.flags%}{TCPerror:%IPerror.dst%:%TCPerror.dport% %IP.src% %ir,ICMP.type%}\n")
o.setlabel(txt, visible=1)
else:
if hasattr(o, "action"):
o.action()
elif ev.drag == "left":
movcenter = ev.pos
elif ev.drop == "left":
movcenter = None
if movcenter:
visual.scene.center -= visual.scene.mouse.pos-movcenter
movcenter = visual.scene.mouse.pos
def world_trace(self):
from modules.geo import locate_ip
ips = {}
rt = {}
ports_done = {}
for s,r in self.res:
ips[r.src] = None
if s.haslayer(TCP) or s.haslayer(UDP):
trace_id = (s.src,s.dst,s.proto,s.dport)
elif s.haslayer(ICMP):
trace_id = (s.src,s.dst,s.proto,s.type)
else:
trace_id = (s.src,s.dst,s.proto,0)
trace = rt.get(trace_id,{})
if not r.haslayer(ICMP) or r.type != 11:
if ports_done.has_key(trace_id):
continue
ports_done[trace_id] = None
trace[s.ttl] = r.src
rt[trace_id] = trace
trt = {}
for trace_id in rt:
trace = rt[trace_id]
loctrace = []
for i in range(max(trace.keys())):
ip = trace.get(i,None)
if ip is None:
continue
loc = locate_ip(ip)
if loc is None:
continue
# loctrace.append((ip,loc)) # no labels yet
loctrace.append(loc)
if loctrace:
trt[trace_id] = loctrace
tr = map(lambda x: Gnuplot.Data(x,with_="lines"), trt.values())
g = Gnuplot.Gnuplot()
world = Gnuplot.File(conf.gnuplot_world,with_="lines")
g.plot(world,*tr)
return g
def make_graph(self,ASres=None,padding=0):
if ASres is None:
ASres = conf.AS_resolver
self.graphASres = ASres
self.graphpadding = padding
ips = {}
rt = {}
ports = {}
ports_done = {}
for s,r in self.res:
r = r.getlayer(IP) or (conf.ipv6_enabled and r[scapy.layers.inet6.IPv6]) or r
s = s.getlayer(IP) or (conf.ipv6_enabled and s[scapy.layers.inet6.IPv6]) or s
ips[r.src] = None
if TCP in s:
trace_id = (s.src,s.dst,6,s.dport)
elif UDP in s:
trace_id = (s.src,s.dst,17,s.dport)
elif ICMP in s:
trace_id = (s.src,s.dst,1,s.type)
else:
trace_id = (s.src,s.dst,s.proto,0)
trace = rt.get(trace_id,{})
ttl = conf.ipv6_enabled and scapy.layers.inet6.IPv6 in s and s.hlim or s.ttl
if not (ICMP in r and r[ICMP].type == 11) and not (conf.ipv6_enabled and scapy.layers.inet6.IPv6 in r and scapy.layers.inet6.ICMPv6TimeExceeded in r):
if trace_id in ports_done:
continue
ports_done[trace_id] = None
p = ports.get(r.src,[])
if TCP in r:
p.append(r.sprintf("<T%ir,TCP.sport%> %TCP.sport% %TCP.flags%"))
trace[ttl] = r.sprintf('"%r,src%":T%ir,TCP.sport%')
elif UDP in r:
p.append(r.sprintf("<U%ir,UDP.sport%> %UDP.sport%"))
trace[ttl] = r.sprintf('"%r,src%":U%ir,UDP.sport%')
elif ICMP in r:
p.append(r.sprintf("<I%ir,ICMP.type%> ICMP %ICMP.type%"))
trace[ttl] = r.sprintf('"%r,src%":I%ir,ICMP.type%')
else:
p.append(r.sprintf("{IP:<P%ir,proto%> IP %proto%}{IPv6:<P%ir,nh%> IPv6 %nh%}"))
trace[ttl] = r.sprintf('"%r,src%":{IP:P%ir,proto%}{IPv6:P%ir,nh%}')
ports[r.src] = p
else:
trace[ttl] = r.sprintf('"%r,src%"')
rt[trace_id] = trace
# Fill holes with unk%i nodes
unknown_label = incremental_label("unk%i")
blackholes = []
bhip = {}
for rtk in rt:
trace = rt[rtk]
k = trace.keys()
for n in range(min(k), max(k)):
if not trace.has_key(n):
trace[n] = unknown_label.next()
if not ports_done.has_key(rtk):
if rtk[2] == 1: #ICMP
bh = "%s %i/icmp" % (rtk[1],rtk[3])
elif rtk[2] == 6: #TCP
bh = "%s %i/tcp" % (rtk[1],rtk[3])
elif rtk[2] == 17: #UDP
bh = '%s %i/udp' % (rtk[1],rtk[3])
else:
bh = '%s %i/proto' % (rtk[1],rtk[2])
ips[bh] = None
bhip[rtk[1]] = bh
bh = '"%s"' % bh
trace[max(k)+1] = bh
blackholes.append(bh)
# Find AS numbers
ASN_query_list = dict.fromkeys(map(lambda x:x.rsplit(" ",1)[0],ips)).keys()
if ASres is None:
ASNlist = []
else:
ASNlist = ASres.resolve(*ASN_query_list)
ASNs = {}
ASDs = {}
for ip,asn,desc, in ASNlist:
if asn is None:
continue
iplist = ASNs.get(asn,[])
if ip in bhip:
if ip in ports:
iplist.append(ip)
iplist.append(bhip[ip])
else:
iplist.append(ip)
ASNs[asn] = iplist
ASDs[asn] = desc
backcolorlist=colgen("60","86","ba","ff")
forecolorlist=colgen("a0","70","40","20")
s = "digraph trace {\n"
s += "\n\tnode [shape=ellipse,color=black,style=solid];\n\n"
s += "\n#ASN clustering\n"
for asn in ASNs:
s += '\tsubgraph cluster_%s {\n' % asn
col = backcolorlist.next()
s += '\t\tcolor="#%s%s%s";' % col
s += '\t\tnode [fillcolor="#%s%s%s",style=filled];' % col
s += '\t\tfontsize = 10;'
s += '\t\tlabel = "%s\\n[%s]"\n' % (asn,ASDs[asn])
for ip in ASNs[asn]:
s += '\t\t"%s";\n'%ip
s += "\t}\n"
s += "#endpoints\n"
for p in ports:
s += '\t"%s" [shape=record,color=black,fillcolor=green,style=filled,label="%s|%s"];\n' % (p,p,"|".join(ports[p]))
s += "\n#Blackholes\n"
for bh in blackholes:
s += '\t%s [shape=octagon,color=black,fillcolor=red,style=filled];\n' % bh
if padding:
s += "\n#Padding\n"
pad={}
for snd,rcv in self.res:
if rcv.src not in ports and rcv.haslayer(conf.padding_layer):
p = rcv.getlayer(conf.padding_layer).load
if p != "\x00"*len(p):
pad[rcv.src]=None
for rcv in pad:
s += '\t"%s" [shape=triangle,color=black,fillcolor=red,style=filled];\n' % rcv
s += "\n\tnode [shape=ellipse,color=black,style=solid];\n\n"
for rtk in rt:
s += "#---[%s\n" % `rtk`
s += '\t\tedge [color="#%s%s%s"];\n' % forecolorlist.next()
trace = rt[rtk]
k = trace.keys()
for n in range(min(k), max(k)):
s += '\t%s ->\n' % trace[n]
s += '\t%s;\n' % trace[max(k)]
s += "}\n";
self.graphdef = s
def graph(self, ASres=None, padding=0, **kargs):
"""x.graph(ASres=conf.AS_resolver, other args):
ASres=None : no AS resolver => no clustering
ASres=AS_resolver() : default whois AS resolver (riswhois.ripe.net)
ASres=AS_resolver_cymru(): use whois.cymru.com whois database
ASres=AS_resolver(server="whois.ra.net")
type: output type (svg, ps, gif, jpg, etc.), passed to dot's "-T" option
target: filename or redirect. Defaults pipe to Imagemagick's display program
prog: which graphviz program to use"""
if ASres is None:
ASres = conf.AS_resolver
if (self.graphdef is None or
self.graphASres != ASres or
self.graphpadding != padding):
self.make_graph(ASres,padding)
return do_graph(self.graphdef, **kargs)
@conf.commands.register
def traceroute(target, dport=80, minttl=1, maxttl=30, sport=RandShort(), l4 = None, filter=None, timeout=2, verbose=None, **kargs):
"""Instant TCP traceroute
traceroute(target, [maxttl=30,] [dport=80,] [sport=80,] [verbose=conf.verb]) -> None
"""
if verbose is None:
verbose = conf.verb
if filter is None:
# we only consider ICMP error packets and TCP packets with at
# least the ACK flag set *and* either the SYN or the RST flag
# set
filter="(icmp and (icmp[0]=3 or icmp[0]=4 or icmp[0]=5 or icmp[0]=11 or icmp[0]=12)) or (tcp and (tcp[13] & 0x16 > 0x10))"
if l4 is None:
a,b = sr(IP(dst=target, id=RandShort(), ttl=(minttl,maxttl))/TCP(seq=RandInt(),sport=sport, dport=dport),
timeout=timeout, filter=filter, verbose=verbose, **kargs)
else:
# this should always work
filter="ip"
a,b = sr(IP(dst=target, id=RandShort(), ttl=(minttl,maxttl))/l4,
timeout=timeout, filter=filter, verbose=verbose, **kargs)
a = TracerouteResult(a.res)
if verbose:
a.show()
return a,b
#############################
## Simple TCP client stack ##
#############################
class TCP_client(Automaton):
def parse_args(self, ip, port, *args, **kargs):
self.dst = iter(Net(ip)).next()
self.dport = port
self.sport = random.randrange(0,2**16)
self.l4 = IP(dst=ip)/TCP(sport=self.sport, dport=self.dport, flags=0,
seq=random.randrange(0,2**32))
self.src = self.l4.src
self.swin=self.l4[TCP].window
self.dwin=1
self.rcvbuf=""
bpf = "host %s and host %s and port %i and port %i" % (self.src,
self.dst,
self.sport,
self.dport)
# bpf=None
Automaton.parse_args(self, filter=bpf, **kargs)
def master_filter(self, pkt):
return (IP in pkt and
pkt[IP].src == self.dst and
pkt[IP].dst == self.src and
TCP in pkt and
pkt[TCP].sport == self.dport and
pkt[TCP].dport == self.sport and
self.l4[TCP].seq >= pkt[TCP].ack and # XXX: seq/ack 2^32 wrap up
((self.l4[TCP].ack == 0) or (self.l4[TCP].ack <= pkt[TCP].seq <= self.l4[TCP].ack+self.swin)) )
@ATMT.state(initial=1)
def START(self):
pass
@ATMT.state()
def SYN_SENT(self):
pass
@ATMT.state()
def ESTABLISHED(self):
pass
@ATMT.state()
def LAST_ACK(self):
pass
@ATMT.state(final=1)
def CLOSED(self):
pass
@ATMT.condition(START)
def connect(self):
raise self.SYN_SENT()
@ATMT.action(connect)
def send_syn(self):
self.l4[TCP].flags = "S"
self.send(self.l4)
self.l4[TCP].seq += 1
@ATMT.receive_condition(SYN_SENT)
def synack_received(self, pkt):
if pkt[TCP].flags & 0x3f == 0x12:
raise self.ESTABLISHED().action_parameters(pkt)
@ATMT.action(synack_received)
def send_ack_of_synack(self, pkt):
self.l4[TCP].ack = pkt[TCP].seq+1
self.l4[TCP].flags = "A"
self.send(self.l4)
@ATMT.receive_condition(ESTABLISHED)
def incoming_data_received(self, pkt):
if not isinstance(pkt[TCP].payload, NoPayload) and not isinstance(pkt[TCP].payload, conf.padding_layer):
raise self.ESTABLISHED().action_parameters(pkt)
@ATMT.action(incoming_data_received)
def receive_data(self,pkt):
data = str(pkt[TCP].payload)
if data and self.l4[TCP].ack == pkt[TCP].seq:
self.l4[TCP].ack += len(data)
self.l4[TCP].flags = "A"
self.send(self.l4)
self.rcvbuf += data
if pkt[TCP].flags & 8 != 0: #PUSH
self.oi.tcp.send(self.rcvbuf)
self.rcvbuf = ""
@ATMT.ioevent(ESTABLISHED,name="tcp", as_supersocket="tcplink")
def outgoing_data_received(self, fd):
raise self.ESTABLISHED().action_parameters(fd.recv())
@ATMT.action(outgoing_data_received)
def send_data(self, d):
self.l4[TCP].flags = "PA"
self.send(self.l4/d)
self.l4[TCP].seq += len(d)
@ATMT.receive_condition(ESTABLISHED)
def reset_received(self, pkt):
if pkt[TCP].flags & 4 != 0:
raise self.CLOSED()
@ATMT.receive_condition(ESTABLISHED)
def fin_received(self, pkt):
if pkt[TCP].flags & 0x1 == 1:
raise self.LAST_ACK().action_parameters(pkt)
@ATMT.action(fin_received)
def send_finack(self, pkt):
self.l4[TCP].flags = "FA"
self.l4[TCP].ack = pkt[TCP].seq+1
self.send(self.l4)
self.l4[TCP].seq += 1
@ATMT.receive_condition(LAST_ACK)
def ack_of_fin_received(self, pkt):
if pkt[TCP].flags & 0x3f == 0x10:
raise self.CLOSED()
#####################
## Reporting stuff ##
#####################
def report_ports(target, ports):
"""portscan a target and output a LaTeX table
report_ports(target, ports) -> string"""
ans,unans = sr(IP(dst=target)/TCP(dport=ports),timeout=5)
rep = "\\begin{tabular}{|r|l|l|}\n\\hline\n"
for s,r in ans:
if not r.haslayer(ICMP):
if r.payload.flags == 0x12:
rep += r.sprintf("%TCP.sport% & open & SA \\\\\n")
rep += "\\hline\n"
for s,r in ans:
if r.haslayer(ICMP):
rep += r.sprintf("%TCPerror.dport% & closed & ICMP type %ICMP.type%/%ICMP.code% from %IP.src% \\\\\n")
elif r.payload.flags != 0x12:
rep += r.sprintf("%TCP.sport% & closed & TCP %TCP.flags% \\\\\n")
rep += "\\hline\n"
for i in unans:
rep += i.sprintf("%TCP.dport% & ? & unanswered \\\\\n")
rep += "\\hline\n\\end{tabular}\n"
return rep
def IPID_count(lst, funcID=lambda x:x[1].id, funcpres=lambda x:x[1].summary()):
idlst = map(funcID, lst)
idlst.sort()
classes = [idlst[0]]+map(lambda x:x[1],filter(lambda (x,y): abs(x-y)>50, map(lambda x,y: (x,y),idlst[:-1], idlst[1:])))
lst = map(lambda x:(funcID(x), funcpres(x)), lst)
lst.sort()
print "Probably %i classes:" % len(classes), classes
for id,pr in lst:
print "%5i" % id, pr
def fragleak(target,sport=123, dport=123, timeout=0.2, onlyasc=0):
load = "XXXXYYYYYYYYYY"
# getmacbyip(target)
# pkt = IP(dst=target, id=RandShort(), options="\x22"*40)/UDP()/load
pkt = IP(dst=target, id=RandShort(), options="\x00"*40, flags=1)/UDP(sport=sport, dport=sport)/load
s=conf.L3socket()
intr=0
found={}
try:
while 1:
try:
if not intr:
s.send(pkt)
sin,sout,serr = select([s],[],[],timeout)
if not sin:
continue
ans=s.recv(1600)
if not isinstance(ans, IP): #TODO: IPv6
continue
if not isinstance(ans.payload, ICMP):
continue
if not isinstance(ans.payload.payload, IPerror):
continue
if ans.payload.payload.dst != target:
continue
if ans.src != target:
print "leak from", ans.src,
# print repr(ans)
if not ans.haslayer(conf.padding_layer):
continue
# print repr(ans.payload.payload.payload.payload)
# if not isinstance(ans.payload.payload.payload.payload, conf.raw_layer):
# continue
# leak = ans.payload.payload.payload.payload.load[len(load):]
leak = ans.getlayer(conf.padding_layer).load
if leak not in found:
found[leak]=None
linehexdump(leak, onlyasc=onlyasc)
except KeyboardInterrupt:
if intr:
raise
intr=1
except KeyboardInterrupt:
pass
def fragleak2(target, timeout=0.4, onlyasc=0):
found={}
try:
while 1:
p = sr1(IP(dst=target, options="\x00"*40, proto=200)/"XXXXYYYYYYYYYYYY",timeout=timeout,verbose=0)
if not p:
continue
if conf.padding_layer in p:
leak = p[conf.padding_layer].load
if leak not in found:
found[leak]=None
linehexdump(leak,onlyasc=onlyasc)
except:
pass
conf.stats_classic_protocols += [TCP,UDP,ICMP]
conf.stats_dot11_protocols += [TCP,UDP,ICMP]
if conf.ipv6_enabled:
import scapy.layers.inet6
|
|
# vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright 2010 United States Government as represented by the
# Administrator of the National Aeronautics and Space Administration.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from nova import compute
from nova import context
from nova import db
from nova import flags
from nova import quota
from nova import exception
from nova import rpc
from nova import test
from nova import volume
from nova.compute import instance_types
from nova.scheduler import driver as scheduler_driver
FLAGS = flags.FLAGS
class QuotaTestCase(test.TestCase):
class StubImageService(object):
def show(self, *args, **kwargs):
return {"properties": {}}
def setUp(self):
super(QuotaTestCase, self).setUp()
self.flags(connection_type='fake',
quota_instances=2,
quota_cores=4,
quota_volumes=2,
quota_gigabytes=20,
quota_floating_ips=1,
network_manager='nova.network.manager.FlatDHCPManager')
self.network = self.network = self.start_service('network')
self.user_id = 'admin'
self.project_id = 'admin'
self.context = context.RequestContext(self.user_id,
self.project_id,
is_admin=True)
orig_rpc_call = rpc.call
def rpc_call_wrapper(context, topic, msg):
"""Stub out the scheduler creating the instance entry"""
if (topic == FLAGS.scheduler_topic and
msg['method'] == 'run_instance'):
scheduler = scheduler_driver.Scheduler
instance = scheduler().create_instance_db_entry(
context,
msg['args']['request_spec'])
return [scheduler_driver.encode_instance(instance)]
else:
return orig_rpc_call(context, topic, msg)
self.stubs.Set(rpc, 'call', rpc_call_wrapper)
def _create_instance(self, cores=2):
"""Create a test instance"""
inst = {}
inst['image_id'] = 'cedef40a-ed67-4d10-800e-17455edce175'
inst['reservation_id'] = 'r-fakeres'
inst['user_id'] = self.user_id
inst['project_id'] = self.project_id
inst['instance_type_id'] = '3' # m1.large
inst['vcpus'] = cores
return db.instance_create(self.context, inst)['id']
def _create_volume(self, size=10):
"""Create a test volume"""
vol = {}
vol['user_id'] = self.user_id
vol['project_id'] = self.project_id
vol['size'] = size
return db.volume_create(self.context, vol)['id']
def _get_instance_type(self, name):
instance_types = {
'm1.tiny': dict(memory_mb=512, vcpus=1, root_gb=0, flavorid=1),
'm1.small': dict(memory_mb=2048, vcpus=1, root_gb=20, flavorid=2),
'm1.medium':
dict(memory_mb=4096, vcpus=2, root_gb=40, flavorid=3),
'm1.large': dict(memory_mb=8192, vcpus=4, root_gb=80, flavorid=4),
'm1.xlarge':
dict(memory_mb=16384, vcpus=8, root_gb=160, flavorid=5),
'm1.nocpu': dict(memory_mb=512, vcpus=0, root_gb=0, flavorid=6),
'm1.nomem': dict(memory_mb=0, vcpus=1, root_gb=0, flavorid=7)}
return instance_types[name]
def test_quota_no_mem_no_cpu(self):
num_instances = quota.allowed_instances(self.context, 100,
self._get_instance_type('m1.nocpu'))
self.assertEqual(num_instances, 2)
num_instances = quota.allowed_instances(self.context, 100,
self._get_instance_type('m1.nomem'))
self.assertEqual(num_instances, 2)
def test_quota_overrides(self):
"""Make sure overriding a projects quotas works"""
num_instances = quota.allowed_instances(self.context, 100,
self._get_instance_type('m1.small'))
self.assertEqual(num_instances, 2)
db.quota_create(self.context, self.project_id, 'instances', 10)
num_instances = quota.allowed_instances(self.context, 100,
self._get_instance_type('m1.small'))
self.assertEqual(num_instances, 4)
db.quota_create(self.context, self.project_id, 'cores', 100)
num_instances = quota.allowed_instances(self.context, 100,
self._get_instance_type('m1.small'))
self.assertEqual(num_instances, 10)
db.quota_create(self.context, self.project_id, 'ram', 3 * 2048)
num_instances = quota.allowed_instances(self.context, 100,
self._get_instance_type('m1.small'))
self.assertEqual(num_instances, 3)
# metadata_items
too_many_items = FLAGS.quota_metadata_items + 1000
num_metadata_items = quota.allowed_metadata_items(self.context,
too_many_items)
self.assertEqual(num_metadata_items, FLAGS.quota_metadata_items)
db.quota_create(self.context, self.project_id, 'metadata_items', 5)
num_metadata_items = quota.allowed_metadata_items(self.context,
too_many_items)
self.assertEqual(num_metadata_items, 5)
# Cleanup
db.quota_destroy_all_by_project(self.context, self.project_id)
def test_unlimited_instances(self):
self.flags(quota_instances=2, quota_ram=-1, quota_cores=-1)
instance_type = self._get_instance_type('m1.small')
num_instances = quota.allowed_instances(self.context, 100,
instance_type)
self.assertEqual(num_instances, 2)
db.quota_create(self.context, self.project_id, 'instances', None)
num_instances = quota.allowed_instances(self.context, 100,
instance_type)
self.assertEqual(num_instances, 100)
db.quota_create(self.context, self.project_id, 'instances', -1)
num_instances = quota.allowed_instances(self.context, 100,
instance_type)
self.assertEqual(num_instances, 100)
num_instances = quota.allowed_instances(self.context, 101,
instance_type)
self.assertEqual(num_instances, 101)
def test_unlimited_ram(self):
self.flags(quota_instances=-1, quota_ram=2 * 2048, quota_cores=-1)
instance_type = self._get_instance_type('m1.small')
num_instances = quota.allowed_instances(self.context, 100,
instance_type)
self.assertEqual(num_instances, 2)
db.quota_create(self.context, self.project_id, 'ram', None)
num_instances = quota.allowed_instances(self.context, 100,
instance_type)
self.assertEqual(num_instances, 100)
db.quota_create(self.context, self.project_id, 'ram', -1)
num_instances = quota.allowed_instances(self.context, 100,
instance_type)
self.assertEqual(num_instances, 100)
num_instances = quota.allowed_instances(self.context, 101,
instance_type)
self.assertEqual(num_instances, 101)
def test_unlimited_cores(self):
self.flags(quota_instances=-1, quota_ram=-1, quota_cores=2)
instance_type = self._get_instance_type('m1.small')
num_instances = quota.allowed_instances(self.context, 100,
instance_type)
self.assertEqual(num_instances, 2)
db.quota_create(self.context, self.project_id, 'cores', None)
num_instances = quota.allowed_instances(self.context, 100,
instance_type)
self.assertEqual(num_instances, 100)
db.quota_create(self.context, self.project_id, 'cores', -1)
num_instances = quota.allowed_instances(self.context, 100,
instance_type)
self.assertEqual(num_instances, 100)
num_instances = quota.allowed_instances(self.context, 101,
instance_type)
self.assertEqual(num_instances, 101)
def test_unlimited_volumes(self):
self.flags(quota_volumes=10, quota_gigabytes=-1)
volumes = quota.allowed_volumes(self.context, 100, 1)
self.assertEqual(volumes, 10)
db.quota_create(self.context, self.project_id, 'volumes', None)
volumes = quota.allowed_volumes(self.context, 100, 1)
self.assertEqual(volumes, 100)
db.quota_create(self.context, self.project_id, 'volumes', -1)
volumes = quota.allowed_volumes(self.context, 100, 1)
self.assertEqual(volumes, 100)
volumes = quota.allowed_volumes(self.context, 101, 1)
self.assertEqual(volumes, 101)
def test_unlimited_gigabytes(self):
self.flags(quota_volumes=-1, quota_gigabytes=10)
volumes = quota.allowed_volumes(self.context, 100, 1)
self.assertEqual(volumes, 10)
db.quota_create(self.context, self.project_id, 'gigabytes', None)
volumes = quota.allowed_volumes(self.context, 100, 1)
self.assertEqual(volumes, 100)
db.quota_create(self.context, self.project_id, 'gigabytes', -1)
volumes = quota.allowed_volumes(self.context, 100, 1)
self.assertEqual(volumes, 100)
volumes = quota.allowed_volumes(self.context, 101, 1)
self.assertEqual(volumes, 101)
def test_unlimited_floating_ips(self):
self.flags(quota_floating_ips=10)
floating_ips = quota.allowed_floating_ips(self.context, 100)
self.assertEqual(floating_ips, 10)
db.quota_create(self.context, self.project_id, 'floating_ips', None)
floating_ips = quota.allowed_floating_ips(self.context, 100)
self.assertEqual(floating_ips, 100)
db.quota_create(self.context, self.project_id, 'floating_ips', -1)
floating_ips = quota.allowed_floating_ips(self.context, 100)
self.assertEqual(floating_ips, 100)
floating_ips = quota.allowed_floating_ips(self.context, 101)
self.assertEqual(floating_ips, 101)
def test_unlimited_metadata_items(self):
self.flags(quota_metadata_items=10)
items = quota.allowed_metadata_items(self.context, 100)
self.assertEqual(items, 10)
db.quota_create(self.context, self.project_id, 'metadata_items', None)
items = quota.allowed_metadata_items(self.context, 100)
self.assertEqual(items, 100)
db.quota_create(self.context, self.project_id, 'metadata_items', -1)
items = quota.allowed_metadata_items(self.context, 100)
self.assertEqual(items, 100)
items = quota.allowed_metadata_items(self.context, 101)
self.assertEqual(items, 101)
def test_too_many_instances(self):
instance_ids = []
for i in range(FLAGS.quota_instances):
instance_id = self._create_instance()
instance_ids.append(instance_id)
inst_type = instance_types.get_instance_type_by_name('m1.small')
image_uuid = 'cedef40a-ed67-4d10-800e-17455edce175'
self.assertRaises(exception.QuotaError, compute.API().create,
self.context,
min_count=1,
max_count=1,
instance_type=inst_type,
image_href=image_uuid)
for instance_id in instance_ids:
db.instance_destroy(self.context, instance_id)
def test_too_many_cores(self):
instance_ids = []
instance_id = self._create_instance(cores=4)
instance_ids.append(instance_id)
inst_type = instance_types.get_instance_type_by_name('m1.small')
image_uuid = 'cedef40a-ed67-4d10-800e-17455edce175'
self.assertRaises(exception.QuotaError, compute.API().create,
self.context,
min_count=1,
max_count=1,
instance_type=inst_type,
image_href=image_uuid)
for instance_id in instance_ids:
db.instance_destroy(self.context, instance_id)
def test_too_many_volumes(self):
volume_ids = []
for i in range(FLAGS.quota_volumes):
volume_id = self._create_volume()
volume_ids.append(volume_id)
self.assertRaises(exception.QuotaError,
volume.API().create,
self.context, 10, '', '', None)
for volume_id in volume_ids:
db.volume_destroy(self.context, volume_id)
def test_too_many_gigabytes(self):
volume_ids = []
volume_id = self._create_volume(size=20)
volume_ids.append(volume_id)
self.assertRaises(exception.QuotaError,
volume.API().create,
self.context, 10, '', '', None)
for volume_id in volume_ids:
db.volume_destroy(self.context, volume_id)
def test_too_many_addresses(self):
address = '192.168.0.100'
db.floating_ip_create(context.get_admin_context(),
{'address': address,
'project_id': self.project_id})
self.assertRaises(exception.QuotaError,
self.network.allocate_floating_ip,
self.context,
self.project_id)
db.floating_ip_destroy(context.get_admin_context(), address)
def test_too_many_metadata_items(self):
metadata = {}
for i in range(FLAGS.quota_metadata_items + 1):
metadata['key%s' % i] = 'value%s' % i
inst_type = instance_types.get_instance_type_by_name('m1.small')
image_uuid = 'cedef40a-ed67-4d10-800e-17455edce175'
self.assertRaises(exception.QuotaError, compute.API().create,
self.context,
min_count=1,
max_count=1,
instance_type=inst_type,
image_href=image_uuid,
metadata=metadata)
def test_default_allowed_injected_files(self):
self.flags(quota_max_injected_files=55)
self.assertEqual(quota.allowed_injected_files(self.context, 100), 55)
def test_overridden_allowed_injected_files(self):
self.flags(quota_max_injected_files=5)
db.quota_create(self.context, self.project_id, 'injected_files', 77)
self.assertEqual(quota.allowed_injected_files(self.context, 100), 77)
def test_unlimited_default_allowed_injected_files(self):
self.flags(quota_max_injected_files=-1)
self.assertEqual(quota.allowed_injected_files(self.context, 100), 100)
def test_unlimited_db_allowed_injected_files(self):
self.flags(quota_max_injected_files=5)
db.quota_create(self.context, self.project_id, 'injected_files', None)
self.assertEqual(quota.allowed_injected_files(self.context, 100), 100)
def test_default_allowed_injected_file_content_bytes(self):
self.flags(quota_max_injected_file_content_bytes=12345)
limit = quota.allowed_injected_file_content_bytes(self.context, 23456)
self.assertEqual(limit, 12345)
def test_overridden_allowed_injected_file_content_bytes(self):
self.flags(quota_max_injected_file_content_bytes=12345)
db.quota_create(self.context, self.project_id,
'injected_file_content_bytes', 5678)
limit = quota.allowed_injected_file_content_bytes(self.context, 23456)
self.assertEqual(limit, 5678)
def test_unlimited_default_allowed_injected_file_content_bytes(self):
self.flags(quota_max_injected_file_content_bytes=-1)
limit = quota.allowed_injected_file_content_bytes(self.context, 23456)
self.assertEqual(limit, 23456)
def test_unlimited_db_allowed_injected_file_content_bytes(self):
self.flags(quota_max_injected_file_content_bytes=12345)
db.quota_create(self.context, self.project_id,
'injected_file_content_bytes', None)
limit = quota.allowed_injected_file_content_bytes(self.context, 23456)
self.assertEqual(limit, 23456)
def _create_with_injected_files(self, files):
self.flags(image_service='nova.image.fake.FakeImageService')
api = compute.API(image_service=self.StubImageService())
inst_type = instance_types.get_instance_type_by_name('m1.small')
image_uuid = 'cedef40a-ed67-4d10-800e-17455edce175'
api.create(self.context, min_count=1, max_count=1,
instance_type=inst_type, image_href=image_uuid,
injected_files=files)
def test_no_injected_files(self):
self.flags(image_service='nova.image.fake.FakeImageService')
api = compute.API(image_service=self.StubImageService())
inst_type = instance_types.get_instance_type_by_name('m1.small')
image_uuid = 'cedef40a-ed67-4d10-800e-17455edce175'
api.create(self.context,
instance_type=inst_type,
image_href=image_uuid)
def test_max_injected_files(self):
files = []
for i in xrange(FLAGS.quota_max_injected_files):
files.append(('/my/path%d' % i, 'config = test\n'))
self._create_with_injected_files(files) # no QuotaError
def test_too_many_injected_files(self):
files = []
for i in xrange(FLAGS.quota_max_injected_files + 1):
files.append(('/my/path%d' % i, 'my\ncontent%d\n' % i))
self.assertRaises(exception.QuotaError,
self._create_with_injected_files, files)
def test_max_injected_file_content_bytes(self):
max = FLAGS.quota_max_injected_file_content_bytes
content = ''.join(['a' for i in xrange(max)])
files = [('/test/path', content)]
self._create_with_injected_files(files) # no QuotaError
def test_too_many_injected_file_content_bytes(self):
max = FLAGS.quota_max_injected_file_content_bytes
content = ''.join(['a' for i in xrange(max + 1)])
files = [('/test/path', content)]
self.assertRaises(exception.QuotaError,
self._create_with_injected_files, files)
def test_allowed_injected_file_path_bytes(self):
self.assertEqual(
quota.allowed_injected_file_path_bytes(self.context),
FLAGS.quota_max_injected_file_path_bytes)
def test_max_injected_file_path_bytes(self):
max = FLAGS.quota_max_injected_file_path_bytes
path = ''.join(['a' for i in xrange(max)])
files = [(path, 'config = quotatest')]
self._create_with_injected_files(files) # no QuotaError
def test_too_many_injected_file_path_bytes(self):
max = FLAGS.quota_max_injected_file_path_bytes
path = ''.join(['a' for i in xrange(max + 1)])
files = [(path, 'config = quotatest')]
self.assertRaises(exception.QuotaError,
self._create_with_injected_files, files)
|
|
import json
import logging
from builtins import str
from django.conf import settings
from django.contrib.auth.decorators import login_required
from django.contrib.sites.shortcuts import get_current_site
from django.core.cache import cache
from django.core.exceptions import ObjectDoesNotExist
from django.core.exceptions import PermissionDenied
from django.core.urlresolvers import reverse_lazy
from django.db.models import Count
from django.db.models import IntegerField
from django.db.models import OuterRef
from django.db.models import Q
from django.db.models import Subquery
from django.http import HttpResponse
from django.http import HttpResponseBadRequest
from django.http import HttpResponseForbidden
from django.http import HttpResponseNotFound
from django.shortcuts import get_object_or_404
from django.shortcuts import redirect
from django.shortcuts import render
from django.views.decorators.csrf import csrf_exempt
from django.views.generic.base import TemplateView
from le_utils.constants import content_kinds
from rest_framework.authentication import BasicAuthentication
from rest_framework.authentication import SessionAuthentication
from rest_framework.authentication import TokenAuthentication
from rest_framework.decorators import api_view
from rest_framework.decorators import authentication_classes
from rest_framework.decorators import permission_classes
from rest_framework.permissions import AllowAny
from rest_framework.permissions import IsAuthenticated
from rest_framework.renderers import JSONRenderer
from rest_framework.response import Response
from .json_dump import json_for_parse_from_data
from .json_dump import json_for_parse_from_serializer
from contentcuration.api import activate_channel
from contentcuration.api import get_staged_diff
from contentcuration.db.models.aggregates import ArrayAgg
from contentcuration.decorators import browser_is_supported
from contentcuration.decorators import has_accepted_policies
from contentcuration.models import Channel
from contentcuration.models import ChannelSet
from contentcuration.models import ContentNode
from contentcuration.models import DEFAULT_USER_PREFERENCES
from contentcuration.models import Language
from contentcuration.models import User
from contentcuration.serializers import ContentNodeSerializer
from contentcuration.serializers import CurrentUserSerializer
from contentcuration.serializers import SimplifiedChannelProbeCheckSerializer
from contentcuration.serializers import TaskSerializer
from contentcuration.serializers import UserChannelListSerializer
from contentcuration.tasks import create_async_task
from contentcuration.tasks import generatechannelcsv_task
from contentcuration.utils.messages import get_messages
from contentcuration.viewsets.channelset import PublicChannelSetSerializer
PUBLIC_CHANNELS_CACHE_DURATION = 30 # seconds
MESSAGES = "i18n_messages"
PREFERENCES = "user_preferences"
CURRENT_USER = "current_user"
@browser_is_supported
@permission_classes((AllowAny,))
def base(request):
if settings.LIBRARY_MODE:
return channel_list(request)
elif request.user.is_authenticated():
return redirect(reverse_lazy('channels'))
else:
return redirect(reverse_lazy('accounts'))
""" HEALTH CHECKS """
def health(request):
c = Channel.objects.first()
if c:
return HttpResponse(c.name)
else:
return HttpResponse("No channels created yet!")
def stealth(request):
return HttpResponse("<3")
@api_view(["GET"])
@authentication_classes((TokenAuthentication, SessionAuthentication))
@permission_classes((IsAuthenticated,))
def get_prober_channel(request):
if not request.user.is_admin:
return HttpResponseForbidden()
channel = Channel.objects.filter(editors=request.user).first()
if not channel:
channel = Channel.objects.create(name="Prober channel", editors=[request.user])
return Response(SimplifiedChannelProbeCheckSerializer(channel).data)
""" END HEALTH CHECKS """
def get_or_set_cached_constants(constant, serializer):
cached_data = cache.get(constant.__name__)
if cached_data:
return cached_data
constant_objects = constant.objects.all()
constant_serializer = serializer(constant_objects, many=True)
constant_data = JSONRenderer().render(constant_serializer.data)
cache.set(constant.__name__, constant_data, None)
return constant_data
@browser_is_supported
@has_accepted_policies
@permission_classes((AllowAny,))
def channel_list(request):
anon = settings.LIBRARY_MODE or request.user.is_anonymous()
current_user = (
None
if anon
else json_for_parse_from_serializer(UserChannelListSerializer(request.user))
)
preferences = DEFAULT_USER_PREFERENCES if anon else request.user.content_defaults
# Get public channel languages
public_lang_query = Language.objects.filter(channel_language__public=True,
channel_language__main_tree__published=True,
channel_language__deleted=False) \
.values('lang_code') \
.annotate(count=Count('lang_code')) \
.order_by('lang_code')
# Get public channel sets
public_channelset_query = ChannelSet.objects.filter(public=True) \
.annotate(count=SQCountDistinct(
Channel.objects.filter(
secret_tokens=OuterRef("secret_token"),
public=True,
main_tree__published=True,
deleted=False
).values_list("id", flat=True),
field="id"
))
return render(
request,
"channel_list.html",
{
CURRENT_USER: current_user,
PREFERENCES: json_for_parse_from_data(preferences),
MESSAGES: json_for_parse_from_data(get_messages()),
"LIBRARY_MODE": settings.LIBRARY_MODE,
'public_languages': json_for_parse_from_data({l['lang_code']: l['count'] for l in public_lang_query}),
'public_collections': json_for_parse_from_serializer(PublicChannelSetSerializer(public_channelset_query, many=True))
},
)
@browser_is_supported
@has_accepted_policies
@permission_classes((AllowAny,))
def accounts(request):
if not request.user.is_anonymous:
return redirect("channels")
return render(
request,
"accounts.html",
{
PREFERENCES: json_for_parse_from_data(DEFAULT_USER_PREFERENCES),
MESSAGES: json_for_parse_from_data(get_messages()),
},
)
@login_required
@browser_is_supported
@has_accepted_policies
@authentication_classes(
(SessionAuthentication, BasicAuthentication, TokenAuthentication)
)
@permission_classes((IsAuthenticated,))
def channel(request, channel_id):
channel = get_object_or_404(Channel, id=channel_id, deleted=False)
# Check user has permission to view channel
try:
request.user.can_view_channel(channel)
except PermissionDenied:
return HttpResponseNotFound("Channel not found")
return render(
request,
"channel_edit.html",
{
"channel_id": channel_id,
CURRENT_USER: json_for_parse_from_serializer(
UserChannelListSerializer(request.user)
),
PREFERENCES: json_for_parse_from_data(request.user.content_defaults),
MESSAGES: json_for_parse_from_data(get_messages()),
},
)
@csrf_exempt
@authentication_classes(
(SessionAuthentication, BasicAuthentication, TokenAuthentication)
)
@permission_classes((IsAuthenticated,))
def publish_channel(request):
logging.debug("Entering the publish_channel endpoint")
if request.method != "POST":
return HttpResponseBadRequest(
"Only POST requests are allowed on this endpoint."
)
data = json.loads(request.body)
try:
channel_id = data["channel_id"]
version_notes = data.get('version_notes')
request.user.can_edit(channel_id)
task_info = {
"user": request.user,
"metadata": {"affects": {"channels": [channel_id]}},
}
task_args = {
"user_id": request.user.pk,
"channel_id": channel_id,
"version_notes": version_notes,
}
task, task_info = create_async_task("export-channel", task_info, task_args)
return HttpResponse(JSONRenderer().render(TaskSerializer(task_info).data))
except KeyError:
raise ObjectDoesNotExist("Missing attribute from data: {}".format(data))
class SQCountDistinct(Subquery):
# Include ALIAS at the end to support Postgres
template = (
"(SELECT COUNT(DISTINCT %(field)s) FROM (%(subquery)s) AS %(field)s__sum)"
)
output_field = IntegerField()
def map_channel_data(channel):
channel["id"] = channel.pop("main_tree__id")
channel["title"] = channel.pop("name")
channel["children"] = [child for child in channel["children"] if child]
return channel
@api_view(["GET"])
@authentication_classes((TokenAuthentication, SessionAuthentication))
@permission_classes((IsAuthenticated,))
def accessible_channels(request, channel_id):
# Used for import modal
# Returns a list of objects with the following parameters:
# id, title, resource_count, children
channels = (
Channel.objects.filter(
Q(deleted=False)
& (Q(public=True) | Q(editors=request.user) | Q(viewers=request.user))
)
.exclude(pk=channel_id)
.select_related("main_tree")
)
channel_main_tree_nodes = ContentNode.objects.filter(
tree_id=OuterRef("main_tree__tree_id")
)
# Add the unique count of distinct non-topic node content_ids
non_topic_content_ids = (
channel_main_tree_nodes.exclude(kind_id=content_kinds.TOPIC)
.order_by("content_id")
.distinct("content_id")
.values_list("content_id", flat=True)
)
channels = channels.annotate(
resource_count=SQCountDistinct(non_topic_content_ids, field="content_id"),
children=ArrayAgg("main_tree__children", distinct=True),
)
channels_data = channels.values(
"name", "resource_count", "children", "main_tree__id"
)
return Response(map(map_channel_data, channels_data))
def activate_channel_endpoint(request):
if request.method != "POST":
return HttpResponseBadRequest(
"Only POST requests are allowed on this endpoint."
)
data = json.loads(request.body)
channel = Channel.objects.get(pk=data["channel_id"])
changes = []
try:
change = activate_channel(channel, request.user)
changes.append(change)
except PermissionDenied as e:
return HttpResponseForbidden(str(e))
return HttpResponse(json.dumps({"success": True, "changes": changes}))
def get_staged_diff_endpoint(request):
if request.method == "POST":
return HttpResponse(
json.dumps(get_staged_diff(json.loads(request.body)["channel_id"]))
)
return HttpResponseBadRequest("Only POST requests are allowed on this endpoint.")
@authentication_classes(
(SessionAuthentication, BasicAuthentication, TokenAuthentication)
)
@permission_classes((IsAuthenticated,))
def add_bookmark(request):
if request.method != "POST":
return HttpResponseBadRequest(
"Only POST requests are allowed on this endpoint."
)
data = json.loads(request.body)
try:
user = User.objects.get(pk=data["user_id"])
channel = Channel.objects.get(pk=data["channel_id"])
channel.bookmarked_by.add(user)
channel.save()
return HttpResponse(json.dumps({"success": True}))
except ObjectDoesNotExist:
return HttpResponseNotFound(
"Channel with id {} not found".format(data["channel_id"])
)
@authentication_classes(
(SessionAuthentication, BasicAuthentication, TokenAuthentication)
)
@permission_classes((IsAuthenticated,))
def remove_bookmark(request):
if request.method != "POST":
return HttpResponseBadRequest(
"Only POST requests are allowed on this endpoint."
)
data = json.loads(request.body)
try:
user = User.objects.get(pk=data["user_id"])
channel = Channel.objects.get(pk=data["channel_id"])
channel.bookmarked_by.remove(user)
channel.save()
return HttpResponse(json.dumps({"success": True}))
except ObjectDoesNotExist:
return HttpResponseNotFound(
"Channel with id {} not found".format(data["channel_id"])
)
@authentication_classes(
(SessionAuthentication, BasicAuthentication, TokenAuthentication)
)
@permission_classes((IsAuthenticated,))
def set_channel_priority(request):
if request.method != "POST":
return HttpResponseBadRequest(
"Only POST requests are allowed on this endpoint."
)
data = json.loads(request.body)
try:
channel = Channel.objects.get(pk=data["channel_id"])
channel.priority = data["priority"]
channel.save()
return HttpResponse(json.dumps({"success": True}))
except ObjectDoesNotExist:
return HttpResponseNotFound(
"Channel with id {} not found".format(data["channel_id"])
)
@authentication_classes(
(SessionAuthentication, BasicAuthentication, TokenAuthentication)
)
@permission_classes((IsAuthenticated,))
def download_channel_content_csv(request, channel_id):
""" Writes list of channels to csv, which is then emailed """
site = get_current_site(request)
generatechannelcsv_task.delay(channel_id, site.domain, request.user.id)
return HttpResponse({"success": True})
class SandboxView(TemplateView):
template_name = "sandbox.html"
def get_context_data(self, **kwargs):
kwargs = super(SandboxView, self).get_context_data(**kwargs)
active_channels = Channel.objects.filter(
Q(editors=self.request.user) | Q(public=True)
)
active_tree_ids = active_channels.values_list("main_tree__tree_id", flat=True)
active_nodes = ContentNode.objects.filter(tree_id__in=active_tree_ids)
nodes = []
# Get a node of every kind
for kind, _ in reversed(sorted(content_kinds.choices)):
node = active_nodes.filter(
kind_id=kind, freeze_authoring_data=False
).first()
if node:
nodes.append(ContentNodeSerializer(node).data)
# Add an imported node
imported_node = (
active_nodes.filter(freeze_authoring_data=True)
.exclude(kind_id=content_kinds.TOPIC)
.first()
)
if imported_node:
nodes.append(ContentNodeSerializer(imported_node).data)
kwargs.update(
{
"nodes": JSONRenderer().render(nodes),
"channel": active_channels.first().pk,
"current_user": JSONRenderer().render(
CurrentUserSerializer(self.request.user).data
),
"root_id": self.request.user.clipboard_tree.pk,
}
)
return kwargs
@api_view(["GET"])
@authentication_classes((TokenAuthentication, SessionAuthentication))
@permission_classes((IsAuthenticated,))
def get_clipboard_channels(request):
if not request.user:
return Response([])
channel_ids = request.user.clipboard_tree.get_descendants().order_by('original_channel_id').values_list('original_channel_id', flat=True).distinct()
return Response(channel_ids)
|
|
# Copyright 2013 Mirantis, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""Type conversion and validation classes for configuration options.
Use these classes as values for the `type` argument to
:class:`oslo_config.cfg.Opt` and its subclasses.
"""
import re
import netaddr
import six
class ConfigType(object):
BASE_TYPES = (None,)
def is_base_type(self, other):
return isinstance(other, self.BASE_TYPES)
class String(ConfigType):
"""String type.
String values do not get transformed and are returned as str objects.
:param choices: Optional sequence of valid values. Mutually
exclusive with 'regex'.
:param quotes: If True and string is enclosed with single or double
quotes, will strip those quotes. Will signal error if
string have quote at the beginning and no quote at
the end. Turned off by default. Useful if used with
container types like List.
:param regex: Optional regular expression (string or compiled
regex) that the value must match on an unanchored
search. Mutually exclusive with 'choices'.
"""
BASE_TYPES = six.string_types
def __init__(self, choices=None, quotes=False, regex=None):
super(String, self).__init__()
if choices and regex:
raise ValueError("'choices' and 'regex' cannot both be specified")
self.choices = choices
self.quotes = quotes
self.regex = re.compile(regex) if regex is not None else None
def __call__(self, value):
value = str(value)
if self.quotes and value:
if value[0] in "\"'":
if value[-1] != value[0]:
raise ValueError('Non-closed quote: %s' % value)
value = value[1:-1]
if self.regex and not self.regex.search(value):
raise ValueError("Value %r doesn't match regex %r" %
(value, self.regex.pattern))
if self.choices is None or value in self.choices:
return value
raise ValueError(
'Valid values are [%s], but found %s' % (
', '.join([str(v) for v in self.choices]),
repr(value)))
def __repr__(self):
details = []
if self.choices:
details.append("choices=%r" % self.choices)
if self.regex:
details.append("regex=%r" % self.regex.pattern)
if details:
return "String(%s)" % ",".join(details)
return 'String'
def __eq__(self, other):
return (
(self.__class__ == other.__class__) and
(self.choices == other.choices) and
(self.quotes == other.quotes) and
(self.regex == other.regex)
)
class MultiString(String):
BASE_TYPES = six.string_types + (list,)
class Boolean(ConfigType):
"""Boolean type.
Values are case insensitive and can be set using
1/0, yes/no, true/false or on/off.
"""
TRUE_VALUES = ['true', '1', 'on', 'yes']
FALSE_VALUES = ['false', '0', 'off', 'no']
BASE_TYPES = (bool,)
def __call__(self, value):
if isinstance(value, bool):
return value
s = value.lower()
if s in self.TRUE_VALUES:
return True
elif s in self.FALSE_VALUES:
return False
else:
raise ValueError('Unexpected boolean value %r' % value)
def __repr__(self):
return 'Boolean'
def __eq__(self, other):
return self.__class__ == other.__class__
class Integer(ConfigType):
"""Integer type.
Converts value to an integer optionally doing range checking.
If value is whitespace or empty string will return None.
:param min: Optional check that value is greater than or equal to min
:param max: Optional check that value is less than or equal to max
"""
BASE_TYPES = six.integer_types
def __init__(self, min=None, max=None):
super(Integer, self).__init__()
self.min = min
self.max = max
if min is not None and max is not None and max < min:
raise ValueError('Max value is less than min value')
def __call__(self, value):
if not isinstance(value, int):
s = str(value).strip()
if s == '':
value = None
else:
value = int(value)
if value is not None:
self._check_range(value)
return value
def _check_range(self, value):
if self.min is not None and value < self.min:
raise ValueError('Should be greater than or equal to %d' %
self.min)
if self.max is not None and value > self.max:
raise ValueError('Should be less than or equal to %d' % self.max)
def __repr__(self):
props = []
if self.min is not None:
props.append('min=%d' % self.min)
if self.max is not None:
props.append('max=%d' % self.max)
if props:
return 'Integer(%s)' % ', '.join(props)
return 'Integer'
def __eq__(self, other):
return (
(self.__class__ == other.__class__) and
(self.min == other.min) and
(self.max == other.max)
)
class Float(ConfigType):
"""Float type."""
# allow float to be set from int
BASE_TYPES = six.integer_types + (float,)
def __call__(self, value):
if isinstance(value, float):
return value
return float(value)
def __repr__(self):
return 'Float'
def __eq__(self, other):
return self.__class__ == other.__class__
class List(ConfigType):
"""List type.
Represent values of other (item) type, separated by commas.
The resulting value is a list containing those values.
List doesn't know if item type can also contain commas. To workaround this
it tries the following: if the next part fails item validation, it appends
comma and next item until validation succeeds or there is no parts left.
In the later case it will signal validation error.
:param item_type: type of list items
:param bounds: if True, value should be inside "[" and "]" pair
"""
BASE_TYPES = (list,)
def __init__(self, item_type=None, bounds=False):
super(List, self).__init__()
if item_type is None:
item_type = String()
if not callable(item_type):
raise TypeError('item_type must be callable')
self.item_type = item_type
self.bounds = bounds
def __call__(self, value):
if isinstance(value, list):
return value
result = []
s = value.strip()
if self.bounds:
if not s.startswith('['):
raise ValueError('Value should start with "["')
if not s.endswith(']'):
raise ValueError('Value should end with "]"')
s = s[1:-1]
if s == '':
return result
values = s.split(',')
while values:
value = values.pop(0)
while True:
first_error = None
try:
validated_value = self.item_type(value.strip())
break
except ValueError as e:
if not first_error:
first_error = e
if len(values) == 0:
raise first_error
value += ',' + values.pop(0)
result.append(validated_value)
return result
def __repr__(self):
return 'List of %s' % repr(self.item_type)
def __eq__(self, other):
return (
(self.__class__ == other.__class__) and
(self.item_type == other.item_type)
)
class Dict(ConfigType):
"""Dictionary type.
Dictionary type values are key:value pairs separated by commas.
The resulting value is a dictionary of these key/value pairs.
Type of dictionary key is always string, but dictionary value
type can be customized.
:param value_type: type of values in dictionary
:param bounds: if True, value should be inside "{" and "}" pair
"""
BASE_TYPES = (dict,)
def __init__(self, value_type=None, bounds=False):
super(Dict, self).__init__()
if value_type is None:
value_type = String()
if not callable(value_type):
raise TypeError('value_type must be callable')
self.value_type = value_type
self.bounds = bounds
def __call__(self, value):
if isinstance(value, dict):
return value
result = {}
s = value.strip()
if self.bounds:
if not s.startswith('{'):
raise ValueError('Value should start with "{"')
if not s.endswith('}'):
raise ValueError('Value should end with "}"')
s = s[1:-1]
if s == '':
return result
pairs = s.split(',')
while pairs:
pair = pairs.pop(0)
while True:
first_error = None
try:
key_value = pair.split(':', 1)
if len(key_value) < 2:
raise ValueError('Value should be NAME:VALUE pairs '
'separated by ","')
key, value = key_value
key = key.strip()
value = value.strip()
value = self.value_type(value)
break
except ValueError as e:
if not first_error:
first_error = e
if not pairs:
raise first_error
pair += ',' + pairs.pop(0)
if key == '':
raise ValueError('Key name should not be empty')
if key in result:
raise ValueError('Duplicate key %s' % key)
result[key] = value
return result
def __repr__(self):
return 'Dict of %s' % repr(self.value_type)
def __eq__(self, other):
return (
(self.__class__ == other.__class__) and
(self.value_type == other.value_type)
)
class IPAddress(ConfigType):
"""IP address type
Represents either ipv4 or ipv6. Without specifying version parameter both
versions are checked
:param version: defines which version should be explicitly checked (4 or 6)
"""
BASE_TYPES = six.string_types
def __init__(self, version=None):
super(IPAddress, self).__init__()
version_checkers = {
None: self._check_both_versions,
4: self._check_ipv4,
6: self._check_ipv6
}
self.version_checker = version_checkers.get(version)
if self.version_checker is None:
raise TypeError("%s is not a valid IP version." % version)
def __call__(self, value):
value = str(value)
if not value:
raise ValueError("IP address cannot be an empty string")
self.version_checker(value)
return value
def __repr__(self):
return "IPAddress"
def __eq__(self, other):
return self.__class__ == other.__class__
def _check_ipv4(self, address):
if not netaddr.valid_ipv4(address, netaddr.core.INET_PTON):
raise ValueError("%s is not an IPv4 address" % address)
def _check_ipv6(self, address):
if not netaddr.valid_ipv6(address, netaddr.core.INET_PTON):
raise ValueError("%s is not an IPv6 address" % address)
def _check_both_versions(self, address):
if not (netaddr.valid_ipv4(address, netaddr.core.INET_PTON) or
netaddr.valid_ipv6(address, netaddr.core.INET_PTON)):
raise ValueError("%s is not IPv4 or IPv6 address" % address)
|
|
import os.path
import tornado
import tornado.auth
import markdown
import re
from gblog import config
from gblog import utils
from gblog.handlers.basehandler import BaseHandler
class HomeHandler(BaseHandler):
"""Handle URL '/'.
Subclass of BaseHandler and RequestHandler, support standard GET/POST
method.
"""
def get(self):
"""Return the home page."""
# Calculate the pages num
entries_per_page=config.options.entries_per_page
try:
int(entries_per_page)
except ValueError:
raise tornado.web.HTTPError(500)
count = self.db.get("SELECT COUNT(*) FROM entries")
count = list(count.values())[0]
pages = int((count-1)/entries_per_page + 1) # equal math.ceil(a/b)
pages = pages if pages else 1 # pages cannot be 0
entries = self.db.query("SELECT id,title,slug,abstract,published,\
readtimes,comments FROM entries ORDER BY published DESC \
LIMIT {0}".format(entries_per_page))
if not entries:
pass
tags_list = self.db.query("SELECT * FROM tags")
dates_list = self.db.query("SELECT * FROM dates")
self.render("home.html", entries=entries, tags_list=tags_list,
dates_list=dates_list, pages=pages)
def post(self):
"""Return the entries in home page #."""
page = self.get_argument("page", None)
if not page: raise tornado.web.HTTPError(404)
entries_per_page=config.options.entries_per_page
start = int(page)*entries_per_page - entries_per_page
entries = self.db.query("SELECT id,title,slug,abstract,published,\
readtimes,comments FROM entries ORDER BY published DESC \
LIMIT {0},{1}".format(start, entries_per_page))
self.render("modules/entry.html", entries=entries)
class AboutHandler(BaseHandler):
"""Handle URL '/about'.
"""
def get(self):
"""Return the about page."""
about_file_path = self.settings["config_dir"] + '/about.md'
if os.path.isfile(about_file_path):
f = open(about_file_path)
content=f.read()
content = markdown.markdown(content)
else:
content = None
comments = self.db.query("SELECT * FROM comments WHERE \
entry_id={0}".format(0))
#if comments:
reply_map={}
i=0
for comment in comments:
reply_map[ comment["id"] ]=i
i+=1
self.render("about.html", content=content, comments=comments,
entry_id=0, reply_map=reply_map)
class DefaultHandler(BaseHandler):
"""handler of default_handler_class in Application Setting.
"""
def get(self):
"""Return the 404 page."""
# search *.jpg, *.ico, *.css ....
#match = re.search('\.', self.request.uri)
#if match:
#self.send_error(400)
#else:
#self.render("404.html")
raise tornado.web.HTTPError(404)
class EntryHandler(BaseHandler):
"""Handle URL '/entry/[^/]+'.
Subclass of BaseHandler and RequestHandler, support standard GET method.
"""
def get(self, slug):
entry = self.db.get("SELECT * FROM entries WHERE slug = '{0}'"\
.format(slug))
if not entry: raise tornado.web.HTTPError(404)
# Update readtimes
if not self.current_user:
entry.readtimes+=1
self.db.execute( "UPDATE entries SET readtimes = '{0}' WHERE \
id = {1}".format(entry.readtimes, entry.id))
tags_list = self.db.query("SELECT * FROM tags")
dates_list = self.db.query("SELECT * FROM dates")
# Query the pre and next article
query = self.db.query("(SELECT id,slug,title FROM entries WHERE \
id<{0} ORDER BY id DESC LIMIT 1) UNION ALL (SELECT id, \
slug,title FROM entries WHERE id>{0} ORDER BY id LIMIT 1) \
".format(entry.id))
# Only little interger can use 'is'
if len(query) is 2:
pre = query[0]
nex = query[1]
elif len(query) is 1:
if query[0]["id"] < entry["id"]:
pre = query[0]
nex = None
else:
pre = None
nex = query[0]
else:
pre = None
nex = None
self.render("article.html", entry=entry, tags_list=tags_list,
dates_list=dates_list, pre=pre, nex=nex)
class ArchiveHandler(BaseHandler):
"""Handle URL '/archive'.
Subclass of BaseHandler and RequestHandler, support standard GET method.
"""
def get(self):
entries = self.db.query("SELECT * FROM entries ORDER BY \
published DESC")
self.render("archive.html", entries=entries)
class CategoryHandler(BaseHandler):
"""Handle URL '/category'.
"""
def get(self):
# Check argument
name=self.get_argument("name")
id = self.get_argument("id")
# Check id
try:
id=int(id)
except ValueError:
raise tornado.web.HTTPError()
if name == "tag":
if id is 0:
tagname=self.get_argument("tagname")
print(tagname)
tag = self.db.get("SELECT * FROM tags WHERE name = '{0}' \
LIMIT 1".format(tagname))
entries = self.db.query("SELECT * FROM entries WHERE id IN \
(SELECT entry_id FROM tagmaps WHERE tag_id = {0})" \
. format(tag["id"]))
if not entries:
raise tornado.web.HTTPError(404)
self.render("category.html", entries=entries, category="tag",
item=tag)
entries = self.db.query("SELECT * FROM entries WHERE id IN (SELECT \
entry_id FROM tagmaps WHERE tag_id = {0})".format(id))
if not entries:
raise tornado.web.HTTPError(404)
tag = self.db.get("SELECT * FROM tags WHERE id = {0} LIMIT 1"\
.format(id))
self.render("category.html", entries=entries, category="tag",
item=tag)
elif name == "date":
entries = self.db.query("SELECT * FROM entries WHERE id IN (SELECT\
entry_id FROM datemaps WHERE date_id = {0})".format(id))
if not entries:
raise tornado.web.HTTPError(404)
date = self.db.get("SELECT * FROM dates WHERE id = {0} LIMIT 1"\
.format(id))
self.render("category.html", entries=entries, category="date",
item=date)
else:
raise tornado.web.HTTPError(404)
|
|
import numpy as np
import py_paddle.swig_paddle as api
from paddle.proto.ParameterConfig_pb2 import ParameterConfig
import struct
import tarfile
import cStringIO
from topology import Topology
__all__ = ['Parameters', 'create']
def create(layers):
"""
Create parameter pool by topology.
:param layers:
:return:
"""
topology = Topology(layers)
pool = Parameters()
for param in topology.proto().parameters:
pool.__append_config__(param)
return pool
class Parameters(object):
"""
Parameters is a dictionary contains Paddle's parameter. The key of
Parameters is the name of parameter. The value of Parameters is a plain
:code:`numpy.ndarry` .
Basically usage is
.. code-block:: python
data = paddle.layers.data(...)
...
out = paddle.layers.fc(...)
parameters = paddle.parameters.create(out)
parameter_names = parameters.names()
fc_mat = parameters.get('fc')
print fc_mat
"""
def __init__(self):
self.__param_conf__ = dict()
self.__gradient_machines__ = []
self.__tmp_params__ = []
def __append_config__(self, param_conf):
"""
Append a parameter configuration. It used to initialize Parameters and
should be invoked only in paddle.parameters.create
:param param_conf: The parameter configuration in protobuf
:type param_conf: ParameterConfig
:return: Nothing
"""
if not isinstance(param_conf, ParameterConfig):
raise ValueError("param_conf must be paddle.proto.ParameterConfig")
if param_conf.name in self.__param_conf__:
raise ValueError("duplicated parameter %s" % param_conf.name)
self.__param_conf__[param_conf.name] = param_conf
def keys(self):
"""
keys are the names of each parameter.
:return: list of parameter name
:rtype: list
"""
return self.__param_conf__.keys()
def names(self):
"""
names of each parameter.
:return: list of parameter name
:rtype: list
"""
return self.keys()
def has_key(self, key):
"""
has_key return true if there are such parameter name == key
:param key: Parameter name
:type key: basestring
:return: True if contains such key
"""
return key in self.__param_conf__.keys()
def __iter__(self):
"""
Return an iterator of parameter name. It is used by `for loop`
or `in` operator.
.. code-block:: python
parameters = paddle.parameters.create(...)
if "fc_param" in parameters:
print 'OK'
:return: an iterator of parameter name
:rtype: iterator
"""
return iter(self.__param_conf__)
def __getitem__(self, key):
"""
Get parameter by parameter name. It uses Python dict syntax.
:note: It will always copy the parameter from C++ side.
:param key: Parameter name
:type key: basestring
:return: parameter value
:rtype: np.ndarray
"""
shape = self.get_shape(key)
if len(self.__gradient_machines__) == 0:
# create new parameter in python numpy.
if len(self.__tmp_params__) != 0:
ret_list = [
mat for name, mat in self.__tmp_params__ if name == key
]
if len(ret_list) == 1:
return ret_list[0]
return np.ndarray(shape=shape, dtype=np.float32)
else:
for each_gradient_machine in self.__gradient_machines__:
param = __get_parameter_in_gradient_machine__(
each_gradient_machine, key)
# for simplify implementation now, we always copy from C++
assert isinstance(param, api.Parameter)
val = param.getBuf(api.PARAMETER_VALUE)
assert isinstance(val, api.Vector)
val = val.copyToNumpyArray()
return val
# else continue
raise RuntimeError("Unexpected branch")
def get_shape(self, key):
"""
get shape of the parameter.
:param key: parameter name
:type key: basestring
:return: parameter's shape
:rtype: tuple
"""
if not isinstance(key, basestring):
raise ValueError("parameter name should be string")
if not self.has_key(key):
raise ValueError("No such parameter %s" % key)
conf = self.__param_conf__[key]
dims = conf.dims if conf.dims else (1, conf.size)
return tuple(map(int, dims))
def __setitem__(self, key, value):
"""
Set parameter by parameter name & value. It use Python dict syntax.
:note: It will always copy the parameter to C++ side.
:param key: Parameter name
:type key: basestring
:param value: Parameter matrix.
:type value: np.ndarray
:return: Nothing
"""
if not isinstance(value, np.ndarray):
raise ValueError("Must return ndarray")
value = value.astype(dtype=np.float32)
shape = self.get_shape(key)
if value.shape != shape:
raise ValueError("Value shape mismatch, expect %s, should %s" %
(shape, value.shape))
if len(self.__gradient_machines__) == 0:
self.__tmp_params__.append((key, value))
else:
for each_gradient_machine in self.__gradient_machines__:
__copy_parameter_to_gradient_machine__(each_gradient_machine,
key, value)
def get(self, parameter_name):
"""
Get parameter by parameter name.
:note: It will always copy the parameter from C++ side.
:param parameter_name: parameter name
:type parameter_name: basestring
:return: The parameter matrix.
:rtype: np.ndarray
"""
return self.__getitem__(key=parameter_name)
def set(self, parameter_name, value):
"""
Set parameter by parameter name & matrix.
:param parameter_name: parameter name
:type parameter_name: basestring
:param value: parameter matrix
:type value: np.ndarray
:return: Nothing.
"""
self.__setitem__(key=parameter_name, value=value)
def append_gradient_machine(self, gradient_machine):
"""
append gradient machine to parameters. This method is used internally in
Trainer.train.
:param gradient_machine: Paddle C++ GradientMachine object.
:type gradient_machine: api.GradientMachine
:return:
"""
if not isinstance(gradient_machine, api.GradientMachine):
raise ValueError("gradient_machine should be api.GradientMachine")
if len(self.__tmp_params__) != 0:
for name, val in self.__tmp_params__:
try:
__copy_parameter_to_gradient_machine__(gradient_machine,
name, val)
except ValueError:
# If no such parameter in gradient machine, then don't copy
pass
self.__gradient_machines__.append(gradient_machine)
def serialize(self, name, f):
"""
:param name:
:param f:
:type f: file
:return:
"""
param = self.get(name)
size = reduce(lambda a, b: a * b, param.shape)
f.write(struct.pack("IIQ", 0, 4, size))
param = param.astype(np.float32)
f.write(param.tobytes())
def deserialize(self, name, f):
"""
:param name:
:param f:
:type f: file
:return:
"""
f.read(16) # header
arr = np.frombuffer(f.read(), dtype=np.float32)
self.set(name, arr.reshape(self.get_shape(name)))
def to_tar(self, f):
tar = tarfile.TarFile(fileobj=f, mode='w')
for nm in self.names():
buf = cStringIO.StringIO()
self.serialize(nm, buf)
tarinfo = tarfile.TarInfo(name=nm)
buf.seek(0)
tarinfo.size = len(buf.getvalue())
tar.addfile(tarinfo, buf)
conf = self.__param_conf__[nm]
confStr = conf.SerializeToString()
tarinfo = tarfile.TarInfo(name="%s.protobuf" % nm)
tarinfo.size = len(confStr)
buf = cStringIO.StringIO(confStr)
buf.seek(0)
tar.addfile(tarinfo, fileobj=buf)
@staticmethod
def from_tar(f):
params = Parameters()
tar = tarfile.TarFile(fileobj=f, mode='r')
for finfo in tar:
assert isinstance(finfo, tarfile.TarInfo)
if finfo.name.endswith('.protobuf'):
f = tar.extractfile(finfo)
conf = ParameterConfig()
conf.ParseFromString(f.read())
params.__append_config__(conf)
for param_name in params.names():
f = tar.extractfile(param_name)
params.deserialize(param_name, f)
return params
def __get_parameter_in_gradient_machine__(gradient_machine, name):
"""
:param gradient_machine:
:type gradient_machine: api.GradientMachine
:param name:
:return:
:rtype: api.Parameter
"""
params = filter(lambda p: p.getName() == name,
gradient_machine.getParameters())
if len(params) == 0:
raise ValueError("No such parameter")
elif len(params) > 1:
raise ValueError("Unexpected branch")
else:
return params[0]
def __copy_parameter_to_gradient_machine__(gradient_machine, name, arr):
"""
Copy a python ndarray into the gradient machine.
:param gradient_machine:
:type gradient_machine: api.GradientMachine
:param name:
:param arr:
:type arr: np.ndarray
:return:
:rtype: api.Parameter
"""
param = __get_parameter_in_gradient_machine__(gradient_machine, name)
vec = param.getBuf(api.PARAMETER_VALUE)
assert isinstance(vec, api.Vector)
vec.copyFromNumpyArray(arr.flatten())
|
|
###############################################################################
# Server process to keep track of unlinked resources (like shared memory
# segments, semaphores etc.) and clean them.
#
# On Unix we run a server process which keeps track of unlinked
# resources. The server ignores SIGINT and SIGTERM and reads from a
# pipe. Every other process of the program has a copy of the writable
# end of the pipe, so we get EOF when all other processes have exited.
# Then the server process unlinks any remaining resource names.
#
# This is important because there may be system limits for such resources: for
# instance, the system only supports a limited number of named semaphores, and
# shared-memory segments live in the RAM. If a python process leaks such a
# resource, this resource will not be removed till the next reboot. Without
# this resource tracker process, "killall python" would probably leave unlinked
# resources.
import os
import signal
import sys
import threading
import warnings
from . import spawn
from . import util
__all__ = ['ensure_running', 'register', 'unregister']
_HAVE_SIGMASK = hasattr(signal, 'pthread_sigmask')
_IGNORED_SIGNALS = (signal.SIGINT, signal.SIGTERM)
_CLEANUP_FUNCS = {
'noop': lambda: None,
}
if os.name == 'posix':
import _multiprocessing
import _posixshmem
_CLEANUP_FUNCS.update({
'semaphore': _multiprocessing.sem_unlink,
'shared_memory': _posixshmem.shm_unlink,
})
class ResourceTracker(object):
def __init__(self):
self._lock = threading.Lock()
self._fd = None
self._pid = None
def _stop(self):
with self._lock:
if self._fd is None:
# not running
return
# closing the "alive" file descriptor stops main()
os.close(self._fd)
self._fd = None
os.waitpid(self._pid, 0)
self._pid = None
def getfd(self):
self.ensure_running()
return self._fd
def ensure_running(self):
'''Make sure that resource tracker process is running.
This can be run from any process. Usually a child process will use
the resource created by its parent.'''
with self._lock:
if self._fd is not None:
# resource tracker was launched before, is it still running?
if self._check_alive():
# => still alive
return
# => dead, launch it again
os.close(self._fd)
# Clean-up to avoid dangling processes.
try:
# _pid can be None if this process is a child from another
# python process, which has started the resource_tracker.
if self._pid is not None:
os.waitpid(self._pid, 0)
except ChildProcessError:
# The resource_tracker has already been terminated.
pass
self._fd = None
self._pid = None
warnings.warn('resource_tracker: process died unexpectedly, '
'relaunching. Some resources might leak.')
fds_to_pass = []
try:
fds_to_pass.append(sys.stderr.fileno())
except Exception:
pass
cmd = 'from multiprocessing.resource_tracker import main;main(%d)'
r, w = os.pipe()
try:
fds_to_pass.append(r)
# process will out live us, so no need to wait on pid
exe = spawn.get_executable()
args = [exe] + util._args_from_interpreter_flags()
args += ['-c', cmd % r]
# bpo-33613: Register a signal mask that will block the signals.
# This signal mask will be inherited by the child that is going
# to be spawned and will protect the child from a race condition
# that can make the child die before it registers signal handlers
# for SIGINT and SIGTERM. The mask is unregistered after spawning
# the child.
try:
if _HAVE_SIGMASK:
signal.pthread_sigmask(signal.SIG_BLOCK, _IGNORED_SIGNALS)
pid = util.spawnv_passfds(exe, args, fds_to_pass)
finally:
if _HAVE_SIGMASK:
signal.pthread_sigmask(signal.SIG_UNBLOCK, _IGNORED_SIGNALS)
except:
os.close(w)
raise
else:
self._fd = w
self._pid = pid
finally:
os.close(r)
def _check_alive(self):
'''Check that the pipe has not been closed by sending a probe.'''
try:
# We cannot use send here as it calls ensure_running, creating
# a cycle.
os.write(self._fd, b'PROBE:0:noop\n')
except OSError:
return False
else:
return True
def register(self, name, rtype):
'''Register name of resource with resource tracker.'''
self._send('REGISTER', name, rtype)
def unregister(self, name, rtype):
'''Unregister name of resource with resource tracker.'''
self._send('UNREGISTER', name, rtype)
def _send(self, cmd, name, rtype):
self.ensure_running()
msg = '{0}:{1}:{2}\n'.format(cmd, name, rtype).encode('ascii')
if len(name) > 512:
# posix guarantees that writes to a pipe of less than PIPE_BUF
# bytes are atomic, and that PIPE_BUF >= 512
raise ValueError('name too long')
nbytes = os.write(self._fd, msg)
assert nbytes == len(msg), "nbytes {0:n} but len(msg) {1:n}".format(
nbytes, len(msg))
_resource_tracker = ResourceTracker()
ensure_running = _resource_tracker.ensure_running
register = _resource_tracker.register
unregister = _resource_tracker.unregister
getfd = _resource_tracker.getfd
def main(fd):
'''Run resource tracker.'''
# protect the process from ^C and "killall python" etc
signal.signal(signal.SIGINT, signal.SIG_IGN)
signal.signal(signal.SIGTERM, signal.SIG_IGN)
if _HAVE_SIGMASK:
signal.pthread_sigmask(signal.SIG_UNBLOCK, _IGNORED_SIGNALS)
for f in (sys.stdin, sys.stdout):
try:
f.close()
except Exception:
pass
cache = {rtype: set() for rtype in _CLEANUP_FUNCS.keys()}
try:
# keep track of registered/unregistered resources
with open(fd, 'rb') as f:
for line in f:
try:
cmd, name, rtype = line.strip().decode('ascii').split(':')
cleanup_func = _CLEANUP_FUNCS.get(rtype, None)
if cleanup_func is None:
raise ValueError(
f'Cannot register {name} for automatic cleanup: '
f'unknown resource type {rtype}')
if cmd == 'REGISTER':
cache[rtype].add(name)
elif cmd == 'UNREGISTER':
cache[rtype].remove(name)
elif cmd == 'PROBE':
pass
else:
raise RuntimeError('unrecognized command %r' % cmd)
except Exception:
try:
sys.excepthook(*sys.exc_info())
except:
pass
finally:
# all processes have terminated; cleanup any remaining resources
for rtype, rtype_cache in cache.items():
if rtype_cache:
try:
warnings.warn('resource_tracker: There appear to be %d '
'leaked %s objects to clean up at shutdown' %
(len(rtype_cache), rtype))
except Exception:
pass
for name in rtype_cache:
# For some reason the process which created and registered this
# resource has failed to unregister it. Presumably it has
# died. We therefore unlink it.
try:
try:
_CLEANUP_FUNCS[rtype](name)
except Exception as e:
warnings.warn('resource_tracker: %r: %s' % (name, e))
finally:
pass
|
|
from math import sqrt, pi, sin, cos
import numpy as np
from lib.fourier import direct_f, inverse_f
from lib import cmbplot
L_max_field = 4
L_max_polynom = 4
L_max_back = 4
N = 32
def coef_1(in_l, in_m):
if in_l != 0:
return sqrt((in_l - in_m) * (2.0 * in_l + 1.0)
/ ((in_l + in_m) * (2.0 * in_l - 1.0)))
if in_l == 0:
return 0.0
# P_ generation
P_ = np.zeros((N / 2 + 1, L_max_polynom + 1, L_max_polynom + 1))
for j in xrange(1, N / 2):
theta = 2.0 * pi * j / N
P_[j][0][0] = 1.0 / sqrt(4.0 * pi)
for m in xrange(0, L_max_polynom):
P_[j][m + 1][m + 1] = - P_[j][m][m] * sin(theta) * sqrt(2.0 * m + 3.0) / sqrt(2.0 * m + 2.0)
for m in xrange(0, L_max_polynom):
P_[j][m][m + 1] = P_[j][m][m] * cos(theta) * sqrt(2.0 * m + 3.0)
for m in xrange(0, L_max_polynom - 1):
for l in xrange(m + 2, L_max_polynom + 1):
P_[j][m][l] = ((2.0 * l - 1.0) * sqrt((l - m) * (2.0 * l + 1.0)) / sqrt((l + m) * (2.0 * l - 1.0)) *
P_[j][m][l - 1] * cos(theta) - (l + m - 1.0) * sqrt((l - m) * (l - 1.0 - m) *
(2.0 * l + 1.0)) / sqrt((l + m) * (l - 1.0 + m) * (2.0 * l - 3.0)) * P_[j][m][l - 2]) / \
(l - m)
for m in xrange(1, L_max_polynom + 1):
for l in xrange(m, L_max_polynom + 1):
P_[j][m][l] *= sqrt(2.0)
# F_x generation - np.imag + np.real
F_x = np.zeros((N / 2 + 1, L_max_polynom + 1, L_max_polynom + 1))
for j in xrange(1, N / 2):
theta = 2.0 * pi * j / N
for l in xrange(2, L_max_polynom + 1):
for m in xrange(0, l + 1):
F_x[j][m][l] = m * P_[j][m][l] / sin(theta)
# F_y generation - np.real + np.imag
F_y = np.zeros((N / 2 + 1, L_max_polynom + 1, L_max_polynom + 1))
for j in xrange(1, N / 2):
theta = 2.0 * pi * j / N
for l in xrange(2, L_max_polynom + 1):
for m in xrange(0, l + 1):
F_y[j][m][l] = l * cos(theta) / sin(theta) * P_[j][m][l] - \
1.0 / sin(theta) * (l + m) * coef_1(l, m) * P_[j][m][l - 1]
# F_xy generation - np.imag + np.real
F_xy = np.zeros((N / 2 + 1, L_max_polynom + 1, L_max_polynom + 1))
for j in xrange(1, N / 2):
theta = 2.0 * pi * j / N
for l in xrange(2, L_max_polynom + 1):
for m in xrange(0, l + 1):
F_xy[j][m][l] = m / sin(theta) * ((1.0 / sin(theta)) * (l + m) * P_[j][m][l - 1] * coef_1(l, m) -
(l - 1.0) * cos(theta) / sin(theta) * P_[j][m][l])
# F_xx_1 generation - np.real + np.real
F_xx_1 = np.zeros((N / 2 + 1, L_max_polynom + 1, L_max_polynom + 1))
for j in xrange(1, N / 2):
theta = 2.0 * pi * j / N
for l in xrange(2, L_max_polynom + 1):
for m in xrange(0, l + 1):
F_xx_1[j][m][l] = - m * m * P_[j][m][l] / (sin(theta) * sin(theta))
# F_xx_2 generation - np.real + np.real
F_xx_2 = np.zeros((N / 2 + 1, L_max_polynom + 1, L_max_polynom + 1))
for j in xrange(1, N / 2):
theta = 2.0 * pi * j / N
for l in xrange(2, L_max_polynom + 1):
for m in xrange(0, l + 1):
F_xx_2[j][m][l] = (l * cos(theta) / sin(theta) * P_[j][m][l] - 1.0 / sin(theta) * (l + m) * coef_1(l, m) *
P_[j][m][l - 1]) * cos(theta) / sin(theta)
# F_yy generation - np.real + np.real
F_yy = np.zeros((N / 2 + 1, L_max_polynom + 1, L_max_polynom + 1))
for j in xrange(1, N / 2):
theta = 2.0 * pi * j / N
for l in xrange(2, L_max_polynom + 1):
for m in xrange(0, l + 1):
F_yy[j][m][l] = 0.5 / sin(theta) * ((1.0 / sin(theta)) * (l * l * cos(2.0 * theta) -
(l + 2.0) * l + 2.0 * m * m) * P_[j][m][l] + 2.0 * (l + m) * cos(theta) /
sin(theta) * coef_1(l, m) * P_[j][m][l - 1])
x = np.zeros((N + 1, N / 2 + 1))
y = np.zeros((N + 1, N / 2 + 1))
for i in xrange(0, N + 1):
for j in xrange(0, N / 2 + 1):
x[i][j] = (2.0 * i - N) / N * pi
y[i][j] = 2.0 * j / N * pi - pi / 2.0
Fa = np.zeros((N / 2 + 1, N))
Fb = np.zeros((N / 2 + 1, N))
T = np.zeros(N)
a_coef = np.random.normal(size=(L_max_polynom + 1, L_max_polynom + 1))
b_coef = np.random.normal(size=(L_max_polynom + 1, L_max_polynom + 1))
# a_coef = np.zeros((L_max_field + 1, L_max_field + 1))
# b_coef = np.zeros((L_max_field + 1, L_max_field + 1))
for m in xrange(0, L_max_field + 1):
for l in xrange(0, m):
a_coef[m][l] = 0.0
b_coef[m][l] = 0.0
for l in xrange(0, L_max_field + 1):
b_coef[0][l] = 0.0
a_coef[0][0] = 0.0
b_coef[0][0] = 0.0
a_coef[0][1] = 0.0
a_coef[1][1] = 0.0
b_coef[0][1] = 0.0
b_coef[1][1] = 0.0
C = np.zeros((L_max_field + 1))
for l in xrange(0, L_max_field + 1):
C_sum = 0.0
for m in xrange(0, l + 1):
C_sum = C_sum + a_coef[m][l] * a_coef[m][l] + b_coef[m][l] * b_coef[m][l]
C[l] = C_sum / (2.0 * l + 1.0)
sigma_0 = 0.0
for l in xrange(0, L_max_field + 1):
sigma_0 += (2.0 * l + 1.0) * C[l]
sigma_0 = sqrt(sigma_0 / 4.0 / pi)
sigma_1 = 0.0
for l in xrange(0, L_max_field + 1):
sigma_1 += l * (l + 1.0) * (2.0 * l + 1.0) * C[l]
sigma_1 = sqrt(sigma_1 / 4.0 * pi)
sigma_2 = 0.0
for l in xrange(0, L_max_field + 1):
sigma_2 += (l + 2.0) * (l - 1.0) * l * (l + 1.0) * (2.0 * l + 1.0) * C[l]
sigma_2 = sqrt(sigma_2 / 4.0 * pi)
func1 = 0.0
func2 = 0.0
field = direct_f(N, P_, Fa, Fb, a_coef, b_coef, L_max_field)
field_x = direct_f(N, F_x, Fa, Fb, a_coef, b_coef, L_max_field, True)
field_y = direct_f(N, F_y, Fa, Fb, a_coef, b_coef, L_max_field)
field_xx = direct_f(N, F_xx_1 + F_xx_2, Fa, Fb, a_coef, b_coef, L_max_field)
field_yy = direct_f(N, F_yy, Fa, Fb, a_coef, b_coef, L_max_field)
field_xy = direct_f(N, F_xy, Fa, Fb, a_coef, b_coef, L_max_field, True)
a = 0.0
na = 0.0
for i in xrange(0, N):
for j in xrange(1, N / 2):
a += cos(y[i][j]) * field[i][j] * field[i][j]
na += cos(y[i][j])
sigma_0_map = sqrt(a / na)
if sigma_0_map == 0:
print 'There is no map!'
# field /= sigma_0_map
U = 2 * field_xy
Q = field_xx - field_yy
P = U*U + Q*Q
back_F = np.zeros((N / 2 + 1, N), dtype=complex)
back_F_a = np.zeros((N / 2 + 1, N))
back_F_b = np.zeros((N / 2 + 1, N))
back_a_coef, back_b_coef = inverse_f(N, field, P_, back_F, back_F_a, back_F_b, L_max_back)
my_map = cmbplot.flat(x, y, P)
cmbplot.polarization(N, my_map, U, Q, x, y)
cmbplot.show()
|
|
# Copyright 2013 OpenStack Foundation
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import copy
import uuid
from six.moves import http_client
from testtools import matchers
from keystone.tests import unit
from keystone.tests.unit.ksfixtures import database
from keystone.tests.unit import test_v3
class CatalogTestCase(test_v3.RestfulTestCase):
"""Test service & endpoint CRUD."""
# region crud tests
def test_create_region_with_id(self):
"""Call ``PUT /regions/{region_id}`` w/o an ID in the request body."""
ref = unit.new_region_ref()
region_id = ref.pop('id')
r = self.put(
'/regions/%s' % region_id,
body={'region': ref},
expected_status=http_client.CREATED)
self.assertValidRegionResponse(r, ref)
# Double-check that the region ID was kept as-is and not
# populated with a UUID, as is the case with POST /v3/regions
self.assertEqual(region_id, r.json['region']['id'])
def test_create_region_with_matching_ids(self):
"""Call ``PUT /regions/{region_id}`` with an ID in the request body."""
ref = unit.new_region_ref()
region_id = ref['id']
r = self.put(
'/regions/%s' % region_id,
body={'region': ref},
expected_status=http_client.CREATED)
self.assertValidRegionResponse(r, ref)
# Double-check that the region ID was kept as-is and not
# populated with a UUID, as is the case with POST /v3/regions
self.assertEqual(region_id, r.json['region']['id'])
def test_create_region_with_duplicate_id(self):
"""Call ``PUT /regions/{region_id}``."""
ref = unit.new_region_ref()
region_id = ref['id']
self.put(
'/regions/%s' % region_id,
body={'region': ref}, expected_status=http_client.CREATED)
# Create region again with duplicate id
self.put(
'/regions/%s' % region_id,
body={'region': ref}, expected_status=http_client.CONFLICT)
def test_create_region(self):
"""Call ``POST /regions`` with an ID in the request body."""
# the ref will have an ID defined on it
ref = unit.new_region_ref()
r = self.post(
'/regions',
body={'region': ref})
self.assertValidRegionResponse(r, ref)
# we should be able to get the region, having defined the ID ourselves
r = self.get(
'/regions/%(region_id)s' % {
'region_id': ref['id']})
self.assertValidRegionResponse(r, ref)
def test_create_region_with_empty_id(self):
"""Call ``POST /regions`` with an empty ID in the request body."""
ref = unit.new_region_ref(id='')
r = self.post('/regions', body={'region': ref})
self.assertValidRegionResponse(r, ref)
self.assertNotEmpty(r.result['region'].get('id'))
def test_create_region_without_id(self):
"""Call ``POST /regions`` without an ID in the request body."""
ref = unit.new_region_ref()
# instead of defining the ID ourselves...
del ref['id']
# let the service define the ID
r = self.post('/regions', body={'region': ref})
self.assertValidRegionResponse(r, ref)
def test_create_region_without_description(self):
"""Call ``POST /regions`` without description in the request body."""
ref = unit.new_region_ref(description=None)
del ref['description']
r = self.post('/regions', body={'region': ref})
# Create the description in the reference to compare to since the
# response should now have a description, even though we didn't send
# it with the original reference.
ref['description'] = ''
self.assertValidRegionResponse(r, ref)
def test_create_regions_with_same_description_string(self):
"""Call ``POST /regions`` with duplicate descriptions."""
# NOTE(lbragstad): Make sure we can create two regions that have the
# same description.
region_desc = 'Some Region Description'
ref1 = unit.new_region_ref(description=region_desc)
ref2 = unit.new_region_ref(description=region_desc)
resp1 = self.post('/regions', body={'region': ref1})
self.assertValidRegionResponse(resp1, ref1)
resp2 = self.post('/regions', body={'region': ref2})
self.assertValidRegionResponse(resp2, ref2)
def test_create_regions_without_descriptions(self):
"""Call ``POST /regions`` with no description."""
# NOTE(lbragstad): Make sure we can create two regions that have
# no description in the request body. The description should be
# populated by Catalog Manager.
ref1 = unit.new_region_ref()
ref2 = unit.new_region_ref()
del ref1['description']
ref2['description'] = None
resp1 = self.post('/regions', body={'region': ref1})
resp2 = self.post('/regions', body={'region': ref2})
# Create the descriptions in the references to compare to since the
# responses should now have descriptions, even though we didn't send
# a description with the original references.
ref1['description'] = ''
ref2['description'] = ''
self.assertValidRegionResponse(resp1, ref1)
self.assertValidRegionResponse(resp2, ref2)
def test_create_region_with_conflicting_ids(self):
"""Call ``PUT /regions/{region_id}`` with conflicting region IDs."""
# the region ref is created with an ID
ref = unit.new_region_ref()
# but instead of using that ID, make up a new, conflicting one
self.put(
'/regions/%s' % uuid.uuid4().hex,
body={'region': ref},
expected_status=http_client.BAD_REQUEST)
def test_list_head_regions(self):
"""Call ``GET & HEAD /regions``."""
resource_url = '/regions'
r = self.get(resource_url)
self.assertValidRegionListResponse(r, ref=self.region)
self.head(resource_url, expected_status=http_client.OK)
def _create_region_with_parent_id(self, parent_id=None):
ref = unit.new_region_ref(parent_region_id=parent_id)
return self.post(
'/regions',
body={'region': ref})
def test_list_regions_filtered_by_parent_region_id(self):
"""Call ``GET /regions?parent_region_id={parent_region_id}``."""
new_region = self._create_region_with_parent_id()
parent_id = new_region.result['region']['id']
new_region = self._create_region_with_parent_id(parent_id)
new_region = self._create_region_with_parent_id(parent_id)
r = self.get('/regions?parent_region_id=%s' % parent_id)
for region in r.result['regions']:
self.assertEqual(parent_id, region['parent_region_id'])
def test_get_head_region(self):
"""Call ``GET & HEAD /regions/{region_id}``."""
resource_url = '/regions/%(region_id)s' % {
'region_id': self.region_id}
r = self.get(resource_url)
self.assertValidRegionResponse(r, self.region)
self.head(resource_url, expected_status=http_client.OK)
def test_update_region(self):
"""Call ``PATCH /regions/{region_id}``."""
region = unit.new_region_ref()
del region['id']
r = self.patch('/regions/%(region_id)s' % {
'region_id': self.region_id},
body={'region': region})
self.assertValidRegionResponse(r, region)
def test_update_region_without_description_keeps_original(self):
"""Call ``PATCH /regions/{region_id}``."""
region_ref = unit.new_region_ref()
resp = self.post('/regions', body={'region': region_ref})
region_updates = {
# update with something that's not the description
'parent_region_id': self.region_id,
}
resp = self.patch('/regions/%s' % region_ref['id'],
body={'region': region_updates})
# NOTE(dstanek): Keystone should keep the original description.
self.assertEqual(region_ref['description'],
resp.result['region']['description'])
def test_update_region_with_null_description(self):
"""Call ``PATCH /regions/{region_id}``."""
region = unit.new_region_ref(description=None)
del region['id']
r = self.patch('/regions/%(region_id)s' % {
'region_id': self.region_id},
body={'region': region})
# NOTE(dstanek): Keystone should turn the provided None value into
# an empty string before storing in the backend.
region['description'] = ''
self.assertValidRegionResponse(r, region)
def test_delete_region(self):
"""Call ``DELETE /regions/{region_id}``."""
ref = unit.new_region_ref()
r = self.post(
'/regions',
body={'region': ref})
self.assertValidRegionResponse(r, ref)
self.delete('/regions/%(region_id)s' % {
'region_id': ref['id']})
# service crud tests
def test_create_service(self):
"""Call ``POST /services``."""
ref = unit.new_service_ref()
r = self.post(
'/services',
body={'service': ref})
self.assertValidServiceResponse(r, ref)
def test_create_service_no_name(self):
"""Call ``POST /services``."""
ref = unit.new_service_ref()
del ref['name']
r = self.post(
'/services',
body={'service': ref})
ref['name'] = ''
self.assertValidServiceResponse(r, ref)
def test_create_service_no_enabled(self):
"""Call ``POST /services``."""
ref = unit.new_service_ref()
del ref['enabled']
r = self.post(
'/services',
body={'service': ref})
ref['enabled'] = True
self.assertValidServiceResponse(r, ref)
self.assertIs(True, r.result['service']['enabled'])
def test_create_service_enabled_false(self):
"""Call ``POST /services``."""
ref = unit.new_service_ref(enabled=False)
r = self.post(
'/services',
body={'service': ref})
self.assertValidServiceResponse(r, ref)
self.assertIs(False, r.result['service']['enabled'])
def test_create_service_enabled_true(self):
"""Call ``POST /services``."""
ref = unit.new_service_ref(enabled=True)
r = self.post(
'/services',
body={'service': ref})
self.assertValidServiceResponse(r, ref)
self.assertIs(True, r.result['service']['enabled'])
def test_create_service_enabled_str_true(self):
"""Call ``POST /services``."""
ref = unit.new_service_ref(enabled='True')
self.post('/services', body={'service': ref},
expected_status=http_client.BAD_REQUEST)
def test_create_service_enabled_str_false(self):
"""Call ``POST /services``."""
ref = unit.new_service_ref(enabled='False')
self.post('/services', body={'service': ref},
expected_status=http_client.BAD_REQUEST)
def test_create_service_enabled_str_random(self):
"""Call ``POST /services``."""
ref = unit.new_service_ref(enabled='puppies')
self.post('/services', body={'service': ref},
expected_status=http_client.BAD_REQUEST)
def test_list_head_services(self):
"""Call ``GET & HEAD /services``."""
resource_url = '/services'
r = self.get(resource_url)
self.assertValidServiceListResponse(r, ref=self.service)
self.head(resource_url, expected_status=http_client.OK)
def _create_random_service(self):
ref = unit.new_service_ref()
response = self.post(
'/services',
body={'service': ref})
return response.json['service']
def test_filter_list_services_by_type(self):
"""Call ``GET /services?type=<some type>``."""
target_ref = self._create_random_service()
# create unrelated services
self._create_random_service()
self._create_random_service()
response = self.get('/services?type=' + target_ref['type'])
self.assertValidServiceListResponse(response, ref=target_ref)
filtered_service_list = response.json['services']
self.assertEqual(1, len(filtered_service_list))
filtered_service = filtered_service_list[0]
self.assertEqual(target_ref['type'], filtered_service['type'])
def test_filter_list_services_by_name(self):
"""Call ``GET /services?name=<some name>``."""
# create unrelated services
self._create_random_service()
self._create_random_service()
# create the desired service
target_ref = self._create_random_service()
response = self.get('/services?name=' + target_ref['name'])
self.assertValidServiceListResponse(response, ref=target_ref)
filtered_service_list = response.json['services']
self.assertEqual(1, len(filtered_service_list))
filtered_service = filtered_service_list[0]
self.assertEqual(target_ref['name'], filtered_service['name'])
def test_filter_list_services_by_name_with_list_limit(self):
"""Call ``GET /services?name=<some name>``."""
self.config_fixture.config(list_limit=1)
self.test_filter_list_services_by_name()
def test_get_head_service(self):
"""Call ``GET & HEAD /services/{service_id}``."""
resource_url = '/services/%(service_id)s' % {
'service_id': self.service_id}
r = self.get(resource_url)
self.assertValidServiceResponse(r, self.service)
self.head(resource_url, expected_status=http_client.OK)
def test_update_service(self):
"""Call ``PATCH /services/{service_id}``."""
service = unit.new_service_ref()
del service['id']
r = self.patch('/services/%(service_id)s' % {
'service_id': self.service_id},
body={'service': service})
self.assertValidServiceResponse(r, service)
def test_delete_service(self):
"""Call ``DELETE /services/{service_id}``."""
self.delete('/services/%(service_id)s' % {
'service_id': self.service_id})
# endpoint crud tests
def test_list_head_endpoints(self):
"""Call ``GET & HEAD /endpoints``."""
resource_url = '/endpoints'
r = self.get(resource_url)
self.assertValidEndpointListResponse(r, ref=self.endpoint)
self.head(resource_url, expected_status=http_client.OK)
def _create_random_endpoint(self, interface='public',
parent_region_id=None):
region = self._create_region_with_parent_id(
parent_id=parent_region_id)
service = self._create_random_service()
ref = unit.new_endpoint_ref(
service_id=service['id'],
interface=interface,
region_id=region.result['region']['id'])
response = self.post(
'/endpoints',
body={'endpoint': ref})
return response.json['endpoint']
def test_list_endpoints_filtered_by_interface(self):
"""Call ``GET /endpoints?interface={interface}``."""
ref = self._create_random_endpoint(interface='internal')
response = self.get('/endpoints?interface=%s' % ref['interface'])
self.assertValidEndpointListResponse(response, ref=ref)
for endpoint in response.json['endpoints']:
self.assertEqual(ref['interface'], endpoint['interface'])
def test_list_endpoints_filtered_by_service_id(self):
"""Call ``GET /endpoints?service_id={service_id}``."""
ref = self._create_random_endpoint()
response = self.get('/endpoints?service_id=%s' % ref['service_id'])
self.assertValidEndpointListResponse(response, ref=ref)
for endpoint in response.json['endpoints']:
self.assertEqual(ref['service_id'], endpoint['service_id'])
def test_list_endpoints_filtered_by_region_id(self):
"""Call ``GET /endpoints?region_id={region_id}``."""
ref = self._create_random_endpoint()
response = self.get('/endpoints?region_id=%s' % ref['region_id'])
self.assertValidEndpointListResponse(response, ref=ref)
for endpoint in response.json['endpoints']:
self.assertEqual(ref['region_id'], endpoint['region_id'])
def test_list_endpoints_filtered_by_parent_region_id(self):
"""Call ``GET /endpoints?region_id={region_id}``.
Ensure passing the parent_region_id as filter returns an
empty list.
"""
parent_region = self._create_region_with_parent_id()
parent_region_id = parent_region.result['region']['id']
self._create_random_endpoint(parent_region_id=parent_region_id)
response = self.get('/endpoints?region_id=%s' % parent_region_id)
self.assertEqual(0, len(response.json['endpoints']))
def test_list_endpoints_with_multiple_filters(self):
"""Call ``GET /endpoints?interface={interface}...``.
Ensure passing different combinations of interface, region_id and
service_id as filters will return the correct result.
"""
# interface and region_id specified
ref = self._create_random_endpoint(interface='internal')
response = self.get('/endpoints?interface=%s®ion_id=%s' %
(ref['interface'], ref['region_id']))
self.assertValidEndpointListResponse(response, ref=ref)
for endpoint in response.json['endpoints']:
self.assertEqual(ref['interface'], endpoint['interface'])
self.assertEqual(ref['region_id'], endpoint['region_id'])
# interface and service_id specified
ref = self._create_random_endpoint(interface='internal')
response = self.get('/endpoints?interface=%s&service_id=%s' %
(ref['interface'], ref['service_id']))
self.assertValidEndpointListResponse(response, ref=ref)
for endpoint in response.json['endpoints']:
self.assertEqual(ref['interface'], endpoint['interface'])
self.assertEqual(ref['service_id'], endpoint['service_id'])
# region_id and service_id specified
ref = self._create_random_endpoint(interface='internal')
response = self.get('/endpoints?region_id=%s&service_id=%s' %
(ref['region_id'], ref['service_id']))
self.assertValidEndpointListResponse(response, ref=ref)
for endpoint in response.json['endpoints']:
self.assertEqual(ref['region_id'], endpoint['region_id'])
self.assertEqual(ref['service_id'], endpoint['service_id'])
# interface, region_id and service_id specified
ref = self._create_random_endpoint(interface='internal')
response = self.get(('/endpoints?interface=%s®ion_id=%s'
'&service_id=%s') %
(ref['interface'], ref['region_id'],
ref['service_id']))
self.assertValidEndpointListResponse(response, ref=ref)
for endpoint in response.json['endpoints']:
self.assertEqual(ref['interface'], endpoint['interface'])
self.assertEqual(ref['region_id'], endpoint['region_id'])
self.assertEqual(ref['service_id'], endpoint['service_id'])
def test_list_endpoints_with_random_filter_values(self):
"""Call ``GET /endpoints?interface={interface}...``.
Ensure passing random values for: interface, region_id and
service_id will return an empty list.
"""
self._create_random_endpoint(interface='internal')
response = self.get('/endpoints?interface=%s' % uuid.uuid4().hex)
self.assertEqual(0, len(response.json['endpoints']))
response = self.get('/endpoints?region_id=%s' % uuid.uuid4().hex)
self.assertEqual(0, len(response.json['endpoints']))
response = self.get('/endpoints?service_id=%s' % uuid.uuid4().hex)
self.assertEqual(0, len(response.json['endpoints']))
def test_create_endpoint_no_enabled(self):
"""Call ``POST /endpoints``."""
ref = unit.new_endpoint_ref(service_id=self.service_id,
interface='public',
region_id=self.region_id)
r = self.post('/endpoints', body={'endpoint': ref})
ref['enabled'] = True
self.assertValidEndpointResponse(r, ref)
def test_create_endpoint_enabled_true(self):
"""Call ``POST /endpoints`` with enabled: true."""
ref = unit.new_endpoint_ref(service_id=self.service_id,
interface='public',
region_id=self.region_id,
enabled=True)
r = self.post('/endpoints', body={'endpoint': ref})
self.assertValidEndpointResponse(r, ref)
def test_create_endpoint_enabled_false(self):
"""Call ``POST /endpoints`` with enabled: false."""
ref = unit.new_endpoint_ref(service_id=self.service_id,
interface='public',
region_id=self.region_id,
enabled=False)
r = self.post('/endpoints', body={'endpoint': ref})
self.assertValidEndpointResponse(r, ref)
def test_create_endpoint_enabled_str_true(self):
"""Call ``POST /endpoints`` with enabled: 'True'."""
ref = unit.new_endpoint_ref(service_id=self.service_id,
interface='public',
region_id=self.region_id,
enabled='True')
self.post('/endpoints', body={'endpoint': ref},
expected_status=http_client.BAD_REQUEST)
def test_create_endpoint_enabled_str_false(self):
"""Call ``POST /endpoints`` with enabled: 'False'."""
ref = unit.new_endpoint_ref(service_id=self.service_id,
interface='public',
region_id=self.region_id,
enabled='False')
self.post('/endpoints', body={'endpoint': ref},
expected_status=http_client.BAD_REQUEST)
def test_create_endpoint_enabled_str_random(self):
"""Call ``POST /endpoints`` with enabled: 'puppies'."""
ref = unit.new_endpoint_ref(service_id=self.service_id,
interface='public',
region_id=self.region_id,
enabled='puppies')
self.post('/endpoints', body={'endpoint': ref},
expected_status=http_client.BAD_REQUEST)
def test_create_endpoint_with_invalid_region_id(self):
"""Call ``POST /endpoints``."""
ref = unit.new_endpoint_ref(service_id=self.service_id)
self.post('/endpoints', body={'endpoint': ref},
expected_status=http_client.BAD_REQUEST)
def test_create_endpoint_with_region(self):
"""EndpointV3 creates the region before creating the endpoint.
This occurs when endpoint is provided with 'region' and no 'region_id'.
"""
ref = unit.new_endpoint_ref_with_region(service_id=self.service_id,
region=uuid.uuid4().hex)
self.post('/endpoints', body={'endpoint': ref})
# Make sure the region is created
self.get('/regions/%(region_id)s' % {'region_id': ref["region"]})
def test_create_endpoint_with_no_region(self):
"""EndpointV3 allows to creates the endpoint without region."""
ref = unit.new_endpoint_ref(service_id=self.service_id, region_id=None)
del ref['region_id'] # cannot just be None, it needs to not exist
self.post('/endpoints', body={'endpoint': ref})
def test_create_endpoint_with_empty_url(self):
"""Call ``POST /endpoints``."""
ref = unit.new_endpoint_ref(service_id=self.service_id, url='')
self.post('/endpoints', body={'endpoint': ref},
expected_status=http_client.BAD_REQUEST)
def test_get_head_endpoint(self):
"""Call ``GET & HEAD /endpoints/{endpoint_id}``."""
resource_url = '/endpoints/%(endpoint_id)s' % {
'endpoint_id': self.endpoint_id}
r = self.get(resource_url)
self.assertValidEndpointResponse(r, self.endpoint)
self.head(resource_url, expected_status=http_client.OK)
def test_update_endpoint(self):
"""Call ``PATCH /endpoints/{endpoint_id}``."""
ref = unit.new_endpoint_ref(service_id=self.service_id,
interface='public',
region_id=self.region_id)
del ref['id']
r = self.patch(
'/endpoints/%(endpoint_id)s' % {
'endpoint_id': self.endpoint_id},
body={'endpoint': ref})
ref['enabled'] = True
self.assertValidEndpointResponse(r, ref)
def test_update_endpoint_enabled_true(self):
"""Call ``PATCH /endpoints/{endpoint_id}`` with enabled: True."""
r = self.patch(
'/endpoints/%(endpoint_id)s' % {
'endpoint_id': self.endpoint_id},
body={'endpoint': {'enabled': True}})
self.assertValidEndpointResponse(r, self.endpoint)
def test_update_endpoint_enabled_false(self):
"""Call ``PATCH /endpoints/{endpoint_id}`` with enabled: False."""
r = self.patch(
'/endpoints/%(endpoint_id)s' % {
'endpoint_id': self.endpoint_id},
body={'endpoint': {'enabled': False}})
exp_endpoint = copy.copy(self.endpoint)
exp_endpoint['enabled'] = False
self.assertValidEndpointResponse(r, exp_endpoint)
def test_update_endpoint_enabled_str_true(self):
"""Call ``PATCH /endpoints/{endpoint_id}`` with enabled: 'True'."""
self.patch(
'/endpoints/%(endpoint_id)s' % {
'endpoint_id': self.endpoint_id},
body={'endpoint': {'enabled': 'True'}},
expected_status=http_client.BAD_REQUEST)
def test_update_endpoint_enabled_str_false(self):
"""Call ``PATCH /endpoints/{endpoint_id}`` with enabled: 'False'."""
self.patch(
'/endpoints/%(endpoint_id)s' % {
'endpoint_id': self.endpoint_id},
body={'endpoint': {'enabled': 'False'}},
expected_status=http_client.BAD_REQUEST)
def test_update_endpoint_enabled_str_random(self):
"""Call ``PATCH /endpoints/{endpoint_id}`` with enabled: 'kitties'."""
self.patch(
'/endpoints/%(endpoint_id)s' % {
'endpoint_id': self.endpoint_id},
body={'endpoint': {'enabled': 'kitties'}},
expected_status=http_client.BAD_REQUEST)
def test_delete_endpoint(self):
"""Call ``DELETE /endpoints/{endpoint_id}``."""
self.delete(
'/endpoints/%(endpoint_id)s' % {
'endpoint_id': self.endpoint_id})
def test_create_endpoint_on_v2(self):
# clear the v3 endpoint so we only have endpoints created on v2
self.delete(
'/endpoints/%(endpoint_id)s' % {
'endpoint_id': self.endpoint_id})
# create a v3 endpoint ref, and then tweak it back to a v2-style ref
ref = unit.new_endpoint_ref_with_region(service_id=self.service['id'],
region=uuid.uuid4().hex,
internalurl=None)
del ref['id']
del ref['interface']
ref['publicurl'] = ref.pop('url')
# don't set adminurl to ensure it's absence is handled like internalurl
# create the endpoint on v2 (using a v3 token)
r = self.admin_request(
method='POST',
path='/v2.0/endpoints',
token=self.get_scoped_token(),
body={'endpoint': ref})
endpoint_v2 = r.result['endpoint']
# test the endpoint on v3
r = self.get('/endpoints')
endpoints = self.assertValidEndpointListResponse(r)
self.assertEqual(1, len(endpoints))
endpoint_v3 = endpoints.pop()
# these attributes are identical between both APIs
self.assertEqual(ref['region'], endpoint_v3['region_id'])
self.assertEqual(ref['service_id'], endpoint_v3['service_id'])
self.assertEqual(ref['description'], endpoint_v3['description'])
# a v2 endpoint is not quite the same concept as a v3 endpoint, so they
# receive different identifiers
self.assertNotEqual(endpoint_v2['id'], endpoint_v3['id'])
# v2 has a publicurl; v3 has a url + interface type
self.assertEqual(ref['publicurl'], endpoint_v3['url'])
self.assertEqual('public', endpoint_v3['interface'])
# tests for bug 1152632 -- these attributes were being returned by v3
self.assertNotIn('publicurl', endpoint_v3)
self.assertNotIn('adminurl', endpoint_v3)
self.assertNotIn('internalurl', endpoint_v3)
# test for bug 1152635 -- this attribute was being returned by v3
self.assertNotIn('legacy_endpoint_id', endpoint_v3)
self.assertEqual(endpoint_v2['region'], endpoint_v3['region_id'])
def test_deleting_endpoint_with_space_in_url(self):
# add a space to all urls (intentional "i d" to test bug)
url_with_space = "http://127.0.0.1:8774 /v1.1/\$(tenant_i d)s"
# create a v3 endpoint ref
ref = unit.new_endpoint_ref(service_id=self.service['id'],
region_id=None,
publicurl=url_with_space,
internalurl=url_with_space,
adminurl=url_with_space,
url=url_with_space)
# add the endpoint to the database
self.catalog_api.create_endpoint(ref['id'], ref)
# delete the endpoint
self.delete('/endpoints/%s' % ref['id'])
# make sure it's deleted (GET should return Not Found)
self.get('/endpoints/%s' % ref['id'],
expected_status=http_client.NOT_FOUND)
def test_endpoint_create_with_valid_url(self):
"""Create endpoint with valid url should be tested,too."""
# list one valid url is enough, no need to list too much
valid_url = 'http://127.0.0.1:8774/v1.1/$(project_id)s'
ref = unit.new_endpoint_ref(self.service_id,
interface='public',
region_id=self.region_id,
url=valid_url)
self.post('/endpoints', body={'endpoint': ref})
def test_endpoint_create_with_valid_url_project_id(self):
"""Create endpoint with valid url should be tested,too."""
valid_url = 'http://127.0.0.1:8774/v1.1/$(project_id)s'
ref = unit.new_endpoint_ref(self.service_id,
interface='public',
region_id=self.region_id,
url=valid_url)
self.post('/endpoints', body={'endpoint': ref})
def test_endpoint_create_with_invalid_url(self):
"""Test the invalid cases: substitutions is not exactly right."""
invalid_urls = [
# using a substitution that is not whitelisted - KeyError
'http://127.0.0.1:8774/v1.1/$(nonexistent)s',
# invalid formatting - ValueError
'http://127.0.0.1:8774/v1.1/$(project_id)',
'http://127.0.0.1:8774/v1.1/$(project_id)t',
'http://127.0.0.1:8774/v1.1/$(project_id',
# invalid type specifier - TypeError
# admin_url is a string not an int
'http://127.0.0.1:8774/v1.1/$(admin_url)d',
]
ref = unit.new_endpoint_ref(self.service_id)
for invalid_url in invalid_urls:
ref['url'] = invalid_url
self.post('/endpoints',
body={'endpoint': ref},
expected_status=http_client.BAD_REQUEST)
class TestCatalogAPISQL(unit.TestCase):
"""Test for the catalog Manager against the SQL backend."""
def setUp(self):
super(TestCatalogAPISQL, self).setUp()
self.useFixture(database.Database())
self.load_backends()
service = unit.new_service_ref()
self.service_id = service['id']
self.catalog_api.create_service(self.service_id, service)
self.create_endpoint(service_id=self.service_id)
def create_endpoint(self, service_id, **kwargs):
endpoint = unit.new_endpoint_ref(service_id=service_id,
region_id=None, **kwargs)
self.catalog_api.create_endpoint(endpoint['id'], endpoint)
return endpoint
def config_overrides(self):
super(TestCatalogAPISQL, self).config_overrides()
self.config_fixture.config(group='catalog', driver='sql')
def test_get_catalog_ignores_endpoints_with_invalid_urls(self):
user_id = uuid.uuid4().hex
# create a project since the project should exist if we want to
# filter the catalog by the project or replace the url with a
# valid project id.
domain = unit.new_domain_ref()
self.resource_api.create_domain(domain['id'], domain)
project = unit.new_project_ref(domain_id=domain['id'])
self.resource_api.create_project(project['id'], project)
# the only endpoint in the catalog is the one created in setUp
catalog = self.catalog_api.get_v3_catalog(user_id, project['id'])
self.assertEqual(1, len(catalog[0]['endpoints']))
# it's also the only endpoint in the backend
self.assertEqual(1, len(self.catalog_api.list_endpoints()))
# create a new, invalid endpoint - malformed type declaration
self.create_endpoint(self.service_id,
url='http://keystone/%(project_id)')
# create a new, invalid endpoint - nonexistent key
self.create_endpoint(self.service_id,
url='http://keystone/%(you_wont_find_me)s')
# verify that the invalid endpoints don't appear in the catalog
catalog = self.catalog_api.get_v3_catalog(user_id, project['id'])
self.assertEqual(1, len(catalog[0]['endpoints']))
# all three appear in the backend
self.assertEqual(3, len(self.catalog_api.list_endpoints()))
# create another valid endpoint - project_id will be replaced
self.create_endpoint(self.service_id,
url='http://keystone/%(project_id)s')
# there are two valid endpoints, positive check
catalog = self.catalog_api.get_v3_catalog(user_id, project['id'])
self.assertThat(catalog[0]['endpoints'], matchers.HasLength(2))
# If the URL has no 'project_id' to substitute, we will skip the
# endpoint which contains this kind of URL, negative check.
project_id = None
catalog = self.catalog_api.get_v3_catalog(user_id, project_id)
self.assertThat(catalog[0]['endpoints'], matchers.HasLength(1))
def test_get_catalog_always_returns_service_name(self):
user_id = uuid.uuid4().hex
# create a project since the project should exist if we want to
# filter the catalog by the project or replace the url with a
# valid project id.
domain = unit.new_domain_ref()
self.resource_api.create_domain(domain['id'], domain)
project = unit.new_project_ref(domain_id=domain['id'])
self.resource_api.create_project(project['id'], project)
# create a service, with a name
named_svc = unit.new_service_ref()
self.catalog_api.create_service(named_svc['id'], named_svc)
self.create_endpoint(service_id=named_svc['id'])
# create a service, with no name
unnamed_svc = unit.new_service_ref(name=None)
del unnamed_svc['name']
self.catalog_api.create_service(unnamed_svc['id'], unnamed_svc)
self.create_endpoint(service_id=unnamed_svc['id'])
catalog = self.catalog_api.get_v3_catalog(user_id, project['id'])
named_endpoint = [ep for ep in catalog
if ep['type'] == named_svc['type']][0]
self.assertEqual(named_svc['name'], named_endpoint['name'])
unnamed_endpoint = [ep for ep in catalog
if ep['type'] == unnamed_svc['type']][0]
self.assertEqual('', unnamed_endpoint['name'])
# TODO(dstanek): this needs refactoring with the test above, but we are in a
# crunch so that will happen in a future patch.
class TestCatalogAPISQLRegions(unit.TestCase):
"""Test for the catalog Manager against the SQL backend."""
def setUp(self):
super(TestCatalogAPISQLRegions, self).setUp()
self.useFixture(database.Database())
self.load_backends()
def config_overrides(self):
super(TestCatalogAPISQLRegions, self).config_overrides()
self.config_fixture.config(group='catalog', driver='sql')
def test_get_catalog_returns_proper_endpoints_with_no_region(self):
service = unit.new_service_ref()
service_id = service['id']
self.catalog_api.create_service(service_id, service)
endpoint = unit.new_endpoint_ref(service_id=service_id,
region_id=None)
del endpoint['region_id']
self.catalog_api.create_endpoint(endpoint['id'], endpoint)
# create a project since the project should exist if we want to
# filter the catalog by the project or replace the url with a
# valid project id.
domain = unit.new_domain_ref()
self.resource_api.create_domain(domain['id'], domain)
project = unit.new_project_ref(domain_id=domain['id'])
self.resource_api.create_project(project['id'], project)
user_id = uuid.uuid4().hex
catalog = self.catalog_api.get_v3_catalog(user_id, project['id'])
self.assertValidCatalogEndpoint(
catalog[0]['endpoints'][0], ref=endpoint)
def test_get_catalog_returns_proper_endpoints_with_region(self):
service = unit.new_service_ref()
service_id = service['id']
self.catalog_api.create_service(service_id, service)
endpoint = unit.new_endpoint_ref(service_id=service_id)
region = unit.new_region_ref(id=endpoint['region_id'])
self.catalog_api.create_region(region)
self.catalog_api.create_endpoint(endpoint['id'], endpoint)
endpoint = self.catalog_api.get_endpoint(endpoint['id'])
user_id = uuid.uuid4().hex
# create a project since the project should exist if we want to
# filter the catalog by the project or replace the url with a
# valid project id.
domain = unit.new_domain_ref()
self.resource_api.create_domain(domain['id'], domain)
project = unit.new_project_ref(domain_id=domain['id'])
self.resource_api.create_project(project['id'], project)
catalog = self.catalog_api.get_v3_catalog(user_id, project['id'])
self.assertValidCatalogEndpoint(
catalog[0]['endpoints'][0], ref=endpoint)
def assertValidCatalogEndpoint(self, entity, ref=None):
keys = ['description', 'id', 'interface', 'name', 'region_id', 'url']
for k in keys:
self.assertEqual(ref.get(k), entity[k], k)
self.assertEqual(entity['region_id'], entity['region'])
class TestCatalogAPITemplatedProject(test_v3.RestfulTestCase):
"""Templated Catalog doesn't support full API.
Eg. No region/endpoint creation.
"""
def config_overrides(self):
super(TestCatalogAPITemplatedProject, self).config_overrides()
self.config_fixture.config(group='catalog', driver='templated')
def load_fixtures(self, fixtures):
self.load_sample_data(create_region_and_endpoints=False)
def test_project_delete(self):
"""Deleting a project should not result in an 500 ISE.
Deleting a project will create a notification, which the EndpointFilter
functionality will use to clean up any project->endpoint and
project->endpoint_group relationships. The templated catalog does not
support such relationships, but the act of attempting to delete them
should not cause a NotImplemented exception to be exposed to an API
caller.
Deleting an endpoint has a similar notification and clean up
mechanism, but since we do not allow deletion of endpoints with the
templated catalog, there is no testing to do for that action.
"""
self.delete(
'/projects/%(project_id)s' % {
'project_id': self.project_id})
|
|
# The following comment should be removed at some point in the future.
# mypy: strict-optional=False
import functools
import logging
import os
import shutil
import sys
import uuid
import zipfile
from typing import Any, Collection, Dict, Iterable, List, Optional, Sequence, Union
from pip._vendor.packaging.markers import Marker
from pip._vendor.packaging.requirements import Requirement
from pip._vendor.packaging.specifiers import SpecifierSet
from pip._vendor.packaging.utils import canonicalize_name
from pip._vendor.packaging.version import Version
from pip._vendor.packaging.version import parse as parse_version
from pip._vendor.pep517.wrappers import Pep517HookCaller
from pip._internal.build_env import BuildEnvironment, NoOpBuildEnvironment
from pip._internal.exceptions import InstallationError, LegacyInstallFailure
from pip._internal.locations import get_scheme
from pip._internal.metadata import (
BaseDistribution,
get_default_environment,
get_directory_distribution,
)
from pip._internal.models.link import Link
from pip._internal.operations.build.metadata import generate_metadata
from pip._internal.operations.build.metadata_editable import generate_editable_metadata
from pip._internal.operations.build.metadata_legacy import (
generate_metadata as generate_metadata_legacy,
)
from pip._internal.operations.install.editable_legacy import (
install_editable as install_editable_legacy,
)
from pip._internal.operations.install.legacy import install as install_legacy
from pip._internal.operations.install.wheel import install_wheel
from pip._internal.pyproject import load_pyproject_toml, make_pyproject_path
from pip._internal.req.req_uninstall import UninstallPathSet
from pip._internal.utils.deprecation import deprecated
from pip._internal.utils.direct_url_helpers import (
direct_url_for_editable,
direct_url_from_link,
)
from pip._internal.utils.hashes import Hashes
from pip._internal.utils.misc import (
ask_path_exists,
backup_dir,
display_path,
hide_url,
redact_auth_from_url,
)
from pip._internal.utils.packaging import safe_extra
from pip._internal.utils.subprocess import runner_with_spinner_message
from pip._internal.utils.temp_dir import TempDirectory, tempdir_kinds
from pip._internal.utils.virtualenv import running_under_virtualenv
from pip._internal.vcs import vcs
logger = logging.getLogger(__name__)
class InstallRequirement:
"""
Represents something that may be installed later on, may have information
about where to fetch the relevant requirement and also contains logic for
installing the said requirement.
"""
def __init__(
self,
req: Optional[Requirement],
comes_from: Optional[Union[str, "InstallRequirement"]],
editable: bool = False,
link: Optional[Link] = None,
markers: Optional[Marker] = None,
use_pep517: Optional[bool] = None,
isolated: bool = False,
install_options: Optional[List[str]] = None,
global_options: Optional[List[str]] = None,
hash_options: Optional[Dict[str, List[str]]] = None,
constraint: bool = False,
extras: Collection[str] = (),
user_supplied: bool = False,
permit_editable_wheels: bool = False,
) -> None:
assert req is None or isinstance(req, Requirement), req
self.req = req
self.comes_from = comes_from
self.constraint = constraint
self.editable = editable
self.permit_editable_wheels = permit_editable_wheels
self.legacy_install_reason: Optional[int] = None
# source_dir is the local directory where the linked requirement is
# located, or unpacked. In case unpacking is needed, creating and
# populating source_dir is done by the RequirementPreparer. Note this
# is not necessarily the directory where pyproject.toml or setup.py is
# located - that one is obtained via unpacked_source_directory.
self.source_dir: Optional[str] = None
if self.editable:
assert link
if link.is_file:
self.source_dir = os.path.normpath(os.path.abspath(link.file_path))
if link is None and req and req.url:
# PEP 508 URL requirement
link = Link(req.url)
self.link = self.original_link = link
self.original_link_is_in_wheel_cache = False
# Path to any downloaded or already-existing package.
self.local_file_path: Optional[str] = None
if self.link and self.link.is_file:
self.local_file_path = self.link.file_path
if extras:
self.extras = extras
elif req:
self.extras = {safe_extra(extra) for extra in req.extras}
else:
self.extras = set()
if markers is None and req:
markers = req.marker
self.markers = markers
# This holds the Distribution object if this requirement is already installed.
self.satisfied_by: Optional[BaseDistribution] = None
# Whether the installation process should try to uninstall an existing
# distribution before installing this requirement.
self.should_reinstall = False
# Temporary build location
self._temp_build_dir: Optional[TempDirectory] = None
# Set to True after successful installation
self.install_succeeded: Optional[bool] = None
# Supplied options
self.install_options = install_options if install_options else []
self.global_options = global_options if global_options else []
self.hash_options = hash_options if hash_options else {}
# Set to True after successful preparation of this requirement
self.prepared = False
# User supplied requirement are explicitly requested for installation
# by the user via CLI arguments or requirements files, as opposed to,
# e.g. dependencies, extras or constraints.
self.user_supplied = user_supplied
self.isolated = isolated
self.build_env: BuildEnvironment = NoOpBuildEnvironment()
# For PEP 517, the directory where we request the project metadata
# gets stored. We need this to pass to build_wheel, so the backend
# can ensure that the wheel matches the metadata (see the PEP for
# details).
self.metadata_directory: Optional[str] = None
# The static build requirements (from pyproject.toml)
self.pyproject_requires: Optional[List[str]] = None
# Build requirements that we will check are available
self.requirements_to_check: List[str] = []
# The PEP 517 backend we should use to build the project
self.pep517_backend: Optional[Pep517HookCaller] = None
# Are we using PEP 517 for this requirement?
# After pyproject.toml has been loaded, the only valid values are True
# and False. Before loading, None is valid (meaning "use the default").
# Setting an explicit value before loading pyproject.toml is supported,
# but after loading this flag should be treated as read only.
self.use_pep517 = use_pep517
# This requirement needs more preparation before it can be built
self.needs_more_preparation = False
def __str__(self) -> str:
if self.req:
s = str(self.req)
if self.link:
s += " from {}".format(redact_auth_from_url(self.link.url))
elif self.link:
s = redact_auth_from_url(self.link.url)
else:
s = "<InstallRequirement>"
if self.satisfied_by is not None:
s += " in {}".format(display_path(self.satisfied_by.location))
if self.comes_from:
if isinstance(self.comes_from, str):
comes_from: Optional[str] = self.comes_from
else:
comes_from = self.comes_from.from_path()
if comes_from:
s += f" (from {comes_from})"
return s
def __repr__(self) -> str:
return "<{} object: {} editable={!r}>".format(
self.__class__.__name__, str(self), self.editable
)
def format_debug(self) -> str:
"""An un-tested helper for getting state, for debugging."""
attributes = vars(self)
names = sorted(attributes)
state = ("{}={!r}".format(attr, attributes[attr]) for attr in sorted(names))
return "<{name} object: {{{state}}}>".format(
name=self.__class__.__name__,
state=", ".join(state),
)
# Things that are valid for all kinds of requirements?
@property
def name(self) -> Optional[str]:
if self.req is None:
return None
return self.req.name
@functools.lru_cache() # use cached_property in python 3.8+
def supports_pyproject_editable(self) -> bool:
if not self.use_pep517:
return False
assert self.pep517_backend
with self.build_env:
runner = runner_with_spinner_message(
"Checking if build backend supports build_editable"
)
with self.pep517_backend.subprocess_runner(runner):
return "build_editable" in self.pep517_backend._supported_features()
@property
def specifier(self) -> SpecifierSet:
return self.req.specifier
@property
def is_pinned(self) -> bool:
"""Return whether I am pinned to an exact version.
For example, some-package==1.2 is pinned; some-package>1.2 is not.
"""
specifiers = self.specifier
return len(specifiers) == 1 and next(iter(specifiers)).operator in {"==", "==="}
def match_markers(self, extras_requested: Optional[Iterable[str]] = None) -> bool:
if not extras_requested:
# Provide an extra to safely evaluate the markers
# without matching any extra
extras_requested = ("",)
if self.markers is not None:
return any(
self.markers.evaluate({"extra": extra}) for extra in extras_requested
)
else:
return True
@property
def has_hash_options(self) -> bool:
"""Return whether any known-good hashes are specified as options.
These activate --require-hashes mode; hashes specified as part of a
URL do not.
"""
return bool(self.hash_options)
def hashes(self, trust_internet: bool = True) -> Hashes:
"""Return a hash-comparer that considers my option- and URL-based
hashes to be known-good.
Hashes in URLs--ones embedded in the requirements file, not ones
downloaded from an index server--are almost peers with ones from
flags. They satisfy --require-hashes (whether it was implicitly or
explicitly activated) but do not activate it. md5 and sha224 are not
allowed in flags, which should nudge people toward good algos. We
always OR all hashes together, even ones from URLs.
:param trust_internet: Whether to trust URL-based (#md5=...) hashes
downloaded from the internet, as by populate_link()
"""
good_hashes = self.hash_options.copy()
link = self.link if trust_internet else self.original_link
if link and link.hash:
good_hashes.setdefault(link.hash_name, []).append(link.hash)
return Hashes(good_hashes)
def from_path(self) -> Optional[str]:
"""Format a nice indicator to show where this "comes from" """
if self.req is None:
return None
s = str(self.req)
if self.comes_from:
if isinstance(self.comes_from, str):
comes_from = self.comes_from
else:
comes_from = self.comes_from.from_path()
if comes_from:
s += "->" + comes_from
return s
def ensure_build_location(
self, build_dir: str, autodelete: bool, parallel_builds: bool
) -> str:
assert build_dir is not None
if self._temp_build_dir is not None:
assert self._temp_build_dir.path
return self._temp_build_dir.path
if self.req is None:
# Some systems have /tmp as a symlink which confuses custom
# builds (such as numpy). Thus, we ensure that the real path
# is returned.
self._temp_build_dir = TempDirectory(
kind=tempdir_kinds.REQ_BUILD, globally_managed=True
)
return self._temp_build_dir.path
# This is the only remaining place where we manually determine the path
# for the temporary directory. It is only needed for editables where
# it is the value of the --src option.
# When parallel builds are enabled, add a UUID to the build directory
# name so multiple builds do not interfere with each other.
dir_name: str = canonicalize_name(self.name)
if parallel_builds:
dir_name = f"{dir_name}_{uuid.uuid4().hex}"
# FIXME: Is there a better place to create the build_dir? (hg and bzr
# need this)
if not os.path.exists(build_dir):
logger.debug("Creating directory %s", build_dir)
os.makedirs(build_dir)
actual_build_dir = os.path.join(build_dir, dir_name)
# `None` indicates that we respect the globally-configured deletion
# settings, which is what we actually want when auto-deleting.
delete_arg = None if autodelete else False
return TempDirectory(
path=actual_build_dir,
delete=delete_arg,
kind=tempdir_kinds.REQ_BUILD,
globally_managed=True,
).path
def _set_requirement(self) -> None:
"""Set requirement after generating metadata."""
assert self.req is None
assert self.metadata is not None
assert self.source_dir is not None
# Construct a Requirement object from the generated metadata
if isinstance(parse_version(self.metadata["Version"]), Version):
op = "=="
else:
op = "==="
self.req = Requirement(
"".join(
[
self.metadata["Name"],
op,
self.metadata["Version"],
]
)
)
def warn_on_mismatching_name(self) -> None:
metadata_name = canonicalize_name(self.metadata["Name"])
if canonicalize_name(self.req.name) == metadata_name:
# Everything is fine.
return
# If we're here, there's a mismatch. Log a warning about it.
logger.warning(
"Generating metadata for package %s "
"produced metadata for project name %s. Fix your "
"#egg=%s fragments.",
self.name,
metadata_name,
self.name,
)
self.req = Requirement(metadata_name)
def check_if_exists(self, use_user_site: bool) -> None:
"""Find an installed distribution that satisfies or conflicts
with this requirement, and set self.satisfied_by or
self.should_reinstall appropriately.
"""
if self.req is None:
return
existing_dist = get_default_environment().get_distribution(self.req.name)
if not existing_dist:
return
version_compatible = self.req.specifier.contains(
existing_dist.version,
prereleases=True,
)
if not version_compatible:
self.satisfied_by = None
if use_user_site:
if existing_dist.in_usersite:
self.should_reinstall = True
elif running_under_virtualenv() and existing_dist.in_site_packages:
raise InstallationError(
f"Will not install to the user site because it will "
f"lack sys.path precedence to {existing_dist.raw_name} "
f"in {existing_dist.location}"
)
else:
self.should_reinstall = True
else:
if self.editable:
self.should_reinstall = True
# when installing editables, nothing pre-existing should ever
# satisfy
self.satisfied_by = None
else:
self.satisfied_by = existing_dist
# Things valid for wheels
@property
def is_wheel(self) -> bool:
if not self.link:
return False
return self.link.is_wheel
# Things valid for sdists
@property
def unpacked_source_directory(self) -> str:
return os.path.join(
self.source_dir, self.link and self.link.subdirectory_fragment or ""
)
@property
def setup_py_path(self) -> str:
assert self.source_dir, f"No source dir for {self}"
setup_py = os.path.join(self.unpacked_source_directory, "setup.py")
return setup_py
@property
def setup_cfg_path(self) -> str:
assert self.source_dir, f"No source dir for {self}"
setup_cfg = os.path.join(self.unpacked_source_directory, "setup.cfg")
return setup_cfg
@property
def pyproject_toml_path(self) -> str:
assert self.source_dir, f"No source dir for {self}"
return make_pyproject_path(self.unpacked_source_directory)
def load_pyproject_toml(self) -> None:
"""Load the pyproject.toml file.
After calling this routine, all of the attributes related to PEP 517
processing for this requirement have been set. In particular, the
use_pep517 attribute can be used to determine whether we should
follow the PEP 517 or legacy (setup.py) code path.
"""
pyproject_toml_data = load_pyproject_toml(
self.use_pep517, self.pyproject_toml_path, self.setup_py_path, str(self)
)
if pyproject_toml_data is None:
self.use_pep517 = False
return
self.use_pep517 = True
requires, backend, check, backend_path = pyproject_toml_data
self.requirements_to_check = check
self.pyproject_requires = requires
self.pep517_backend = Pep517HookCaller(
self.unpacked_source_directory,
backend,
backend_path=backend_path,
)
def isolated_editable_sanity_check(self) -> None:
"""Check that an editable requirement if valid for use with PEP 517/518.
This verifies that an editable that has a pyproject.toml either supports PEP 660
or as a setup.py or a setup.cfg
"""
if (
self.editable
and self.use_pep517
and not self.supports_pyproject_editable()
and not os.path.isfile(self.setup_py_path)
and not os.path.isfile(self.setup_cfg_path)
):
raise InstallationError(
f"Project {self} has a 'pyproject.toml' and its build "
f"backend is missing the 'build_editable' hook. Since it does not "
f"have a 'setup.py' nor a 'setup.cfg', "
f"it cannot be installed in editable mode. "
f"Consider using a build backend that supports PEP 660."
)
def prepare_metadata(self) -> None:
"""Ensure that project metadata is available.
Under PEP 517 and PEP 660, call the backend hook to prepare the metadata.
Under legacy processing, call setup.py egg-info.
"""
assert self.source_dir
details = self.name or f"from {self.link}"
if self.use_pep517:
assert self.pep517_backend is not None
if (
self.editable
and self.permit_editable_wheels
and self.supports_pyproject_editable()
):
self.metadata_directory = generate_editable_metadata(
build_env=self.build_env,
backend=self.pep517_backend,
details=details,
)
else:
self.metadata_directory = generate_metadata(
build_env=self.build_env,
backend=self.pep517_backend,
details=details,
)
else:
self.metadata_directory = generate_metadata_legacy(
build_env=self.build_env,
setup_py_path=self.setup_py_path,
source_dir=self.unpacked_source_directory,
isolated=self.isolated,
details=details,
)
# Act on the newly generated metadata, based on the name and version.
if not self.name:
self._set_requirement()
else:
self.warn_on_mismatching_name()
self.assert_source_matches_version()
@property
def metadata(self) -> Any:
if not hasattr(self, "_metadata"):
self._metadata = self.get_dist().metadata
return self._metadata
def get_dist(self) -> BaseDistribution:
return get_directory_distribution(self.metadata_directory)
def assert_source_matches_version(self) -> None:
assert self.source_dir
version = self.metadata["version"]
if self.req.specifier and version not in self.req.specifier:
logger.warning(
"Requested %s, but installing version %s",
self,
version,
)
else:
logger.debug(
"Source in %s has version %s, which satisfies requirement %s",
display_path(self.source_dir),
version,
self,
)
# For both source distributions and editables
def ensure_has_source_dir(
self,
parent_dir: str,
autodelete: bool = False,
parallel_builds: bool = False,
) -> None:
"""Ensure that a source_dir is set.
This will create a temporary build dir if the name of the requirement
isn't known yet.
:param parent_dir: The ideal pip parent_dir for the source_dir.
Generally src_dir for editables and build_dir for sdists.
:return: self.source_dir
"""
if self.source_dir is None:
self.source_dir = self.ensure_build_location(
parent_dir,
autodelete=autodelete,
parallel_builds=parallel_builds,
)
# For editable installations
def update_editable(self) -> None:
if not self.link:
logger.debug(
"Cannot update repository at %s; repository location is unknown",
self.source_dir,
)
return
assert self.editable
assert self.source_dir
if self.link.scheme == "file":
# Static paths don't get updated
return
vcs_backend = vcs.get_backend_for_scheme(self.link.scheme)
# Editable requirements are validated in Requirement constructors.
# So here, if it's neither a path nor a valid VCS URL, it's a bug.
assert vcs_backend, f"Unsupported VCS URL {self.link.url}"
hidden_url = hide_url(self.link.url)
vcs_backend.obtain(self.source_dir, url=hidden_url, verbosity=0)
# Top-level Actions
def uninstall(
self, auto_confirm: bool = False, verbose: bool = False
) -> Optional[UninstallPathSet]:
"""
Uninstall the distribution currently satisfying this requirement.
Prompts before removing or modifying files unless
``auto_confirm`` is True.
Refuses to delete or modify files outside of ``sys.prefix`` -
thus uninstallation within a virtual environment can only
modify that virtual environment, even if the virtualenv is
linked to global site-packages.
"""
assert self.req
dist = get_default_environment().get_distribution(self.req.name)
if not dist:
logger.warning("Skipping %s as it is not installed.", self.name)
return None
logger.info("Found existing installation: %s", dist)
uninstalled_pathset = UninstallPathSet.from_dist(dist)
uninstalled_pathset.remove(auto_confirm, verbose)
return uninstalled_pathset
def _get_archive_name(self, path: str, parentdir: str, rootdir: str) -> str:
def _clean_zip_name(name: str, prefix: str) -> str:
assert name.startswith(
prefix + os.path.sep
), f"name {name!r} doesn't start with prefix {prefix!r}"
name = name[len(prefix) + 1 :]
name = name.replace(os.path.sep, "/")
return name
path = os.path.join(parentdir, path)
name = _clean_zip_name(path, rootdir)
return self.name + "/" + name
def archive(self, build_dir: Optional[str]) -> None:
"""Saves archive to provided build_dir.
Used for saving downloaded VCS requirements as part of `pip download`.
"""
assert self.source_dir
if build_dir is None:
return
create_archive = True
archive_name = "{}-{}.zip".format(self.name, self.metadata["version"])
archive_path = os.path.join(build_dir, archive_name)
if os.path.exists(archive_path):
response = ask_path_exists(
"The file {} exists. (i)gnore, (w)ipe, "
"(b)ackup, (a)bort ".format(display_path(archive_path)),
("i", "w", "b", "a"),
)
if response == "i":
create_archive = False
elif response == "w":
logger.warning("Deleting %s", display_path(archive_path))
os.remove(archive_path)
elif response == "b":
dest_file = backup_dir(archive_path)
logger.warning(
"Backing up %s to %s",
display_path(archive_path),
display_path(dest_file),
)
shutil.move(archive_path, dest_file)
elif response == "a":
sys.exit(-1)
if not create_archive:
return
zip_output = zipfile.ZipFile(
archive_path,
"w",
zipfile.ZIP_DEFLATED,
allowZip64=True,
)
with zip_output:
dir = os.path.normcase(os.path.abspath(self.unpacked_source_directory))
for dirpath, dirnames, filenames in os.walk(dir):
for dirname in dirnames:
dir_arcname = self._get_archive_name(
dirname,
parentdir=dirpath,
rootdir=dir,
)
zipdir = zipfile.ZipInfo(dir_arcname + "/")
zipdir.external_attr = 0x1ED << 16 # 0o755
zip_output.writestr(zipdir, "")
for filename in filenames:
file_arcname = self._get_archive_name(
filename,
parentdir=dirpath,
rootdir=dir,
)
filename = os.path.join(dirpath, filename)
zip_output.write(filename, file_arcname)
logger.info("Saved %s", display_path(archive_path))
def install(
self,
install_options: List[str],
global_options: Optional[Sequence[str]] = None,
root: Optional[str] = None,
home: Optional[str] = None,
prefix: Optional[str] = None,
warn_script_location: bool = True,
use_user_site: bool = False,
pycompile: bool = True,
) -> None:
scheme = get_scheme(
self.name,
user=use_user_site,
home=home,
root=root,
isolated=self.isolated,
prefix=prefix,
)
global_options = global_options if global_options is not None else []
if self.editable and not self.is_wheel:
install_editable_legacy(
install_options,
global_options,
prefix=prefix,
home=home,
use_user_site=use_user_site,
name=self.name,
setup_py_path=self.setup_py_path,
isolated=self.isolated,
build_env=self.build_env,
unpacked_source_directory=self.unpacked_source_directory,
)
self.install_succeeded = True
return
if self.is_wheel:
assert self.local_file_path
direct_url = None
if self.editable:
direct_url = direct_url_for_editable(self.unpacked_source_directory)
elif self.original_link:
direct_url = direct_url_from_link(
self.original_link,
self.source_dir,
self.original_link_is_in_wheel_cache,
)
install_wheel(
self.name,
self.local_file_path,
scheme=scheme,
req_description=str(self.req),
pycompile=pycompile,
warn_script_location=warn_script_location,
direct_url=direct_url,
requested=self.user_supplied,
)
self.install_succeeded = True
return
# TODO: Why don't we do this for editable installs?
# Extend the list of global and install options passed on to
# the setup.py call with the ones from the requirements file.
# Options specified in requirements file override those
# specified on the command line, since the last option given
# to setup.py is the one that is used.
global_options = list(global_options) + self.global_options
install_options = list(install_options) + self.install_options
try:
success = install_legacy(
install_options=install_options,
global_options=global_options,
root=root,
home=home,
prefix=prefix,
use_user_site=use_user_site,
pycompile=pycompile,
scheme=scheme,
setup_py_path=self.setup_py_path,
isolated=self.isolated,
req_name=self.name,
build_env=self.build_env,
unpacked_source_directory=self.unpacked_source_directory,
req_description=str(self.req),
)
except LegacyInstallFailure as exc:
self.install_succeeded = False
raise exc
except Exception:
self.install_succeeded = True
raise
self.install_succeeded = success
if success and self.legacy_install_reason == 8368:
deprecated(
reason=(
"{} was installed using the legacy 'setup.py install' "
"method, because a wheel could not be built for it.".format(
self.name
)
),
replacement="to fix the wheel build issue reported above",
gone_in=None,
issue=8368,
)
def check_invalid_constraint_type(req: InstallRequirement) -> str:
# Check for unsupported forms
problem = ""
if not req.name:
problem = "Unnamed requirements are not allowed as constraints"
elif req.editable:
problem = "Editable requirements are not allowed as constraints"
elif req.extras:
problem = "Constraints cannot have extras"
if problem:
deprecated(
reason=(
"Constraints are only allowed to take the form of a package "
"name and a version specifier. Other forms were originally "
"permitted as an accident of the implementation, but were "
"undocumented. The new implementation of the resolver no "
"longer supports these forms."
),
replacement="replacing the constraint with a requirement",
# No plan yet for when the new resolver becomes default
gone_in=None,
issue=8210,
)
return problem
|
|
##########################################################################
#
# Copyright (c) 2008-2015, Image Engine Design Inc. All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
#
# * Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
#
# * Neither the name of Image Engine Design nor the names of any
# other contributors to this software may be used to endorse or
# promote products derived from this software without specific prior
# written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS
# IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO,
# THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
# PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
# CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
# EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
# PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
# PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
# LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
# NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
##########################################################################
import unittest
import maya.cmds
import maya.OpenMaya as OpenMaya
import imath
import IECore
import IECoreScene
import IECoreMaya
class ToMayaMeshConverterTest( IECoreMaya.TestCase ) :
def testConversion( self ) :
coreMesh = IECoreScene.MeshPrimitive.createBox( imath.Box3f( imath.V3f( -10 ), imath.V3f( 10 ) ) )
converter = IECoreMaya.ToMayaObjectConverter.create( coreMesh )
self.assert_( converter.isInstanceOf( IECoreMaya.ToMayaObjectConverter.staticTypeId() ) )
self.assert_( converter.isInstanceOf( IECoreMaya.ToMayaConverter.staticTypeId() ) )
self.assert_( converter.isInstanceOf( IECore.FromCoreConverter.staticTypeId() ) )
transform = maya.cmds.createNode( "transform" )
self.assert_( converter.convert( transform ) )
mayaMesh = maya.cmds.listRelatives( transform, shapes=True )[0]
self.assertEqual( maya.cmds.polyEvaluate( mayaMesh, vertex=True ), 8 )
self.assertEqual( maya.cmds.polyEvaluate( mayaMesh, face=True ), 6 )
self.assertEqual( maya.cmds.polyEvaluate( mayaMesh, boundingBox=True ), ( (-10, 10), (-10, 10), (-10, 10) ) )
def testUVConversion( self ) :
coreMesh = IECore.Reader.create( "test/IECore/data/cobFiles/pSphereShape1.cob" ).read()
self.assertTrue( "uv" in coreMesh )
coreMesh[ "testUVSet" ] = IECoreScene.PrimitiveVariable( coreMesh["uv"].interpolation, coreMesh["uv"].data.copy() )
converter = IECoreMaya.ToMayaObjectConverter.create( coreMesh )
self.assert_( converter.isInstanceOf( IECoreMaya.ToMayaObjectConverter.staticTypeId() ) )
self.assert_( converter.isInstanceOf( IECoreMaya.ToMayaConverter.staticTypeId() ) )
self.assert_( converter.isInstanceOf( IECore.FromCoreConverter.staticTypeId() ) )
transform = maya.cmds.createNode( "transform" )
self.assert_( converter.convert( transform ) )
mayaMesh = maya.cmds.listRelatives( transform, shapes=True )[0]
self.assertEqual( maya.cmds.polyEvaluate( mayaMesh, vertex=True ), 382 )
self.assertEqual( maya.cmds.polyEvaluate( mayaMesh, face=True ), 760 )
bb = maya.cmds.polyEvaluate( mayaMesh, boundingBox=True )
self.assertAlmostEqual( bb[0][0], -1, 4 )
self.assertAlmostEqual( bb[0][1], 1, 4 )
self.assertAlmostEqual( bb[1][0], -1, 4 )
self.assertAlmostEqual( bb[1][1], 1, 4 )
self.assertAlmostEqual( bb[2][0], -1, 4 )
self.assertAlmostEqual( bb[2][1], 1, 4 )
l = OpenMaya.MSelectionList()
l.add( mayaMesh )
p = OpenMaya.MDagPath()
l.getDagPath( 0, p )
fnMesh = OpenMaya.MFnMesh( p )
u = OpenMaya.MFloatArray()
v = OpenMaya.MFloatArray()
fnMesh.getUVs( u, v )
self.assertEqual( u.length(), 2280 )
self.assertEqual( v.length(), 2280 )
self.assertEqual( u[0], coreMesh[ "uv" ].data[0][0] )
self.assertEqual( v[0], coreMesh[ "uv" ].data[0][1] )
fnMesh.getUVs( u, v, "testUVSet" )
self.assertEqual( u.length(), 2280 )
self.assertEqual( v.length(), 2280 )
self.assertEqual( u[12], coreMesh[ "testUVSet" ].data[12][0] )
self.assertEqual( v[12], coreMesh[ "testUVSet" ].data[12][1] )
def testUVConversionFromPlug( self ) :
coreMesh = IECore.Reader.create( "test/IECore/data/cobFiles/pSphereShape1.cob" ).read()
self.assertTrue( "uv" in coreMesh )
coreMesh[ "testUVSet" ] = IECoreScene.PrimitiveVariable( coreMesh["uv"].interpolation, coreMesh["uv"].data.copy() )
fn = IECoreMaya.FnOpHolder.create( "test", "meshMerge" )
op = fn.getOp()
with fn.parameterModificationContext() :
op["input"].setValue( coreMesh )
mayaMesh = maya.cmds.ls( maya.cmds.polyPlane(), dag=True, type="mesh" )[0]
maya.cmds.connectAttr( fn.name()+".result", mayaMesh+".inMesh", force=True )
self.assertEqual( maya.cmds.polyEvaluate( mayaMesh, vertex=True ), 382 )
self.assertEqual( maya.cmds.polyEvaluate( mayaMesh, face=True ), 760 )
bb = maya.cmds.polyEvaluate( mayaMesh, boundingBox=True )
self.assertAlmostEqual( bb[0][0], -1, 4 )
self.assertAlmostEqual( bb[0][1], 1, 4 )
self.assertAlmostEqual( bb[1][0], -1, 4 )
self.assertAlmostEqual( bb[1][1], 1, 4 )
self.assertAlmostEqual( bb[2][0], -1, 4 )
self.assertAlmostEqual( bb[2][1], 1, 4 )
l = OpenMaya.MSelectionList()
l.add( mayaMesh )
p = OpenMaya.MDagPath()
l.getDagPath( 0, p )
fnMesh = OpenMaya.MFnMesh( p )
u = OpenMaya.MFloatArray()
v = OpenMaya.MFloatArray()
fnMesh.getUVs( u, v )
self.assertEqual( u.length(), 2280 )
self.assertEqual( v.length(), 2280 )
self.assertEqual( u[0], coreMesh[ "uv" ].data[0][0] )
self.assertEqual( v[0], coreMesh[ "uv" ].data[0][1] )
fnMesh.getUVs( u, v, "testUVSet" )
self.assertEqual( u.length(), 2280 )
self.assertEqual( v.length(), 2280 )
self.assertEqual( u[12], coreMesh[ "testUVSet" ].data[12][0] )
self.assertEqual( v[12], coreMesh[ "testUVSet" ].data[12][1] )
@unittest.skipIf( maya.OpenMaya.MGlobal.apiVersion() < 201600, "Invisible meshes with 6+ UV sets cause seg faults prior to Maya 2016" )
def testManyUVConversionsFromPlug( self ) :
coreMesh = IECore.Reader.create( "test/IECore/data/cobFiles/pSphereShape1.cob" ).read()
self.assertTrue( "uv" in coreMesh )
for i in range( 0, 7 ) :
coreMesh[ "testUVSet%d" % i ] = IECoreScene.PrimitiveVariable( coreMesh["uv"].interpolation, coreMesh["uv"].data.copy() )
fn = IECoreMaya.FnOpHolder.create( "test", "meshMerge" )
mayaMesh = maya.cmds.ls( maya.cmds.polyPlane(), dag=True, type="mesh" )[0]
maya.cmds.connectAttr( fn.name()+".result", mayaMesh+".inMesh", force=True )
op = fn.getOp()
with fn.parameterModificationContext() :
op["input"].setValue( coreMesh )
maya.cmds.file( rename="/tmp/test.ma" )
maya.cmds.file( save=True )
maya.cmds.file( new=True, f=True )
maya.cmds.file( "/tmp/test.ma", open=True )
fnMesh = OpenMaya.MFnMesh( IECoreMaya.dagPathFromString( mayaMesh ) )
self.assertEqual( fnMesh.numPolygons(), 760 )
# When calling fnMesh.numFaceVertices() (and other MFnMesh API calls), given a mesh with 6
# or more UV sets, which has never been evaluated before, the first call throws kFailure.
# From within the ToMayaMeshConverter itself, the output plug appears fine, and the API calls
# evaluate as expected. Despite this, the resulting mesh cannot be evaluated on the first try.
# Making the mesh visible, or making any attempt to evaluate it, will trigger some unknown
# internal updating, and subsequent attempts to evaluate it will succeed. Meshes with 5 or less
# UV sets do not suffer from this problem. This was fixed in Maya 2016, but I'll leave
# this explanation so users of ToMayaMeshConverter have breadcrumbs to follow.
self.assertEqual( fnMesh.numFaceVertices(), 2280 )
self.assertEqual( maya.cmds.polyEvaluate( mayaMesh, vertex=True ), 382 )
self.assertEqual( maya.cmds.polyEvaluate( mayaMesh, face=True ), 760 )
u = OpenMaya.MFloatArray()
v = OpenMaya.MFloatArray()
fnMesh.getUVs( u, v )
self.assertEqual( u.length(), 2280 )
self.assertEqual( v.length(), 2280 )
self.assertEqual( u[0], coreMesh[ "uv" ].data[0][0] )
self.assertEqual( v[0], coreMesh[ "uv" ].data[0][1] )
for i in range( 0, 7 ) :
fnMesh.getUVs( u, v, "testUVSet%d" % i )
self.assertEqual( u.length(), 2280 )
self.assertEqual( v.length(), 2280 )
self.assertEqual( u[12], coreMesh[ "testUVSet%d" % i ].data[12][0] )
self.assertEqual( v[12], coreMesh[ "testUVSet%d" % i ].data[12][1] )
def testUVConversionFromMayaMesh( self ) :
mayaMesh = maya.cmds.ls( maya.cmds.polyPlane(), dag=True, type="mesh" )[0]
coreMesh = IECoreMaya.FromMayaMeshConverter( mayaMesh ).convert()
transform = maya.cmds.createNode( "transform" )
self.failUnless( IECoreMaya.ToMayaMeshConverter( coreMesh ).convert( transform ) )
mayaMesh2 = maya.cmds.listRelatives( transform, shapes=True )[0]
l = OpenMaya.MSelectionList()
l.add( mayaMesh )
l.add( mayaMesh2 )
p = OpenMaya.MDagPath()
p2 = OpenMaya.MDagPath()
l.getDagPath( 0, p )
l.getDagPath( 1, p2 )
uvSets = []
fnMesh = OpenMaya.MFnMesh( p )
fnMesh.getUVSetNames( uvSets )
uvSets2 = []
fnMesh2 = OpenMaya.MFnMesh( p2 )
fnMesh2.getUVSetNames( uvSets2 )
self.assertEqual( uvSets, uvSets2 )
# Check uvIndices
coreMesh2 = IECoreMaya.FromMayaMeshConverter( mayaMesh2 ).convert()
# self.assertEqual( coreMesh["uv"].data, coreMesh2["uv"].data )
self.assertEqual( coreMesh["uv"].indices, coreMesh2["uv"].indices )
def testShadingGroup( self ) :
coreMesh = IECoreScene.MeshPrimitive.createBox( imath.Box3f( imath.V3f( -10 ), imath.V3f( 10 ) ) )
converter = IECoreMaya.ToMayaObjectConverter.create( coreMesh )
transform = maya.cmds.createNode( "transform" )
converter.convert( transform )
mayaMesh = maya.cmds.listRelatives( transform, shapes=True )[0]
self.failUnless( mayaMesh in maya.cmds.sets( "initialShadingGroup", query=True ) )
def testConstructor( self ) :
coreMesh = IECoreScene.MeshPrimitive.createBox( imath.Box3f( imath.V3f( -10 ), imath.V3f( 10 ) ) )
converter = IECoreMaya.ToMayaMeshConverter( coreMesh )
transform = maya.cmds.createNode( "transform" )
converter.convert( transform )
self.assertEqual( maya.cmds.nodeType( maya.cmds.listRelatives( transform, shapes=True )[0] ), "mesh" )
def testNormals( self ) :
sphere = maya.cmds.polySphere( subdivisionsX=4, subdivisionsY=3, constructionHistory=False )
sphere = maya.cmds.listRelatives( sphere, shapes=True )[0]
maya.cmds.polySoftEdge( sphere, angle=145 )
mesh = IECoreMaya.FromMayaShapeConverter.create( sphere ).convert()
self.failUnless( "N" in mesh )
self.failUnless( mesh.arePrimitiveVariablesValid() )
self.assertEqual( mesh["N"].interpolation, IECoreScene.PrimitiveVariable.Interpolation.FaceVarying )
self.failUnless( isinstance( mesh["N"].data, IECore.V3fVectorData ) )
transform = maya.cmds.createNode( "transform" )
IECoreMaya.ToMayaObjectConverter.create( mesh ).convert( transform )
newSphere = maya.cmds.listRelatives( transform, shapes=True )[0]
normals3d = IECore.DataConvertOp()( data=mesh["N"].data, targetType=IECore.TypeId.V3dVectorData )
del mesh["N"]
mesh["N"] = IECoreScene.PrimitiveVariable( IECoreScene.PrimitiveVariable.Interpolation.FaceVarying, normals3d )
self.failUnless( mesh.arePrimitiveVariablesValid() )
self.failUnless( isinstance( mesh["N"].data, IECore.V3dVectorData ) )
transform2 = maya.cmds.createNode( "transform" )
IECoreMaya.ToMayaObjectConverter.create( mesh ).convert( transform2 )
newSphere2 = maya.cmds.listRelatives( transform2, shapes=True )[0]
for i in range( 0, len(maya.cmds.ls( sphere+'.vtx[*]', fl=True )) ) :
origNormal = maya.cmds.polyNormalPerVertex( sphere+'.vtx['+str(i)+']', query=True, xyz=True )
normal3f = maya.cmds.polyNormalPerVertex( newSphere+'.vtx['+str(i)+']', query=True, xyz=True )
normal3d = maya.cmds.polyNormalPerVertex( newSphere2+'.vtx['+str(i)+']', query=True, xyz=True )
for j in range( 0, len(origNormal) ) :
self.assertAlmostEqual( origNormal[j], normal3f[j], 6 )
self.assertAlmostEqual( origNormal[j], normal3d[j], 6 )
def testSetMeshInterpolation( self ) :
sphere = maya.cmds.polySphere( subdivisionsX=10, subdivisionsY=5, constructionHistory=False )
sphere = maya.cmds.listRelatives( sphere, shapes=True )[0]
self.assertRaises( ValueError, maya.cmds.getAttr, sphere + ".ieMeshInterpolation" )
IECoreMaya.ToMayaMeshConverter.setMeshInterpolationAttribute( sphere )
self.assertEqual( maya.cmds.getAttr( sphere + ".ieMeshInterpolation" ), 0 )
coreMesh = IECoreScene.MeshPrimitive.createBox( imath.Box3f( imath.V3f( -10 ), imath.V3f( 10 ) ) )
coreMesh.interpolation = "catmullClark"
converter = IECoreMaya.ToMayaObjectConverter.create( coreMesh )
transform = maya.cmds.createNode( "transform" )
self.assert_( converter.convert( transform ) )
mayaMesh = maya.cmds.listRelatives( transform, shapes=True )[0]
self.assertEqual( maya.cmds.getAttr( mayaMesh + ".ieMeshInterpolation" ), 1 )
def testCreases( self ) :
cortexCube = IECoreScene.MeshPrimitive.createBox( imath.Box3f( imath.V3f( -1 ), imath.V3f( 1 ) ) )
cornerIds = [ 5 ]
cornerSharpnesses = [ 10.0 ]
cortexCube.setCorners( IECore.IntVectorData( cornerIds ), IECore.FloatVectorData( cornerSharpnesses ) )
creaseLengths = [ 3, 2 ]
creaseIds = [ 1, 2, 3, 4, 5 ] # note that these are vertex ids
creaseSharpnesses = [ 1, 5 ]
cortexCube.setCreases( IECore.IntVectorData( creaseLengths ), IECore.IntVectorData( creaseIds ), IECore.FloatVectorData( creaseSharpnesses ) )
converter = IECoreMaya.ToMayaObjectConverter.create( cortexCube )
transform = maya.cmds.createNode( "transform" )
self.assert_( converter.convert( transform ) )
mayaMesh = maya.cmds.listRelatives( transform, shapes=True )[0]
l = OpenMaya.MSelectionList()
l.add( mayaMesh )
p = OpenMaya.MDagPath()
l.getDagPath( 0, p )
fnMesh = OpenMaya.MFnMesh( p )
# Test corners
cornerIds = OpenMaya.MUintArray()
cornerSharpnesses = OpenMaya.MDoubleArray()
fnMesh.getCreaseVertices( cornerIds, cornerSharpnesses )
testIds = OpenMaya.MUintArray()
testIds.append( 5 )
self.assertEqual( cornerIds, testIds )
testSharpnesses = OpenMaya.MFloatArray()
testSharpnesses.append( 10 )
self.assertEqual( cornerSharpnesses, testSharpnesses )
# Test edges
edgeIds = OpenMaya.MUintArray()
edgeSharpnesses = OpenMaya.MDoubleArray()
fnMesh.getCreaseEdges( edgeIds, edgeSharpnesses )
util = OpenMaya.MScriptUtil()
result = []
for edgeId, sharpness in zip( edgeIds, edgeSharpnesses ) :
edgeVertices = util.asInt2Ptr()
fnMesh.getEdgeVertices( edgeId, edgeVertices )
result.append( (util.getInt2ArrayItem( edgeVertices, 0, 1 ),
util.getInt2ArrayItem( edgeVertices, 0, 0 ),
sharpness) )
# we compare sets because maya reorders by edge index
self.assertEqual( set( result ), set( [ ( 1, 2, 1.0 ), ( 2, 3, 1.0 ), ( 4, 5, 5.0 ) ] ) )
if __name__ == "__main__":
IECoreMaya.TestProgram( plugins = [ "ieCore" ] )
|
|
# Copyright (c) 2016 Zadara Storage, Inc.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
Volume driver for Zadara Virtual Private Storage Array (VPSA).
This driver requires VPSA with API version 15.07 or higher.
"""
from lxml import etree
from oslo_config import cfg
from oslo_log import log as logging
from six.moves import http_client
from cinder import exception
from cinder.i18n import _, _LE, _LW
from cinder import interface
from cinder.volume import driver
LOG = logging.getLogger(__name__)
zadara_opts = [
cfg.BoolOpt('zadara_use_iser',
default=True,
help='VPSA - Use ISER instead of iSCSI'),
cfg.StrOpt('zadara_vpsa_host',
default=None,
help='VPSA - Management Host name or IP address'),
cfg.PortOpt('zadara_vpsa_port',
default=None,
help='VPSA - Port number'),
cfg.BoolOpt('zadara_vpsa_use_ssl',
default=False,
help='VPSA - Use SSL connection'),
cfg.StrOpt('zadara_user',
default=None,
help='VPSA - Username'),
cfg.StrOpt('zadara_password',
default=None,
help='VPSA - Password',
secret=True),
cfg.StrOpt('zadara_vpsa_poolname',
default=None,
help='VPSA - Storage Pool assigned for volumes'),
cfg.BoolOpt('zadara_vol_encrypt',
default=False,
help='VPSA - Default encryption policy for volumes'),
cfg.StrOpt('zadara_vol_name_template',
default='OS_%s',
help='VPSA - Default template for VPSA volume names'),
cfg.BoolOpt('zadara_default_snap_policy',
default=False,
help="VPSA - Attach snapshot policy for volumes")]
CONF = cfg.CONF
CONF.register_opts(zadara_opts)
class ZadaraVPSAConnection(object):
"""Executes volume driver commands on VPSA."""
def __init__(self, conf):
self.conf = conf
self.access_key = None
self.ensure_connection()
def _generate_vpsa_cmd(self, cmd, **kwargs):
"""Generate command to be sent to VPSA."""
def _joined_params(params):
param_str = []
for k, v in params.items():
param_str.append("%s=%s" % (k, v))
return '&'.join(param_str)
# Dictionary of applicable VPSA commands in the following format:
# 'command': (method, API_URL, {optional parameters})
vpsa_commands = {
'login': ('POST',
'/api/users/login.xml',
{'user': self.conf.zadara_user,
'password': self.conf.zadara_password}),
# Volume operations
'create_volume': ('POST',
'/api/volumes.xml',
{'name': kwargs.get('name'),
'capacity': kwargs.get('size'),
'pool': self.conf.zadara_vpsa_poolname,
'thin': 'YES',
'crypt': 'YES'
if self.conf.zadara_vol_encrypt else 'NO',
'attachpolicies': 'NO'
if not self.conf.zadara_default_snap_policy
else 'YES'}),
'delete_volume': ('DELETE',
'/api/volumes/%s.xml' % kwargs.get('vpsa_vol'),
{'force': 'YES'}),
'expand_volume': ('POST',
'/api/volumes/%s/expand.xml'
% kwargs.get('vpsa_vol'),
{'capacity': kwargs.get('size')}),
# Snapshot operations
# Snapshot request is triggered for a single volume though the
# API call implies that snapshot is triggered for CG (legacy API).
'create_snapshot': ('POST',
'/api/consistency_groups/%s/snapshots.xml'
% kwargs.get('cg_name'),
{'display_name': kwargs.get('snap_name')}),
'delete_snapshot': ('DELETE',
'/api/snapshots/%s.xml'
% kwargs.get('snap_id'),
{}),
'create_clone_from_snap': ('POST',
'/api/consistency_groups/%s/clone.xml'
% kwargs.get('cg_name'),
{'name': kwargs.get('name'),
'snapshot': kwargs.get('snap_id')}),
'create_clone': ('POST',
'/api/consistency_groups/%s/clone.xml'
% kwargs.get('cg_name'),
{'name': kwargs.get('name')}),
# Server operations
'create_server': ('POST',
'/api/servers.xml',
{'display_name': kwargs.get('initiator'),
'iqn': kwargs.get('initiator')}),
# Attach/Detach operations
'attach_volume': ('POST',
'/api/servers/%s/volumes.xml'
% kwargs.get('vpsa_srv'),
{'volume_name[]': kwargs.get('vpsa_vol'),
'force': 'NO'}),
'detach_volume': ('POST',
'/api/volumes/%s/detach.xml'
% kwargs.get('vpsa_vol'),
{'server_name[]': kwargs.get('vpsa_srv'),
'force': 'NO'}),
# Get operations
'list_volumes': ('GET',
'/api/volumes.xml',
{}),
'list_pools': ('GET',
'/api/pools.xml',
{}),
'list_controllers': ('GET',
'/api/vcontrollers.xml',
{}),
'list_servers': ('GET',
'/api/servers.xml',
{}),
'list_vol_attachments': ('GET',
'/api/volumes/%s/servers.xml'
% kwargs.get('vpsa_vol'),
{}),
'list_vol_snapshots': ('GET',
'/api/consistency_groups/%s/snapshots.xml'
% kwargs.get('cg_name'),
{})}
if cmd not in vpsa_commands:
raise exception.UnknownCmd(cmd=cmd)
else:
(method, url, params) = vpsa_commands[cmd]
if method == 'GET':
# For GET commands add parameters to the URL
params.update(dict(access_key=self.access_key,
page=1, start=0, limit=0))
url += '?' + _joined_params(params)
body = ''
elif method == 'DELETE':
# For DELETE commands add parameters to the URL
params.update(dict(access_key=self.access_key))
url += '?' + _joined_params(params)
body = ''
elif method == 'POST':
if self.access_key:
params.update(dict(access_key=self.access_key))
body = _joined_params(params)
else:
msg = (_('Method %(method)s is not defined') %
{'method': method})
LOG.error(msg)
raise AssertionError(msg)
return (method, url, body)
def ensure_connection(self, cmd=None):
"""Retrieve access key for VPSA connection."""
if self.access_key or cmd == 'login':
return
cmd = 'login'
xml_tree = self.send_cmd(cmd)
user = xml_tree.find('user')
if user is None:
raise (exception.MalformedResponse(cmd=cmd,
reason=_('no "user" field')))
access_key = user.findtext('access-key')
if access_key is None:
raise (exception.MalformedResponse(cmd=cmd,
reason=_('no "access-key" field')))
self.access_key = access_key
def send_cmd(self, cmd, **kwargs):
"""Send command to VPSA Controller."""
self.ensure_connection(cmd)
(method, url, body) = self._generate_vpsa_cmd(cmd, **kwargs)
LOG.debug('Invoking %(cmd)s using %(method)s request.',
{'cmd': cmd, 'method': method})
if self.conf.zadara_vpsa_use_ssl:
connection = (http_client.HTTPSConnection(
self.conf.zadara_vpsa_host,
self.conf.zadara_vpsa_port))
else:
connection = http_client.HTTPConnection(self.conf.zadara_vpsa_host,
self.conf.zadara_vpsa_port)
connection.request(method, url, body)
response = connection.getresponse()
if response.status != 200:
connection.close()
raise exception.BadHTTPResponseStatus(status=response.status)
data = response.read()
connection.close()
xml_tree = etree.fromstring(data)
status = xml_tree.findtext('status')
if status != '0':
raise exception.FailedCmdWithDump(status=status, data=data)
if method in ['POST', 'DELETE']:
LOG.debug('Operation completed with status code %(status)s',
{'status': status})
return xml_tree
@interface.volumedriver
class ZadaraVPSAISCSIDriver(driver.ISCSIDriver):
"""Zadara VPSA iSCSI/iSER volume driver."""
VERSION = '15.07'
# ThirdPartySystems wiki page
CI_WIKI_NAME = "ZadaraStorage_VPSA_CI"
def __init__(self, *args, **kwargs):
super(ZadaraVPSAISCSIDriver, self).__init__(*args, **kwargs)
self.configuration.append_config_values(zadara_opts)
def do_setup(self, context):
"""Any initialization the volume driver does while starting.
Establishes initial connection with VPSA and retrieves access_key.
"""
self.vpsa = ZadaraVPSAConnection(self.configuration)
def check_for_setup_error(self):
"""Returns an error (exception) if prerequisites aren't met."""
self.vpsa.ensure_connection()
def local_path(self, volume):
"""Return local path to existing local volume."""
raise NotImplementedError()
def _xml_parse_helper(self, xml_tree, first_level, search_tuple,
first=True):
"""Helper for parsing VPSA's XML output.
Returns single item if first==True or list for multiple selection.
If second argument in search_tuple is None - returns all items with
appropriate key.
"""
objects = xml_tree.find(first_level)
if objects is None:
return None
result_list = []
(key, value) = search_tuple
for object in objects.getchildren():
found_value = object.findtext(key)
if found_value and (found_value == value or value is None):
if first:
return object
else:
result_list.append(object)
return result_list if result_list else None
def _get_vpsa_volume_name_and_size(self, name):
"""Return VPSA's name & size for the volume."""
xml_tree = self.vpsa.send_cmd('list_volumes')
volume = self._xml_parse_helper(xml_tree, 'volumes',
('display-name', name))
if volume is not None:
return (volume.findtext('name'),
int(volume.findtext('virtual-capacity')))
return (None, None)
def _get_vpsa_volume_name(self, name):
"""Return VPSA's name for the volume."""
(vol_name, size) = self._get_vpsa_volume_name_and_size(name)
return vol_name
def _get_volume_cg_name(self, name):
"""Return name of the consistency group for the volume.
cg-name is a volume uniqe identifier (legacy attribute)
and not consistency group as it may imply.
"""
xml_tree = self.vpsa.send_cmd('list_volumes')
volume = self._xml_parse_helper(xml_tree, 'volumes',
('display-name', name))
if volume is not None:
return volume.findtext('cg-name')
return None
def _get_snap_id(self, cg_name, snap_name):
"""Return snapshot ID for particular volume."""
xml_tree = self.vpsa.send_cmd('list_vol_snapshots',
cg_name=cg_name)
snap = self._xml_parse_helper(xml_tree, 'snapshots',
('display-name', snap_name))
if snap is not None:
return snap.findtext('name')
return None
def _get_pool_capacity(self, pool_name):
"""Return pool's total and available capacities."""
xml_tree = self.vpsa.send_cmd('list_pools')
pool = self._xml_parse_helper(xml_tree, 'pools',
('name', pool_name))
if pool is not None:
total = int(pool.findtext('capacity'))
free = int(float(pool.findtext('available-capacity')))
LOG.debug('Pool %(name)s: %(total)sGB total, %(free)sGB free',
{'name': pool_name, 'total': total, 'free': free})
return (total, free)
return ('unknown', 'unknown')
def _get_active_controller_details(self):
"""Return details of VPSA's active controller."""
xml_tree = self.vpsa.send_cmd('list_controllers')
ctrl = self._xml_parse_helper(xml_tree, 'vcontrollers',
('state', 'active'))
if ctrl is not None:
return dict(target=ctrl.findtext('target'),
ip=ctrl.findtext('iscsi-ip'),
chap_user=ctrl.findtext('vpsa-chap-user'),
chap_passwd=ctrl.findtext('vpsa-chap-secret'))
return None
def _get_server_name(self, initiator):
"""Return VPSA's name for server object with given IQN."""
xml_tree = self.vpsa.send_cmd('list_servers')
server = self._xml_parse_helper(xml_tree, 'servers',
('iqn', initiator))
if server is not None:
return server.findtext('name')
return None
def _create_vpsa_server(self, initiator):
"""Create server object within VPSA (if doesn't exist)."""
vpsa_srv = self._get_server_name(initiator)
if not vpsa_srv:
xml_tree = self.vpsa.send_cmd('create_server', initiator=initiator)
vpsa_srv = xml_tree.findtext('server-name')
return vpsa_srv
def create_volume(self, volume):
"""Create volume."""
self.vpsa.send_cmd(
'create_volume',
name=self.configuration.zadara_vol_name_template % volume['name'],
size=volume['size'])
def delete_volume(self, volume):
"""Delete volume.
Return ok if doesn't exist. Auto detach from all servers.
"""
# Get volume name
name = self.configuration.zadara_vol_name_template % volume['name']
vpsa_vol = self._get_vpsa_volume_name(name)
if not vpsa_vol:
LOG.warning(_LW('Volume %s could not be found. '
'It might be already deleted'), name)
return
# Check attachment info and detach from all
xml_tree = self.vpsa.send_cmd('list_vol_attachments',
vpsa_vol=vpsa_vol)
servers = self._xml_parse_helper(xml_tree, 'servers',
('iqn', None), first=False)
if servers:
for server in servers:
vpsa_srv = server.findtext('name')
if vpsa_srv:
self.vpsa.send_cmd('detach_volume',
vpsa_srv=vpsa_srv,
vpsa_vol=vpsa_vol)
# Delete volume
self.vpsa.send_cmd('delete_volume', vpsa_vol=vpsa_vol)
def create_snapshot(self, snapshot):
"""Creates a snapshot."""
LOG.debug('Create snapshot: %s', snapshot['name'])
# Retrieve the CG name for the base volume
volume_name = (self.configuration.zadara_vol_name_template
% snapshot['volume_name'])
cg_name = self._get_volume_cg_name(volume_name)
if not cg_name:
msg = _('Volume %(name)s not found') % {'name': volume_name}
LOG.error(msg)
raise exception.VolumeDriverException(message=msg)
self.vpsa.send_cmd('create_snapshot',
cg_name=cg_name,
snap_name=snapshot['name'])
def delete_snapshot(self, snapshot):
"""Deletes a snapshot."""
LOG.debug('Delete snapshot: %s', snapshot['name'])
# Retrieve the CG name for the base volume
volume_name = (self.configuration.zadara_vol_name_template
% snapshot['volume_name'])
cg_name = self._get_volume_cg_name(volume_name)
if not cg_name:
# If the volume isn't present, then don't attempt to delete
LOG.warning(_LW('snapshot: original volume %s not found, '
'skipping delete operation'),
volume_name)
return
snap_id = self._get_snap_id(cg_name, snapshot['name'])
if not snap_id:
# If the snapshot isn't present, then don't attempt to delete
LOG.warning(_LW('snapshot: snapshot %s not found, '
'skipping delete operation'), snapshot['name'])
return
self.vpsa.send_cmd('delete_snapshot',
snap_id=snap_id)
def create_volume_from_snapshot(self, volume, snapshot):
"""Creates a volume from a snapshot."""
LOG.debug('Creating volume from snapshot: %s', snapshot['name'])
# Retrieve the CG name for the base volume
volume_name = (self.configuration.zadara_vol_name_template
% snapshot['volume_name'])
cg_name = self._get_volume_cg_name(volume_name)
if not cg_name:
LOG.error(_LE('Volume %(name)s not found'), {'name': volume_name})
raise exception.VolumeNotFound(volume_id=volume['id'])
snap_id = self._get_snap_id(cg_name, snapshot['name'])
if not snap_id:
LOG.error(_LE('Snapshot %(name)s not found'),
{'name': snapshot['name']})
raise exception.SnapshotNotFound(snapshot_id=snapshot['id'])
self.vpsa.send_cmd('create_clone_from_snap',
cg_name=cg_name,
name=self.configuration.zadara_vol_name_template
% volume['name'],
snap_id=snap_id)
if (volume['size'] > snapshot['volume_size']):
self.extend_volume(volume, volume['size'])
def create_cloned_volume(self, volume, src_vref):
"""Creates a clone of the specified volume."""
LOG.debug('Creating clone of volume: %s', src_vref['name'])
# Retrieve the CG name for the base volume
volume_name = (self.configuration.zadara_vol_name_template
% src_vref['name'])
cg_name = self._get_volume_cg_name(volume_name)
if not cg_name:
LOG.error(_LE('Volume %(name)s not found'), {'name': volume_name})
raise exception.VolumeNotFound(volume_id=volume['id'])
self.vpsa.send_cmd('create_clone',
cg_name=cg_name,
name=self.configuration.zadara_vol_name_template
% volume['name'])
if (volume['size'] > src_vref['size']):
self.extend_volume(volume, volume['size'])
def extend_volume(self, volume, new_size):
"""Extend an existing volume."""
# Get volume name
name = self.configuration.zadara_vol_name_template % volume['name']
(vpsa_vol, size) = self._get_vpsa_volume_name_and_size(name)
if not vpsa_vol:
msg = (_('Volume %(name)s could not be found. '
'It might be already deleted') % {'name': name})
LOG.error(msg)
raise exception.ZadaraVolumeNotFound(reason=msg)
if new_size < size:
raise exception.InvalidInput(
reason=_('%(new_size)s < current size %(size)s') %
{'new_size': new_size, 'size': size})
expand_size = new_size - size
self.vpsa.send_cmd('expand_volume',
vpsa_vol=vpsa_vol,
size=expand_size)
def create_export(self, context, volume, vg=None):
"""Irrelevant for VPSA volumes. Export created during attachment."""
pass
def ensure_export(self, context, volume):
"""Irrelevant for VPSA volumes. Export created during attachment."""
pass
def remove_export(self, context, volume):
"""Irrelevant for VPSA volumes. Export removed during detach."""
pass
def initialize_connection(self, volume, connector):
"""Attach volume to initiator/host.
During this call VPSA exposes volume to particular Initiator. It also
creates a 'server' entity for Initiator (if it was not created before)
All necessary connection information is returned, including auth data.
Connection data (target, LUN) is not stored in the DB.
"""
# Get/Create server name for IQN
initiator_name = connector['initiator']
vpsa_srv = self._create_vpsa_server(initiator_name)
if not vpsa_srv:
raise exception.ZadaraServerCreateFailure(name=initiator_name)
# Get volume name
name = self.configuration.zadara_vol_name_template % volume['name']
vpsa_vol = self._get_vpsa_volume_name(name)
if not vpsa_vol:
raise exception.VolumeNotFound(volume_id=volume['id'])
# Get Active controller details
ctrl = self._get_active_controller_details()
if not ctrl:
raise exception.ZadaraVPSANoActiveController()
xml_tree = self.vpsa.send_cmd('list_vol_attachments',
vpsa_vol=vpsa_vol)
attach = self._xml_parse_helper(xml_tree, 'servers',
('name', vpsa_srv))
# Attach volume to server
if attach is None:
self.vpsa.send_cmd('attach_volume',
vpsa_srv=vpsa_srv,
vpsa_vol=vpsa_vol)
# Get connection info
xml_tree = self.vpsa.send_cmd('list_vol_attachments',
vpsa_vol=vpsa_vol)
server = self._xml_parse_helper(xml_tree, 'servers',
('iqn', initiator_name))
if server is None:
raise exception.ZadaraAttachmentsNotFound(name=name)
target = server.findtext('target')
lun = int(server.findtext('lun'))
if target is None or lun is None:
raise exception.ZadaraInvalidAttachmentInfo(
name=name,
reason=_('target=%(target)s, lun=%(lun)s') %
{'target': target, 'lun': lun})
properties = {}
properties['target_discovered'] = False
properties['target_portal'] = '%s:%s' % (ctrl['ip'], '3260')
properties['target_iqn'] = target
properties['target_lun'] = lun
properties['volume_id'] = volume['id']
properties['auth_method'] = 'CHAP'
properties['auth_username'] = ctrl['chap_user']
properties['auth_password'] = ctrl['chap_passwd']
LOG.debug('Attach properties: %(properties)s',
{'properties': properties})
return {'driver_volume_type':
('iser' if (self.configuration.safe_get('zadara_use_iser'))
else 'iscsi'), 'data': properties}
def terminate_connection(self, volume, connector, **kwargs):
"""Detach volume from the initiator."""
# Get server name for IQN
initiator_name = connector['initiator']
vpsa_srv = self._get_server_name(initiator_name)
if not vpsa_srv:
raise exception.ZadaraServerNotFound(name=initiator_name)
# Get volume name
name = self.configuration.zadara_vol_name_template % volume['name']
vpsa_vol = self._get_vpsa_volume_name(name)
if not vpsa_vol:
raise exception.VolumeNotFound(volume_id=volume['id'])
# Detach volume from server
self.vpsa.send_cmd('detach_volume',
vpsa_srv=vpsa_srv,
vpsa_vol=vpsa_vol)
def get_volume_stats(self, refresh=False):
"""Get volume stats.
If 'refresh' is True, run update the stats first.
"""
if refresh:
self._update_volume_stats()
return self._stats
def _update_volume_stats(self):
"""Retrieve stats info from volume group."""
LOG.debug("Updating volume stats")
data = {}
backend_name = self.configuration.safe_get('volume_backend_name')
storage_protocol = ('iSER' if
(self.configuration.safe_get('zadara_use_iser'))
else 'iSCSI')
data["volume_backend_name"] = backend_name or self.__class__.__name__
data["vendor_name"] = 'Zadara Storage'
data["driver_version"] = self.VERSION
data["storage_protocol"] = storage_protocol
data['reserved_percentage'] = self.configuration.reserved_percentage
data['QoS_support'] = False
(total, free) = self._get_pool_capacity(self.configuration.
zadara_vpsa_poolname)
data['total_capacity_gb'] = total
data['free_capacity_gb'] = free
self._stats = data
|
|
import inspect
import mixins
from xml.etree import ElementTree as ET
def capfirst(s):
return s[0].upper() + s[1:]
_mixin_classes = None
def load_mixin_classes():
return dict(inspect.getmembers(mixins, inspect.isclass))
def get_mixin_classes():
global _mixin_classes
if _mixin_classes is None:
_mixin_classes = load_mixin_classes()
return _mixin_classes
class SpecList(object):
def __init__(self, module_specs=[], custom_code=""):
self.module_specs = module_specs
self.custom_code = custom_code
def write_to_xml(self, fname):
root = ET.Element("specs")
subelt = ET.Element("customCode")
subelt.text = self.custom_code
root.append(subelt)
for spec in self.module_specs:
root.append(spec.to_xml())
tree = ET.ElementTree(root)
def indent(elem, level=0):
i = "\n" + level*" "
if len(elem):
if not elem.text or not elem.text.strip():
elem.text = i + " "
if not elem.tail or not elem.tail.strip():
elem.tail = i
for elem in elem:
indent(elem, level+1)
if not elem.tail or not elem.tail.strip():
elem.tail = i
else:
if level and (not elem.tail or not elem.tail.strip()):
elem.tail = i
indent(tree.getroot())
tree.write(fname)
@staticmethod
def read_from_xml(fname):
module_specs = []
custom_code = ""
tree = ET.parse(fname)
for elt in tree.getroot():
if elt.tag == "moduleSpec":
module_specs.append(ModuleSpec.from_xml(elt))
elif elt.tag == "customCode":
custom_code = elt.text
retval = SpecList(module_specs, custom_code)
return retval
class ModuleSpec(object):
attrs = ["name", "superklass", "docstring", "output_type"]
def __init__(self, name, superklass, code_ref, docstring="", port_specs=[],
output_port_specs=[], output_type=None):
self.name = name
self.superklass = superklass
self.code_ref = code_ref
self.docstring = docstring
self.port_specs = port_specs
self.output_port_specs = output_port_specs
self.output_type = output_type
self._mixin_class = None
self._mixin_functions = None
def to_xml(self, elt=None):
if elt is None:
elt = ET.Element("moduleSpec")
elt.set("name", self.name)
elt.set("superclass", self.superklass)
elt.set("code_ref", self.code_ref)
if self.output_type is not None:
elt.set("output_type", self.output_type)
subelt = ET.Element("docstring")
subelt.text = str(self.docstring)
elt.append(subelt)
for port_spec in self.port_specs:
subelt = port_spec.to_xml()
elt.append(subelt)
for port_spec in self.output_port_specs:
subelt = port_spec.to_xml()
elt.append(subelt)
return elt
@classmethod
def from_xml(cls, elt):
name = elt.get("name", "")
superklass = elt.get("superclass", "")
code_ref = elt.get("code_ref", "")
output_type = elt.get("output_type", None)
docstring = ""
port_specs = []
output_port_specs = []
for child in elt.getchildren():
if child.tag == "inputPortSpec":
port_specs.append(InputPortSpec.from_xml(child))
elif child.tag == "outputPortSpec":
output_port_specs.append(OutputPortSpec.from_xml(child))
elif child.tag == "docstring":
if child.text:
docstring = child.text
return cls(name, superklass, code_ref, docstring, port_specs,
output_port_specs, output_type)
def get_returned_output_port_specs(self):
return [ps for ps in self.output_port_specs
if ps.property_key is not None]
def get_input_args(self):
args = [ps for ps in self.port_specs if ps.in_args]
args.sort(key=lambda ps: ps.arg_pos)
if len(args) > 1 and len(args) != (args[-1].arg_pos + 1):
raise ValueError("Argument positions are numbered incorrectly")
return args
def get_output_port_spec(self, compute_name):
for ps in self.output_port_specs:
if ps.compute_name == compute_name:
return ps
return None
def get_mixin_name(self):
return self.name + "Mixin"
def has_mixin(self):
if self._mixin_class is None:
mixin_classes = get_mixin_classes()
if self.get_mixin_name() in mixin_classes:
self._mixin_class = mixin_classes[self.get_mixin_name()]
else:
self._mixin_class = False
return (self._mixin_class is not False)
def get_mixin_function(self, f_name):
if not self.has_mixin():
return None
if self._mixin_functions is None:
self._mixin_functions = \
dict(inspect.getmembers(self._mixin_class, inspect.ismethod))
if f_name in self._mixin_functions:
s = inspect.getsource(self._mixin_functions[f_name])
return s[s.find(':')+1:].strip()
return None
def get_compute_before(self):
return self.get_mixin_function("compute_before")
def get_compute_inner(self):
return self.get_mixin_function("compute_inner")
def get_compute_after(self):
return self.get_mixin_function("compute_after")
def get_init(self):
return self.get_mixin_function("__init__")
class PortSpec(object):
xml_name = "portSpec"
attrs = {"name": "",
"port_type": None,
"docstring": ("", True),
"required": (False, False, True),
"show_port": (False, False, True),
"hide": (False, False, True),
"property_type": "",}
def __init__(self, arg, **kwargs):
self.arg = arg
self.set_defaults(**kwargs)
def set_defaults(self, **kwargs):
for attr, props in self.attrs.iteritems():
if isinstance(props, tuple):
default_val = props[0]
else:
default_val = props
if attr in kwargs:
setattr(self, attr, kwargs[attr])
else:
setattr(self, attr, default_val)
if not self.name:
if self.port_type == "__property__":
self.name = self.arg + "Properties"
else:
self.name = self.arg
def to_xml(self, elt=None):
if elt is None:
elt = ET.Element(self.xml_name)
elt.set("arg", self.arg)
for attr, props in self.attrs.iteritems():
attr_val = getattr(self, attr)
is_subelt = False
if isinstance(props, tuple):
default_val = props[0]
if len(props) > 1:
is_subelt = props[1]
else:
default_val = props
if default_val != attr_val:
if is_subelt:
subelt = ET.Element(attr)
subelt.text = str(getattr(self, attr))
elt.append(subelt)
else:
elt.set(attr, str(attr_val))
return elt
@classmethod
def internal_from_xml(cls, elt, obj=None):
arg = elt.get("arg", "")
if obj is None:
obj = cls(arg)
else:
obj.arg = arg
child_elts = {}
for child in elt.getchildren():
if child.tag not in child_elts:
child_elts[child.tag] = []
child_elts[child.tag].append(child)
kwargs = {}
for attr, props in obj.attrs.iteritems():
is_subelt = False
run_eval = False
if isinstance(props, tuple):
if len(props) > 1:
is_subelt = props[1]
if len(props) > 2:
run_eval = props[2]
attr_vals = []
if is_subelt:
if attr in child_elts:
attr_vals = [c.text for c in child_elts[attr]
if c.text is not None]
if attr == "docstring":
print "()() docstring attr_vals:", attr_vals
else:
attr_val = elt.get(attr)
if attr_val is not None:
attr_vals = [attr_val]
if len(attr_vals) > 1:
raise ValueError('Should have only one value for '
'attribute "%s"' % attr)
if len(attr_vals) > 0:
attr_val = attr_vals[0]
if run_eval:
try:
kwargs[attr] = eval(attr_val)
except (NameError, SyntaxError):
kwargs[attr] = attr_val
else:
kwargs[attr] = attr_val
obj.set_defaults(**kwargs)
return obj, child_elts
@classmethod
def from_xml(cls, elt, obj=None):
obj, child_elts = cls.internal_from_xml(elt, obj)
return obj
@staticmethod
def create_from_xml(elt):
if elt.tag == "inputPortSpec":
return InputPortSpec.from_xml(elt)
elif elt.tag == "outputPortSpec":
return OutputPortSpec.from_xml(elt)
elif elt.tag == "alternateSpec":
return AlternatePortSpec.from_xml(elt)
raise TypeError('Cannot create spec from element of type "%s"' %
elt.tag)
def is_property(self):
return self.port_type == "__property__"
def get_property_type(self):
return "Mpl%sProperties" % \
capfirst(self.property_type.rsplit('.', 1)[1])
def get_port_type(self):
if self.port_type is None:
return "basic:String"
return self.port_type
class InputPortSpec(PortSpec):
xml_name = "inputPortSpec"
attrs = {"entry_types": (None, True, True),
"values": (None, True, True),
"defaults": (None, True, True),
"translations": (None, True, True),
"in_kwargs": (True, False, True),
"in_args": (False, False, True),
"constructor_arg": (False, False, True),
"not_setp": (False, False, True),
"arg_pos": (-1, False, True),
}
attrs.update(PortSpec.attrs)
def __init__(self, arg, **kwargs):
if "alternate_specs" in kwargs and kwargs["alternate_specs"]:
self.alternate_specs = kwargs.pop("alternate_specs")
else:
self.alternate_specs = []
PortSpec.__init__(self, arg, **kwargs)
for spec in self.alternate_specs:
spec.set_parent(self)
def to_xml(self, elt=None):
elt = PortSpec.to_xml(self, elt)
for spec in self.alternate_specs:
# write the spec
subelt = spec.to_xml()
elt.append(subelt)
return elt
@classmethod
def from_xml(cls, elt, obj=None):
obj, child_elts = cls.internal_from_xml(elt, obj)
if "alternateSpec" in child_elts:
for child_elt in child_elts["alternateSpec"]:
spec = AlternatePortSpec.from_xml(child_elt)
spec.set_parent(obj)
obj.alternate_specs.append(spec)
return obj
def get_port_attr_dict(self):
attrs = {}
if self.values:
attrs["values"] = str(self.values)
if self.entry_types:
attrs["entry_types"] = str(self.entry_types)
if self.defaults:
attrs["defaults"] = str(self.defaults)
if self.docstring:
attrs["docstring"] = self.docstring
if not self.required and not self.show_port:
attrs["optional"] = True
return attrs
def get_port_attrs(self):
return str(self.get_port_attr_dict())
def has_alternate_versions(self):
return len(self.alternate_specs) > 0
class AlternatePortSpec(InputPortSpec):
xml_name = "alternateSpec"
def __init__(self, *args, **kwargs):
if len(args) < 1:
args = [""]
InputPortSpec.__init__(self, *args, **kwargs)
self._parent = None
def set_parent(self, parent):
self._parent = parent
if not self.name:
if self._parent.name.endswith("Sequence"):
base_name = self._parent.name[:-8]
elif self._parent.name.endswith("Scalar"):
base_name = self._parent.name[:-6]
else:
base_name = self._parent.name
if self.port_type == "basic:List":
self.name = base_name + "Sequence"
else:
self.name = base_name + "Scalar"
self.arg = self._parent.arg
def get_port_attr_dict(self):
print "CALLING AlternatePortSpec.get_port_attr_dict", self.arg
my_attrs = InputPortSpec.get_port_attr_dict(self)
print "=> my_attrs:", my_attrs
par_attrs = self._parent.get_port_attr_dict()
print "=> par_attrs:", par_attrs
for k, v in par_attrs.iteritems():
if k == 'defaults' or k == "values" or k == "entry_types" or \
k == "translations":
continue
if k not in my_attrs or my_attrs[k] is None:
my_attrs[k] = v
print my_attrs
return my_attrs
class OutputPortSpec(PortSpec):
xml_name = "outputPortSpec"
attrs = {"compute_name": "",
"property_key": None,
"plural": (False, False, True),
"compute_parent": "",
}
attrs.update(PortSpec.attrs)
def set_defaults(self, **kwargs):
PortSpec.set_defaults(self, **kwargs)
if self.compute_name == "":
if self.plural and self.is_property():
self.compute_name = self.arg + 's'
else:
self.compute_name = self.arg
@classmethod
def from_xml(cls, elt, obj=None):
obj, child_elts = cls.internal_from_xml(elt, obj)
output_type = elt.get("output_type")
if output_type is not None:
obj.port_type = output_type
return obj
def get_port_attrs(self):
attrs = {}
if self.docstring:
attrs["docstring"] = self.docstring
return str(attrs)
# class OutputPortSpec(object):
# attrs = ["name", "compute_name", "output_type", "docstring",
# "property_type", "property_key", "plural", "compute_parent"]
# def __init__(self, arg, name, compute_name, output_type, docstring="",
# property_type="", property_key=None, plural=False,
# compute_parent=""):
# self.arg = arg
# self.name = name
# self.compute_name = compute_name
# self.output_type = output_type
# self.docstring = docstring
# self.property_type = property_type
# self.property_key = property_key
# self.plural = plural
# self.compute_parent = compute_parent
# self._property_name = None
# def to_xml(self, elt=None):
# if elt is None:
# elt = ET.Element("outputPortSpec")
# elt.set("arg", self.arg)
# elt.set("name", self.name)
# elt.set("compute_name", self.compute_name)
# if self.output_type is not None:
# elt.set("output_type", self.output_type)
# else:
# elt.set("output_type", "__unknown__")
# elt.set("property_type", self.property_type)
# if self.property_key is None:
# elt.set("property_key", "__none__")
# else:
# elt.set("property_key", str(self.property_key))
# elt.set("plural", str(self.plural))
# elt.set("compute_parent", self.compute_parent)
# subelt = ET.Element("docstring")
# subelt.text = str(self.docstring)
# elt.append(subelt)
# return elt
# @classmethod
# def from_xml(cls, elt):
# arg = elt.get("arg", "")
# output_type = elt.get("output_type", "")
# if output_type == "__unknown__":
# output_type = None
# plural = eval(elt.get("plural", "False"))
# if output_type.lower() == "__property__":
# name = elt.get("name", arg + "Properties")
# compute_name = elt.get("compute_name", arg +
# ("s" if plural else ""))
# else:
# name = elt.get("name", arg)
# compute_name = elt.get("name", arg)
# property_type = elt.get("property_type", "")
# property_key = elt.get("property_key", None)
# if property_key is not None:
# if property_key == "__none__":
# property_key = None
# else:
# try:
# property_key = int(property_key)
# except ValueError:
# pass
# compute_parent = elt.get("compute_parent", "")
# docstring = ""
# for child in elt.getchildren():
# if child.tag == "docstring" and child.text:
# docstring = child.text
# return cls(arg, name, compute_name, output_type, docstring,
# property_type, property_key, plural, compute_parent)
# def is_property_output(self):
# return self.output_type.lower() == "__property__"
# def get_property_type(self):
# return "Mpl%sProperties" % \
# capfirst(self.property_type.rsplit('.', 1)[1])
# def get_port_type(self):
# if self.output_type is None:
# return "basic:String"
# return self.output_type
# class InputPortSpec(PortSpec):
# def __init__(self, arg="", name="", port_type=None, docstring="",
# required=False, show_port=False, hide=False, property_type="",
# entry_types=None, values=None, defaults=None,
# translations=None, alternate_specs=None, in_kwargs=True,
# in_args=False, constructor_arg=False):
# PortSpec.__init__(self, arg, name, port_type, docstring, required,
# show_port, hide, property_type)
# self.entry_types = entry_types
# self.values = values
# self.defaults = defaults
# self.translations = translations
# self.in_kwargs = in_kwargs
# self.in_args = in_args
# self.constructor_arg = constructor_arg
# if alternate_specs is None:
# self.alternate_specs = []
# else:
# self.alternate_specs = alternate_specs
# for spec in self.alternate_specs:
# spec.set_parent(self)
# def to_xml(self, elt=None):
# if elt is None:
# elt = ET.Element("inputPortSpec")
# PortSpec.to_xml(self, elt)
# elt.set("in_kwargs", str(self.in_kwargs))
# elt.set("in_args", str(self.in_args))
# elt.set("constructor_arg", str(self.constructor_arg))
# if self.entry_types is not None:
# subelt = ET.Element("entry_types")
# subelt.text = str(self.entry_types)
# elt.append(subelt)
# if self.values is not None:
# subelt = ET.Element("values")
# subelt.text = str(self.values)
# elt.append(subelt)
# if self.translations is not None:
# subelt = ET.Element("translations")
# subelt.text = str(self.translations)
# elt.append(subelt)
# if self.defaults is not None:
# subelt = ET.Element("defaults")
# subelt.text = str(self.defaults)
# elt.append(subelt)
# for spec in self.alternate_specs:
# # print "FOUND ALT:", spec.name, spec.alternate_specs, spec
# subelt = ET.Element("alternateSpec")
# spec.to_xml(subelt)
# elt.append(subelt)
# # if self.entry_types is not None and self.values is not None and \
# # self.defaults is not None and self.translations is not None:
# # for entry_type, value, default, translation in \
# # izip(self.entry_types, self.values, self.defaults,
# # self.translations):
# # subelt = ET.Element("entry")
# # subelt.set("type", str(entry_type))
# # valueselt = ET.Element("values")
# # valueselt.text = str(value)
# # subelt.append(valueselt)
# # transelt = ET.Element("translation")
# # transelt.text = str(translation)
# # subelt.append(transelt)
# # defaultelt = ET.Element("default")
# # if isinstance(default, basestring):
# # defaultelt.text = "'%s'" % default
# # else:
# # defaultelt.text = str(default)
# # subelt.append(defaultelt)
# # elt.append(subelt)
# docelt = ET.Element("docstring")
# docelt.text = self.docstring
# elt.append(docelt)
# return elt
# @classmethod
# def from_xml(cls, elt):
# arg = elt.get("arg", "")
# port_type = elt.get("port_type", "")
# if port_type == "__unknown__":
# port_type = None
# required = eval(elt.get("required", "False"))
# hide = eval(elt.get("hide", "False"))
# in_kwargs = eval(elt.get("in_kwargs", "True"))
# property_type = elt.get("property_type", "")
# constructor_arg = eval(elt.get("constructor_arg", "False"))
# if port_type is not None and port_type.lower() == "__property__":
# name = elt.get("name", arg + "Properties")
# else:
# name = elt.get("name", arg)
# entry_types = None
# values = None
# defaults = None
# translations = None
# docstring = ""
# alternate_specs = []
# for child in elt.getchildren():
# if child.tag == "entry_types":
# entry_types = eval(child.text)
# elif child.tag == "values":
# try:
# values = eval(child.text)
# except SyntaxError:
# values = [[child.text[2:-2]]]
# elif child.tag == "translations":
# try:
# translations = eval(child.text)
# except NameError:
# translations = child.text
# elif child.tag == "defaults":
# if child.text:
# defaults = eval(child.text)
# elif child.tag == "docstring":
# if child.text:
# docstring = child.text
# elif child.tag == "alternateSpec":
# alternate_specs.append(AlternatePortSpec.from_xml(child))
# # if child.tag == "entry":
# # if entry_types is None:
# # entry_types = []
# # values = []
# # defaults = []
# # translations = []
# # entry_types.append(child.get("type", None))
# # for subchild in child.getchildren():
# # if subchild.tag == "values":
# # values.append(eval(subchild.text))
# # elif subchild.tag == "translation":
# # try:
# # translation = eval(subchild.text)
# # except NameError:
# # translation = subchild.text
# # translations.append(translation)
# # elif subchild.tag == "default":
# # defaults.append(eval(subchild.text))
# # elif child.tag == "docstring":
# # docstring = child.text
# return cls(arg, name, port_type, docstring, required, hide,
# entry_types, values, defaults, translations,
# alternate_specs, in_kwargs, property_type, constructor_arg)
# # def has_scalar_version(self):
# # return self.scalar_type and self.scalar_type != self.port_type
# # def get_scalar_name(self):
# # return self.name + "Scalar"
# # def has_sequence_version(self):
# # return self.sequence_type and self.sequence_type != self.port_type
# # def get_sequence_name(self):
# # return self.name + "Sequence"
# # def has_other_version(self):
# # return self.has_scalar_version() or self.has_sequence_version()
# # def get_other_name(self):
# # if self.has_scalar_version():
# # return self.get_scalar_name()
# # elif self.has_sequence_version():
# # return self.get_sequence_name()
# # return None
# # def get_other_type(self):
# # if self.has_scalar_version():
# # return self.scalar_type
# # elif self.has_sequence_version():
# # return self.sequence_type
# # return None
def run():
specs = SpecList.read_from_xml("mpl_plots_raw.xml")
specs.write_to_xml("mpl_plots_raw_out.xml")
if __name__ == '__main__':
run()
|
|
# ***** BEGIN LICENSE BLOCK *****
# Version: MPL 1.1/GPL 2.0/LGPL 2.1
#
# The contents of this file are subject to the Mozilla Public License
# Version 1.1 (the "License"); you may not use this file except in
# compliance with the License. You may obtain a copy of the License at
# http://www.mozilla.org/MPL/
#
# Software distributed under the License is distributed on an "AS IS"
# basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See the
# License for the specific language governing rights and limitations
# under the License.
#
# The Original Code is Komodo code.
#
# The Initial Developer of the Original Code is ActiveState Software Inc.
# Portions created by ActiveState Software Inc are Copyright (C) 2000-2007
# ActiveState Software Inc. All Rights Reserved.
#
# Contributor(s):
# ActiveState Software Inc
#
# Alternatively, the contents of this file may be used under the terms of
# either the GNU General Public License Version 2 or later (the "GPL"), or
# the GNU Lesser General Public License Version 2.1 or later (the "LGPL"),
# in which case the provisions of the GPL or the LGPL are applicable instead
# of those above. If you wish to allow use of your version of this file only
# under the terms of either the GPL or the LGPL, and not to allow others to
# use your version of this file under the terms of the MPL, indicate your
# decision by deleting the provisions above and replace them with the notice
# and other provisions required by the GPL or the LGPL. If you do not delete
# the provisions above, a recipient may use your version of this file under
# the terms of any one of the MPL, the GPL or the LGPL.
#
# ***** END LICENSE BLOCK *****
#
# Contributors:
# Eric Promislow ([email protected])
"""Tcl parsing support for codeintel/tclcile.py"""
import string
import sys
import re
import textwrap
import logging
log = logging.getLogger("tcl_parser")
from SilverCity import ScintillaConstants
from codeintel2 import tcl_lexer
from codeintel2 import shared_lexer
from codeintel2 import shared_parser
from codeintel2.parser_data import Name_LineNum, VarInfo, Node, ClassNode, \
FileNode, ArgNode, MethodNode, ModuleNode, VariableNode, BlockNode, \
update_collection
from codeintel2.parser_data import VAR_KIND_GLOBAL, VAR_KIND_LOCAL
class TclClassifier(shared_parser.CommonClassifier):
_quote_patterns = {ScintillaConstants.SCE_TCL_STRING: re.compile(
'^\"(.*)\"$'),
ScintillaConstants.SCE_TCL_DEFAULT: re.compile('^.(.*).$'),
}
def get_builtin_type(self, tok, callback):
if self.is_number(tok):
numval = tok['text']
if numval.find(".") >= 0:
return "Float"
else:
return "Fixnum"
elif self.is_string(tok):
return "String"
return None
def is_any_operator(self, tok):
return tok['style'] == ScintillaConstants.SCE_TCL_OPERATOR
def is_comment(self, tok):
return tok['style'] == ScintillaConstants.SCE_TCL_COMMENT
def is_comment_structured(self, tok, callback):
return False
def is_identifier(self, tok, allow_keywords=False):
return (tok['style'] == ScintillaConstants.SCE_TCL_IDENTIFIER or
(allow_keywords and
tok['style'] == ScintillaConstants.SCE_TCL_WORD))
def is_interpolating_string(self, tok, callback):
return tok['style'] == ScintillaConstants.SCE_TCL_STRING
def is_keyword(self, tok, target):
return tok['style'] == ScintillaConstants.SCE_TCL_WORD and tok['text'] == target
def is_number(self, tok):
return tok['style'] == ScintillaConstants.SCE_TCL_NUMBER
def is_operator(self, tok, target):
return tok['style'] == ScintillaConstants.SCE_TCL_OPERATOR and tok['text'] == target
def is_string(self, tok):
return tok['style'] in [ScintillaConstants.SCE_TCL_STRING,
ScintillaConstants.SCE_TCL_CHARACTER,
ScintillaConstants.SCE_TCL_LITERAL
]
def is_symbol(self, tok):
return False
def quote_patterns_cb(self, tok):
tval = tok['text']
if tval[0] == '"':
return self._quote_patterns[ScintillaConstants.SCE_TCL_STRING]
elif tval[0] == '\'':
return self._quote_patterns[ScintillaConstants.SCE_TCL_CHARACTER]
else:
return self._quote_patterns[ScintillaConstants.SCE_TCL_DEFAULT] # Fallback
# Accessors for where we'd rather work with a style than call a predicate
# fn
@property
def style_identifier(self):
return ScintillaConstants.SCE_TCL_IDENTIFIER
@property
def style_operator(self):
return ScintillaConstants.SCE_TCL_OPERATOR
@property
def style_word(self):
return ScintillaConstants.SCE_TCL_WORD
lang_specific_classes = {"Tcl": TclClassifier,
"AOL": shared_parser.UDLClassifier}
leading_hash_re = re.compile(r'^\s*\#+\s*')
mostly_dashes = re.compile(r'\s*-{10}')
spaces_and_braces_re = re.compile(r'\s*\}\s*$')
def remove_hashes(lines):
len1 = len(lines)
if len1 == 0:
return []
set1 = [leading_hash_re.sub("", s) for s in lines]
if len1 > 0 and mostly_dashes.match(set1[0]):
del set1[0]
if len1 > 1 and mostly_dashes.match(set1[-1]):
del set1[-1]
return set1
# Parse Tcl code
class Parser:
def __init__(self, tokenizer, lang):
self.tokenizer = tokenizer
self.block_stack = []
self.tree = FileNode()
self.curr_node = self.tree
self.classifier = lang_specific_classes[lang]()
self.containers = {VAR_KIND_GLOBAL: [self.tree.global_vars],
VAR_KIND_LOCAL: [self.tree.local_vars]} # locals
def _get_fully_qualified_braced_name(self, start_line, start_column):
brace_count = 1
name_parts = []
while 1:
tok = self.tokenizer.get_next_token(skip_ws=0)
if tok['style'] == shared_lexer.EOF_STYLE:
break
elif self.classifier.is_any_operator(tok):
if tok['text'] == "{":
brace_count += 1
elif tok['text'] == "}":
brace_count -= 1
if brace_count == 0:
break
if tok['start_line'] > start_line or tok['start_column'] > start_column:
name_parts.append(" ")
start_column = tok['end_column'] + 1
start_line = tok['start_line']
name_parts.append(tok['text']) # XXX backslashes?
return "".join(name_parts)
def get_fully_qualified_name(self):
tok = self.tokenizer.get_next_token()
if tok['style'] == shared_lexer.EOF_STYLE:
return (None, None)
line_start = tok['start_line']
if self.classifier.is_operator(tok, "{"):
return (self._get_fully_qualified_braced_name(line_start, tok['end_column'] + 1), line_start)
name_start = tok['text']
# Watch out if it starts with a "::"
if name_start == "::":
col = tok['end_column'] + 1
tok = self.tokenizer.get_next_token()
if tok['start_column'] != col or not self.classifier.is_identifier(tok):
self.tokenizer.put_back(tok)
return (name_start, line_start)
name_start += tok['text']
col = tok['end_column'] + 1
while 1:
# Collect operator-type methods
tok = self.tokenizer.get_next_token()
if tok['start_column'] == col and self.classifier.is_operator(tok, "::"):
name_start += tok['text']
col += 2
else:
self.tokenizer.put_back(tok)
break
tok = self.tokenizer.get_next_token()
if tok['start_column'] == col and self.classifier.is_identifier(tok, True):
name_start += tok['text']
col = tok['end_column'] + 1
else:
self.tokenizer.put_back(tok)
break
return (name_start, line_start)
def parse(self):
while self.parse_aux(self.tree):
pass
return self.tree
def get_parsing_objects(self, kwd):
return {
"namespace": [ModuleNode, self.parse_aux],
"proc": [MethodNode, self.parse_method]
}.get(kwd, [None, None])
def _parse_name_list(self):
vars = []
while True:
tok = self.tokenizer.get_next_token()
if tok['style'] == shared_lexer.EOF_STYLE or \
self.classifier.is_operator(tok, "}"):
break
if self.classifier.is_identifier(tok):
vars.append(tok['text'])
return vars
def parse_method(self, curr_node):
# Syntax: proc name { args } { body }
tok = self.tokenizer.get_next_token()
if self.classifier.is_operator(tok, "{"):
# Standard, keep going
do_regular_args = True
elif self.classifier.is_identifier(tok):
# Assume it's the one arg
if tok['text'] == "args":
curr_node.add_arg(tok['text'], None, "varargs")
else:
curr_node.add_arg(tok['text'])
curr_node.signature = "%s {%s}" % (curr_node.name, tok['text'])
do_regular_args = False
else:
self.tokenizer.put_back(tok)
return
if do_regular_args:
braceCount = 1
init_indentation = curr_node.indentation
tok_count = 0
tok_lim = 1000
self.tokenizer.start_sig()
argStart = True
while 1:
tok = self.tokenizer.get_next_token()
if self.classifier.is_any_operator(tok):
argStart = False
tval = tok['text']
if tval == "{":
braceCount += 1
if braceCount == 2:
argStart = True
elif tval == "}":
braceCount -= 1
if braceCount <= 0:
break
elif braceCount == 1:
argStart = True
elif argStart:
if braceCount == 2: # Wait for a } to get next arg.
argStart = False
if self.classifier.is_identifier(tok, True):
if tok['text'] == "args" and braceCount == 1:
# We need to peek at the next token
tok2 = self.tokenizer.get_next_token()
if self.classifier.is_operator(tok2, "}"):
curr_node.add_arg(tok['text'], None, "varargs")
break
else:
self.tokenizer.put_back(tok2)
curr_node.add_arg(tok['text'])
tok_count += 1
if tok_count > tok_lim and tok['start_column'] < init_indentation:
break
self.tokenizer.stop_sig()
# XXX Check white-space in the sig
# We don't know we've hit the end of the sig until we hit
# that final "}", so we need to pull it out.
curr_node.signature = "%s {%s}" % (curr_node.name,
spaces_and_braces_re.sub('', self.tokenizer.get_sig()))
# Now get the body
tok = self.tokenizer.get_next_token()
if not self.classifier.is_operator(tok, "{"):
# Give up
self.tokenizer.put_back(tok)
return
braceCount = 1
self.parse_aux(curr_node, 1) # Count the brace we just saw.
# end parse_method
def parse_assignment(self, tok_text, start_line, isLocal=True):
# Don't bother trying to type it yet.
# Figure out whether we're in a proc or not
if isLocal:
collectionA = self.containers[VAR_KIND_LOCAL]
else:
collectionA = self.containers[VAR_KIND_GLOBAL]
if len(collectionA) == 0 or collectionA[-1] is None:
return
possibleType = self._finishVarAssignment(
collectionA, tok_text, start_line)
update_collection(collectionA[-1], tok_text, start_line, possibleType)
def _finishVarAssignment(self, collectionA, var_name, start_line):
# XXX Add type info
return None
def parse_aux(self, curr_node, braceCount=0):
init_indentation = curr_node.indentation
tok_count = 0
tok_lim = 1000
cmdStart = True
curr_globals = {}
while 1:
tok = self.tokenizer.get_next_token()
if tok['style'] == shared_lexer.EOF_STYLE:
break
# style, text, start_column, start_line, end_column, end_line = tok
style, text = tok['style'], tok['text']
if style == self.classifier.style_word and \
(cmdStart or tok['start_column'] == self.tokenizer.get_curr_indentation()):
cmdStart = False
if text in ["namespace", "proc"]:
curr_indent = self.tokenizer.get_curr_indentation()
if text == "namespace":
tok1 = self.tokenizer.get_next_token()
if not (self.classifier.is_identifier(tok1, True) and tok1['text'] == "eval"):
continue
node_class, node_parser = self.get_parsing_objects(text)
if node_class is None:
sys.stderr.write(
"Couldn't get parsing objects for type %s\n" % text)
break
# Get the comments before further parsing.
comment_lines = remove_hashes(
self.tokenizer.curr_comment())
nm_token = self.get_fully_qualified_name()
fqname = nm_token[0]
if not fqname:
break
# Handle only local names for now
if fqname.startswith("::") and text == "namespace":
fqname = fqname[2:]
new_node = node_class(fqname, tok['start_line'])
new_node.doc_lines = comment_lines
new_node.indentation = curr_indent
self.block_stack.append(new_node)
curr_node.append_node(new_node)
# Push new containers on the symbol table
self.containers[VAR_KIND_LOCAL].append(new_node.local_vars)
node_parser(new_node) # Has self bound to it
self.block_stack.pop()
self.containers[VAR_KIND_LOCAL].pop()
# Clear any comment that's hanging around
self.tokenizer.clear_comments()
elif text == "package":
tok1 = self.tokenizer.get_next_token()
if self.classifier.is_identifier(tok1, True):
if tok1['text'] == "require":
tok2 = self.tokenizer.get_next_token()
if self.classifier.is_identifier(tok2, True) and tok2['text'] != "Tcl":
curr_node.imports.append(Name_LineNum(
tok2['text'], tok['start_line']))
elif text == "global":
# XXX: all tokens following 'global' should be declared
# vars
tok = self.tokenizer.get_next_token()
if self.classifier.is_identifier(tok, True):
curr_globals[tok['text']] = None
elif text == "set":
# XXX: Needs to handle lappend, append, incr, variable
# XXX: possibly dict set, array set, upvar, lassign,
# XXX: foreach, regsub (non-inline)
tok = self.tokenizer.get_next_token()
if self.classifier.is_identifier(tok, True):
if tok['text'] in curr_globals:
pass
else:
self.parse_assignment(tok['text'], tok[
'start_line'], isinstance(curr_node, MethodNode))
elif text == "lassign":
tok = self.tokenizer.get_next_token()
if self.classifier.is_operator(tok, "{"):
start_line = tok['start_line']
isLocal = isinstance(curr_node, MethodNode)
if isLocal:
collectionA = self.containers[VAR_KIND_LOCAL]
else:
collectionA = self.containers[VAR_KIND_GLOBAL]
vars = self._parse_name_list()
for v in vars:
update_collection(collectionA[-1], v, start_line)
elif self.classifier.is_any_operator(tok):
cmdStart = False
if text == "{":
braceCount += 1
elif text == "}":
braceCount -= 1
if braceCount <= 0:
break
elif text in (";", "["):
cmdStart = True
elif text == "\\":
# Skip the next token, whatever it is - bug 74850
tok = self.tokenizer.get_next_token()
else:
cmdStart = False
# Sanity check to make sure we haven't gone too far.
tok_count += 1
if tok_count > tok_lim and tok['start_column'] < init_indentation:
break
# end while
curr_node.set_line_end_num(self.tokenizer.curr_line_no())
return tok['style'] != shared_lexer.EOF_STYLE
# end parse_aux()
# end class Parser
if __name__ == "__main__":
if len(sys.argv) == 1:
sample_code = tcl_lexer.provide_sample_code()
fs = None
elif sys.argv[1] == "-":
fs = sys.stdin
closefs = False
else:
fs = open(sys.argv[1], "r")
closefs = True
if fs is not None:
sample_code = shared_lexer.read_and_detab(fs, closefs)
# fs comes back closed
tokenizer = tcl_lexer.TclLexer(sample_code)
parser = Parser(tokenizer, "Tcl")
tree = parser.parse()
print "Analyze the parse tree"
tree.dump()
|
|
# Copyright (C) 2013 Google Inc., authors, and contributors <see AUTHORS file>
# Licensed under http://www.apache.org/licenses/LICENSE-2.0 <see LICENSE file>
# Created By: [email protected]
# Maintained By: [email protected]
from collections import namedtuple
from flask import g
from flask.ext.login import current_user
from .user_permissions import UserPermissions
from ggrc.models import get_model
Permission = namedtuple('Permission', 'action resource_type context_id')
_contributing_resource_types = {}
# Return a list of resource types using the same context space.
# This is needed because permissions may be given for, e.g., "Contract", but
# the restriction on join is done knowing only "Directive".
def get_contributing_resource_types(resource_type):
resource_types = _contributing_resource_types.get(resource_type, None)
if resource_types is None:
resource_types = [resource_type]
resource_model = get_model(resource_type)
if resource_model:
resource_manager = resource_model._sa_class_manager
resource_types.extend(
manager.class_.__name__ for manager in
resource_manager.subclass_managers(True))
_contributing_resource_types[resource_type] = resource_types
return resource_types
class DefaultUserPermissionsProvider(object):
def __init__(self, settings):
pass
def permissions_for(self, user):
return DefaultUserPermissions()
def resolve_permission_variable(value):
if type(value) is str or type(value) is unicode:
if value.startswith('$'):
if value == '$current_user':
return current_user
raise Exception(
'The permission condition variable {0} is not defined!'.format(name))
else:
return value
else:
return value
def ContainsCondition(instance, value, list_property):
value = resolve_permission_variable(value)
list_value = getattr(instance, list_property)
return value in list_value
def IsCondition(instance, value, property_name):
value = resolve_permission_variable(value)
property_value = getattr(instance, property_name)
return value == property_value
def InCondition(instance, value, property_name):
value = resolve_permission_variable(value)
property_value = getattr(instance, property_name)
return property_value in value
"""
All functions with a signature
..
func(instance, **kwargs)
"""
_CONDITIONS_MAP = {
'contains': ContainsCondition,
'is': IsCondition,
'in': InCondition,
}
class DefaultUserPermissions(UserPermissions):
# super user, context_id 0 indicates all contexts
ADMIN_PERMISSION = Permission(
'__GGRC_ADMIN__',
'__GGRC_ALL__',
0,
)
def _admin_permission_for_context(self, context_id):
return Permission(
self.ADMIN_PERMISSION.action,
self.ADMIN_PERMISSION.resource_type,
context_id)
def _permission_match(self, permission, permissions):
return \
permission.context_id in \
permissions\
.get(permission.action, {})\
.get(permission.resource_type, {})\
.get('contexts', [])\
or permission.context_id in \
permissions\
.get(permission.action, {})\
.get(self.ADMIN_PERMISSION.resource_type, {})\
.get('contexts', [])
def _permissions(self):
return getattr(g, '_request_permissions', {})
def _is_allowed(self, permission):
permissions = self._permissions()
if self._permission_match(permission, permissions):
return True
if self._permission_match(self.ADMIN_PERMISSION, permissions):
return True
return self._permission_match(
self._admin_permission_for_context(permission.context_id),
permissions)
def _is_allowed_for(self, instance, action):
# Check for admin permission
if self._permission_match(self.ADMIN_PERMISSION, self._permissions()):
return True
conditions = self._permissions()\
.setdefault(action, {})\
.setdefault(instance._inflector.model_singular, {})\
.setdefault('conditions', {})\
.setdefault(instance.context_id, [])
#FIXME Check for basic resource permission
#Check any conditions applied per resource
if not conditions:
return True
for condition in conditions:
func = _CONDITIONS_MAP[str(condition['condition'])]
terms = condition.setdefault('terms', {})
if func(instance, **terms):
return True
return False
def is_allowed_create(self, resource_type, context_id):
"""Whether or not the user is allowed to create a resource of the specified
type in the context."""
return self._is_allowed(Permission('create', resource_type, context_id))
def is_allowed_read(self, resource_type, context_id):
"""Whether or not the user is allowed to read a resource of the specified
type in the context."""
return self._is_allowed(Permission('read', resource_type, context_id))
def is_allowed_read_for(self, instance):
return self._is_allowed_for(instance, 'read')
def is_allowed_update(self, resource_type, context_id):
"""Whether or not the user is allowed to update a resource of the specified
type in the context."""
return self._is_allowed(Permission('update', resource_type, context_id))
def is_allowed_update_for(self, instance):
return self._is_allowed_for(instance, 'update')
def is_allowed_delete(self, resource_type, context_id):
"""Whether or not the user is allowed to delete a resource of the specified
type in the context."""
return self._is_allowed(Permission('delete', resource_type, context_id))
def is_allowed_delete_for(self, instance):
return self._is_allowed_for(instance, 'delete')
def _get_contexts_for(self, action, resource_type):
# FIXME: (Security) When applicable, we should explicitly assert that no
# permissions are expected (e.g. that every user has ADMIN_PERMISSION).
permissions = self._permissions()
if self._permission_match(self.ADMIN_PERMISSION, permissions):
return None
# Get the list of contexts for a given resource type and any
# superclasses
resource_types = get_contributing_resource_types(resource_type)
ret = []
for resource_type in resource_types:
ret.extend(permissions\
.get(action, {})\
.get(resource_type, {})\
.get('contexts', []))
# Extend with the list of all contexts for which the user is an ADMIN
admin_list = list(
permissions.get(self.ADMIN_PERMISSION.action, {})\
.get(self.ADMIN_PERMISSION.resource_type, {})\
.get('contexts', ()))
ret.extend(admin_list)
return ret
def create_contexts_for(self, resource_type):
"""All contexts in which the user has create permission."""
return self._get_contexts_for('create', resource_type)
def read_contexts_for(self, resource_type):
"""All contexts in which the user has read permission."""
return self._get_contexts_for('read', resource_type)
def update_contexts_for(self, resource_type):
"""All contexts in which the user has update permission."""
return self._get_contexts_for('update', resource_type)
def delete_contexts_for(self, resource_type):
"""All contexts in which the user has delete permission."""
return self._get_contexts_for('delete', resource_type)
def is_allowed_view_object_page_for(self, instance):
return self._is_allowed(
Permission(
'view_object_page',
instance.__class__.__name__,
instance.context_id
)
)
def is_admin(self):
"""Whether the user has ADMIN permissions."""
return self._is_allowed(self.ADMIN_PERMISSION)
|
|
# -*- coding: utf-8 -*-
#
# Copyright 2017 dpa-infocom GmbH
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import asynctest
import os
from livebridge.controldata import ControlData
from livebridge.controldata.base import BaseControl
from livebridge.controldata.controlfile import ControlFile
from livebridge.controldata.storage import StorageControl
class BaseControlTest(asynctest.TestCase):
def setUp(self):
self.base = BaseControl()
async def test_check_control_change(self):
with self.assertRaises(NotImplementedError):
await self.base.check_control_change()
async def test_load(self):
with self.assertRaises(NotImplementedError):
await self.base.load("path")
async def test_close(self):
assert await self.base.close() == None
class ControlDataTests(asynctest.TestCase):
def setUp(self):
self.config = {
"access_key": "foo",
"secret_key": "baz",
"region": "eu-central-1",
"sqs_s3_queue": "http://foo-queue",
}
self.control = ControlData(self.config)
async def test_set_client(self):
await self.control._set_client(path="file")
assert type(self.control.control_client) == ControlFile
await self.control._set_client(path="sql")
assert type(self.control.control_client) == StorageControl
async def test_check_control_change(self):
control_client = asynctest.MagicMock()
control_client.check_control_change = asynctest.CoroutineMock(return_value=True)
self.control.control_client = control_client
res = await self.control.check_control_change(control_path="/foo")
assert res is True
assert control_client.check_control_change.call_count == 1
assert control_client.check_control_change.call_args == asynctest.call(control_path="/foo")
async def test_iter_bridges(self):
file_path = os.path.join(os.path.dirname(__file__), "files", "control.yaml")
await self.control.load(file_path)
bridges = self.control.list_bridges()
assert len(bridges) == 2
for b in bridges:
assert "source_id" in b
assert "targets" in b
async def test_load_dynamo(self):
self.control.CONTROL_DATA_CLIENTS["dynamodb"].load = \
asynctest.CoroutineMock(return_value={"auth": {}, "bridges": []})
await self.control.load("dynamodb")
assert self.control.control_data == {'auth': {}, 'bridges': []}
async def test_load_auth_resolved(self):
file_path = os.path.join(os.path.dirname(__file__), "files", "control.yaml")
await self.control.load(file_path, resolve_auth=True)
control = self.control.control_data
assert control["auth"]["dev"]["api_key"] == "F00Baz"
assert control["auth"]["live"]["api_key"] == "Foobar"
assert control["bridges"][0]["source_id"] == "abcdefg"
assert control["bridges"][0]["endpoint"] == "https://example.com/api/"
assert control["bridges"][0]["type"] == "liveblog"
assert control["bridges"][0]["targets"][0]["event_id"] == "123456"
assert control["bridges"][0]["targets"][0]["type"] == "scribble"
assert control["bridges"][0]["targets"][0]["auth"] == control["auth"]["dev"]
assert control["bridges"][0]["targets"][1]["event_id"] == "654321"
assert control["bridges"][0]["targets"][1]["type"] == "another"
assert control["bridges"][0]["targets"][1]["auth"] == control["auth"]["live"]
assert control["bridges"][1]["source_id"] == 54321
assert control["bridges"][1]["endpoint"] == "https://foo.org/api/"
assert control["bridges"][1]["type"] == "liveblog"
assert control["bridges"][1]["auth"]["token"] == "token-str"
assert control["bridges"][1]["targets"][0]["event_id"] == "123456"
assert control["bridges"][1]["targets"][0]["type"] == "scribble"
assert control["bridges"][1]["targets"][0]["auth"] == control["auth"]["dev"]
async def test_load_auth_resolved_failed(self):
file_path = os.path.join(os.path.dirname(__file__), "files", "control-lookup-error.yaml")
with self.assertRaises(LookupError):
await self.control.load(file_path, resolve_auth=True)
async def test_load_auth_env_auth_resolved(self):
os.environ["LB_FOO_USER"] = "foo-user"
os.environ["LB_BAR_TOKEN"] = "bar.token"
file_path = os.path.join(os.path.dirname(__file__), "files", "control-env-notation.yaml")
await self.control.load(file_path, resolve_auth=True)
control = self.control.control_data
assert control["auth"]["foo"]["user"] == os.environ["LB_FOO_USER"]
assert control["auth"]["foo"]["pwd"] == "env.LB_NOT_DEFINED"
assert control["auth"]["bar"]["token"] == os.environ["LB_BAR_TOKEN"]
assert control["auth"]["bar"]["key"] == "env.NO_VALID_NAME"
# check nested auth isn't resolved
assert control["auth"]["foo"]["nested"] == {"foo": "env.LB_FOO_USER"}
@asynctest.fail_on(unused_loop=False)
def test_remove_doubles(self):
control = {'bridges': [
{
'endpoint': 'https://example.com/api/',
'type': 'liveblog',
'targets': [
{'event_id': '123456', 'type': 'scribble', 'auth': 'dev'},
{'event_id': '654321', 'type': 'another', 'auth': 'live'},
],
'source_id': 'abcdefg'
}, {
'source_id': "abcdef",
'endpoint': 'https://another.org/api/',
'type': 'foo',
'targets': [
{'target_id': '123456', 'type': 'baz', 'auth': 'dev'},
],
}, {
'source_id': 54321,
'endpoint': 'https://foo.org/api/',
'type': 'liveblog',
'targets': [
{'event_id': '123456', 'type': 'scribble', 'auth': 'dev'}],
'auth': 'slack'
}, {
'endpoint': 'https://example.com/api/',
'type': 'liveblog',
'targets': [
{'event_id': '1122233', 'type': 'scribble', 'auth': 'dev'},
{'event_id': '123456', 'type': 'scribble', 'auth': 'dev'},
{'event_id': '654321', 'type': 'another', 'auth': 'live'},
],
'source_id': 'abcdefg'
}, {
'source_id': 54321,
'endpoint': 'https://foo.org/api/',
'type': 'liveblog',
'targets': [
{'event_id': '123456', 'type': 'scribble', 'auth': 'dev'},
],
'auth': 'slack'
}
],
'auth': {
'dev': {'api_key': 'F00Baz', 'user': 'dev', 'password': 'pwd'},
'slack': {'token': 'token-str'},
'live': {'api_key': 'Foobar', 'user': 'prod', 'password': 'pwd2'}
}
}
cleared = self.control._remove_doubles(control)
assert len(cleared["bridges"]) == 3
assert len(cleared["bridges"][0]["targets"]) == 3
assert len(cleared["bridges"][1]["targets"]) == 1
assert len(cleared["bridges"][2]["targets"]) == 1
self.assertIn(control["bridges"][0]["targets"][0], cleared["bridges"][0]["targets"])
self.assertIn(control["bridges"][0]["targets"][1], cleared["bridges"][0]["targets"])
self.assertIn(control["bridges"][3]["targets"][0], cleared["bridges"][0]["targets"])
self.assertIn(control["bridges"][3]["targets"][1], cleared["bridges"][0]["targets"])
self.assertIn(control["bridges"][3]["targets"][1], cleared["bridges"][0]["targets"])
self.assertIn(control["bridges"][1]["targets"][0], cleared["bridges"][1]["targets"])
self.assertIn(control["bridges"][2]["targets"][0], cleared["bridges"][2]["targets"])
self.assertIn(control["bridges"][4]["targets"][0], cleared["bridges"][2]["targets"])
# test empty targets
control["bridges"][4]["targets"] = []
control["bridges"][2]["targets"] = []
cleared = self.control._remove_doubles(control)
assert len(cleared["bridges"]) == 3
assert len(cleared["bridges"][0]["targets"]) == 3
assert len(cleared["bridges"][1]["targets"]) == 1
assert len(cleared["bridges"][2]["targets"]) == 0
async def test_load_detect_changes(self):
file_path = os.path.join(os.path.dirname(__file__), "files", "control-changes-old.yaml")
await self.control.load(file_path, resolve_auth=True)
assert len(self.control.new_bridges) > 0
assert len(self.control.removed_bridges) == 0
file_path = os.path.join(os.path.dirname(__file__), "files", "control-changes-new.yaml")
await self.control.load(file_path, resolve_auth=True)
assert len(self.control.new_bridges) == 1
assert len(self.control.removed_bridges) == 1
assert self.control.removed_bridges == self.control.list_removed_bridges()
async def test_load_control_doc_sorted(self):
sorted_doc = {
"auth":{},
"bridges":[
{"a":"b", "c":"d", "e":"f", "g":"h", "targets":[]},
{"i":"j", "k":"l", "m":"n", "o":"p", "targets":[]}
]
}
doc_data = {
"auth":{},
"bridges":[
{"g":"h", "c":"d", "e":"f", "a":"b", "targets":[]},
{"targets":[], "m":"n", "i":"j", "o":"p", "k":"l"}
]
}
self.control.control_client = asynctest.MagicMock(spec=ControlFile)
self.control.control_client.load = asynctest.CoroutineMock(return_value=doc_data)
data = await self.control.load_control_doc("/tmp/foo")
assert self.control.control_client.load.call_count == 1
assert data == sorted_doc
async def test_save(self):
self.control.control_client = asynctest.MagicMock(spec=ControlFile)
self.control.control_client.save.return_value = True
path = "/tmp/foo"
data = {"foo": "baz"}
res = await self.control.save(path, data)
assert res is True
assert self.control.control_client.save.call_count == 1
@asynctest.fail_on(unused_loop=False)
def test_remove_inactives(self):
doc1 = {
"auth":{"foo":{"user":"foo", "pwd": "baz"}},
"bridges": [
{"label": "One", "active": False},
{"label": "Two", "active": True},
{"label": "Three"},
]}
res = self.control._remove_inactives(doc1)
assert res == {
"auth":{"foo":{"user":"foo", "pwd": "baz"}},
"bridges": [
{"label": "Two", "active": True},
{"label": "Three"},
]}
@asynctest.fail_on(unused_loop=False)
def test_list_new_bridges(self):
self.control.new_bridges = ["foo", "bar"]
assert self.control.list_new_bridges() == ["foo", "bar"]
@asynctest.fail_on(unused_loop=False)
def test_list_removed_bridges(self):
self.control.removed_bridges = ["foo", "baz"]
assert self.control.list_removed_bridges() == ["foo", "baz"]
async def test_auto_update(self):
self.control.control_client = ControlFile()
assert self.control.control_client.auto_update is True
file_path = os.path.join(os.path.dirname(__file__), "files", "control.yaml")
await self.control.control_client.load(file_path)
assert self.control.control_client.auto_update is False
assert self.control.is_auto_update() is False
await self.control._set_client(path="sql")
assert self.control.is_auto_update() is True
|
|
"""Predict Stellar Mass Function including scatter in the
Stellar-Halo-Mass relation. Plot results in file 'HaloMassFunctions.pdf'.
"""
#-----------------------------------------------------------------------------
# Kenza Arraki (https://github.com/karraki), September 2014
#-----------------------------------------------------------------------------
import os.path
import numpy as np
import matplotlib.pyplot as plt
from matplotlib.backends.backend_pdf import PdfPages
import hmf
import dhmf
import ihmf
import shm
# Create array of input halo masses
Mhx = 10.**np.arange(8.,11.6,0.01)
# Start plots
pp = PdfPages('HaloMassFunctions.pdf')
# 1. Plot LG mass functions
fig, ax = plt.subplots()
plt.plot(Mhx, hmf.brook(Mhx),'r-',zorder=-1)
plt.plot(Mhx, hmf.garrisonkimmel(Mhx,0.7e12,1.2e12),'g-',zorder=-2)
plt.plot(Mhx, hmf.garrisonkimmel(Mhx,1.2e12,2.2e12),'b--',zorder=-3)
ax.semilogx()
ax.semilogy()
plt.xlim(1.e8,1.e12)
plt.ylim(ymin=1)
plt.xlabel(r'M$_\mathregular{vir}$ (M$_{\odot})$')
plt.ylabel('N(>M)')
plt.title('Halo Mass Functions')
plt.savefig(pp, format='pdf', bbox_inches='tight')
# 2. Plot differential mass functions
fig, ax = plt.subplots()
plt.plot(Mhx, hmf.brook(Mhx),'c-',zorder=-1)
Mhm = ihmf.brook(np.arange(1e3)+1.)
NMhm = np.zeros_like(Mhm)
for i in range(len(Mhm)):
NMhm[i] = len(Mhm[:i])
plt.plot(Mhm, NMhm,'r.')
ax.semilogx()
ax.semilogy()
plt.xlim(1.e8,1.e12)
plt.ylim(1,3e3)
plt.xlabel(r'M$_\mathregular{vir}$ (M$_{\odot})$')
plt.ylabel('N(>M)')
plt.plot(hmf.brook(Mhx), Mhx,'r-',zorder=-1)
plt.title('Differential Mass Functions')
plt.savefig(pp, format='pdf', bbox_inches='tight')
# 3. Plot SHM relation
Mst_garrisonkimmel = shm.garrisonkimmel(Mhm)
Mst_brook = shm.brook(Mhm)
fig, ax = plt.subplots()
plt.plot(Mhx, shm.garrisonkimmel(Mhx),'r-.',zorder=-2)
plt.plot(Mhx, shm.brook(Mhx),'c--',zorder=-1)
plt.plot(Mhm, Mst_garrisonkimmel,'k.')
plt.plot(Mhm, Mst_brook,'k.')
ax.semilogx()
ax.semilogy()
plt.xlim(1e8,1e12)
plt.ylim(1e1,1e12)
plt.xlabel(r'M$_\mathregular{vir}$ (M$_{\odot})$')
plt.ylabel(r'M$_{*}$ (M$_{\odot})$')
plt.title('Stellar Halo Mass Relations')
plt.savefig(pp, format='pdf', bbox_inches='tight')
# Add scatter to halo mass values
Mhm_new = np.zeros_like(Mhm)
for i in range(len(Mhm_new)):
Mhm_new[i] = np.random.normal(Mhm[i], 1.5*Mhm[i], 1)
Mhm_new[Mhm_new < 0.] = 0.
Mst_garrisonkimmel_new = shm.garrisonkimmel(Mhm_new)
Mst_brook_new = shm.brook(Mhm_new)
# Write out file with the scatter values
with open('estimated_mstar.txt','w') as f:
f.write('# orig_Mh scatter_Mh orig_garrisonkimmel_M*'+
' scatter_garrisonkimmel_M* orig_brook_M* scatter_brook_M*\n')
for i in range(len(Mhm_new)):
f.write('{:e} '.format(Mhm[i])+
'{:e} '.format(Mhm_new[i])+
'{:e} '.format(Mst_garrisonkimmel[i])+
'{:e} '.format(Mst_garrisonkimmel_new[i])+
'{:e} '.format(Mst_brook[i])+
'{:e} '.format(Mst_brook_new[i])+'\n')
# 4. Plot stellar halo mass function with scatter
fig, ax = plt.subplots()
plt.plot(Mhm, Mst_garrisonkimmel_new, 'r.', zorder=-1)
plt.plot(Mhm, Mst_brook_new, 'c.', zorder=-1)
plt.plot(Mhm, Mst_garrisonkimmel, 'k-')
plt.plot(Mhm, Mst_brook, 'k-')
ax.semilogx()
ax.semilogy()
plt.xlim(1.e8,1.e12)
plt.ylim(1.e1,1.e12)
plt.xlabel(r'M$_\mathregular{vir}$ (M$_{\odot})$')
plt.ylabel(r'M$_{*}$ (M$_{\odot})$')
plt.title('Stellar Halo Mass Function with Scatter')
plt.savefig(pp, format='pdf', bbox_inches='tight')
# 5. Plot stellar halo mass function with scatter zoomed in
fig, ax = plt.subplots()
plt.plot(Mhm, Mst_garrisonkimmel_new, 'r.', zorder=1, mfc='none')
plt.plot(Mhm, Mst_brook_new, 'c.', zorder=-1, mfc='none')
plt.plot(Mhm, Mst_garrisonkimmel, 'k-', zorder=2, mfc='none')
plt.plot(Mhm, Mst_brook, 'k-', zorder=2, mfc='none')
ax.semilogx()
ax.semilogy()
plt.xlim(1.5e8,3.e9)
plt.ylim(1.e-2,3.e5)
plt.xlabel(r'M$_\mathregular{vir}$ (M$_{\odot})$')
plt.ylabel(r'M$_{*}$ (M$_{\odot})$')
plt.title('Stellar Halo Mass Function with Scatter, Zoomed In')
plt.savefig(pp, format='pdf', bbox_inches='tight')
# 6. Plot halo masses to check scatter
fig, ax = plt.subplots()
plt.plot([1.e1,1.e12],[1.e1,1.e12],'k-',zorder=10)
plt.plot(Mhm, Mhm_new, 'g.')
ax.semilogx()
ax.semilogy()
plt.xlim(1.e8,1.e12)
plt.ylim(1.e8,1.e12)
plt.xlabel(r'M$_\mathregular{vir}$ (M$_{\odot})$')
plt.ylabel(r'M$_\mathregular{vir, scatter}$ (M$_{\odot})$')
plt.title('Halo Mass Versus Self with Scatter')
plt.savefig(pp, format='pdf', bbox_inches='tight')
# 7. Plot stellar masses to check scatter
fig, ax = plt.subplots()
plt.plot([1.e1,1.e12],[1.e1,1.e12],'k-',zorder=10)
plt.plot(Mst_garrisonkimmel, Mst_garrisonkimmel_new, 'r.')
plt.plot(Mst_brook, Mst_brook_new, 'c.')
ax.semilogx()
ax.semilogy()
plt.xlim(1.e1,1.e12)
plt.ylim(1.e1,1.e12)
plt.xlabel(r'M$_\mathregular{*}$ (M$_{\odot})$')
plt.ylabel(r'M$_\mathregular{*, scatter}$ (M$_{\odot})$')
plt.title('Stellar Mass Versus Self with Scatter')
plt.savefig(pp, format='pdf', bbox_inches='tight')
# 8. Plot Stellar Mass Function predicted with scatter
fig, ax = plt.subplots()
# Read in observational data
if os.path.isfile('mcconnachie.dat'):
f = 'mcconnachie.dat'
dtypes = ['S4','S15','S9','f','f','f','f','f','f','f']
grp = np.loadtxt(f,comments='#',dtype=dtypes[0],usecols=[0])
name = np.loadtxt(f,comments='#',dtype=dtypes[1],usecols=[1])
mtype = np.loadtxt(f,comments='#',dtype=dtypes[2],usecols=[2])
dmw = np.loadtxt(f,comments='#',dtype=dtypes[3],usecols=[3])
dm31 = np.loadtxt(f,comments='#',dtype=dtypes[4],usecols=[4])
dlg = np.loadtxt(f,comments='#',dtype=dtypes[5],usecols=[5])
r12 = np.loadtxt(f,comments='#',dtype=dtypes[6],usecols=[6])
obs_mst0 = np.loadtxt(f,comments='#',dtype=dtypes[7],usecols=[7])*1.e6
obs_mhi = np.loadtxt(f,comments='#',dtype=dtypes[8],usecols=[8])*1.e6
obs_mdyn = np.loadtxt(f,comments='#',dtype=dtypes[9],usecols=[9])*1.e6
obs_mst = obs_mst0[dlg < 1800.]
obs_mst = obs_mst[np.isnan(obs_mst) == False]
nobs_mst = np.zeros(len(obs_mst))
sort_ind = sorted(range(len(obs_mst)), key=obs_mst.__getitem__)
sort_obs_mst = obs_mst[sort_ind]
for i in range(len(obs_mst)):
nobs_mst[i] = len(sort_obs_mst[i:])
plt.plot(sort_obs_mst, nobs_mst, 'g:')
NMhm = np.zeros_like(Mhm)
sort_ind = sorted(range(len(Mhm)), key=Mhm.__getitem__)
sort_Mhm = Mhm[sort_ind]
for i in range(len(Mhm)):
NMhm[i] = len(sort_Mhm[i:])
plt.plot(sort_Mhm,NMhm,'k--')
NMhm_new = np.zeros_like(Mhm_new)
sort_ind = sorted(range(len(Mhm_new)), key=Mhm_new.__getitem__)
sort_Mhm_new = Mhm_new[sort_ind]
for i in range(len(Mhm_new)):
NMhm_new[i] = len(sort_Mhm_new[i:])
plt.plot(sort_Mhm_new,NMhm_new,'k-')
NMst_garrisonkimmel = np.zeros_like(Mst_garrisonkimmel)
sort_ind = sorted(range(len(Mst_garrisonkimmel)),
key=Mst_garrisonkimmel.__getitem__)
sort_Mst_garrisonkimmel = Mst_garrisonkimmel[sort_ind]
for i in range(len(Mst_garrisonkimmel)):
NMst_garrisonkimmel[i] = len(sort_Mst_garrisonkimmel[i:])
plt.plot(sort_Mst_garrisonkimmel,NMst_garrisonkimmel,'r--')
NMst_brook = np.zeros_like(Mst_brook)
sort_ind = sorted(range(len(Mst_brook)), key=Mst_brook.__getitem__)
sort_Mst_brook = Mst_brook[sort_ind]
for i in range(len(Mst_brook)):
NMst_brook[i] = len(sort_Mst_brook[i:])
plt.plot(sort_Mst_brook,NMst_brook,'c--')
NMst_garrisonkimmel_new = np.zeros_like(Mst_garrisonkimmel_new)
sort_ind = sorted(range(len(Mst_garrisonkimmel_new)),
key=Mst_garrisonkimmel_new.__getitem__)
sort_Mst_garrisonkimmel_new = Mst_garrisonkimmel_new[sort_ind]
for i in range(len(Mst_garrisonkimmel_new)):
NMst_garrisonkimmel_new[i] = len(sort_Mst_garrisonkimmel_new[i:])
plt.plot(sort_Mst_garrisonkimmel_new,NMst_garrisonkimmel_new,'r-')
NMst_brook_new = np.zeros_like(Mst_brook_new)
sort_ind = sorted(range(len(Mst_brook_new)),
key=Mst_brook_new.__getitem__)
sort_Mst_brook_new = Mst_brook_new[sort_ind]
for i in range(len(Mst_brook_new)):
NMst_brook_new[i] = len(sort_Mst_brook_new[i:])
plt.plot(sort_Mst_brook_new,NMst_brook_new,'c-')
plt.scatter(1.e3, 82., marker='*', c='b', s=100, edgecolor='none')
plt.scatter(1.e3, 160., marker='*', c='gray', s=100, edgecolor='none')
plt.scatter(1.e3, 240., marker='*', c='gray', s=100, edgecolor='none')
#plt.scatter(1.e3, 82., marker='>', c='b', s=100, edgecolor='none')
plt.plot([3e6,3e6],[1,1e3],'k:')
ax.semilogx()
ax.semilogy()
plt.xlim(1.e1,1.e11)
plt.ylim(1,1e3)
plt.xlabel(r'M$_{*}$ (M$_{\odot})$')
plt.ylabel('N(>M)')
plt.title('Stellar Mass Function with Scatter')
plt.savefig(pp, format='pdf', bbox_inches='tight')
pp.close()
|
|
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
Command-line interface to the Heat API.
"""
import argparse
import httplib2
import logging
import sys
from keystoneclient.v2_0 import client as ksclient
from heatclient import client as heatclient
from heatclient.common import utils
from heatclient import exc
logger = logging.getLogger(__name__)
class HeatShell(object):
def get_base_parser(self):
parser = argparse.ArgumentParser(
prog='heat',
description=__doc__.strip(),
epilog='See "heat help COMMAND" '
'for help on a specific command.',
add_help=False,
formatter_class=HelpFormatter,
)
# Global arguments
parser.add_argument('-h', '--help',
action='store_true',
help=argparse.SUPPRESS)
parser.add_argument('-d', '--debug',
default=bool(utils.env('HEATCLIENT_DEBUG')),
action='store_true',
help='Defaults to env[HEATCLIENT_DEBUG]')
parser.add_argument('-v', '--verbose',
default=False, action="store_true",
help="Print more verbose output")
parser.add_argument('-k', '--insecure',
default=False,
action='store_true',
help="Explicitly allow the client to perform"
"\"insecure\" SSL (https) requests. The server's "
"certificate will not be verified against any "
"certificate authorities. "
"This option should be used with caution.")
parser.add_argument('--cert-file',
help='Path of certificate file to use in SSL '
'connection. This file can optionally be prepended'
'with the private key.')
parser.add_argument('--key-file',
help='Path of client key to use in SSL connection.'
'This option is not necessary if your key is'
' prepended to your cert file.')
parser.add_argument('--ca-file',
help='Path of CA SSL certificate(s) used to verify'
' the remote server\'s certificate. Without this'
' option the client looks'
' for the default system CA certificates.')
parser.add_argument('--timeout',
default=600,
help='Number of seconds to wait for a response')
parser.add_argument('--os-username',
default=utils.env('OS_USERNAME'),
help='Defaults to env[OS_USERNAME]')
parser.add_argument('--os_username',
help=argparse.SUPPRESS)
parser.add_argument('--os-password',
default=utils.env('OS_PASSWORD'),
help='Defaults to env[OS_PASSWORD]')
parser.add_argument('--os_password',
help=argparse.SUPPRESS)
parser.add_argument('--os-tenant-id',
default=utils.env('OS_TENANT_ID'),
help='Defaults to env[OS_TENANT_ID]')
parser.add_argument('--os_tenant_id',
help=argparse.SUPPRESS)
parser.add_argument('--os-tenant-name',
default=utils.env('OS_TENANT_NAME'),
help='Defaults to env[OS_TENANT_NAME]')
parser.add_argument('--os_tenant_name',
help=argparse.SUPPRESS)
parser.add_argument('--os-auth-url',
default=utils.env('OS_AUTH_URL'),
help='Defaults to env[OS_AUTH_URL]')
parser.add_argument('--os_auth_url',
help=argparse.SUPPRESS)
parser.add_argument('--os-region-name',
default=utils.env('OS_REGION_NAME'),
help='Defaults to env[OS_REGION_NAME]')
parser.add_argument('--os_region_name',
help=argparse.SUPPRESS)
parser.add_argument('--os-auth-token',
default=utils.env('OS_AUTH_TOKEN'),
help='Defaults to env[OS_AUTH_TOKEN]')
parser.add_argument('--os_auth_token',
help=argparse.SUPPRESS)
parser.add_argument('--os-no-client-auth',
default=utils.env('OS_NO_CLIENT_AUTH'),
action='store_true',
help="Do not contact keystone for a token.\
Defaults to env[OS_NO_CLIENT_AUTH]")
parser.add_argument('--heat-url',
default=utils.env('HEAT_URL'),
help='Defaults to env[HEAT_URL]')
parser.add_argument('--heat_url',
help=argparse.SUPPRESS)
parser.add_argument('--heat-api-version',
default=utils.env('HEAT_API_VERSION', default='1'),
help='Defaults to env[HEAT_API_VERSION] or 1')
parser.add_argument('--heat_api_version',
help=argparse.SUPPRESS)
parser.add_argument('--os-service-type',
default=utils.env('OS_SERVICE_TYPE'),
help='Defaults to env[OS_SERVICE_TYPE]')
parser.add_argument('--os_service_type',
help=argparse.SUPPRESS)
parser.add_argument('--os-endpoint-type',
default=utils.env('OS_ENDPOINT_TYPE'),
help='Defaults to env[OS_ENDPOINT_TYPE]')
parser.add_argument('--os_endpoint_type',
help=argparse.SUPPRESS)
parser.add_argument('-t', '--token-only',
default=bool(False),
action='store_true',
help=argparse.SUPPRESS)
return parser
def get_subcommand_parser(self, version):
parser = self.get_base_parser()
self.subcommands = {}
subparsers = parser.add_subparsers(metavar='<subcommand>')
submodule = utils.import_versioned_module(version, 'shell')
self._find_actions(subparsers, submodule)
self._find_actions(subparsers, self)
return parser
def _find_actions(self, subparsers, actions_module):
for attr in (a for a in dir(actions_module) if a.startswith('do_')):
# I prefer to be hypen-separated instead of underscores.
command = attr[3:].replace('_', '-')
callback = getattr(actions_module, attr)
desc = callback.__doc__ or ''
help = desc.strip().split('\n')[0]
arguments = getattr(callback, 'arguments', [])
subparser = subparsers.add_parser(command,
help=help,
description=desc,
add_help=False,
formatter_class=HelpFormatter)
subparser.add_argument('-h', '--help',
action='help',
help=argparse.SUPPRESS)
self.subcommands[command] = subparser
for (args, kwargs) in arguments:
subparser.add_argument(*args, **kwargs)
subparser.set_defaults(func=callback)
def _get_ksclient(self, **kwargs):
"""Get an endpoint and auth token from Keystone.
:param username: name of user
:param password: user's password
:param tenant_id: unique identifier of tenant
:param tenant_name: name of tenant
:param auth_url: endpoint to authenticate against
"""
return ksclient.Client(username=kwargs.get('username'),
password=kwargs.get('password'),
tenant_id=kwargs.get('tenant_id'),
tenant_name=kwargs.get('tenant_name'),
auth_url=kwargs.get('auth_url'),
insecure=kwargs.get('insecure'))
def _get_endpoint(self, client, **kwargs):
"""Get an endpoint using the provided keystone client."""
return client.service_catalog.url_for(
service_type=kwargs.get('service_type') or 'orchestration',
endpoint_type=kwargs.get('endpoint_type') or 'publicURL')
def _setup_debugging(self, debug):
if debug:
logging.basicConfig(
format="%(levelname)s (%(module)s:%(lineno)d) %(message)s",
level=logging.DEBUG)
httplib2.debuglevel = 1
def _setup_verbose(self, verbose):
if verbose:
exc.verbose = 1
def main(self, argv):
# Parse args once to find version
parser = self.get_base_parser()
(options, args) = parser.parse_known_args(argv)
self._setup_debugging(options.debug)
self._setup_verbose(options.verbose)
# build available subcommands based on version
api_version = options.heat_api_version
subcommand_parser = self.get_subcommand_parser(api_version)
self.parser = subcommand_parser
# Handle top-level --help/-h before attempting to parse
# a command off the command line
if options.help or not argv:
self.do_help(options)
return 0
# Parse args again and call whatever callback was selected
args = subcommand_parser.parse_args(argv)
# Short-circuit and deal with help command right away.
if args.func == self.do_help:
self.do_help(args)
return 0
if not args.os_username:
raise exc.CommandError("You must provide a username via"
" either --os-username or env[OS_USERNAME]")
if not args.os_password:
raise exc.CommandError("You must provide a password via"
" either --os-password or env[OS_PASSWORD]")
if not (args.os_tenant_id or args.os_tenant_name):
raise exc.CommandError("You must provide a tenant_id via"
" either --os-tenant-id or via "
"env[OS_TENANT_ID]")
if not args.os_auth_url:
raise exc.CommandError("You must provide an auth url via"
" either --os-auth-url or via "
"env[OS_AUTH_URL]")
if args.os_no_client_auth and not args.heat_url:
raise exc.CommandError("If you specify --os-no-client-auth"
" you must also specify a Heat API URL "
"via either --heat-url or "
"env[HEAT_URL]")
kwargs = {
'username': args.os_username,
'password': args.os_password,
'tenant_id': args.os_tenant_id,
'tenant_name': args.os_tenant_name,
'auth_url': args.os_auth_url,
'service_type': args.os_service_type,
'endpoint_type': args.os_endpoint_type,
'insecure': args.insecure
}
endpoint = args.heat_url
if not args.os_no_client_auth:
_ksclient = self._get_ksclient(**kwargs)
token = args.os_auth_token or _ksclient.auth_token
kwargs = {
'token': token,
'insecure': args.insecure,
'timeout': args.timeout,
'ca_file': args.ca_file,
'cert_file': args.cert_file,
'key_file': args.key_file,
'username': args.os_username,
'password': args.os_password
}
if not endpoint:
endpoint = self._get_endpoint(_ksclient, **kwargs)
client = heatclient.Client(api_version, endpoint, **kwargs)
try:
args.func(client, args)
except exc.Unauthorized:
raise exc.CommandError("Invalid OpenStack Identity credentials.")
@utils.arg('command', metavar='<subcommand>', nargs='?',
help='Display help for <subcommand>')
def do_help(self, args):
"""Display help about this program or one of its subcommands."""
if getattr(args, 'command', None):
if args.command in self.subcommands:
self.subcommands[args.command].print_help()
else:
raise exc.CommandError("'%s' is not a valid subcommand" %
args.command)
else:
self.parser.print_help()
class HelpFormatter(argparse.HelpFormatter):
def start_section(self, heading):
# Title-case the headings
heading = '%s%s' % (heading[0].upper(), heading[1:])
super(HelpFormatter, self).start_section(heading)
def main():
try:
HeatShell().main(sys.argv[1:])
except Exception as e:
print >> sys.stderr, e
sys.exit(1)
if __name__ == "__main__":
main()
|
|
# -*- coding: utf-8 -*-
from sqlalchemy import event
from slugify import slugify
from flask import current_app as app
from flask import url_for
from flask_editablesite.database import (
Column,
db,
Model,
SurrogatePK,
Slugged,
TimeStamped,
Confirmable,
update_slug_before_save,
update_timestamps_before_insert,
update_timestamps_before_update,
update_confirmedat_before_save,
)
class ShortTextContentBlock(SurrogatePK, Slugged, TimeStamped,
Confirmable, Model):
__tablename__ = 'short_text_content_block'
content = Column(db.String(255), nullable=False, default='')
__table_args__ = (
db.UniqueConstraint('slug', name='_stcb_slug_uc'),
db.Index('_stcb_slug_active_ix', 'slug', 'active'))
def __repr__(self):
return self.title
@classmethod
def default_content(cls):
ret = {}
title = 'Site welcome prefix'
slug = slugify(title, to_lower=True)
ret[slug] = cls(title=title, slug=slug,
content='Welcome to', active=True)
title = 'Site byline'
slug = slugify(title, to_lower=True)
ret[slug] = cls(title=title, slug=slug, content=(
'A template for building a small marketing web site '
'in Flask where all content is live editable.'),
active=True)
title = 'Site byline link title'
slug = slugify(title, to_lower=True)
ret[slug] = cls(title=title, slug=slug,
content='Learn more', active=True)
title = 'Site byline link URL'
slug = slugify(title, to_lower=True)
ret[slug] = cls(
title=title, slug=slug,
content='https://github.com/Jaza/flask-editablesite',
active=True)
title = 'About title'
slug = slugify(title, to_lower=True)
ret[slug] = cls(title=title, slug=slug,
content='About', active=True)
title = 'Gallery title'
slug = slugify(title, to_lower=True)
ret[slug] = cls(title=title, slug=slug,
content='Gallery', active=True)
title = 'Events title'
slug = slugify(title, to_lower=True)
ret[slug] = cls(title=title, slug=slug,
content='Events', active=True)
title = 'Contact title'
slug = slugify(title, to_lower=True)
ret[slug] = cls(title=title, slug=slug,
content='Contact', active=True)
return ret
event.listen(ShortTextContentBlock, 'before_insert',
update_timestamps_before_insert)
event.listen(ShortTextContentBlock, 'before_update',
update_timestamps_before_update)
event.listen(ShortTextContentBlock, 'before_insert',
update_confirmedat_before_save)
event.listen(ShortTextContentBlock, 'before_update',
update_confirmedat_before_save)
event.listen(ShortTextContentBlock, 'before_insert',
update_slug_before_save)
event.listen(ShortTextContentBlock, 'before_update',
update_slug_before_save)
class RichTextContentBlock(SurrogatePK, Slugged, TimeStamped,
Confirmable, Model):
__tablename__ = 'rich_text_content_block'
content = Column(db.Text(), nullable=False, default='')
__table_args__ = (
db.UniqueConstraint('slug', name='_rtcb_slug_uc'),
db.Index('_rtcb_slug_active_ix', 'slug', 'active'))
def __repr__(self):
return self.title
@classmethod
def default_content(cls):
ret = {}
title = 'About text (left column)'
slug = slugify(title, to_lower=True)
ret[slug] = cls(title=title, slug=slug, content=(
'<p>The aim of this app is to demonstrate that, with the '
'help of modern JS libraries, and with some well-'
'thought-out server-side snippets, it\'s now perfectly '
'possible to "bake in" live in-place editing for '
'virtually every content element in a typical '
'brochureware site.</p>'),
active=True)
title = 'About text (right column)'
slug = slugify(title, to_lower=True)
ret[slug] = cls(title=title, slug=slug, content=(
'<p>This app is not a CMS. On the contrary, think of it '
'as a proof-of-concept alternative to a CMS. An '
'alternative where there\'s no "admin area", there\'s '
'no "editing mode", and there\'s no "preview '
'button".</p>'),
active=True)
title = 'About text (below columns)'
slug = slugify(title, to_lower=True)
ret[slug] = cls(
title=title, slug=slug,
content="<p>There's only direct manipulation.</p>",
active=True)
return ret
event.listen(RichTextContentBlock, 'before_insert',
update_timestamps_before_insert)
event.listen(RichTextContentBlock, 'before_update',
update_timestamps_before_update)
event.listen(RichTextContentBlock, 'before_insert',
update_confirmedat_before_save)
event.listen(RichTextContentBlock, 'before_update',
update_confirmedat_before_save)
event.listen(RichTextContentBlock, 'before_insert',
update_slug_before_save)
event.listen(RichTextContentBlock, 'before_update',
update_slug_before_save)
class ImageContentBlock(SurrogatePK, Slugged, TimeStamped,
Confirmable, Model):
__tablename__ = 'image_content_block'
image = Column(db.String(255), nullable=False, default='')
__table_args__ = (
db.UniqueConstraint('slug', name='_icb_slug_uc'),
db.Index('_icb_slug_active_ix', 'slug', 'active'))
def __repr__(self):
return self.title
@property
def image_path(self):
return (
self.image
and '%s%s' % (
app.config['UPLOADS_RELATIVE_PATH'], self.image)
or None)
@property
def image_url(self):
if not self.image:
return None
return url_for('static', filename=self.image_path,
_external=True)
@property
def image_or_placeholder(self):
return (
self.image
or app.config['EDITABLE_PLACEHOLDER_IMAGE_RELATIVE_PATH'])
@classmethod
def default_content(cls):
ret = {}
title = 'Site logo'
slug = slugify(title, to_lower=True)
ret[slug] = cls(
title=title, slug=slug,
image=app.config[
'EDITABLE_PLACEHOLDER_IMAGE_RELATIVE_PATH'],
active=True)
return ret
event.listen(ImageContentBlock, 'before_insert',
update_timestamps_before_insert)
event.listen(ImageContentBlock, 'before_update',
update_timestamps_before_update)
event.listen(ImageContentBlock, 'before_insert',
update_confirmedat_before_save)
event.listen(ImageContentBlock, 'before_update',
update_confirmedat_before_save)
event.listen(ImageContentBlock, 'before_insert',
update_slug_before_save)
event.listen(ImageContentBlock, 'before_update',
update_slug_before_save)
|
|
"""An implementation of the Zephyr Abstract Syntax Definition Language.
See http://asdl.sourceforge.net/ and
http://www.cs.princeton.edu/~danwang/Papers/dsl97/dsl97-abstract.html.
Only supports top level module decl, not view. I'm guessing that view
is intended to support the browser and I'm not interested in the
browser.
Changes for Python: Add support for module versions
"""
import os
import traceback
import spark
class Token(object):
# spark seems to dispatch in the parser based on a token's
# type attribute
def __init__(self, type, lineno):
self.type = type
self.lineno = lineno
def __str__(self):
return self.type
def __repr__(self):
return str(self)
class Id(Token):
def __init__(self, value, lineno):
self.type = 'Id'
self.value = value
self.lineno = lineno
def __str__(self):
return self.value
class String(Token):
def __init__(self, value, lineno):
self.type = 'String'
self.value = value
self.lineno = lineno
class ASDLSyntaxError(Exception):
def __init__(self, lineno, token=None, msg=None):
self.lineno = lineno
self.token = token
self.msg = msg
def __str__(self):
if self.msg is None:
return "Error at '%s', line %d" % (self.token, self.lineno)
else:
return "%s, line %d" % (self.msg, self.lineno)
class ASDLScanner(spark.GenericScanner, object):
def tokenize(self, input):
self.rv = []
self.lineno = 1
super(ASDLScanner, self).tokenize(input)
return self.rv
def t_id(self, s):
r"[\w\.]+"
# XXX doesn't distinguish upper vs. lower, which is
# significant for ASDL.
self.rv.append(Id(s, self.lineno))
def t_string(self, s):
r'"[^"]*"'
self.rv.append(String(s, self.lineno))
def t_xxx(self, s): # not sure what this production means
r"<="
self.rv.append(Token(s, self.lineno))
def t_punctuation(self, s):
r"[\{\}\*\=\|\(\)\,\?\:]"
self.rv.append(Token(s, self.lineno))
def t_comment(self, s):
r"\-\-[^\n]*"
pass
def t_newline(self, s):
r"\n"
self.lineno += 1
def t_whitespace(self, s):
r"[ \t]+"
pass
def t_default(self, s):
r" . +"
raise ValueError, "unmatched input: %s" % `s`
class ASDLParser(spark.GenericParser, object):
def __init__(self):
super(ASDLParser, self).__init__("module")
def typestring(self, tok):
return tok.type
def error(self, tok):
raise ASDLSyntaxError(tok.lineno, tok)
def p_module_0(self, (module, name, version, _0, _1)):
" module ::= Id Id version { } "
if module.value != "module":
raise ASDLSyntaxError(module.lineno,
msg="expected 'module', found %s" % module)
return Module(name, None, version)
def p_module(self, (module, name, version, _0, definitions, _1)):
" module ::= Id Id version { definitions } "
if module.value != "module":
raise ASDLSyntaxError(module.lineno,
msg="expected 'module', found %s" % module)
return Module(name, definitions, version)
def p_version(self, (version, V)):
"version ::= Id String"
if version.value != "version":
raise ASDLSyntaxError(version.lineno,
msg="expected 'version', found %" % version)
return V
def p_definition_0(self, (definition,)):
" definitions ::= definition "
return definition
def p_definition_1(self, (definitions, definition)):
" definitions ::= definition definitions "
return definitions + definition
def p_definition(self, (id, _, type)):
" definition ::= Id = type "
return [Type(id, type)]
def p_type_0(self, (product,)):
" type ::= product "
return product
def p_type_1(self, (sum,)):
" type ::= sum "
return Sum(sum)
def p_type_2(self, (sum, id, _0, attributes, _1)):
" type ::= sum Id ( fields ) "
if id.value != "attributes":
raise ASDLSyntaxError(id.lineno,
msg="expected attributes, found %s" % id)
if attributes:
attributes.reverse()
return Sum(sum, attributes)
def p_product(self, (_0, fields, _1)):
" product ::= ( fields ) "
# XXX can't I just construct things in the right order?
fields.reverse()
return Product(fields)
def p_sum_0(self, (constructor,)):
" sum ::= constructor "
return [constructor]
def p_sum_1(self, (constructor, _, sum)):
" sum ::= constructor | sum "
return [constructor] + sum
def p_sum_2(self, (constructor, _, sum)):
" sum ::= constructor | sum "
return [constructor] + sum
def p_constructor_0(self, (id,)):
" constructor ::= Id "
return Constructor(id)
def p_constructor_1(self, (id, _0, fields, _1)):
" constructor ::= Id ( fields ) "
# XXX can't I just construct things in the right order?
fields.reverse()
return Constructor(id, fields)
def p_fields_0(self, (field,)):
" fields ::= field "
return [field]
def p_fields_1(self, (field, _, fields)):
" fields ::= field , fields "
return fields + [field]
def p_field_0(self, (type,)):
" field ::= Id "
return Field(type)
def p_field_1(self, (type, name)):
" field ::= Id Id "
return Field(type, name)
def p_field_2(self, (type, _, name)):
" field ::= Id * Id "
return Field(type, name, seq=True)
def p_field_3(self, (type, _, name)):
" field ::= Id ? Id "
return Field(type, name, opt=True)
def p_field_4(self, (type, _)):
" field ::= Id * "
return Field(type, seq=True)
def p_field_5(self, (type, _)):
" field ::= Id ? "
return Field(type, opt=True)
builtin_types = ("identifier", "string", "int", "bool", "object")
# below is a collection of classes to capture the AST of an AST :-)
# not sure if any of the methods are useful yet, but I'm adding them
# piecemeal as they seem helpful
class AST(object):
pass # a marker class
class Module(AST):
def __init__(self, name, dfns, version):
self.name = name
self.dfns = dfns
self.version = version
self.types = {} # maps type name to value (from dfns)
for type in dfns:
self.types[type.name.value] = type.value
def __repr__(self):
return "Module(%s, %s)" % (self.name, self.dfns)
class Type(AST):
def __init__(self, name, value):
self.name = name
self.value = value
def __repr__(self):
return "Type(%s, %s)" % (self.name, self.value)
class Constructor(AST):
def __init__(self, name, fields=None):
self.name = name
self.fields = fields or []
def __repr__(self):
return "Constructor(%s, %s)" % (self.name, self.fields)
class Field(AST):
def __init__(self, type, name=None, seq=False, opt=False):
self.type = type
self.name = name
self.seq = seq
self.opt = opt
def __repr__(self):
if self.seq:
extra = ", seq=True"
elif self.opt:
extra = ", opt=True"
else:
extra = ""
if self.name is None:
return "Field(%s%s)" % (self.type, extra)
else:
return "Field(%s, %s%s)" % (self.type, self.name, extra)
class Sum(AST):
def __init__(self, types, attributes=None):
self.types = types
self.attributes = attributes or []
def __repr__(self):
if self.attributes is None:
return "Sum(%s)" % self.types
else:
return "Sum(%s, %s)" % (self.types, self.attributes)
class Product(AST):
def __init__(self, fields):
self.fields = fields
def __repr__(self):
return "Product(%s)" % self.fields
class VisitorBase(object):
def __init__(self, skip=False):
self.cache = {}
self.skip = skip
def visit(self, object, *args):
meth = self._dispatch(object)
if meth is None:
return
try:
meth(object, *args)
except Exception, err:
print "Error visiting", repr(object)
print err
traceback.print_exc()
# XXX hack
if hasattr(self, 'file'):
self.file.flush()
os._exit(1)
def _dispatch(self, object):
assert isinstance(object, AST), repr(object)
klass = object.__class__
meth = self.cache.get(klass)
if meth is None:
methname = "visit" + klass.__name__
if self.skip:
meth = getattr(self, methname, None)
else:
meth = getattr(self, methname)
self.cache[klass] = meth
return meth
class Check(VisitorBase):
def __init__(self):
super(Check, self).__init__(skip=True)
self.cons = {}
self.errors = 0
self.types = {}
def visitModule(self, mod):
for dfn in mod.dfns:
self.visit(dfn)
def visitType(self, type):
self.visit(type.value, str(type.name))
def visitSum(self, sum, name):
for t in sum.types:
self.visit(t, name)
def visitConstructor(self, cons, name):
key = str(cons.name)
conflict = self.cons.get(key)
if conflict is None:
self.cons[key] = name
else:
print "Redefinition of constructor %s" % key
print "Defined in %s and %s" % (conflict, name)
self.errors += 1
for f in cons.fields:
self.visit(f, key)
def visitField(self, field, name):
key = str(field.type)
l = self.types.setdefault(key, [])
l.append(name)
def visitProduct(self, prod, name):
for f in prod.fields:
self.visit(f, name)
def check(mod):
v = Check()
v.visit(mod)
for t in v.types:
if t not in mod.types and not t in builtin_types:
v.errors += 1
uses = ", ".join(v.types[t])
print "Undefined type %s, used in %s" % (t, uses)
return not v.errors
def parse(file):
scanner = ASDLScanner()
parser = ASDLParser()
buf = open(file).read()
tokens = scanner.tokenize(buf)
try:
return parser.parse(tokens)
except ASDLSyntaxError, err:
print err
lines = buf.split("\n")
print lines[err.lineno - 1] # lines starts at 0, files at 1
if __name__ == "__main__":
import glob
import sys
if len(sys.argv) > 1:
files = sys.argv[1:]
else:
testdir = "tests"
files = glob.glob(testdir + "/*.asdl")
for file in files:
print file
mod = parse(file)
print "module", mod.name
print len(mod.dfns), "definitions"
if not check(mod):
print "Check failed"
else:
for dfn in mod.dfns:
print dfn.type
|
|
import numpy as np
llf = np.array([-240.21658671417])
nobs = np.array([ 202])
k = np.array([ 4])
k_exog = np.array([ 1])
sigma = np.array([ .79473430527544])
chi2 = np.array([ 54633.096432541])
df_model = np.array([ 3])
k_ar = np.array([ 2])
k_ma = np.array([ 1])
params = np.array([ 1.1970355174119,
-.19724105359909,
-.91770441432171,
.63160261598163])
cov_params = np.array([ .00182362158934,
-.00163271308366,
-.00140915922719,
-.00044400541718,
-.00163271308366,
.00148731108625,
.00117094734518,
.00044939207189,
-.00140915922719,
.00117094734518,
.00181742086472,
.00032737864417,
-.00044400541718,
.00044939207189,
.00032737864417,
.00071193796554]).reshape(4,4)
xb = np.array([ 0,
0,
.04999904707074,
.06866559386253,
.02903163060546,
.07047952711582,
.03383458778262,
.08519082516432,
.06387747824192,
.0323903337121,
.0664371997118,
.056028008461,
.05634552612901,
.08741936087608,
.04945361241698,
.0881454795599,
.06608437746763,
.05997726693749,
.1058451384306,
.07246486097574,
.10775856673717,
.06419499218464,
.07649331539869,
.08432687073946,
.10236189514399,
.09031195193529,
.11902847886086,
.08933536708355,
.13242828845978,
.18790599703789,
.1410321444273,
.2076324224472,
.12561346590519,
.16128000617027,
.19236192107201,
.20115886628628,
.23716603219509,
.22255305945873,
.28473255038261,
.27441227436066,
.28466689586639,
.34994292259216,
.3424659371376,
.35532799363136,
.39506548643112,
.41180691123009,
.37130555510521,
.40151777863503,
.40951976180077,
.33306270837784,
.40587121248245,
.35764938592911,
.35284259915352,
.34843212366104,
.34438464045525,
.36860400438309,
.3990384042263,
.54691004753113,
.44432625174522,
.70020270347595,
.70163971185684,
.77033877372742,
.78572767972946,
.95923948287964,
.90811860561371,
.77250319719315,
.85019183158875,
.83438217639923,
.83959633111954,
.67678385972977,
.81331378221512,
.8202628493309,
.79870623350143,
.93831378221512,
.8281461596489,
.8256653547287,
.9071888923645,
.95076316595078,
1.0827594995499,
1.1249971389771,
1.1078934669495,
1.3271758556366,
1.4741442203522,
1.4939774274826,
1.7192279100418,
1.8355281352997,
1.5873348712921,
1.7079899311066,
1.8515517711639,
1.7368041276932,
1.8895095586777,
1.7913619279861,
1.5485136508942,
1.3914349079132,
1.8569093942642,
1.378589630127,
1.0909280776978,
1.2919955253601,
1.2874838113785,
1.2636196613312,
1.3255174160004,
1.2952193021774,
1.1754019260406,
1.2002106904984,
1.0717958211899,
1.2283787727356,
1.0664055347443,
1.0640426874161,
1.2097471952438,
.49885395169258,
.91795635223389,
.88015007972717,
1.0048481225967,
1.0485925674438,
1.0131514072418,
1.0480036735535,
1.0044000148773,
1.0596977472305,
1.0989319086075,
1.1431447267532,
1.1360602378845,
1.316884636879,
1.2248164415359,
1.0992801189423,
1.4178918600082,
1.2780615091324,
1.3436778783798,
1.727570772171,
1.376532793045,
1.1185711622238,
1.2548811435699,
1.2139776945114,
1.22409760952,
1.2136551141739,
1.2040791511536,
1.2232189178467,
1.1931306123734,
1.1573059558868,
1.0603547096252,
1.1422899961472,
1.0268490314484,
1.0556720495224,
1.1264756917953,
1.0764141082764,
1.0978548526764,
1.1536711454391,
1.025780916214,
1.034966468811,
1.1074740886688,
1.1707112789154,
1.0496238470078,
1.1209251880646,
1.1271858215332,
.93740028142929,
.90130144357681,
1.0357736349106,
.87323325872421,
.75861483812332,
.93606770038605,
.85732334852219,
.87216699123383,
.97779452800751,
.88410341739655,
1.0446182489395,
1.0177079439163,
1.144193649292,
1.2372444868088,
1.1155867576599,
1.2619564533234,
1.0462523698807,
1.0816910266876,
.85130125284195,
.76972281932831,
1.1335872411728,
.92024201154709,
1.0416384935379,
1.1102936267853,
.91037821769714,
.85678082704544,
1.022847533226,
1.0930491685867,
1.0342184305191,
1.2070096731186,
1.2472279071808,
1.0886085033417,
1.3604420423508,
1.1053978204727,
2.0939025878906,
1.0898643732071,
1.3238569498062,
1.5171576738358,
.77435439825058,
1.3360253572464,
1.5512014627457,
1.3569095134735,
1.4669530391693,
1.9312930107117,
1.52878677845,
2.3952746391296,
.80755305290222,
-.2365039139986,
.85178333520889,
1.1858888864517])
y = np.array([np.nan,
28.979999542236,
29.199998855591,
29.4186668396,
29.399032592773,
29.610481262207,
29.583833694458,
29.835191726685,
29.903877258301,
29.842390060425,
29.986436843872,
30.036027908325,
30.096345901489,
30.29741859436,
30.269453048706,
30.468145370483,
30.506084442139,
30.539976119995,
30.795845031738,
30.822463989258,
31.047760009766,
31.014196395874,
31.096494674683,
31.204328536987,
31.382362365723,
31.470310211182,
31.699028015137,
31.739334106445,
32.012428283691,
32.467903137207,
32.591033935547,
33.057632446289,
33.025615692139,
33.261280059814,
33.592365264893,
33.901161193848,
34.33716583252,
34.622554779053,
35.184734344482,
35.574413299561,
35.984668731689,
36.649940490723,
37.142463684082,
37.655326843262,
38.295066833496,
38.911808013916,
39.271308898926,
39.801517486572,
40.309520721436,
40.433059692383,
41.005870819092,
41.257652282715,
41.552845001221,
41.848430633545,
42.144382476807,
42.568603515625,
43.099040985107,
44.246910095215,
44.644325256348,
46.300201416016,
47.501640319824,
48.870338439941,
50.08572769165,
51.959239959717,
53.208118438721,
53.77250289917,
54.850193023682,
55.734382629395,
56.639595031738,
56.776782989502,
57.813312530518,
58.720264434814,
59.498706817627,
60.938312530518,
61.628147125244,
62.425662994385,
63.607189178467,
64.850761413574,
66.58275604248,
68.224998474121,
69.607894897461,
71.927177429199,
74.474143981934,
76.693977355957,
79.719230651855,
82.735527038574,
84.18733215332,
86.407989501953,
89.051551818848,
90.836799621582,
93.389511108398,
95.191360473633,
95.948516845703,
96.39143371582,
99.356910705566,
99.478584289551,
98.990928649902,
100.09199523926,
101.08748626709,
102.063621521,
103.42551422119,
104.59522247314,
105.27539825439,
106.30020904541,
106.77178955078,
108.2283782959,
108.76640319824,
109.5640411377,
111.10974884033,
109.19885253906,
110.41795349121,
111.08014678955,
112.40484619141,
113.74858856201,
114.81315612793,
116.04800415039,
117.00440216064,
118.25969696045,
119.59893035889,
121.04314422607,
122.33605957031,
124.41688537598,
125.72481536865,
126.49928283691,
128.91789245605,
130.17805480957,
131.84367370605,
135.12756347656,
136.07652282715,
136.21858215332,
137.45487976074,
138.41397094727,
139.52409362793,
140.61364746094,
141.70408630371,
142.92321777344,
143.99313354492,
144.9573059082,
145.56034851074,
146.74229431152,
147.32685852051,
148.25567626953,
149.52647399902,
150.47640991211,
151.59785461426,
152.95367431641,
153.62579345703,
154.53497314453,
155.80746459961,
157.27072143555,
158.04962158203,
159.32092285156,
160.52717590332,
160.83738708496,
161.30130004883,
162.53576660156,
162.87322998047,
162.95861816406,
164.13606262207,
164.75732421875,
165.57215881348,
166.8777923584,
167.58410644531,
169.14462280273,
170.31771850586,
172.04418945313,
173.93724060059,
175.01557922363,
176.86196899414,
177.44624328613,
178.48168945313,
178.4513092041,
178.4697265625,
180.43359375,
180.92024230957,
182.24163818359,
183.71029663086,
184.11038208008,
184.5567779541,
185.92283630371,
187.39305114746,
188.43421936035,
190.30702209473,
192.04722595215,
192.88861083984,
195.16044616699,
195.8053894043,
201.29389953613,
200.48985290527,
202.0238494873,
204.21714782715,
202.67434692383,
204.91003417969,
207.47120666504,
208.6949005127,
210.59994506836,
214.42628479004,
215.52578735352,
221.00527954102,
217.6965637207,
211.93748474121,
213.52278137207,
215.65487670898])
resid = np.array([np.nan,
.17000007629395,
.150001719594,
-.04866513609886,
.1409684419632,
-.06048120185733,
.16616617143154,
.00480932369828,
-.09387816488743,
.07761027663946,
-.00643773004413,
.00397336483002,
.1136526465416,
-.07741913199425,
.11054623872042,
-.02814410813153,
-.02608536928892,
.15002372860909,
-.04584567248821,
.11753567308187,
-.09775833785534,
.00580469891429,
.02350706420839,
.07567297667265,
-.00236342288554,
.10968881100416,
-.04902878031135,
.14066417515278,
.2675713300705,
-.01790400780737,
.25896555185318,
-.15762937068939,
.074383482337,
.13872304558754,
.10763731598854,
.19883884489536,
.06283701956272,
.27744692564011,
.11526516824961,
.12558923661709,
.3153315782547,
.15005706250668,
.1575340628624,
.24467428028584,
.20493300259113,
-.01180539745837,
.12869445979595,
.09848223626614,
-.20952282845974,
.166937276721,
-.1058681756258,
-.05765015259385,
-.05284334719181,
-.04843288660049,
.05561690032482,
.13139598071575,
.60096162557602,
-.04691004380584,
.95567148923874,
.49979802966118,
.5983595252037,
.42966198921204,
.91427308320999,
.34075975418091,
-.20811785757542,
.22749677300453,
.049809679389,
.06561553478241,
-.53959709405899,
.2232176810503,
.08668774366379,
-.02026358619332,
.50129300355911,
-.13831453025341,
-.02814690209925,
.27433693408966,
.29281187057495,
.64923530817032,
.51723897457123,
.27500438690186,
.99210506677628,
1.0728256702423,
.72585266828537,
1.3060256242752,
1.1807736158371,
-.13553117215633,
.51266354322433,
.79201000928879,
.0484497398138,
.66319739818573,
.0104919327423,
-.79136198759079,
-.94851511716843,
1.1085650920868,
-1.2569109201431,
-1.5785865783691,
-.19092650711536,
-.29199549555779,
-.2874838411808,
.03637577593327,
-.12551285326481,
-.49522390961647,
-.17540194094181,
-.60021221637726,
.22820720076561,
-.52838176488876,
-.26640248298645,
.33595886826515,
-2.4097516536713,
.30114910006523,
-.21795941889286,
.31985449790955,
.29514732956886,
.05141358822584,
.18684551119804,
-.04800364747643,
.19559693336487,
.24030530452728,
.30106961727142,
.15685074031353,
.76394122838974,
.08311692625284,
-.32481494545937,
1.0007183551788,
-.01789797656238,
.32194453477859,
1.5563160181046,
-.42756772041321,
-.97652357816696,
-.01858037337661,
-.25488117337227,
-.11397163569927,
-.12410674989223,
-.1136489585042,
-.00408222479746,
-.12321277707815,
-.19313062727451,
-.45730903744698,
.03965143114328,
-.44229298830032,
-.12685517966747,
.1443248540163,
-.12647566199303,
.02359197475016,
.20214818418026,
-.35366812348366,
-.12578700482845,
.16503044962883,
.29253509640694,
-.27071738243103,
.15037304162979,
.07907173037529,
-.6271858215332,
-.43740031123161,
.19870468974113,
-.53577369451523,
-.67323631048203,
.24138513207436,
-.23607075214386,
-.05732027813792,
.32782995700836,
-.17779149115086,
.51590573787689,
.15537866950035,
.5822828412056,
.65580940246582,
-.03724759072065,
.58442544937134,
-.46196871995926,
-.04625232890248,
-.88167881965637,
-.75131040811539,
.83028328418732,
-.43359026312828,
.2797549366951,
.35837066173553,
-.51030284166336,
-.41037824749947,
.34321609139442,
.37716165184975,
.00694172456861,
.66579383611679,
.49298724532127,
-.24722795188427,
.91139149665833,
-.46044808626175,
3.3946022987366,
-1.8939057588577,
.21013870835304,
.67614299058914,
-2.3171606063843,
.89965683221817,
1.0099676847458,
-.13320215046406,
.43808862566948,
1.8950464725494,
-.42929407954216,
3.0842199325562,
-4.1162676811218,
-5.5225644111633,
.73351317644119,
.94620555639267,
.73011147975922])
yr = np.array([np.nan,
.17000007629395,
.150001719594,
-.04866513609886,
.1409684419632,
-.06048120185733,
.16616617143154,
.00480932369828,
-.09387816488743,
.07761027663946,
-.00643773004413,
.00397336483002,
.1136526465416,
-.07741913199425,
.11054623872042,
-.02814410813153,
-.02608536928892,
.15002372860909,
-.04584567248821,
.11753567308187,
-.09775833785534,
.00580469891429,
.02350706420839,
.07567297667265,
-.00236342288554,
.10968881100416,
-.04902878031135,
.14066417515278,
.2675713300705,
-.01790400780737,
.25896555185318,
-.15762937068939,
.074383482337,
.13872304558754,
.10763731598854,
.19883884489536,
.06283701956272,
.27744692564011,
.11526516824961,
.12558923661709,
.3153315782547,
.15005706250668,
.1575340628624,
.24467428028584,
.20493300259113,
-.01180539745837,
.12869445979595,
.09848223626614,
-.20952282845974,
.166937276721,
-.1058681756258,
-.05765015259385,
-.05284334719181,
-.04843288660049,
.05561690032482,
.13139598071575,
.60096162557602,
-.04691004380584,
.95567148923874,
.49979802966118,
.5983595252037,
.42966198921204,
.91427308320999,
.34075975418091,
-.20811785757542,
.22749677300453,
.049809679389,
.06561553478241,
-.53959709405899,
.2232176810503,
.08668774366379,
-.02026358619332,
.50129300355911,
-.13831453025341,
-.02814690209925,
.27433693408966,
.29281187057495,
.64923530817032,
.51723897457123,
.27500438690186,
.99210506677628,
1.0728256702423,
.72585266828537,
1.3060256242752,
1.1807736158371,
-.13553117215633,
.51266354322433,
.79201000928879,
.0484497398138,
.66319739818573,
.0104919327423,
-.79136198759079,
-.94851511716843,
1.1085650920868,
-1.2569109201431,
-1.5785865783691,
-.19092650711536,
-.29199549555779,
-.2874838411808,
.03637577593327,
-.12551285326481,
-.49522390961647,
-.17540194094181,
-.60021221637726,
.22820720076561,
-.52838176488876,
-.26640248298645,
.33595886826515,
-2.4097516536713,
.30114910006523,
-.21795941889286,
.31985449790955,
.29514732956886,
.05141358822584,
.18684551119804,
-.04800364747643,
.19559693336487,
.24030530452728,
.30106961727142,
.15685074031353,
.76394122838974,
.08311692625284,
-.32481494545937,
1.0007183551788,
-.01789797656238,
.32194453477859,
1.5563160181046,
-.42756772041321,
-.97652357816696,
-.01858037337661,
-.25488117337227,
-.11397163569927,
-.12410674989223,
-.1136489585042,
-.00408222479746,
-.12321277707815,
-.19313062727451,
-.45730903744698,
.03965143114328,
-.44229298830032,
-.12685517966747,
.1443248540163,
-.12647566199303,
.02359197475016,
.20214818418026,
-.35366812348366,
-.12578700482845,
.16503044962883,
.29253509640694,
-.27071738243103,
.15037304162979,
.07907173037529,
-.6271858215332,
-.43740031123161,
.19870468974113,
-.53577369451523,
-.67323631048203,
.24138513207436,
-.23607075214386,
-.05732027813792,
.32782995700836,
-.17779149115086,
.51590573787689,
.15537866950035,
.5822828412056,
.65580940246582,
-.03724759072065,
.58442544937134,
-.46196871995926,
-.04625232890248,
-.88167881965637,
-.75131040811539,
.83028328418732,
-.43359026312828,
.2797549366951,
.35837066173553,
-.51030284166336,
-.41037824749947,
.34321609139442,
.37716165184975,
.00694172456861,
.66579383611679,
.49298724532127,
-.24722795188427,
.91139149665833,
-.46044808626175,
3.3946022987366,
-1.8939057588577,
.21013870835304,
.67614299058914,
-2.3171606063843,
.89965683221817,
1.0099676847458,
-.13320215046406,
.43808862566948,
1.8950464725494,
-.42929407954216,
3.0842199325562,
-4.1162676811218,
-5.5225644111633,
.73351317644119,
.94620555639267,
.73011147975922])
mse = np.array([ 1.1635265350342,
.70545583963394,
.63365471363068,
.633325278759,
.63304948806763,
.63281834125519,
.63262450695038,
.63246184587479,
.63232523202896,
.63221049308777,
.63211411237717,
.63203299045563,
.63196486234665,
.63190752267838,
.63185924291611,
.63181865215302,
.63178449869156,
.63175576925278,
.631731569767,
.63171118497849,
.63169401884079,
.63167959451675,
.63166743516922,
.63165718317032,
.63164860010147,
.63164132833481,
.6316351890564,
.63163006305695,
.63162571191788,
.63162207603455,
.63161903619766,
.63161641359329,
.63161426782608,
.63161242008209,
.63161087036133,
.63160955905914,
.63160848617554,
.63160753250122,
.63160675764084,
.63160610198975,
.63160556554794,
.63160508871078,
.63160473108292,
.63160437345505,
.63160407543182,
.63160383701324,
.63160365819931,
.63160347938538,
.63160336017609,
.6316032409668,
.63160312175751,
.63160306215286,
.63160300254822,
.63160294294357,
.63160288333893,
.63160282373428,
.63160282373428,
.63160276412964,
.63160276412964,
.63160270452499,
.63160270452499,
.63160270452499,
.63160270452499,
.63160264492035,
.63160264492035,
.63160264492035,
.63160264492035,
.63160264492035,
.63160264492035,
.63160264492035,
.63160264492035,
.63160264492035,
.63160264492035,
.63160264492035,
.63160264492035,
.63160264492035,
.63160264492035,
.63160264492035,
.63160264492035,
.63160264492035,
.63160264492035,
.63160264492035,
.63160264492035,
.63160264492035,
.63160264492035,
.63160264492035,
.63160264492035,
.63160264492035,
.63160264492035,
.63160264492035,
.63160264492035,
.63160264492035,
.63160264492035,
.63160264492035,
.63160264492035,
.63160264492035,
.63160264492035,
.63160264492035,
.63160264492035,
.63160264492035,
.63160264492035,
.63160264492035,
.63160264492035,
.63160264492035,
.63160264492035,
.63160264492035,
.63160264492035,
.63160264492035,
.63160264492035,
.63160264492035,
.63160264492035,
.63160264492035,
.63160264492035,
.63160264492035,
.63160264492035,
.63160264492035,
.63160264492035,
.63160264492035,
.63160264492035,
.63160264492035,
.63160264492035,
.63160264492035,
.63160264492035,
.63160264492035,
.63160264492035,
.63160264492035,
.63160264492035,
.63160264492035,
.63160264492035,
.63160264492035,
.63160264492035,
.63160264492035,
.63160264492035,
.63160264492035,
.63160264492035,
.63160264492035,
.63160264492035,
.63160264492035,
.63160264492035,
.63160264492035,
.63160264492035,
.63160264492035,
.63160264492035,
.63160264492035,
.63160264492035,
.63160264492035,
.63160264492035,
.63160264492035,
.63160264492035,
.63160264492035,
.63160264492035,
.63160264492035,
.63160264492035,
.63160264492035,
.63160264492035,
.63160264492035,
.63160264492035,
.63160264492035,
.63160264492035,
.63160264492035,
.63160264492035,
.63160264492035,
.63160264492035,
.63160264492035,
.63160264492035,
.63160264492035,
.63160264492035,
.63160264492035,
.63160264492035,
.63160264492035,
.63160264492035,
.63160264492035,
.63160264492035,
.63160264492035,
.63160264492035,
.63160264492035,
.63160264492035,
.63160264492035,
.63160264492035,
.63160264492035,
.63160264492035,
.63160264492035,
.63160264492035,
.63160264492035,
.63160264492035,
.63160264492035,
.63160264492035,
.63160264492035,
.63160264492035,
.63160264492035,
.63160264492035,
.63160264492035,
.63160264492035,
.63160264492035,
.63160264492035,
.63160264492035,
.63160264492035,
.63160264492035,
.63160264492035,
.63160264492035,
.63160264492035,
.63160264492035,
.63160264492035])
stdp = np.array([ 0,
0,
.04999904707074,
.06866559386253,
.02903163060546,
.07047952711582,
.03383458778262,
.08519082516432,
.06387747824192,
.0323903337121,
.0664371997118,
.056028008461,
.05634552612901,
.08741936087608,
.04945361241698,
.0881454795599,
.06608437746763,
.05997726693749,
.1058451384306,
.07246486097574,
.10775856673717,
.06419499218464,
.07649331539869,
.08432687073946,
.10236189514399,
.09031195193529,
.11902847886086,
.08933536708355,
.13242828845978,
.18790599703789,
.1410321444273,
.2076324224472,
.12561346590519,
.16128000617027,
.19236192107201,
.20115886628628,
.23716603219509,
.22255305945873,
.28473255038261,
.27441227436066,
.28466689586639,
.34994292259216,
.3424659371376,
.35532799363136,
.39506548643112,
.41180691123009,
.37130555510521,
.40151777863503,
.40951976180077,
.33306270837784,
.40587121248245,
.35764938592911,
.35284259915352,
.34843212366104,
.34438464045525,
.36860400438309,
.3990384042263,
.54691004753113,
.44432625174522,
.70020270347595,
.70163971185684,
.77033877372742,
.78572767972946,
.95923948287964,
.90811860561371,
.77250319719315,
.85019183158875,
.83438217639923,
.83959633111954,
.67678385972977,
.81331378221512,
.8202628493309,
.79870623350143,
.93831378221512,
.8281461596489,
.8256653547287,
.9071888923645,
.95076316595078,
1.0827594995499,
1.1249971389771,
1.1078934669495,
1.3271758556366,
1.4741442203522,
1.4939774274826,
1.7192279100418,
1.8355281352997,
1.5873348712921,
1.7079899311066,
1.8515517711639,
1.7368041276932,
1.8895095586777,
1.7913619279861,
1.5485136508942,
1.3914349079132,
1.8569093942642,
1.378589630127,
1.0909280776978,
1.2919955253601,
1.2874838113785,
1.2636196613312,
1.3255174160004,
1.2952193021774,
1.1754019260406,
1.2002106904984,
1.0717958211899,
1.2283787727356,
1.0664055347443,
1.0640426874161,
1.2097471952438,
.49885395169258,
.91795635223389,
.88015007972717,
1.0048481225967,
1.0485925674438,
1.0131514072418,
1.0480036735535,
1.0044000148773,
1.0596977472305,
1.0989319086075,
1.1431447267532,
1.1360602378845,
1.316884636879,
1.2248164415359,
1.0992801189423,
1.4178918600082,
1.2780615091324,
1.3436778783798,
1.727570772171,
1.376532793045,
1.1185711622238,
1.2548811435699,
1.2139776945114,
1.22409760952,
1.2136551141739,
1.2040791511536,
1.2232189178467,
1.1931306123734,
1.1573059558868,
1.0603547096252,
1.1422899961472,
1.0268490314484,
1.0556720495224,
1.1264756917953,
1.0764141082764,
1.0978548526764,
1.1536711454391,
1.025780916214,
1.034966468811,
1.1074740886688,
1.1707112789154,
1.0496238470078,
1.1209251880646,
1.1271858215332,
.93740028142929,
.90130144357681,
1.0357736349106,
.87323325872421,
.75861483812332,
.93606770038605,
.85732334852219,
.87216699123383,
.97779452800751,
.88410341739655,
1.0446182489395,
1.0177079439163,
1.144193649292,
1.2372444868088,
1.1155867576599,
1.2619564533234,
1.0462523698807,
1.0816910266876,
.85130125284195,
.76972281932831,
1.1335872411728,
.92024201154709,
1.0416384935379,
1.1102936267853,
.91037821769714,
.85678082704544,
1.022847533226,
1.0930491685867,
1.0342184305191,
1.2070096731186,
1.2472279071808,
1.0886085033417,
1.3604420423508,
1.1053978204727,
2.0939025878906,
1.0898643732071,
1.3238569498062,
1.5171576738358,
.77435439825058,
1.3360253572464,
1.5512014627457,
1.3569095134735,
1.4669530391693,
1.9312930107117,
1.52878677845,
2.3952746391296,
.80755305290222,
-.2365039139986,
.85178333520889,
1.1858888864517])
icstats = np.array([ 202,
np.nan,
-240.21658671417,
4,
488.43317342834,
501.66624421795])
class Bunch(dict):
def __init__(self, **kw):
dict.__init__(self, kw)
self.__dict__ = self
results = Bunch(llf=llf, nobs=nobs, k=k, k_exog=k_exog, sigma=sigma, chi2=chi2, df_model=df_model, k_ar=k_ar, k_ma=k_ma, params=params, cov_params=cov_params, xb=xb, y=y, resid=resid, yr=yr, mse=mse, stdp=stdp, icstats=icstats, )
|
|
#!/usr/bin/python
# File: start_client.py ; This file is part of Twister.
# version: 2.011
# Copyright (C) 2012-2013 , Luxoft
# Authors:
# Adrian Toader <[email protected]>
# Andrei Costachi <[email protected]>
# Andrei Toma <[email protected]>
# Cristi Constantin <[email protected]>
# Daniel Cioata <[email protected]>
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at:
# http://www.apache.org/licenses/LICENSE-2.0
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# This file will register ALL Execution Processes that are enabled,
# from file `twister/config/epname.ini` !
# To be able to start the packet sniffer, this must run as ROOT.
import os, sys
import socket
socket.setdefaulttimeout(3)
import cherrypy
import xmlrpclib
import subprocess
import traceback
from time import sleep
from datetime import datetime
from cherrypy import _cptools
from ConfigParser import SafeConfigParser
from json import loads as jsonLoads, dumps as jsonDumps
from socket import gethostname, gethostbyaddr
from thread import start_new_thread
#
if not sys.version.startswith('2.7'):
print('Python version error! The client must run on Python 2.7!')
exit(1)
def userHome(user):
""" Return user home path for all kind of users """
return subprocess.check_output('echo ~' + user, shell=True).strip()
try:
username = os.getenv('USER')
if username=='root':
username = os.getenv('SUDO_USER')
except:
print('Cannot guess user name for this Execution Process! Exiting!')
exit(1)
# Twister path environment
os.environ['TWISTER_PATH'] = userHome(username).rstrip('/') +os.sep+ 'twister/'
#
def keepalive(service):
""" """
print('Keep-Alive process started...')
service.registerEPs()
while True:
for ce in service.proxyList:
try:
response = service.proxyList[ce].echo('ping')
except Exception as e:
print('EP warning: Central Engine is down ... [{}]'.format(e))
service.registerEPs(ce)
sleep(0.8)
#
class TwisterClientService(_cptools.XMLRPCController):
""" Twister Client XML-RPC Service. """
def __init__(self, username):
""" """
print('Twister Client Service init..')
self.username = username
self.hostname = gethostname().lower()
self.snifferEth = None
self.eps = {}
self.proxyList = {}
# Close all sniffer and ep instaces and parse eps
pipe = subprocess.Popen('ps ax | grep start_packet_sniffer.py', shell=True, stdout=subprocess.PIPE)
for line in pipe.stdout.read().splitlines():
try:
os.kill(int(line.split()[0]), 9)
except Exception as e:
pass
del pipe
pipe = subprocess.Popen('ps ax | grep ExecutionProcess.py', shell=True, stdout=subprocess.PIPE)
for line in pipe.stdout.read().splitlines():
try:
os.kill(int(line.split()[0]), 9)
except Exception as e:
pass
del pipe
cfg = SafeConfigParser()
cfg.read(os.getenv('TWISTER_PATH') + '/config/epname.ini')
# sniffer config
if (cfg.has_option('PACKETSNIFFERPLUGIN', 'EP_HOST') and
cfg.get('PACKETSNIFFERPLUGIN', 'ENABLED') == '1'):
self.snifferEth = cfg.get('PACKETSNIFFERPLUGIN', 'ETH_INTERFACE')
else:
self.snifferEth = 'eth0'
allow_any_host = True
for ep in cfg.sections():
# check if the config has option EP_HOST and is
# not commented out and it coantains an IP address
if cfg.has_option(ep, 'EP_HOST'):
host_value = (cfg.get(ep, 'EP_HOST'))
if host_value:
allow_any_host = False
# All sections that have an option CE_IP, are EP names
eps = []
for ep in cfg.sections():
if cfg.has_option(ep, 'CE_IP') and not allow_any_host:
try:
if self.hostname in gethostbyaddr(cfg.get(ep, 'EP_HOST'))[0].lower():
eps.append(ep)
except Exception as e:
pass
elif cfg.has_option(ep, 'CE_IP') and allow_any_host:
eps.append(ep)
print('Found `{}` EPs: `{}`.\n'.format(len(eps), ', '.join(eps)))
if not eps:
raise Exception('No EPS found!')
# Generate list of EPs and connections
for currentEP in eps:
newEP = {}
newEP['ce_ip'] = cfg.get(currentEP, 'CE_IP')
newEP['ce_port'] = cfg.get(currentEP, 'CE_PORT')
_proxy = '{ip}:{port}'.format(ip=newEP['ce_ip'], port=newEP['ce_port'])
if self.proxyList.has_key(_proxy):
# Re-use Central Engine connection
newEP['proxy'] = self.proxyList[_proxy]
else:
# Create a new Central Engine connection
newEP['proxy'] = \
xmlrpclib.ServerProxy('http://{}:EP@{}:{}/'.format(self.username, newEP['ce_ip'], newEP['ce_port']))
self.proxyList.update([(_proxy, newEP['proxy']), ])
newEP['exec_str'] = 'nohup {python} -u {twister_path}/client/executionprocess/ExecutionProcess.py '\
'{user} {ep} "{ip}:{port}" {sniff} > "{twister_path}/.twister_cache/{ep}_LIVE.log" &'.format(
python = sys.executable,
twister_path = os.getenv('TWISTER_PATH').rstrip('/'),
user = self.username,
ep = currentEP,
ip = newEP['ce_ip'],
port = newEP['ce_port'],
sniff = self.snifferEth,
)
newEP['pid'] = None
self.eps.update([(currentEP, newEP), ])
def registerEPs(self, ce_proxy=None):
""" Register EPs to Central Engines """
if ce_proxy:
print('Starting Client Service register on `{}`...'.format(ce_proxy))
else:
print('Starting Client Service register...')
# List of Central Engine connections
proxyEpsList = {}
for currentEP in self.eps:
_proxy = '{}:{}'.format(self.eps[currentEP]['ce_ip'], self.eps[currentEP]['ce_port'])
# If Central Engine proxy filter is specified, use it
if ce_proxy and ce_proxy != _proxy:
continue
if not proxyEpsList.has_key(_proxy):
proxyEpsList[_proxy] = [
ep for ep in self.eps if self.eps[ep]['ce_ip'] == self.eps[currentEP]['ce_ip'] and
self.eps[ep]['ce_port'] == self.eps[currentEP]['ce_port']
]
unregistered = True
# Try to register to Central Engine, forever
while unregistered:
for currentCE in proxyEpsList:
try:
proxy = self.eps[proxyEpsList[currentCE][0]]['proxy']
__proxy = proxy._ServerProxy__host.split('@')[1].split(':')
except Exception, e:
print('CE proxy error: `{}` on `{}`.'.format(e, __proxy))
continue
clientKey = ':{port}'.format(port=self.clientPort)
try:
userCeClientInfo = proxy.getUserVariable(self.username, 'clients')
if not userCeClientInfo:
userCeClientInfo = {}
else:
userCeClientInfo = jsonLoads(userCeClientInfo)
while True:
ceStatus = proxy.getExecStatusAll(self.username)
if ceStatus.startswith('invalid'):
break
elif ceStatus.startswith('stopped'):
# Reset user project
proxy.resetProject(self.username)
print('User project reset.')
break
else:
print('CE on `{}` is running with status `{}`.'.format(
proxy._ServerProxy__host.split('@')[1], ceStatus))
print('Waiting to stop ...')
sleep(2)
for (prxy, eps) in userCeClientInfo.items():
for ep in eps:
if ep in proxyEpsList[currentCE]:
print('Warning: epname {} already registered. Trying to stop..'.format(ep))
try:
p = xmlrpclib.ServerProxy('http://{}:{}/twisterclient/'.format(
prxy.split(':')[0], prxy.split(':')[1]))
try:
last_seen_alive = self.eps[ep]['proxy'].getEpVariable(
self.username, ep, 'last_seen_alive')
now_dtime = datetime.today()
if last_seen_alive:
diff = now_dtime - datetime.strptime(last_seen_alive,
'%Y-%m-%d %H:%M:%S')
if diff.seconds <= 2.4:
proxyEpsList[currentCE].pop(proxyEpsList[currentCE].index(ep))
print('Warning: epname {} is running. Will not register.'.format(ep))
else:
p.stopEP(ep)
userCeClientInfo[prxy].pop(userCeClientInfo[prxy].index(ep))
if not userCeClientInfo[prxy]:
userCeClientInfo.pop(prxy)
print('Warning: epname {} stoped. Will register.'.format(ep))
except Exception as e:
pass
except Exception as e:
pass
if not proxyEpsList[currentCE]:
continue
userCeClientInfo.update([(clientKey, proxyEpsList[currentCE]), ])
userCeClientInfo = jsonDumps(userCeClientInfo)
proxy.registerClient(self.username, userCeClientInfo)
unregistered = False
except Exception as e:
self.proxyList.pop(currentCE)
print('Error: {er}'.format(er=e))
if unregistered:
print('Error: Central Engine is down... will retry...')
sleep(2)
print('Client is now registered on CE.\n')
@cherrypy.expose
def startEP(self, epname, *args, **kwargs):
""" """
if not epname in self.eps.keys():
print('Error: Unknown EP name : `{}` !'.format(epname))
return False
sleep(2.4)
try:
proxy = self.eps[epname]['proxy']
last_seen_alive = proxy.getEpVariable(self.username, epname, 'last_seen_alive')
except:
print('Error: Cannot connect to Central Engine to check the EP!\n')
trace = traceback.format_exc()[34:].strip()
print(trace)
return False
now_dtime = datetime.today()
if last_seen_alive:
diff = now_dtime - datetime.strptime(last_seen_alive, '%Y-%m-%d %H:%M:%S')
if diff.seconds < 2.5:
print('Error: Process {} is already started for user {}! (ping={} sec)\n'.format(
epname, username, diff.seconds))
return False
if self.eps[epname]['pid']:
print('Error: Process {} is already started for user {}! (pid={})\n'.format(
epname, username, self.eps[epname]['pid']))
return False
print('Executing: {}'.format(self.eps[epname]['exec_str']))
self.eps[epname]['pid'] = subprocess.Popen(
self.eps[epname]['exec_str'], shell=True, preexec_fn=os.setsid)
print('EP `{}` for user `{}` launched in background!\n'.format(epname, self.username))
return True
@cherrypy.expose
def stopEP(self, epname, *args, **kwargs):
""" """
if not epname in self.eps.keys():
print('Error: Unknown EP name : `{}` !'.format(epname))
return False
if not self.eps[epname]['pid']:
print('Error: EP `{}` is not running !'.format(epname))
return False
sleep(2.4)
try:
os.killpg(self.eps[epname]['pid'].pid, 9)
self.eps[epname]['pid'] = None
except:
trace = traceback.format_exc()[34:].strip()
print(trace)
return False
print('Stopping EP `{}` !'.format(epname))
return True
@cherrypy.expose
def restartEP(self, epname, *args, **kwargs):
""" """
if not epname in self.eps.keys():
print('Error: Unknown EP name : `{}` !'.format(epname))
return False
if self.eps[epname]['pid']:
try:
os.killpg(self.eps[epname]['pid'].pid, 9)
self.eps[epname]['pid'] = None
print('Killing EP `{}` !'.format(epname))
except:
trace = traceback.format_exc()[34:].strip()
print(trace)
return False
print('Executing: {}'.format(self.eps[epname]['exec_str']))
self.eps[epname]['pid'] = subprocess.Popen(
self.eps[epname]['exec_str'], shell=True, preexec_fn=os.setsid)
print('Restarted EP `{}` for user `{}` !\n'.format(epname, self.username))
return True
#
if __name__ == "__main__":
# Run client service
service = TwisterClientService(username)
# Find firs free port in range ...
connectionEstablished = False
minport, maxport = 4444, 4488
clientPort = minport
while clientPort < maxport:
try:
socket.create_connection(('0.0.0.0', clientPort), 2)
print('Client warning, the port `{}` is taken!'.format(clientPort))
clientPort += 1
except Exception as e:
connectionEstablished = True
break
if not connectionEstablished:
print('Cound not find any free port in range {} - {} !'.format(minport, maxport))
exit(1)
print('Client will start on : `0.0.0.0:{}`.'.format(clientPort))
service.clientPort = clientPort
# Config
conf = {'global': {
'server.socket_host': '0.0.0.0',
'server.socket_port': clientPort,
'engine.autoreload.on': False,
'log.screen': False
}
}
start_new_thread(keepalive, (service, ))
# Start !
cherrypy.quickstart(service, '/twisterclient/', config=conf)
#
|
|
#!/usr/bin/env python
# Copyright (c) 2012 Cloudera, Inc. All rights reserved.
#
# Basic object model for an Impala cluster. Basic model is an "Impala Service" which
# represents a collection of ImpalaD processes and a State Store service. The Impala
# service is associated with an ImpalaCluster which has information on all the hosts
# / machines in the cluster along with the different services available (currently
# only Impala.
# To authenticate remote operation over SSH set the IMPALA_SSH_PRIVATE_KEY,
# IMPALA_SSH_PRIVATE_KEY_PASSWORD (if applicable), and IMPALA_SSH_USER environment
# variables. If not set the current user and default keys will be used.
#
# Dependencies:
# paramiko - Used to perform remote SSH commands. To install run 'easy_install paramiko'
# cm_api - Used to interact with cluster environment - Visit cloudera.github.com/cm_api/
# for installation details.
import cmd
import logging
import time
import os
import sys
import json
import paramiko
import urllib
from collections import defaultdict
from cm_api.api_client import ApiResource
from datetime import datetime
from optparse import OptionParser
from paramiko import PKey
logging.basicConfig(level=logging.ERROR, format='%(threadName)s: %(message)s')
LOG = logging.getLogger('impala_cluster')
LOG.setLevel(level=logging.DEBUG)
# Environment variables that control how to execute commands on remote machines
IMPALA_SSH_PRIVATE_KEY = os.environ.get('IMPALA_PRIVATE_KEY', None)
IMPALA_SSH_PRIVATE_KEY_PASSWORD = os.environ.get('IMPALA_PRIVATE_KEY_PASSWORD', str())
IMPALA_SSH_USER = os.environ.get('IMPALA_SSH_USER', 'impala')
# Represents a set of Impala services, processes, and machines they are running on
class ImpalaCluster(object):
def __init__(self, cm_host, cm_cluster_name, username, password):
self.cm_api = ApiResource(cm_host, username=username, password=password)
self.hosts = dict()
self.services = list()
self.cluster = self.cm_api.get_cluster(cm_cluster_name)
if self.cluster is None:
raise RuntimeError, 'Cluster name "%s" not found' % cm_cluster_name
self.__load_hosts()
self.__impala_service = ImpalaService(self)
def _get_all_services(self):
return self.cluster.get_all_services()
def get_impala_service(self):
return self.__impala_service
def __load_hosts(self):
self.hosts = dict()
# Search for all hosts that are in the target cluster.
# There is no API that provides the list of host in a given cluster, so to find them
# we must loop through all the hosts and check the cluster name matches.
for host_info in self.cm_api.get_all_hosts():
# host_info doesn't include a link to the roleRef so need to do another lookup
# based on the hostId.
host = self.cm_api.get_host(host_info.hostId)
for roleRef.get('clusterName') == self.cluster_name:
self.hosts[host_info.hostId] = Host(host)
break
# Base class for Cluster service objects
class ClusterService(object):
def __init__(self):
pass
def start(self):
raise NotImplementedError, 'This method is NYI'
def stop(self):
raise NotImplementedError, 'This method is NYI'
def restart(self):
raise NotImplementedError, 'This method is NYI'
# Represents an Impala service - a set of ImpalaD processes and a statestore.
class ImpalaService(ClusterService):
def __init__(self, cluster):
self.__parent_cluster = cluster
self.__state_store_process = None
self.__impalad_processes = list()
self.__impala_service = self.__get_impala_service_internal()
if self.__impala_service is None:
raise RuntimeError, 'No Impala service found on cluster'
# For each service, CM has a set of roles. A role is a lightweight object
# that provides a link between a physical host machine and a logical service.
# Here that information is used to determine where all the impala processes
# are actually located (what machines).
for role in self.__impala_service.get_all_roles():
if 'STATESTORE' in role.name:
self.__state_store_process = ImpalaStateStoreProcess(self,
self.__parent_cluster.hosts[role.hostRef.hostId], role)
elif 'IMPALAD' in role.name:
self.__impalad_processes.append(ImpaladProcess(
self.__parent_cluster.hosts[role.hostRef.hostId], role))
else:
raise RuntimeError, 'Unknown Impala role type'
def get_state_store_process(self):
""" Returns the state store process """
return self.__state_store_process
def get_impalad_process(self, hostname):
""" Returns the impalad process running on the specified hostname """
return first(self.__impalad_processes,
lambda impalad: impalad.hostname == hostname)
def get_all_impalad_processes(self):
return self.__impalad_processes
def __get_impala_service_internal(self):
return first(self.__parent_cluster._get_all_services(),
lambda service: 'impala' in service.name)
def set_process_auto_restart_config(self, value):
""" Sets the process_auto_restart configuration value.
If set, Impala processes will automatically restart if the process dies
"""
self.__update_configuration('process_auto_restart', str(value).lower())
def __update_configuration(self, name, value):
for role in self.__impala_service.get_all_roles():
role.update_config({name: value})
LOG.debug('Updated Config Value: %s/%s' % (role.name, role.get_config()))
def start(self):
""" Starts all roles/processes of the service """
LOG.debug("Starting ImpalaService")
self.__impala_service.start()
self.__wait_for_service_state('STARTED')
def restart(self):
""" Restarts all roles/processes of the service """
LOG.debug("Restarting ImpalaService")
self.__impala_service.restart()
self.__wait_for_service_state('STARTED')
def stop(self):
""" Stops all roles/processes of the service """
LOG.debug("Stopping ImpalaService")
self.__impala_service.stop()
self.__wait_for_service_state('STOPPED')
def get_health_summary(self):
return self.__get_impala_service_internal().healthSummary
def state(self):
"""
Gets the current state of the service (a string value).
Possible values are STOPPED, STOPPING, STARTED, STARTING, UNKNOWN
"""
return self.__get_impala_service_internal().serviceState
def __wait_for_service_state(self, desired_state, timeout=0):
""" Waits for the service to reach the specified state within the given time(secs) """
current_state = self.state()
start_time = datetime.now()
while current_state.upper() != desired_state.upper():
LOG.debug('Current Impala Service State: %s Waiting For: %s' % (current_state,
desired_state))
# Sleep for a bit to give the serivce time to reach the target state.
time.sleep(1)
# Get the current service state.
current_state = self.state()
if timeout != 0 and (datetime.now() - start_time).seconds > timeout:
raise RuntimeError, 'Did not reach desired state within %d seconds.' % timeout
# Represents one host/machine in the cluster.
class Host(object):
def __init__(self, cm_host):
self.cm_host = cm_host
self.hostname = cm_host.hostname
self.ssh_client = paramiko.SSHClient()
self.ssh_client.load_system_host_keys()
self.ssh_client.set_missing_host_key_policy(paramiko.AutoAddPolicy())
def exec_cmd(self, cmd):
""" Executes a command on the machine using SSH """
self.ssh_client.connect(hostname=self.hostname, username=IMPALA_SSH_USER)
LOG.debug('Executing on host: %s Command: "%s"' % (self.hostname, cmd))
rsa_key = None
# TODO: Support other key types besides RSA
if IMPALA_SSH_PRIVATE_KEY is not None:
paramiko.RSAKey.from_private_key_file(filename=IMPALA_SSH_PRIVATE_KEY,
password=IMPALA_SSH_PRIVATE_KEY_PASSWORD)
stdin, stdout, stderr = self.ssh_client.exec_command(cmd, rsa_key)
stdout_str = stdout.read()
stderr_str = stderr.read()
if stdout_str: LOG.debug(stdout_str.strip())
if stderr_str: LOG.debug(stderr_str.strip())
stdout.close()
stderr.close()
self.ssh_client.close()
return stdout_str, stderr_str
# Represents a single process running on a machine
class Process(object):
def __init__(self, host, process_name):
self.name = process_name
self.host = host
self.hostname = host.hostname
self.__pid = None
def kill(self):
""" Kill the process if it is running, if not running this will be a no-op """
pid = self.get_pid()
if pid is not None and pid > 0:
self.host.exec_cmd('sudo kill -9 %d' % self.get_pid())
else:
LOG.debug('Skipping kill of pid: %s on host: %s' % (pid, self.hostname))
def get_pid(self):
""" Returns the process' current pid """
stdout, stderr = self.host.exec_cmd('sudo /sbin/pidof %s' % self.name)
pids = [pid.strip() for pid in stdout.split()]
# Note: This is initialized to -2 instead of -1 because 'kill -1' kills all processes.
self.__pid = -2
if len(pids) > 1:
raise RuntimeError, 'Error - %d PIDs detected. Expected 1' % len(pids)
elif len(pids) == 1:
self.__pid = int(pids[0])
return self.__pid
def is_running(self):
return self.get_pid() > 0
# Represents a single Impala statestore process
class ImpalaStateStoreProcess(Process):
def __init__(self, parent_service, host, cm_role, metrics_port=9190):
Process.__init__(self, host, 'impala-statestore');
self.metrics_port = metrics_port
self.role = cm_role
def get_impala_backend(self, hostname):
"""Returns the impala backend on the specified host."""
return first(self.get_live_impala_backends(),
lambda backend: backend.split(':')[0] == hostname)
def get_live_impala_backends(self):
"""Returns a list of host:be_port strings of live impalad instances."""
metrics_page = urllib.urlopen("http://%s:%d/jsonmetrics" %\
(self.host, int(self.metrics_port)))
return json.loads(metrics_page.read())['statestore.live.backends.list']
def __str__(self):
return 'Name: %s Host: %s' % (self.name, self.hostname)
# Represents a single Impalad process
class ImpaladProcess(Process):
def __init__(self, host, cm_role, be_port=22000, beeswax_port=21000):
Process.__init__(self, host, 'impalad');
self.role = cm_role
self.host = host
self.be_port = be_port
self.beeswax_port = beeswax_port
self.__pid = None
def get_pid(self):
try:
self.__pid = super(ImpaladProcess, self).get_pid()
except RuntimeError, e:
# There could be multiple ImpalaD instances running on the same
# machine (local testing case). Fall back to this method for getting the pid.
LOG.info('Multiple PIDs found for Impalad service. Attempting to get PID based on '\
'the the be_port: %s', e)
stdout, stderr = self.host.exec_cmd(
'lsof -i:%d | awk \'{print $2}\' | tail -n 1' % self.be_port)
self.__pid = int(stdout) if stdout else -1
return self.__pid
def __str__(self):
return 'Name: %s, Host: %s BE Port: %d Beeswax Port: %d PID: %s'\
% (self.name, self.hostname, self.be_port, self.beeswax_port, self.__pid)
def first(collection, match_function):
""" Returns the first item in the collection that satisfies the match function """
return next((item for item in collection if match_function(item)), None)
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.