code
stringlengths 2
1.05M
| repo_name
stringlengths 5
104
| path
stringlengths 4
251
| language
stringclasses 1
value | license
stringclasses 15
values | size
int32 2
1.05M
|
---|---|---|---|---|---|
#!/usr/bin/env python
# Hua Shao <[email protected]>
import sys
import re
import random
l1conf = []
def parseconfig(conf):
global l1conf
l1conf.extend([[],[],[]])
with open(conf, "r") as fp:
for line in fp:
if line.startswith("CONFIG_first_card_"):
kv = line.split("=")
l1conf[0].append((kv[0][len("CONFIG_first_card_"):], kv[1].strip("\"\'\r\n\t")))
elif line.startswith("CONFIG_second_card_"):
kv = line.split("=")
l1conf[1].append((kv[0][len("CONFIG_second_card_"):], kv[1].strip("\"\'\r\n\t")))
elif line.startswith("CONFIG_third_card_"):
kv = line.split("=")
l1conf[2].append((kv[0][len("CONFIG_third_card_"):], kv[1].strip("\"\'\r\n\t")))
else:
continue
def validate():
global l1conf
d1 = dict(l1conf[0]) if len(l1conf) > 0 else {}
d2 = dict(l1conf[1]) if len(l1conf) > 1 else {}
d3 = dict(l1conf[2]) if len(l1conf) > 2 else {}
# make sure no empty value
for dx in [d1,d2,d3]:
for k,v in dx.items():
assert v
# make sure these configs are unique
for name in ["main_ifname", "ext_ifname", "wds_ifname",
"apcli_name", "mesh_ifname", "nvram_zone",
"profile_path"]:
if1 = d1.get(name, random.random())
if2 = d2.get(name, random.random())
if3 = d3.get(name, random.random())
assert len(set([if1, if2, if3])) == 3, "duplication found in "+name
# main_ifname should end with "0"
if1 = [ x.strip() for x in d1.get("main_ifname","").split(";") if x]
if2 = [ x.strip() for x in d2.get("main_ifname","").split(";") if x]
if3 = [ x.strip() for x in d3.get("main_ifname","").split(";") if x]
for each in if1:
assert not each or each.endswith("0"), "1st main_ifname {0} does not ends with 0".format(each)
for each in if2:
assert not each or each.endswith("0"), "2nd main_ifname {0} does not ends with 0".format(each)
for each in if3:
assert not each or each.endswith("0"), "3rd main_ifname {0} does not ends with 0".format(each)
# main_ifname should start with ext_ifname
if1ext = [ x.strip() for x in d1.get("ext_ifname","").split(";") if x]
if2ext = [ x.strip() for x in d2.get("ext_ifname","").split(";") if x]
if3ext = [ x.strip() for x in d3.get("ext_ifname","").split(";") if x]
assert len(if1) == len(if1ext), "number of 1st main_ifname does not equal to 1st ext_ifname"
assert len(if2) == len(if2ext), "number of 2nd main_ifname does not equal to 2nd ext_ifname"
assert len(if3) == len(if3ext), "number of 3rd main_ifname does not equal to 3rd ext_ifname"
for i,each in enumerate(if1ext):
assert if1[i].startswith(each), "1st main_ifname {0} does not start with its ext_ifname {1}".format(if1[i], each)
for i,each in enumerate(if2ext):
assert if2[i].startswith(each), "2nd main_ifname {0} does not start with its ext_ifname {1}".format(if2[i], each)
for i,each in enumerate(if3ext):
assert if3[i].startswith(each), "3rd main_ifname {0} does not start with its ext_ifname {1}".format(if3[i], each)
# assertion failure or returning any python non-true value will terminate the build procedure.
# if you need more validations, feel free to add you code below.
return True
def genfile(dest):
global l1conf
with open(dest, "w") as fp:
print("Default")
fp.write("Default\n")
for i,lst in enumerate(l1conf):
for (k,v) in lst:
if k == "name":
line = "INDEX{0}={1}".format(i, v)
else:
line = "INDEX{0}_{1}={2}".format(i, k, v)
print(line)
fp.write(line+"\n")
fp.write("\n") # extra line-end to make drivers happy
if __name__ == "__main__":
if len(sys.argv) < 3:
print("arguments missing!")
print("usage: make-l1profile.py <.config> <l1profile.dat>!")
sys.exit(-1)
conf = sys.argv[1]
dest = sys.argv[2]
parseconfig(conf)
if validate():
genfile(dest)
else:
print("something is wrong with your l1profile configurations!")
sys.exit(-1)
| jchuang1977/openwrt | package/lean/mt/drivers/wifi-l1profile/make-l1profile.py | Python | gpl-2.0 | 3,812 |
#!/usr/bin/env python
from distutils.core import setup, Extension
poppler_install_path = '/usr'
import multivio
setup(
name='multivio',
version=multivio.__version__,
description='Multivio server.',
long_description='''Multivio is a project...''',
license=multivio.__license__,
url='http://www.multivio.org',
ext_modules=[Extension('multivio/poppler/_mypoppler', ['multivio/poppler/mypoppler.i'],
swig_opts=['-c++', '-modern', '-I%s/include' % poppler_install_path],
extra_compile_args=['-I%s/include/poppler' % poppler_install_path],
extra_link_args=['-lpoppler'])],
py_modules=['multivio.poppler.mypoppler'],
packages=[
'multivio'
],
scripts=[
'tools/multivio_server.py', 'tools/mvo_config_example.py'
],
keywords=['multivio'],
classifiers=[
'Development Status :: Beta',
'Environment :: Console',
'Intended Audience :: Developers',
'Intended Audience :: Science/Research',
'License :: OSI Approved :: Internal',
],
install_requires=[
'PIL>=1.1.7'
]
)
| jma/multivio_server | setup.py | Python | gpl-2.0 | 1,080 |
import glob, imp
import events
class EventManager:
def __init__(self):
self.Chat_Message_Event = events.ChatMessageEventHandler()
self.Player_Join_Event = events.PlayerJoinEventHandler()
self.Player_Leave_Event = events.PlayerLeaveEventHandler()
self.Player_Move_Event = events.PlayerMoveEventHandler()
self.Command_Event = events.CommandEventHandler()
self.Packet_Recv_Event = events.PacketRecvEventHandler()
class PluginManager:
def __init__(self, server):
self.plugins = {}
self.server = server
def load_plugins(self):
for plugin in glob.glob("plugins/*_plugin.py"):
plugin_name = plugin[8:-10]
self.plugins[plugin_name] = imp.load_source(plugin_name, plugin)
getattr(self.plugins[plugin_name],plugin_name)(self.server) | benbaptist/pymine2 | plugin.py | Python | gpl-2.0 | 849 |
#!/usr/env python
# Change this stuff:
name='Light Blue Fun Timexxxx'
author='Travis Wells'
shortname='litebluebg' # out file will be shortname with a 'vxp' extension
color=(155,204,224) # Set to the Red,Green,Blue of the color you want.
uniqueid=0 # Set to 0 if you don't have one.
#Don't change this stuff:
#RGB2VXP: Converts BMP files to VXP expansions
#Copyright (C) 2004-2015 Foone Turing
#
#This program is free software; you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation; either version 2 of the License, or (at your option) any later version.
#
#This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details.
#
#You should have received a copy of the GNU General Public License along with this program; if not, write to the Free Software Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
import os
import sys
sys.path.append('code')
import zipfile
import pygame
from pygame.constants import *
import lib3dmm
from simpleMBMP import MBMP
from urllib import urlopen
from struct import pack
from idgenerator import GenerateID
import sockgui
from error import SaveError
from time import time
version='0.2'
def CreateVXPExpansionFromColor(name,author,outfile,shortname,color,uniqueid,progress):
created_files=[]
try:
if name=='':
raise SaveError('No name')
if author=='':
raise SaveError('No author')
if shortname=='':
raise SaveError('No shortname')
try:
r,g,b=color
if r<0 or r>255:
raise SaveError('R channel out of bounds!')
if g<0 or g>255:
raise SaveError('G channel out of bounds!')
if b<0 or b>255:
raise SaveError('B channel out of bounds!')
except ValueError:
raise SaveError('Bad color')
if outfile=='':
raise SaveError('No outfile')
def SaveCFG(outzip):
cfg='Name=%s\nAuthor=%s\nOriginal Author=%s\nType=Portable\nContent=Backgrounds\nDate=%i\nGenerator=rgb2vxp %s\n' % (name,author,author,int(time()),version)
outzip.writestr(shortname+'.cfg',cfg)
progress()
def Save3CN(outzip):
bkgd=lib3dmm.Quad('BKGD',uniqueid,2)
bkgd.setData('\x01\x00\x03\x03\x0A\x5C\xF8\x77')
bkgd.setString(name)
bds=lib3dmm.Quad('BDS ',uniqueid)
bds.setData('\x01\x00\x03\x03\x00\x00\x01\x00\x01\x00\x00\x00\x02\x00\x00\x00\x00\x00\x00\x00\x44\x4E\x53\x4D\xFF\x4F\x00\x00')
cam=lib3dmm.Quad('CAM ',uniqueid)
cam.setData('\x01\x00\x03\x03\x00\x00\x01\x00\x00\x00\x88\x13\xbd\x16\x5f\x4e\x4a\xb7\x5a\x00\x00\x00\x00\x00\x23\x13\x11\x00\x27\x18\x00\x00\x00\x00\x00\x00\x2c\x01\xff\xff\x94\xfd\xff\xff\xf4\xff\x00\x00\xc5\xff\xff\xff\xcc\xfe\x00\x00\x6e\x02\x00\x00\x27\x18\x00\x00\x6c\x28\xa9\x00\xcf\xda\x15\x00\x94\xa8\x17\x00\xc8\xa0\x38\x00\x00\x00\x00\x00\xfb\xdb\x1f\x00')
mbmp=lib3dmm.Quad('MBMP',uniqueid)
mbmp.setDataFromFile('code/templates/rgbtemplate.MBMP')
zbmp=lib3dmm.Quad('ZBMP',uniqueid,4) # compressed
zbmp.setDataFromFile('code/templates/rgbtemplate.ZBMP')
gllt=lib3dmm.Quad('GLLT',uniqueid)
gllt.setData('\x01\x00\x03\x03\x38\x00\x00\x00\x02\x00\x00\x00\x24\xce\x00\x00\x00\x00\x00\x00\xbd\x97\x00\x00\xc9\x44\x00\x00\x26\xe4\x00\x00\x8c\xa2\xff\xff\xc3\x78\xff\xff\x0e\x74\x00\x00\xba\xb7\x00\x00\x1a\xa2\x38\x00\x33\xf2\x9a\x00\x06\x34\x5a\x00\x00\x00\x01\x00\x01\x00\x00\x00\xdb\x11\xff\xff\x00\x00\x00\x00\x27\xa2\xff\xff\x3a\xe0\xff\xff\xda\xf0\x00\x00\xa0\x50\x00\x00\x4d\x58\x00\x00\xac\x56\x00\x00\xef\x1f\xff\xff\x19\x21\x65\x02\xf2\x30\x71\x01\x44\x8b\xaa\xfb\x00\x00\x01\x00\x01\x00\x00\x00')
glcr=lib3dmm.Quad('GLCR',uniqueid)
glcrdata=str(open('code/templates/rgb_template.GLCR','rb').read())
glcrdata=glcrdata[0:772]+pack('<3B',b,g,r)+glcrdata[772+3:]
glcr.setData(glcrdata)
bkgd.addReference(bds,0)
bkgd.addReference(cam,0)
bkgd.addReference(glcr,0)
bkgd.addReference(gllt,0)
cam.addReference(mbmp,0)
cam.addReference(zbmp,0)
vxp3cn=lib3dmm.c3dmmFileOut()
vxp3cn.addQuad(bkgd)
vxp3cn.addQuad(bds)
vxp3cn.addQuad(cam)
vxp3cn.addQuad(mbmp)
vxp3cn.addQuad(zbmp)
vxp3cn.addQuad(glcr)
vxp3cn.addQuad(gllt)
progress()
outzip.writestr(shortname+'.3cn',vxp3cn.getData())
progress()
def CreateMBMP():
surf=pygame.Surface((128,72),SWSURFACE,palette_surf)
surf.fill(30)
surf.set_palette(palette_surf.get_palette())
font=sockgui.Font('code/font.png')
font.draw(surf,(2,2),'Custom color')
font.draw(surf,(2,2+8), 'Red: %i (%2.0f%%)' % (r,(r/255.0)*100))
font.draw(surf,(2,2+16),'Green: %i (%2.0f%%)' % (g,(g/255.0)*100))
font.draw(surf,(2,2+24),'Blue: %i (%2.0f%%)' % (b,(b/255.0)*100))
#stuff here.
return surf
def Save3TH(outzip):
bkth=lib3dmm.Quad('BKTH',uniqueid,mode=2)
bkth.setData(pack('<4B 4s L',1,0,3,3,'BKGD'[::-1],uniqueid))
cath=lib3dmm.Quad('CATH',uniqueid)
cath.setData(pack('<4B 4s L',1,0,3,3,'CAM '[::-1],0))
gokd=lib3dmm.Quad('GOKD',uniqueid)
gokd.setData('\x01\x00\x03\x03\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x0A\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFF\x61\xC3\x00\x00\xFF\xFF\xFF\xFF')
mbmp=lib3dmm.Quad('MBMP',uniqueid)
mbmpdata=MBMP()
mbmpdata.loadFromSurface(minisurf)
mbmp.setData(mbmpdata.getData())
bkth.addReference(cath,0)
bkth.addReference(gokd,0)
cath.addReference(gokd,0)
gokd.addReference(mbmp,65536)
vxp3th=lib3dmm.c3dmmFileOut()
vxp3th.addQuad(bkth)
vxp3th.addQuad(cath)
vxp3th.addQuad(gokd)
vxp3th.addQuad(mbmp)
progress()
outzip.writestr(shortname+'.3th',vxp3th.getData())
progress()
pygame.init()
palette_surf=pygame.image.load('code/palette.bmp')
if uniqueid is None or uniqueid==0:
uniqueid=GenerateID()
if uniqueid==0:
raise SaveError("Couldn't get ID (or id==0)")
minisurf=CreateMBMP()
progress()
created_files.append(outfile)
outvxp=zipfile.ZipFile(outfile,'w',zipfile.ZIP_DEFLATED)
SaveCFG(outvxp)
Save3CN(outvxp)
Save3TH(outvxp)
outvxp.close()
progress()
created_files=[] # Clear file list, so they won't be nuked
return True
finally:
try:
files=[]
dirs=[]
for file in created_files:
if os.path.isdir(file):
dirs.append(file)
else:
files.append(file)
for file in files:
try:
os.unlink(file)
except:
pass
for dirfile in dirs:
try:
os.rmdir(dirfile)
except OSError:
pass
except OSError:
pass
def NullProgress():
pass
if __name__=='__main__':
outfile=shortname+'.vxp'
CreateVXPExpansionFromColor(name,author,outfile,shortname,color,uniqueid,NullProgress)
| foone/7gen | bin/rgb2vxp.py | Python | gpl-2.0 | 6,990 |
#
# livecd.py: An anaconda backend to do an install from a live CD image
#
# The basic idea is that with a live CD, we already have an install
# and should be able to just copy those bits over to the disk. So we dd
# the image, move things to the "right" filesystem as needed, and then
# resize the rootfs to the size of its container.
#
# Copyright (C) 2007 Red Hat, Inc. All rights reserved.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
# Author(s): Jeremy Katz <[email protected]>
#
import os, sys
import stat
import shutil
import time
import subprocess
import storage
import selinux
from flags import flags
from constants import *
import gettext
_ = lambda x: gettext.ldgettext("anaconda", x)
import backend
import isys
import iutil
import packages
import logging
log = logging.getLogger("anaconda")
class Error(EnvironmentError):
pass
def copytree(src, dst, symlinks=False, preserveOwner=False,
preserveSelinux=False):
def tryChown(src, dest):
try:
os.chown(dest, os.stat(src)[stat.ST_UID], os.stat(src)[stat.ST_GID])
except OverflowError:
log.error("Could not set owner and group on file %s" % dest)
def trySetfilecon(src, dest):
try:
selinux.lsetfilecon(dest, selinux.lgetfilecon(src)[1])
except:
log.error("Could not set selinux context on file %s" % dest)
# copy of shutil.copytree which doesn't require dst to not exist
# and which also has options to preserve the owner and selinux contexts
names = os.listdir(src)
if not os.path.isdir(dst):
os.makedirs(dst)
errors = []
for name in names:
srcname = os.path.join(src, name)
dstname = os.path.join(dst, name)
try:
if symlinks and os.path.islink(srcname):
linkto = os.readlink(srcname)
os.symlink(linkto, dstname)
if preserveSelinux:
trySetfilecon(srcname, dstname)
elif os.path.isdir(srcname):
copytree(srcname, dstname, symlinks, preserveOwner, preserveSelinux)
else:
shutil.copyfile(srcname, dstname)
if preserveOwner:
tryChown(srcname, dstname)
if preserveSelinux:
trySetfilecon(srcname, dstname)
shutil.copystat(srcname, dstname)
except (IOError, os.error), why:
errors.append((srcname, dstname, str(why)))
# catch the Error from the recursive copytree so that we can
# continue with other files
except Error, err:
errors.extend(err.args[0])
try:
if preserveOwner:
tryChown(src, dst)
if preserveSelinux:
trySetfilecon(src, dst)
shutil.copystat(src, dst)
except OSError as e:
errors.extend((src, dst, e.strerror))
if errors:
raise Error, errors
class LiveCDCopyBackend(backend.AnacondaBackend):
def __init__(self, anaconda):
backend.AnacondaBackend.__init__(self, anaconda)
flags.livecdInstall = True
self.supportsUpgrades = False
self.supportsPackageSelection = False
self.skipFormatRoot = True
self.osimg = anaconda.methodstr[8:]
if not stat.S_ISBLK(os.stat(self.osimg)[stat.ST_MODE]):
anaconda.intf.messageWindow(_("Unable to find image"),
_("The given location isn't a valid %s "
"live CD to use as an installation source.")
%(productName,), type = "custom",
custom_icon="error",
custom_buttons=[_("Exit installer")])
sys.exit(0)
self.rootFsType = isys.readFSType(self.osimg)
def _getLiveBlockDevice(self):
return os.path.normpath(self.osimg)
def _getLiveSize(self):
def parseField(output, field):
for line in output.split("\n"):
if line.startswith(field + ":"):
return line[len(field) + 1:].strip()
raise KeyError("Failed to find field '%s' in output" % field)
output = subprocess.Popen(['/sbin/dumpe2fs', '-h', self.osimg],
stdout=subprocess.PIPE,
stderr=open('/dev/null', 'w')
).communicate()[0]
blkcnt = int(parseField(output, "Block count"))
blksize = int(parseField(output, "Block size"))
return blkcnt * blksize
def _getLiveSizeMB(self):
return self._getLiveSize() / 1048576
def _unmountNonFstabDirs(self, anaconda):
# unmount things that aren't listed in /etc/fstab. *sigh*
dirs = []
if flags.selinux:
dirs.append("/selinux")
for dir in dirs:
try:
isys.umount("%s/%s" %(anaconda.rootPath,dir), removeDir = False)
except Exception, e:
log.error("unable to unmount %s: %s" %(dir, e))
def postAction(self, anaconda):
self._unmountNonFstabDirs(anaconda)
try:
anaconda.id.storage.umountFilesystems(swapoff = False)
os.rmdir(anaconda.rootPath)
except Exception, e:
log.error("Unable to unmount filesystems: %s" % e)
def doPreInstall(self, anaconda):
if anaconda.dir == DISPATCH_BACK:
self._unmountNonFstabDirs(anaconda)
return
anaconda.id.storage.umountFilesystems(swapoff = False)
def doInstall(self, anaconda):
log.info("Preparing to install packages")
progress = anaconda.id.instProgress
progress.set_label(_("Copying live image to hard drive."))
progress.processEvents()
osimg = self._getLiveBlockDevice() # the real image
osfd = os.open(osimg, os.O_RDONLY)
rootDevice = anaconda.id.storage.rootDevice
rootDevice.setup()
rootfd = os.open(rootDevice.path, os.O_WRONLY)
readamt = 1024 * 1024 * 8 # 8 megs at a time
size = self._getLiveSize()
copied = 0
while copied < size:
try:
buf = os.read(osfd, readamt)
written = os.write(rootfd, buf)
except:
rc = anaconda.intf.messageWindow(_("Error"),
_("There was an error installing the live image to "
"your hard drive. This could be due to bad media. "
"Please verify your installation media.\n\nIf you "
"exit, your system will be left in an inconsistent "
"state that will require reinstallation."),
type="custom", custom_icon="error",
custom_buttons=[_("_Exit installer"), _("_Retry")])
if rc == 0:
sys.exit(0)
else:
os.lseek(osfd, 0, 0)
os.lseek(rootfd, 0, 0)
copied = 0
continue
if (written < readamt) and (written < len(buf)):
raise RuntimeError, "error copying filesystem!"
copied += written
progress.set_fraction(pct = copied / float(size))
progress.processEvents()
os.close(osfd)
os.close(rootfd)
anaconda.id.instProgress = None
def _doFilesystemMangling(self, anaconda):
log.info("doing post-install fs mangling")
wait = anaconda.intf.waitWindow(_("Post-Installation"),
_("Performing post-installation filesystem changes. This may take several minutes."))
# resize rootfs first, since it is 100% full due to genMinInstDelta
self._resizeRootfs(anaconda, wait)
# remount filesystems
anaconda.id.storage.mountFilesystems()
# restore the label of / to what we think it is
rootDevice = anaconda.id.storage.rootDevice
rootDevice.setup()
# ensure we have a random UUID on the rootfs
# FIXME: this should be abstracted per filesystem type
iutil.execWithRedirect("tune2fs",
["-U",
"random",
rootDevice.path],
stdout="/dev/tty5",
stderr="/dev/tty5")
# and now set the uuid in the storage layer
rootDevice.updateSysfsPath()
iutil.notify_kernel("/sys%s" %rootDevice.sysfsPath)
storage.udev.udev_settle()
rootDevice.updateSysfsPath()
info = storage.udev.udev_get_block_device(rootDevice.sysfsPath)
rootDevice.format.uuid = storage.udev.udev_device_get_uuid(info)
log.info("reset the rootdev (%s) to have a uuid of %s" %(rootDevice.sysfsPath, rootDevice.format.uuid))
# for any filesystem that's _not_ on the root, we need to handle
# moving the bits from the livecd -> the real filesystems.
# this is pretty distasteful, but should work with things like
# having a separate /usr/local
def _setupFilesystems(mounts, chroot="", teardown=False):
""" Setup or teardown all filesystems except for "/" """
mountpoints = sorted(mounts.keys(),
reverse=teardown is True)
if teardown:
method = "teardown"
kwargs = {}
else:
method = "setup"
kwargs = {"chroot": chroot}
mountpoints.remove("/")
for mountpoint in mountpoints:
device = mounts[mountpoint]
getattr(device.format, method)(**kwargs)
# Start by sorting the mountpoints in decreasing-depth order.
mountpoints = sorted(anaconda.id.storage.mountpoints.keys(),
reverse=True)
# We don't want to copy the root filesystem.
mountpoints.remove("/")
stats = {} # mountpoint: posix.stat_result
# unmount the filesystems, except for /
_setupFilesystems(anaconda.id.storage.mountpoints, teardown=True)
# mount all of the filesystems under /mnt so we can copy in content
_setupFilesystems(anaconda.id.storage.mountpoints,
chroot=anaconda.rootPath + "/mnt")
# And now let's do the real copies
for tocopy in mountpoints:
device = anaconda.id.storage.mountpoints[tocopy]
# FIXME: all calls to wait.refresh() are kind of a hack... we
# should do better about not doing blocking things in the
# main thread. but threading anaconda is a job for another
# time.
wait.refresh()
if not os.path.exists("%s/%s" % (anaconda.rootPath, tocopy)):
# the directory does not exist in the live image, so there's
# nothing to move
continue
copytree("%s/%s" % (anaconda.rootPath, tocopy),
"%s/mnt/%s" % (anaconda.rootPath, tocopy),
True, True, flags.selinux)
wait.refresh()
shutil.rmtree("%s/%s" % (anaconda.rootPath, tocopy))
wait.refresh()
# now unmount each fs, collect stat info for the mountpoint, then
# remove the entire tree containing the mountpoint
for tocopy in mountpoints:
device = anaconda.id.storage.mountpoints[tocopy]
device.format.teardown()
if not os.path.exists("%s/%s" % (anaconda.rootPath, tocopy)):
continue
try:
stats[tocopy]= os.stat("%s/mnt/%s" % (anaconda.rootPath,
tocopy))
except Exception as e:
log.info("failed to get stat info for mountpoint %s: %s"
% (tocopy, e))
shutil.rmtree("%s/mnt/%s" % (anaconda.rootPath,
tocopy.split("/")[1]))
wait.refresh()
# now mount all of the filesystems so that post-install writes end
# up where they're supposed to end up
_setupFilesystems(anaconda.id.storage.mountpoints,
chroot=anaconda.rootPath)
# restore stat info for each mountpoint
for mountpoint in reversed(mountpoints):
if mountpoint not in stats:
# there's no info to restore since the mountpoint did not
# exist in the live image
continue
dest = "%s/%s" % (anaconda.rootPath, mountpoint)
st = stats[mountpoint]
# restore the correct stat info for this mountpoint
os.utime(dest, (st.st_atime, st.st_mtime))
os.chown(dest, st.st_uid, st.st_gid)
os.chmod(dest, stat.S_IMODE(st.st_mode))
# ensure that non-fstab filesystems are mounted in the chroot
if flags.selinux:
try:
isys.mount("/selinux", anaconda.rootPath + "/selinux", "selinuxfs")
except Exception, e:
log.error("error mounting selinuxfs: %s" %(e,))
wait.pop()
def _resizeRootfs(self, anaconda, win = None):
log.info("going to do resize")
rootDevice = anaconda.id.storage.rootDevice
# FIXME: we'd like to have progress here to give an idea of
# how long it will take. or at least, to give an indefinite
# progress window. but, not for this time
cmd = ["resize2fs", rootDevice.path, "-p"]
out = open("/dev/tty5", "w")
proc = subprocess.Popen(cmd, stdout=out, stderr=out)
rc = proc.poll()
while rc is None:
win and win.refresh()
time.sleep(0.5)
rc = proc.poll()
if rc:
log.error("error running resize2fs; leaving filesystem as is")
return
# we should also do a fsck afterwards
cmd = ["e2fsck", "-f", "-y", rootDevice.path]
out = open("/dev/tty5", "w")
proc = subprocess.Popen(cmd, stdout=out, stderr=out)
rc = proc.poll()
while rc is None:
win and win.refresh()
time.sleep(0.5)
rc = proc.poll()
def doPostInstall(self, anaconda):
import rpm
self._doFilesystemMangling(anaconda)
# setup /etc/rpm/ for the post-install environment
iutil.writeRpmPlatform(anaconda.rootPath)
storage.writeEscrowPackets(anaconda)
packages.rpmSetupGraphicalSystem(anaconda)
# now write out the "real" fstab and mtab
anaconda.id.storage.write(anaconda.rootPath)
f = open(anaconda.rootPath + "/etc/mtab", "w+")
f.write(anaconda.id.storage.mtab)
f.close()
# copy over the modprobe.conf
if os.path.exists("/etc/modprobe.conf"):
shutil.copyfile("/etc/modprobe.conf",
anaconda.rootPath + "/etc/modprobe.conf")
# set the same keyboard the user selected in the keyboard dialog:
anaconda.id.keyboard.write(anaconda.rootPath)
# rebuild the initrd(s)
vers = self.kernelVersionList(anaconda.rootPath)
for (n, arch, tag) in vers:
packages.recreateInitrd(n, anaconda.rootPath)
def writeConfiguration(self):
pass
def kernelVersionList(self, rootPath = "/"):
return packages.rpmKernelVersionList(rootPath)
def getMinimumSizeMB(self, part):
if part == "/":
return self._getLiveSizeMB()
return 0
def doBackendSetup(self, anaconda):
# ensure there's enough space on the rootfs
# FIXME: really, this should be in the general sanity checking, but
# trying to weave that in is a little tricky at present.
ossize = self._getLiveSizeMB()
slash = anaconda.id.storage.rootDevice
if slash.size < ossize:
rc = anaconda.intf.messageWindow(_("Error"),
_("The root filesystem you created is "
"not large enough for this live "
"image (%.2f MB required).") % ossize,
type = "custom",
custom_icon = "error",
custom_buttons=[_("_Back"),
_("_Exit installer")])
if rc == 0:
return DISPATCH_BACK
else:
sys.exit(1)
# package/group selection doesn't apply for this backend
def groupExists(self, group):
pass
def selectGroup(self, group, *args):
pass
def deselectGroup(self, group, *args):
pass
def selectPackage(self, pkg, *args):
pass
def deselectPackage(self, pkg, *args):
pass
def packageExists(self, pkg):
return True
def getDefaultGroups(self, anaconda):
return []
def writePackagesKS(self, f, anaconda):
pass
| icomfort/anaconda | livecd.py | Python | gpl-2.0 | 17,872 |
#----------------------------------------------------------------------
# This file was generated by D:\personal\src\airs\gui\images\make_images.py
#
from wx import ImageFromStream, BitmapFromImage, EmptyIcon
import cStringIO, zlib
def getData():
return zlib.decompress(
'x\xda\x01}\x08\x82\xf7\x89PNG\r\n\x1a\n\x00\x00\x00\rIHDR\x00\x00\x00 \x00\
\x00\x00 \x08\x02\x00\x00\x00\xfc\x18\xed\xa3\x00\x00\x00\x03sBIT\x08\x08\
\x08\xdb\xe1O\xe0\x00\x00\x085IDATH\x89\xb5\x96Yl]\xc5\x19\xc7\xbfof\xcev7__\
\xdb\xb1\xe3$\xc4v\x12\x9b$\x84%A\xa4@ )\x15 RA\xd5REm"\xb5Om\xc53\x0f\x11\
\x94\x87\xb6\xf0\x84\xd4V\x15\xea\x13\x15*U[$JS!\x9a\xb0\x95%\nMBq\x12\x02Y\
\x1c\'\xd8\x8e\xe3\xf5\xda\xbe\x8b\xcf\xbd\xe7\xdesf\xe6\xfb\xfa`7A4]\x1e\
\xda\xd1\xd1\x91\xce<\xfc\x7f\xe7\x9b\xf9\x96?\xc0\xffy!\x000\xf3\x17v\x97\
\xbe\xe9z\x9b\x02@\xfc\xf7\xea\x88\xea\x9f\xa5kI\xc2\x9c\xd4)\xed\x00\x8d\
\x85r\xaaX\x8e\xc1I\xb4\r\xa4\xe9(\xb4\xb4\xca\xb85\x9b\xce\xa6\xd9\xb1\xc6W\
\x12\x18\x01\x11\x99\x01\xaf\xfe\xc0\x7f\x8a\xa0X\t\xcb\xc6\xadYw\xa6\xbc\
\xf8\xe9\xe5R\x02\xc1\xba<\x80R\x1a\x9dOG\xcb@IK6\xeb+\x1e\xe8l\xd9\xde\xeb\
\xe7}\xe9C\xa3\x8eA\x80\x86\x99\xa5t\xbe\x10\x01\x02\x00\xd1\xb5\xc3\xb0D\
\xe35\xfc\xdb\xf0|kZ\xdd\xb9\xb1\xe038@\x04\x82\x050\xd3T\xb9Q\xc8\xa7\xafT\
\xcc\xd9+Q\xb9iJss\xdb\x06Vn\xe9\xc9\xad\x101\n\x05\x00\x88\xf2\xfa\x00d\x00\
\xb4\x9a`\xce\xcawOM\xed\xd9\xde\xed0\x18\x06\t\x80\x88\x04$P\xb0\xd1\xf3,3\
\x0c \x12\x17=\x16xv\xbc\x8aY\xf1\xeaqs\xc7f\xf7+k\x95C\x1e\x83AD`\x04!\x18@\
|>\x02\x06\x9a\xab\xd3\xc2b\xb8\xae\xab \xd8\xd4\x1b:\x93\t\x80A\x81%\x14\
\x86 \xd1\xb1\xf2\xfc\xf1\xe9\xb9\xbe\x95\x1d\x00\x86\x19\x08P\xc7\xb2\x04\
\x8d\xc1s\x10j\xf1\xe8m\x148\x1e\x00 \xa0@\xbc\x06\x88u\x82\x001\xa8j%\\\xd9\
\xe67\xb5\xf2\x1c\xa8[\xb107\x1b\xe4:2\x8a\x84\xc0\x18\x05H\xa8W\x9aN\xe08 \
\xf3>W\x1a\xdc\xeaK\xb6u\x13\xbb\x91\xd0\xdaM\x1d<\xbe\xb0g{[\x8a\xaa\x84\
\x1eJ\x85B!\xa2X:)!\xd5\xec|\xa9\xb3-\xa7\xc1\xf1\x15\x1a"\x8b\xa0\xbc\xcc\
\x9bGO\x96\x12;\x1f\xd1l\x84?~\xf6\xc5\x8b\xd33\x89\xc1\xa1+\x13c\x97\'\xdf;\
r\xd8\xa2Y(i\xed\xc8<\xb2[\xaa>\xba5\xfd\xc6\xe0\xa4\x15\x19@F& ^\xce"m(\xd6\
\x16\\\x91B\x11\x1b=\x1f&\x85 x\xeb\xd4\xd9b\x83\x8e|2\x02dX\xfag\x86\xce\
\xa6|\xaf\xd9\x8c\x84\x90\xdf~\xe4\xa1\xae\\:\x9d\xcco\x1a\xe8\x93\xd9\x15\
\x19[\t\x90\x1d?%P\x08!\xa7\xcb\xd1\xea\xb6\x14\xb2E\x90(\xe52@ \x83@dH\x08\
\xc2\x84N\x8e\x17\xdf\x1a,\xbe}\xe4\xf5X\'l\x9b&n:\xae\xab\x04Z\xc3\x04\xe0\
\x05\xbeR\xce\x96\xb5=Q#i\xf3y\xff\xde\x072]\xab|N\\\xa5\x12\x92\x19\xa9\xb5\
%\x04p\x1co\xb9\xd0\x88\x08\x15Z\xc3\x86axf\xb1\xcc\x99_\xfd\xf9\xf4\xf0\xf8\
9\xa1\x1c\x1b\xd7\x95\x9bJg\xf2V\x1b\xcfu\xc2\xb0B:J\x12L\xa2\xc5\xc1\xa1z\
\x90\xcbl\xdc\xf5\xd5\x8bU\xc7\\\x1e|h\xc7V&]\xad\'NK\xe0\xa9\xc4\xb2$\xfeG\
\xa1\x11\x91\x052V\x87\xec\x1d\x19n\xfe\xe6\x8d\x0f\xe7\xc2\xe2\x9e/\xef|\
\xef\xf8`M\xd7\xd6toj\xef\xee\xbb\x7fSP\xab,\xfc\xee\xd0\x07\x1a\xe5\xe8\xa5\
c\xf7=\xf0\xdd\xfa\xc4\xc7G\xce\x8d\xa7m\x1d<\xef\xf7O?\xd1S\xd0\xd6\x08$\
\xfa\xac\x94\x0c\xb49(\x1d\x04r\x1cG\x02\xc0\x93O\xfd\x10\xc9V\xac\xf7\xdc\
\xebcc\x95\xc4\x94G\xbf\xf3\xe0\xdd\xeb\xba:\x07G\x16Vu\xdfz\xcbM\xbdI\xadt_\
\x7fvkWj\xc7Mk+"7<:s\xe9\xe2\'/>\xf5\x83\xae\xaeU\xc7\xce\x0eY\xabO\\\x9a\
\xb1\x99\x8e\xac\n\xda\xf3\x8e\x16\x9e\xe3HG\x01\xb1y\xe6\xe9g\x04\x00\x80P%\
\xe3\xbd\xf2q\xb5\x86\xb9Zq\xe4\xa7\xdf\x7fx\xf7M\xad\'Gf7\xdd\xd8\xd7\xdb\
\xed\xe7e3Ld\x06J\x11.\xb6\xe7\xd4\xbd\xfd\xed\xdf\xfc\xc6\xbe\xde\x1b\xd6\
\xcf/\x9a\xafmY\xf1\xeb\x9f<\xbevU\xef\x95\xd1\x0b/\xbf\xfe\xce\x9f>\x1a9qn$\
\'\xec\xfb\x83\x17\x88\x91\x85\xb7\xdc\x9b\x8c\xc1\xf1\x90\xa7jN\x80\xd1\x8f\
\xf6\xdd\xe3\t\r\xe8L,\x9a/\xad\xf6\xfbs4\xb9`_}\xf9\xd9o=\xf9|bZ\xa4\xad\
\xe5\x82\xd4\x89\x13\x7f\xdd|\xf3\xbd\xc3\xc5(P\xb8>\x17\xef\xdf\xf3\x90\xdb\
\xde33=v\xf0\xed\x83\xc7\xc6\x9aC\x0b\xd5;\xb7\x0e,\xc6<1\x1f.\x03\xa6\xc3\
\xc6\xe0Hxk\x07=\xbe{MZZ\xe58,\x84\xc7\x8dl\xce\xef\xca\xc6\x0b\xd1\xa25\xf5\
\xcb\xb3\xc5\x9d\xdf\xdb\xff\xc6P\xf5\x97/\x1d\x8c\x9a\xb2oM>\x8cb@!\xd9\xdb\
\xb6.\xf5\xd8\xee]\xd9\x8e\xeez\x14\xbe\xff\xe9\xe9\xb7\x8f\x9e.\xc5fdb\xba\
\xa7=\x00\x00\x05\x00\x1f\x9c\xab\xec\xdd\xde\x99R\x82L\x13\xa52,\x909Q\x99\
\xf1\xf1\xf9\x1d\x03\x9d7d\xe7\x10eb-4\xea\xbf8\xf0\xd1\xe6\x9b\xb7\xedlo\
\xcd\n\x9d\x90\x95\x82C+}0w\xf6w\x1e8\x9e\xbfq\xe76\xdd(\x03\xb6f9Y\xdd\xd3\
\x19&\xbc\x0c\xb8e\xad\xef\xb9B\x11\x1b\xd7g\x006\x86Xn^\xe1\xb5z\x8b\xc5R\
\xf1\xaeu\xc1\xcf\xa4/H\x83l\xdd\xb5}{gg\xc7\x86U\xa9\x16\x9fkUPJ\xb6\xa8D\
\x90\xd7\xb7\xc2\xee\xde\xb1sxN\x14\x1b\xa2pC\xef\xa1\x0f\xa7|\x0c\xef\xda\
\xbaq9M\r\xd9k\x1d\x96\x98\x99I\xaacWL)lni\xd5\xd9|\xcbl\xa8\x9f\xf8\xf9\xf3\
\xb7\xdf\xb5\xf3\xc1\xbb\xb7\xccLU\x04\xe9\xe3\x97\xaa\xfb\x1f^\x9f\x92$\xc0\
0\x03\x00^\x98\x8de&\xfd\xce\xa9\xca\xcc\xe8\xe0\xdeGv\x16\x027\x0c\xcb\x03]\
\x05\x00\x00\xcbt\xf5aKl\x89\xc8\x84\xc4O\x1c\x9a\xab3OW\xa2D\'\x91\xb1\x93\
\xd5\xc6\x0b\x87G\x1b\xf5\xe8\xc0\xf9\xc6c/\x9c\xd4\xd6\x92\xd5D\x86\xc8Zk\
\xe7k\x91\x8d\xa3\x99Z\xa3\x94\xf0\xe9\xc9\xda\xd0\xd4\xa2\xa5x\xf9\x92??\
\xd1\xac\x00\x12\xc8\xcciS\xcb\xb8\xf8\xeepR\'GK\xa7\xdc\x04\xa5\x9c\x1d\xeb\
\x0b\x15\xe9\x0e\x0eW\xf7\xee\xeaC\x14\xb8<\xa7\x99\x99s\xa9\x00P\x15\\\xe1S\
\xbc\xb1\xc3\xe9k\xf3\xad\x85k\x00c\x8c\xd6Z\x1b39]\xbc0Y\xadjS\xb3\xe9}\xb7\
\xb7\x1d:\x1fvfT14Q-\xce\xf9qG\xbb\x13\x01NN]\xb9\xb5\xcbK\x8cI\xac\xa8\'\
\xb6\x99\x18\x00hh\x18[h&\xd2\xb5\xd2\x8d\x12\xab\x01\xa2\xd8\\k\x15\x8c\x00\
\x0cL\xc4\x8cu)^{\xf7\xfc\xea\xee\xfc\x1d\xeb;\x87\xa6\xea\xaf|R\xbf{c\xfb==\
\xea\xe3\xcf\xaaSq0\xf4\xd9\xe8\xfe\xaf\xf7\xbb\xd6"\x82\x00\x04\x92M\x11\
\xcf\x97\xe4\xe1\xf3\x13\xab\x0b\xa9\xdb\x06\xdaQ\x1bO\xb1T\x92\x88\\\xe5!\
\x00011\x01\x80% \xab\x19\xac\xa6\xd4\x99bu<t\xd7\xb5\xca\xf5yH;\x96UP\x11\
\xa0\x00|h:\x89\xdfd\n\x1b\xb1\xf0\x82\xc8r9a\xc50=\xb5\xb8mC^\x99f\xe0KA\
\x84(\x84\x10J)\xb1\xd4\xf1\x84\x10\x88\xa8$\xba\xae\xeb\xbaA\xda\xa3\xf5\
\xdd-w\xac\xf5\xcf]\x1c}\xe1\xf0\x85K\xb3\xf5\xa3CW\x86\xce\x17\x0f\xfc\xe5\
\x0c\x93\x7f\xa9\xdcx\xee\xa57\xdf9=V\xd1\xf1\x1f\x8fNv\xa4\xc5o\xff\xf0Z\
\x0e\xe6L\xbd\x92\t\x84\x00\x92\x8e\x04\xc1\x96\xcdr\x1d\\u\x00W\x9d\x8c%H#7\
L\xb4v\xcdJ\x9cK\xda:\n\xb3\xb5\xf2\xe13\xa3\xab\x0b~\xa4yt\xaa\xa42\xad=]\
\x1d&q\xf7\xed\xe8j\x87\xf8\xf1=\xf7\xb9\xd9\xac\x0b$\x04Yk\x97\x86\xfc\x92\
\xe0\x17\x8d\xd7\xd2\x92\x12}\xa25y\xc7\x97rCWN\x87\x95\x89j\xbc\xbd\x7fUog\
\xcaSX\x9d\x9fx\xf4\xfem+\xb2\xce\xa9\x0b\xd3\xbd\xfd]\xc4\xb2\x90u\x98\x9b\
\x8e\x14\x89\xb6B\x88\xa5\xb4\\\x02\\\xdf:\x02\x03\x01$\x96\x13c\x08(\x90\
\xa4I\x18\xe19\n\x88A\x01\x08\xa2JX\x1f\x9b)\xdf\xb2a\x85@G\x01\xd4\x8d\xf6\
\\W\xb2]\x12D\x81\xcc\xecH\xef_\x00`\xb9\x00\x97\x1d\rJf\x16B\x10\x91\x14(\
\x90\x98\x91Y\xd4\xa2F\xc6\x97K\xf7\xb7$\xc2\xcc\xb8l%\x18\x00\x04\xba\xff\
\x0e@\xc0\x9f\x07(\xb0\x88\x08\xc0\x80\x0c,\x08\xd0\x10\xb8\xe2\x9a\xee\xd2\
\x1b\x00\x00\xf9*\xe0\xba\xca\xff\xcb\xf5wR\xce\xc6"sn\x01\xbe\x00\x00\x00\
\x00IEND\xaeB`\x82u\x8c&\x9d' )
def getBitmap():
return BitmapFromImage(getImage())
def getImage():
stream = cStringIO.StringIO(getData())
return ImageFromStream(stream)
| jorgb/airs | gui/images/icon_about.py | Python | gpl-2.0 | 7,076 |
# -*- coding: utf-8 -*-
"""
-------------------------------------
N A C S P Y T H O N S C R I P T
-------------------------------------
NACS version: 2.0.2745 - pre3
NACS architecture: CENTOS 5.11 (X86_64)
File generated at Tue Jan 20 16:55:05 2015
On host 'lse86' by 'cae42'
"""
from __future__ import division
try:
from nacs.scripting import *
except:
raise Exception("File is only executable in the NACS python interpreter!")
# =================
# NACS SIMULATION
# =================
simulation = NacsSimulation()
simulation.setGrid(u'project3.nmf', 'plane')
simulation.addOutput(Output.Nacs())
text = Output.Text()
simulation.addOutput(text)
simulation.addOutput(Output.GiD())
# =====================
# MATERIAL DEFINITION
# =====================
copper = Material('Copper')
copper.density(8940.0)
copper.lossTangensDelta([1000],[0.002])
copper.stiffness.isotropic.byENu(1.15e+11, 0.35)
steel = Material('Steel')
steel.density(6280)
steel.lossTangensDelta([1000],[0.0003])
steel.stiffness.isotropic.byENu(1.95e+11, 0.28)
silicon = Material('Silicon')
silicon.density(2300.0)
silicon.stiffness.isotropic.byENu(67500000000.0, 0.1)
simulation.setMat('exc_f_r', copper)
simulation.setMat('rec_f_r', copper)
simulation.setMat('sen_coat_r', steel)
simulation.setMat('silicon_r', silicon)
# ===============
# ANALYSIS STEP
# ===============
trans1 = Analysis.Transient()
trans1.set(3.6417e-11, None, 300, False)
mech1 = Physic.Mechanic('planeStrain')
mech1.addRegions(['exc_f_r', 'sen_coat_r', 'silicon_r', 'rec_f_r'])
mech1.addBc(mech1.BC.Force.expr('exc_f_r', 'y', "-1000*sinBurst(1.3730e+09, 5, 1, 1 ,t)"))
mech1.addBc(mech1.BC.Fix('outerbounds_bot', ['x', 'y']))
mech1.addResult(mech1.Result.Displacement(['exc_f_r', 'rec_f_r', 'sen_coat_r', 'silicon_r']))
mech1.addResult(mech1.Result.Displacement(['observer_point_1', 'observer_point_2', 'observer_point_3', 'observer_point_4', 'observer_point_e4'], 'amplPhase', 'mesh', [text]))
trans1.addPhysic(mech1)
simulation.addAnalysis(trans1)
| cosailer/caeproject | simulation_result/3/project3.py | Python | gpl-2.0 | 2,019 |
# -*- encoding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2013 Smile (<http://www.smile.fr>). All Rights Reserved
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
import ir_translation
import update
| 3dfxsoftware/cbss-addons | smile_base/__init__.py | Python | gpl-2.0 | 1,012 |
#!/usr/bin/python
import unittest, tempfile, os, glob, logging
try:
import autotest.common as common
except ImportError:
import common
from autotest.client.shared import xml_utils, ElementTree
class xml_test_data(unittest.TestCase):
def get_tmp_files(self, prefix, sufix):
path_string = os.path.join('/tmp', "%s*%s" % (prefix, sufix))
return glob.glob(path_string)
def setUp(self):
# Previous testing may have failed / left behind extra files
for filename in self.get_tmp_files(xml_utils.TMPPFX, xml_utils.TMPSFX):
os.unlink(filename)
for filename in self.get_tmp_files(xml_utils.TMPPFX,
xml_utils.TMPSFX + xml_utils.EXSFX):
os.unlink(filename)
# Compacted to save excess scrolling
self.TEXT_REPLACE_KEY="TEST_XML_TEXT_REPLACE"
self.XMLSTR="""<?xml version='1.0' encoding='UTF-8'?><capabilities><host>
<uuid>4d515db1-9adc-477d-8195-f817681e72e6</uuid><cpu><arch>x86_64</arch>
<model>Westmere</model><vendor>Intel</vendor><topology sockets='1'
cores='2' threads='2'/><feature name='rdtscp'/><feature name='x2apic'/>
<feature name='xtpr'/><feature name='tm2'/><feature name='est'/>
<feature name='vmx'/><feature name='ds_cpl'/><feature name='monitor'/>
<feature name='pbe'/><feature name='tm'/><feature name='ht'/><feature
name='ss'/><feature name='acpi'/><feature name='ds'/><feature
name='vme'/></cpu><migration_features><live/><uri_transports>
<uri_transport>tcp</uri_transport></uri_transports>
</migration_features><topology><cells num='1'><cell id='0'><cpus
num='4'><cpu id='0'/><cpu id='1'/><cpu id='2'/><cpu id='3'/></cpus>
</cell></cells></topology><secmodel><model>selinux</model><doi>0</doi>
</secmodel></host><guest><os_type>hvm</os_type><arch name='i686'>
<wordsize>32</wordsize><emulator>$TEST_XML_TEXT_REPLACE</emulator>
<machine>rhel6.2.0</machine><machine canonical='rhel6.2.0'>pc</machine>
<machine>rhel6.1.0</machine><machine>rhel6.0.0</machine><machine>
rhel5.5.0</machine><machine>rhel5.4.4</machine><machine>rhel5.4.0
</machine><domain type='qemu'></domain><domain type='kvm'><emulator>
/usr/libexec/qemu-kvm</emulator></domain></arch><features><cpuselection
/><deviceboot/><pae/><nonpae/><acpi default='on' toggle='yes'/><apic
default='on' toggle='no'/></features></guest></capabilities>"""
(fd, self.XMLFILE) = tempfile.mkstemp(suffix=xml_utils.TMPSFX,
prefix=xml_utils.TMPPFX)
os.write(fd, self.XMLSTR)
os.close(fd)
self.canonicalize_test_xml()
def tearDown(self):
os.unlink(self.XMLFILE)
leftovers = self.get_tmp_files(xml_utils.TMPPFX, xml_utils.TMPSFX)
if len(leftovers) > 0:
self.fail('Leftover files: %s' % str(leftovers))
def canonicalize_test_xml(self):
et = ElementTree.parse(self.XMLFILE)
et.write(self.XMLFILE, encoding="UTF-8")
f = file(self.XMLFILE)
self.XMLSTR = f.read()
f.close()
class test_ElementTree(xml_test_data):
def test_bundled_elementtree(self):
self.assertEqual(xml_utils.ElementTree.VERSION, ElementTree.VERSION)
class test_TempXMLFile(xml_test_data):
def test_prefix_sufix(self):
filename = os.path.basename(self.XMLFILE)
self.assert_(filename.startswith(xml_utils.TMPPFX))
self.assert_(filename.endswith(xml_utils.TMPSFX))
def test_test_TempXMLFile_canread(self):
tmpf = xml_utils.TempXMLFile()
tmpf.write(self.XMLSTR)
tmpf.seek(0)
stuff = tmpf.read()
self.assertEqual(stuff, self.XMLSTR)
del tmpf
def test_TempXMLFile_implicit(self):
def out_of_scope_tempxmlfile():
tmpf = xml_utils.TempXMLFile()
return tmpf.name
self.assertRaises(OSError, os.stat, out_of_scope_tempxmlfile())
def test_TempXMLFile_explicit(self):
tmpf = xml_utils.TempXMLFile()
tmpf_name = tmpf.name
# Assert this does NOT raise an exception
os.stat(tmpf_name)
del tmpf
self.assertRaises(OSError, os.stat, tmpf_name)
class test_XMLBackup(xml_test_data):
class_to_test = xml_utils.XMLBackup
def is_same_contents(self, filename, other=None):
try:
f = file(filename, "rb")
s = f.read()
except (IOError, OSError):
logging.warning("File %s does not exist" % filename)
return False
if other is None:
return s == self.XMLSTR
else:
other_f = file(other, "rb")
other_s = other_f.read()
return s == other_s
def test_backup_filename(self):
xmlbackup = self.class_to_test(self.XMLFILE)
self.assertEqual(xmlbackup.sourcefilename, self.XMLFILE)
def test_backup_file(self):
xmlbackup = self.class_to_test(self.XMLFILE)
self.assertTrue(self.is_same_contents(xmlbackup.name))
def test_rebackup_file(self):
xmlbackup = self.class_to_test(self.XMLFILE)
oops = file(xmlbackup.name, "wb")
oops.write("foobar")
oops.close()
self.assertFalse(self.is_same_contents(xmlbackup.name))
xmlbackup.backup()
self.assertTrue(self.is_same_contents(xmlbackup.name))
def test_restore_file(self):
xmlbackup = self.class_to_test(self.XMLFILE)
# nuke source
os.unlink(xmlbackup.sourcefilename)
xmlbackup.restore()
self.assertTrue(self.is_same_contents(xmlbackup.name))
def test_remove_backup_file(self):
xmlbackup = self.class_to_test(self.XMLFILE)
filename = xmlbackup.name
os.unlink(filename)
del xmlbackup
self.assertRaises(OSError, os.unlink, filename)
def test_TempXMLBackup_implicit(self):
def out_of_scope_xmlbackup():
tmpf = self.class_to_test(self.XMLFILE)
return tmpf.name
filename = out_of_scope_xmlbackup()
self.assertRaises(OSError, os.unlink, filename)
def test_TempXMLBackup_exception_exit(self):
tmpf = self.class_to_test(self.XMLFILE)
filename = tmpf.name
# simulate exception exit DOES NOT DELETE
tmpf.__exit__(Exception, "foo", "bar")
self.assertTrue(self.is_same_contents(filename + xml_utils.EXSFX))
os.unlink(filename + xml_utils.EXSFX)
def test_TempXMLBackup_unexception_exit(self):
tmpf = self.class_to_test(self.XMLFILE)
filename = tmpf.name
# simulate normal exit DOES DELETE
tmpf.__exit__(None, None, None)
self.assertRaises(OSError, os.unlink, filename)
class test_XMLTreeFile(test_XMLBackup):
class_to_test = xml_utils.XMLTreeFile
def test_sourcebackupfile_closed_file(self):
xml = self.class_to_test(self.XMLFILE)
self.assertRaises(ValueError, xml.sourcebackupfile.write, 'foobar')
def test_sourcebackupfile_closed_string(self):
xml = self.class_to_test(self.XMLSTR)
self.assertRaises(ValueError, xml.sourcebackupfile.write, 'foobar')
def test_init_str(self):
xml = self.class_to_test(self.XMLSTR)
self.assert_(xml.sourcefilename is not None)
self.assertEqual(xml.sourcebackupfile.name,
xml.sourcefilename)
def test_init_xml(self):
xml = self.class_to_test(self.XMLFILE)
self.assert_(xml.sourcefilename is not None)
self.assertEqual(xml.sourcebackupfile.name,
xml.sourcefilename)
def test_restore_from_string(self):
xmlbackup = self.class_to_test(self.XMLSTR)
os.unlink(xmlbackup.sourcefilename)
xmlbackup.restore()
self.assertTrue(self.is_same_contents(xmlbackup.sourcefilename))
def test_restore_from_file(self):
xmlbackup = self.class_to_test(self.XMLFILE)
os.unlink(xmlbackup.sourcefilename)
xmlbackup.restore()
self.assertTrue(self.is_same_contents(xmlbackup.name))
def test_backup_backup_and_remove(self):
tmpf = self.class_to_test(self.XMLFILE)
tmps = self.class_to_test(self.XMLSTR)
bu_tmpf = tmpf.backup_copy()
bu_tmps = tmps.backup_copy()
self.assertTrue(self.is_same_contents(bu_tmpf.name, tmpf.name))
self.assertTrue(self.is_same_contents(bu_tmps.name, tmps.name))
tmpf.remove_by_xpath('guest/arch/wordsize')
tmps.find('guest/arch/wordsize').text = 'FOOBAR'
tmpf.write()
tmps.write()
self.assertFalse(self.is_same_contents(bu_tmpf.name, tmpf.name))
self.assertFalse(self.is_same_contents(bu_tmps.name, tmps.name))
self.assertTrue(self.is_same_contents(bu_tmpf.name, bu_tmps.name))
self.assertFalse(self.is_same_contents(tmpf.name, tmps.name))
del bu_tmpf
del bu_tmps
def test_write_default(self):
xmlbackup = self.class_to_test(self.XMLFILE)
wordsize = xmlbackup.find('guest/arch/wordsize')
self.assertTrue(wordsize is not None)
self.assertEqual(int(wordsize.text), 32)
wordsize.text = str(64)
xmlbackup.write()
self.assertFalse(self.is_same_contents(xmlbackup.name))
def test_write_other(self):
xmlbackup = self.class_to_test(self.XMLFILE)
otherfile = xml_utils.TempXMLFile()
xmlbackup.write(otherfile)
otherfile.close()
self.assertTrue(self.is_same_contents(otherfile.name))
def test_write_other_changed(self):
xmlbackup = self.class_to_test(self.XMLSTR)
otherfile = xml_utils.TempXMLFile()
wordsize = xmlbackup.find('guest/arch/wordsize')
wordsize.text = str(64)
xmlbackup.write(otherfile)
otherfile.close()
xmlbackup.write(self.XMLFILE)
xmlbackup.close()
self.canonicalize_test_xml()
self.assertTrue(self.is_same_contents(otherfile.name))
def test_read_other_changed(self):
xmlbackup = self.class_to_test(self.XMLSTR)
wordsize = xmlbackup.find('guest/arch/wordsize')
wordsize.text = str(64)
otherfile = xml_utils.TempXMLFile()
xmlbackup.write(otherfile)
otherfile.close()
xmlbackup.backup()
self.assertTrue(self.is_same_contents(xmlbackup.name))
xmlbackup.read(otherfile.name)
self.assertFalse(self.is_same_contents(otherfile.name))
xmlbackup.write(self.XMLFILE)
self.assertFalse(self.is_same_contents(otherfile.name))
self.canonicalize_test_xml()
self.assertTrue(self.is_same_contents(otherfile.name))
class test_templatized_xml(xml_test_data):
def setUp(self):
self.MAPPING = {"foo":"bar", "bar":"baz", "baz":"foo"}
self.FULLREPLACE = """<$foo $bar="$baz">${baz}${foo}${bar}</$foo>"""
self.RESULTCHECK = """<bar baz="foo">foobarbaz</bar>"""
super(test_templatized_xml, self).setUp()
def test_sub(self):
sub = xml_utils.Sub(**self.MAPPING)
self.assertEqual(sub.substitute(self.FULLREPLACE), self.RESULTCHECK)
def test_MappingTreeBuilder_standalone(self):
txtb = xml_utils.TemplateXMLTreeBuilder(**self.MAPPING)
txtb.feed(self.FULLREPLACE)
et = txtb.close()
result = ElementTree.tostring(et)
self.assertEqual(result, self.RESULTCHECK)
def test_TemplateXMLTreeBuilder_nosub(self):
txtb = xml_utils.TemplateXMLTreeBuilder()
# elementree pukes on identifiers starting with $
txtb.feed(self.RESULTCHECK)
et = txtb.close()
result = ElementTree.tostring(et)
self.assertEqual(result, self.RESULTCHECK)
def test_TemplateXML(self):
tx = xml_utils.TemplateXML(self.FULLREPLACE, **self.MAPPING)
et = ElementTree.ElementTree(None, tx.name)
check = ElementTree.tostring(et.getroot())
self.assertEqual(check, self.RESULTCHECK)
def test_restore_fails(self):
testmapping = {self.TEXT_REPLACE_KEY:"foobar"}
xmlbackup = xml_utils.TemplateXML(self.XMLFILE, **testmapping)
# Unless the backup was initialized from a string (into a temp file)
# assume the source is read-only and should be protected.
self.assertRaises(IOError, xmlbackup.restore)
if __name__ == "__main__":
unittest.main()
| ColinIanKing/autotest | client/shared/xml_utils_unittest.py | Python | gpl-2.0 | 12,510 |
## Copyright (C) 2007 Sadique Puthen <[email protected]>
### This program is free software; you can redistribute it and/or modify
## it under the terms of the GNU General Public License as published by
## the Free Software Foundation; either version 2 of the License, or
## (at your option) any later version.
## This program is distributed in the hope that it will be useful,
## but WITHOUT ANY WARRANTY; without even the implied warranty of
## MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
## GNU General Public License for more details.
## You should have received a copy of the GNU General Public License
## along with this program; if not, write to the Free Software
## Foundation, Inc., 675 Mass Ave, Cambridge, MA 02139, USA.
from sos.plugins import Plugin, RedHatPlugin, DebianPlugin, UbuntuPlugin
class IPSec(Plugin):
"""ipsec related information
"""
plugin_name = "ipsec"
packages = ('ipsec-tools',)
class RedHatIpsec(IPSec, RedHatPlugin):
"""ipsec related information for Red Hat distributions
"""
files = ('/etc/racoon/racoon.conf',)
def setup(self):
self.add_copy_spec("/etc/racoon")
class DebianIPSec(IPSec, DebianPlugin, UbuntuPlugin):
"""ipsec related information for Debian distributions
"""
files = ('/etc/ipsec-tools.conf',)
def setup(self):
self.add_copy_specs([
"/etc/ipsec-tools.conf",
"/etc/ipsec-tools.d",
"/etc/default/setkey"
])
# vim: et ts=4 sw=4
| portante/sosreport | sos/plugins/ipsec.py | Python | gpl-2.0 | 1,514 |
from mock import patch
import networkx as nx
from nav.models.manage import SwPortVlan, Vlan
from nav.netmap import topology
from nav.topology import vlan
from .topology_testcase import TopologyTestCase
class TopologyLayer2TestCase(TopologyTestCase):
def setUp(self):
super(TopologyLayer2TestCase, self).setUp()
self.model_id = 1
self.nav_graph = nx.MultiDiGraph()
self.a = a = self._netbox_factory('a')
self.b = b = self._netbox_factory('b')
self.c = c = self._netbox_factory('c')
self.d = d = self._netbox_factory('d')
self.a1 = a1 = self._interface_factory('a1', a)
self.a2 = a2 = self._interface_factory('a2', a)
self.a3 = a3 = self._interface_factory('a3', a)
self.b1 = b1 = self._interface_factory('b1', b)
self.b2 = b2 = self._interface_factory('b2', b)
self.c3 = c3 = self._interface_factory('c3', c)
self.c4 = c4 = self._interface_factory('c4', c)
self.d4 = d4 = self._interface_factory('d4', d)
self._add_edge(self.nav_graph, a1.netbox, a1, b1.netbox, b1)
self._add_edge(self.nav_graph, b1.netbox, b1, a1.netbox, a1)
self._add_edge(self.nav_graph, a2.netbox, a2, b2.netbox, b2)
self._add_edge(self.nav_graph, b2.netbox, b2, a2.netbox, a2)
self._add_edge(self.nav_graph, a3.netbox, a3, c3.netbox, c3)
self._add_edge(self.nav_graph, d4.netbox, d4, c4.netbox, c4)
self.vlan__a1_b1 = a_vlan_between_a1_and_b1 = SwPortVlan(
id=self._next_id(), interface=self.a1, vlan=Vlan(id=201, vlan=2))
self.vlans = patch.object(topology, '_get_vlans_map_layer2',
return_value=(
{
self.a1: [a_vlan_between_a1_and_b1],
self.b1: [a_vlan_between_a1_and_b1],
self.a2: [],
self.b2: [],
self.a3: [],
self.c3: []
},
{
self.a: {201: a_vlan_between_a1_and_b1},
self.b: {201: a_vlan_between_a1_and_b1},
self.c: {}
}))
self.vlans.start()
self.build_l2 = patch.object(vlan, 'build_layer2_graph', return_value=self.nav_graph)
self.build_l2.start()
bar = vlan.build_layer2_graph()
#foo = topology._get_vlans_map_layer2(bar)
vlan_by_interfaces, vlan_by_netbox = topology._get_vlans_map_layer2(self.nav_graph)
self.netmap_graph = topology.build_netmap_layer2_graph(
vlan.build_layer2_graph(),
vlan_by_interfaces,
vlan_by_netbox,
None)
def tearDown(self):
self.vlans.stop()
self.build_l2.stop()
def test_noop_layer2_testcase_setup(self):
self.assertTrue(True)
def _add_edge(self, g, node_a, interface_a, node_b, interface_b):
interface_a.to_interface = interface_b
g.add_edge(node_a, node_b, key=interface_a)
| UNINETT/nav | tests/unittests/netmap/topology_layer2_testcase.py | Python | gpl-2.0 | 3,029 |
from __future__ import unicode_literals
"""
AllSkyMap is a subclass of Basemap, specialized for handling common plotting
tasks for celestial data.
It is essentially equivalent to using Basemap with full-sphere projections
(e.g., 'hammer' or 'moll') and the `celestial` keyword set to `True`, but
it adds a few new methods:
* label_meridians for, well, labeling meridians with their longitude values;
* geodesic, a replacement for Basemap.drawgreatcircle, that can correctly
handle geodesics that cross the limb of the map, and providing the user
easy control over clipping (which affects thick lines at or near the limb);
* tissot, which overrides Basemap.tissot, correctly handling geodesics that
cross the limb of the map.
Created Jan 2011 by Tom Loredo, based on Jeff Whitaker's code in Basemap's
__init__.py module.
"""
from numpy import *
import matplotlib.pyplot as pl
from matplotlib.pyplot import *
from mpl_toolkits.basemap import Basemap
import pyproj
from pyproj import Geod
__all__ = ['AllSkyMap']
def angle_symbol(angle, round_to=1.0):
"""
Return a string representing an angle, rounded and with a degree symbol.
This is adapted from code in mpl's projections.geo module.
"""
value = np.round(angle / round_to) * round_to
if pl.rcParams['text.usetex'] and not pl.rcParams['text.latex.unicode']:
return r'$%0.0f^\circ$' % value
else:
return '%0.0f\N{DEGREE SIGN}' % value
class AllSkyMap(Basemap):
"""
AllSkyMap is a subclass of Basemap, specialized for handling common plotting
tasks for celestial data.
It is essentially equivalent to using Basemap with full-sphere projections
(e.g., 'hammer' or 'moll') and the `celestial` keyword set to `True`, but
it adds a few new methods:
* label_meridians for, well, labeling meridians with their longitude values;
* geodesic, a replacement for Basemap.drawgreatcircle, that can correctly
handle geodesics that cross the limb of the map, and providing the user
easy control over clipping (which affects thick lines at or near the
limb);
* tissot, which overrides Basemap.tissot, correctly handling geodesics that
cross the limb of the map.
"""
# Longitudes corresponding to east and west edges, reflecting the
# convention that 180 deg is the eastern edge, according to basemap's
# underlying projections:
east_lon = 180.
west_lon = 180.+1.e-10
def __init__(self,
projection='hammer',
lat_0=0., lon_0=0.,
suppress_ticks=True,
boundinglat=None,
fix_aspect=True,
anchor=str('C'),
ax=None):
if projection != 'hammer' and projection !='moll':
raise ValueError('Only hammer and moll projections supported!')
# Use Basemap's init, enforcing the values of many parameters that
# aren't used or whose Basemap defaults would not be altered for all-sky
# celestial maps.
Basemap.__init__(self, llcrnrlon=None, llcrnrlat=None,
urcrnrlon=None, urcrnrlat=None,
llcrnrx=None, llcrnry=None,
urcrnrx=None, urcrnry=None,
width=None, height=None,
projection=projection, resolution=None,
area_thresh=None, rsphere=1.,
lat_ts=None,
lat_1=None, lat_2=None,
lat_0=lat_0, lon_0=lon_0,
suppress_ticks=suppress_ticks,
satellite_height=1.,
boundinglat=None,
fix_aspect=True,
anchor=anchor,
celestial=True,
ax=ax)
# Keep a local ref to lon_0 for hemisphere checking.
self._lon_0 = self.projparams['lon_0']
self._limb = None
def drawmapboundary(self,color='k',linewidth=1.0,fill_color=None,\
zorder=None,ax=None):
"""
draw boundary around map projection region, optionally
filling interior of region.
.. tabularcolumns:: |l|L|
============== ====================================================
Keyword Description
============== ====================================================
linewidth line width for boundary (default 1.)
color color of boundary line (default black)
fill_color fill the map region background with this
color (default is no fill or fill with axis
background color).
zorder sets the zorder for filling map background
(default 0).
ax axes instance to use
(default None, use default axes instance).
============== ====================================================
returns matplotlib.collections.PatchCollection representing map boundary.
"""
# Just call the base class version, but keep a copy of the limb
# polygon for clipping.
self._limb = Basemap.drawmapboundary(self, color=color,
linewidth=linewidth, fill_color=fill_color, zorder=zorder, ax=ax)
return self._limb
def label_meridians(self, lons, fontsize=10, valign='bottom', vnudge=0,
halign='center', hnudge=0, color='black'):
"""
Label meridians with their longitude values in degrees.
This labels meridians with negative longitude l with the value 360-l;
for maps in celestial orientation, this means meridians to the right
of the central meridian are labeled from 360 to 180 (left to right).
`vnudge` and `hnudge` specify amounts in degress to nudge the labels
from their default placements, vertically and horizontally. This
values obey the map orientation, so to nudge to the right, use a
negative `hnudge` value.
"""
# Run through (lon, lat) pairs, with lat=0 in each pair.
lats = len(lons)*[0.]
for lon,lat in zip(lons, lats):
x, y = self(lon+hnudge, lat+vnudge)
if lon < 0:
lon_lbl = 360 + lon
else:
lon_lbl = lon
pl.text(x, y, angle_symbol(lon_lbl), fontsize=fontsize,
verticalalignment=valign,
horizontalalignment=halign,color=color)
def east_hem(self, lon):
"""
Return True if lon is in the eastern hemisphere of the map wrt lon_0.
"""
if (lon-self._lon_0) % 360. <= self.east_lon:
return True
else:
return False
def geodesic(self, lon1, lat1, lon2, lat2, del_s=.01, clip=True, **kwargs):
"""
Plot a geodesic curve from (lon1, lat1) to (lon2, lat2), with
points separated by arc length del_s. Return a list of Line2D
instances for the curves comprising the geodesic. If the geodesic does
not cross the map limb, there will be only a single curve; if it
crosses the limb, there will be two curves.
"""
# TODO: Perhaps return a single Line2D instance when there is only a
# single segment, and a list of segments only when there are two segs?
# TODO: Check the units of del_s.
# This is based on Basemap.drawgreatcircle (which draws an *arc* of a
# great circle), but addresses a limitation of that method, supporting
# geodesics that cross the map boundary by breaking them into two
# segments, one in the eastern hemisphere and the other in the western.
gc = pyproj.Geod(a=self.rmajor,b=self.rminor)
az12,az21,dist = gc.inv(lon1,lat1,lon2,lat2)
npoints = int((dist+0.5**del_s)/del_s)
# Calculate lon & lat for points on the arc.
lonlats = gc.npts(lon1,lat1,lon2,lat2,npoints)
lons = [lon1]; lats = [lat1]
for lon, lat in lonlats:
lons.append(lon)
lats.append(lat)
lons.append(lon2); lats.append(lat2)
# Break the arc into segments as needed, when there is a longitudinal
# hemisphere crossing.
segs = []
seg_lons, seg_lats = [lon1], [lat1]
cur_hem = self.east_hem(lon1)
for lon, lat in zip(lons[1:], lats[1:]):
if self.east_hem(lon) == cur_hem:
seg_lons.append(lon)
seg_lats.append(lat)
else:
# We should interpolate a new pt at the boundary, but in
# the mean time just rely on the step size being small.
segs.append( (seg_lons, seg_lats) )
seg_lons, seg_lats = [lon], [lat]
cur_hem = not cur_hem
segs.append( (seg_lons, seg_lats) )
# Plot each segment; return a list of the mpl lines.
lines = []
for lons, lats in segs:
x, y = self(lons, lats)
if clip and self._limb:
line = plot(x, y, clip_path=self._limb, **kwargs)[0]
else:
line = plot(x, y, **kwargs)[0]
lines.append(line)
# If there are multiple segments and no color args, reconcile the
# colors, which mpl will have autoset to different values.
# *** Does this screw up mpl's color set sequence for later lines?
if 'c' not in kwargs or 'color' in kwargs:
if len(lines) > 1:
c1 = lines[0].get_color()
for line in lines[1:]:
line.set_color(c1)
return lines
def tissot(self,lon_0,lat_0,radius_deg,npts,ax=None,**kwargs):
"""
Draw a polygon centered at ``lon_0,lat_0``. The polygon
approximates a circle on the surface of the earth with radius
``radius_deg`` degrees latitude along longitude ``lon_0``,
made up of ``npts`` vertices.
The polygon represents a Tissot's indicatrix
(http://en.wikipedia.org/wiki/Tissot's_Indicatrix),
which when drawn on a map shows the distortion inherent in the map
projection. Tissots can be used to display azimuthally symmetric
directional uncertainties ("error circles").
Extra keyword ``ax`` can be used to override the default axis instance.
Other \**kwargs passed on to matplotlib.patches.Polygon.
returns a list of matplotlib.patches.Polygon objects, with two polygons
when the tissot crosses the limb, and just one polygon otherwise.
"""
# TODO: Just return the polygon (not a list) when there is only one
# polygon? Or stick with the list for consistency?
# This is based on Basemap.tissot, but addresses a limitation of that
# method by handling tissots that cross the limb of the map by finding
# separate polygons in the eastern and western hemispheres comprising
# the tissot.
ax = kwargs.pop('ax', None) or self._check_ax()
g = pyproj.Geod(a=self.rmajor,b=self.rminor)
az12,az21,dist = g.inv(lon_0,lat_0,lon_0,lat_0+radius_deg)
start_hem = self.east_hem(lon_0)
segs1 = [self(lon_0,lat_0+radius_deg)]
over, segs2 = [], []
delaz = 360./npts
az = az12
last_lon = lon_0
# Note adjacent and opposite edge longitudes, in case the tissot
# runs over the edge.
if start_hem: # eastern case
adj_lon = self.east_lon
opp_lon = self.west_lon
else:
adj_lon = self.west_lon
opp_lon = self.east_lon
for n in range(npts):
az = az+delaz
# skip segments along equator (Geod can't handle equatorial arcs)
if np.allclose(0.,lat_0) and (np.allclose(90.,az) or np.allclose(270.,az)):
continue
else:
lon, lat, az21 = g.fwd(lon_0, lat_0, az, dist)
# If in the starting hemisphere, add to 1st polygon seg list.
if self.east_hem(lon) == start_hem:
x, y = self(lon, lat)
# Add segment if it is in the map projection region.
if x < 1.e20 and y < 1.e20:
segs1.append( (x, y) )
last_lon = lon
# Otherwise, we cross hemispheres.
else:
# Trace the edge of each hemisphere.
x, y = self(adj_lon, lat)
if x < 1.e20 and y < 1.e20:
segs1.append( (x, y) )
# We presume if adj projection is okay, opposite is.
segs2.append( self(opp_lon, lat) )
# Also store the overlap in the opposite hemisphere.
x, y = self(lon, lat)
if x < 1.e20 and y < 1.e20:
over.append( (x, y) )
last_lon = lon
poly1 = Polygon(segs1, **kwargs)
ax.add_patch(poly1)
if segs2:
over.reverse()
segs2.extend(over)
poly2 = Polygon(segs2, **kwargs)
ax.add_patch(poly2)
return [poly1, poly2]
else:
return [poly1]
if __name__ == '__main__':
# Note that Hammer & Mollweide projections enforce a 2:1 aspect ratio.
# Use figure size good for a 2:1 plot.
fig = figure(figsize=(12,6))
# Set up the projection and draw a grid.
map = AllSkyMap(projection='hammer')
# Save the bounding limb to use as a clip path later.
limb = map.drawmapboundary(fill_color='white')
map.drawparallels(np.arange(-75,76,15), linewidth=0.5, dashes=[1,2],
labels=[1,0,0,0], fontsize=9)
map.drawmeridians(np.arange(-150,151,30), linewidth=0.5, dashes=[1,2])
# Label a subset of meridians.
lons = np.arange(-150,151,30)
map.label_meridians(lons, fontsize=9, vnudge=1,
halign='left', hnudge=-1) # hnudge<0 shifts to right
# x, y limits are [0, 4*rt2], [0, 2*rt2].
rt2 = sqrt(2)
# Draw a slanted green line crossing the map limb.
line = plot([rt2,0], [rt2,2*rt2], 'g-')
# Draw a slanted magenta line crossing the map limb but clipped.
line = plot([rt2+.1,0+.1], [rt2,2*rt2], 'm-', clip_path=limb)
# Draw some geodesics.
# First a transparent thick blue geodesic crossing the limb but not clipped,
# overlayed by a thinner red geodesic that is clipped (by default), to
# illustrate the effect of clipping.
lines = map.geodesic(120, 30, 240, 60, clip=False, c='b', lw=7, alpha=.5)
lines = map.geodesic(240, 60, 120, 30, c='r', lw=3, alpha=.5)
# Next two large limb-crossing geodesics with the same path, but rendered
# in opposite directions, one transparent blue, the other transparent
# yellow. They should be right on top of each other, giving a greenish
# brown hue.
lines = map.geodesic(240, -60, 120, 30, c='b', lw=2, alpha=.5)
lines = map.geodesic(120, 30, 240, -60, c='y', lw=2, alpha=.5)
# What happens if a geodesic is given coordinates spanning more than
# a single rotation? Not sure what to expect, but it shoots off the
# map (clipped here). Perhaps we should ensure lons are in [0, 360].
#lines = map.geodesic(120, 20, 240+360, 50, del_s=.2, c='g')
# Two tissots fully within the limb.
poly = map.tissot(60, -15, 10, 100)
poly = map.tissot(280, 60, 10, 100)
#poly = map.tissot(90, -85, 10, 100)
# Limb-spanning tissots in each quadrant.
# lower left:
poly = map.tissot(170, -60, 15, 100)
# upper left:
poly = map.tissot(175, 70, 15, 100)
# upper right (note negative longitude):
poly = map.tissot(-175, 30, 15, 100, color='r', alpha=.6)
# lower right:
poly = map.tissot(185, -40, 10, 100)
# Plot the tissot centers as "+" symbols. Note the top left symbol
# would cross the limb without the clip_path argument; this might be
# desired to enhance visibility.
lons = [170, 175, -175, 185]
lats = [-60, 70, 30, -40]
x, y = map(lons, lats)
map.scatter(x, y, s=40, marker='+', linewidths=1, edgecolors='g',
facecolors='none', clip_path=limb, zorder=10) # hi zorder -> top
title('AllSkyMap demo: Clipped lines, markers, geodesics, tissots')
show()
| bosscha/alma-calibrator | notebooks/selecting_source/allskymap.py | Python | gpl-2.0 | 16,580 |
"""
##############################################################################
##
##
## @name : StatsDateLib.py
##
## @license : MetPX Copyright (C) 2004-2006 Environment Canada
## MetPX comes with ABSOLUTELY NO WARRANTY; For details type see the file
## named COPYING in the root of the source directory tree.
##
## @author : Nicholas Lemay
##
## @since : 29-05-2006 , last updated on 08-04-2008
##
##
## @summary: Contains many usefull date manipulation methods wich are
## to be used throughout the stats library.
##
##############################################################################
"""
import time, sys, os
sys.path.insert(1, os.path.dirname( os.path.abspath(__file__) ) + '/../../')
from pxStats.lib.StatsPaths import StatsPaths
from pxStats.lib.LanguageTools import LanguageTools
CURRENT_MODULE_ABS_PATH = os.path.abspath(__file__).replace( ".pyc", ".py" )
"""
- Small function that adds pxLib to sys path.
"""
STATSPATHS = StatsPaths( )
STATSPATHS.setPaths( LanguageTools.getMainApplicationLanguage() )
sys.path.append( STATSPATHS.PXLIB )
"""
Globals
"""
MINUTE = 60
HOUR = 60 * MINUTE
DAY = 24 * HOUR
MINUTES_PER_DAY = 24*60
class StatsDateLib:
global _
_ = LanguageTools.getTranslatorForModule( CURRENT_MODULE_ABS_PATH )
#Constants can be removed once we add methods to the datelibrary and include it
MINUTE = 60
HOUR = 60 * MINUTE
DAY = 24 * HOUR
MINUTES_PER_DAY = 24*60
LIST_OF_MONTHS_3LETTER_FORMAT = [ _("Jan"), _("Feb"), _("Mar"), _("Apr"), _("May"), _("Jun"), _("Jul"), _("Aug"), _("Sep"), _("Oct"), _("Nov"), _("Dec") ]
LIST_OF_MONTHS=[ _("January"), _("February"), _("March"), _("April"), _("May"), _("June"), _("July"), _("August"), _("September"), _("October"), _("November"), _("December") ]
def setLanguage( language ):
"""
@summary : sets specified language as the
language used for translations
throughout the entire class.
"""
if language in LanguageTools.getSupportedLanguages() :
global _
_ = LanguageTools.getTranslatorForModule( CURRENT_MODULE_ABS_PATH, language )
setLanguage = staticmethod( setLanguage )
def addMonthsToIsoDate( isodate, monthstoAdd ):
"""
@summary : Add a certain number of months to a date.
@param isodate: Date in iso format to which to add months.
@param monthstoAdd: Number of months to add.( 0 or bigger)
@return : The resulting date. Will return the date received as a parameter
if error occurs
"""
monthsWith30Days = [4,6,9,11]
validDate = True
resultingDate = isodate
try :
StatsDateLib.getSecondsSinceEpoch( isodate )
except:
validDate = False
if validDate == True :
dayFromDate = int(isodate.split( "-" )[2].split( " " )[0])
monthFromDate = int(isodate.split( "-" )[1])
yearFromDate = int(isodate.split( "-" )[0])
hourFromDate = isodate.split( " " )[1]
yearsToAdd , resultingMonth = divmod( ( monthFromDate + monthstoAdd ), 12 )
if resultingMonth == 0:
resultingMonth = 12
yearsToAdd = yearsToAdd -1
resultingYear = yearFromDate + yearsToAdd
if resultingMonth in monthsWith30Days and dayFromDate == 31 :
resultingDay = 30
elif resultingMonth == 2 and (dayFromDate == 30 or dayFromDate == 31):
if ( ( resultingYear%4 == 0 and resultingYear%100 !=0 ) or resultingYear%400 == 0 ):
resultingDay = 29
else:
resultingDay = 28
else:
resultingDay = dayFromDate
if len(str(resultingDay)) < 2:
resultingDay = '0' + str(resultingDay)
if len(str(resultingMonth)) < 2:
resultingMonth = '0' + str(resultingMonth)
resultingDate = str( resultingYear ) + '-' + str( resultingMonth ) + '-' + str( resultingDay ) + ' ' + str( hourFromDate )
return resultingDate
addMonthsToIsoDate = staticmethod( addMonthsToIsoDate )
def getCurrentTimeInIsoformat():
"""
@summary : Returns current system time in iso format.
@return : Returns current system time in iso format.
"""
currentTimeInEpochFormat = time.time()
return StatsDateLib.getIsoFromEpoch( currentTimeInEpochFormat )
getCurrentTimeInIsoformat = staticmethod( getCurrentTimeInIsoformat )
def isValidIsoDate( isoDate ):
"""
@summary : Verifies whether or not the received
date is a valid iso format date.
@return : Returns whether or not the received
date is a valid iso format date.
"""
isValid = True
try:
StatsDateLib.getSecondsSinceEpoch( isoDate )
except:
isValid = False
return isValid
isValidIsoDate = staticmethod(isValidIsoDate)
def getYearMonthDayInStrfTime( timeInEpochFormat ):
"""
@summary : Return the year month day in strftime
based on an epoch date.
@param timeInEpochFormat : Time, in seconds since epoch format
from which you want to get the year month day.
@return : a three item tuple containing the following :
- year
- month
- day
"""
global _
months = { "January": _("January"), "February": _("February"), "March":_("March"), "April":_("April"),\
"May":_("May"), "June":_("June"), "July":_("July"), "August":_("August"), "September":_("September"),\
"October":_("October"), "November":_("November"), "December":_("December") }
year = time.strftime( '%Y', time.gmtime(timeInEpochFormat) )
month = time.strftime( '%B', time.gmtime(timeInEpochFormat) )
day = time.strftime( '%d', time.gmtime(timeInEpochFormat) )
month = months[month]
return year, month, day
getYearMonthDayInStrfTime = staticmethod(getYearMonthDayInStrfTime)
def getDayOfTheWeek( timeInEpochFormat ):
"""
@summary : Return the year month day in strftime
based on an epoch date.
@Note : The returned day of the week will be written in the language
that has currently been set.
@param : Time, in seconds since epoch format
from which you want to get the day of the week.
"""
global _
days = { "Mon": _("Mon"), "Tue": _("Tue"), "Wed": _("Wed"), "Thu": _("Thu"),\
"Fri": _("Fri"),"Sat": _("Sat"),"Sun": _("Sun"), "Monday": _("Monday"),\
"Tuesday": _("Tuesday"), "Wednesday": _("Wednesday"), "Thursday": _("Thursday"),\
"Friday": _("Friday"),"Saturday": _("Saturday"),"Sunday":_("Sunday") }
day = time.strftime( "%a", time.gmtime( timeInEpochFormat ) )
day = days[day]
return day
getDayOfTheWeek = staticmethod( getDayOfTheWeek )
def getStartEndFromPreviousDay( currentTime, nbDays = 1 ):
"""
Returns the start and end time of
the day prior to the currentTime.
currentTime must be in iso format.
start and end are returned in iso format.
"""
end = StatsDateLib.getIsoTodaysMidnight( currentTime )
yesterday = StatsDateLib.getIsoFromEpoch( StatsDateLib.getSecondsSinceEpoch( currentTime ) - (24*60*60) )
start = StatsDateLib.getIsoTodaysMidnight( yesterday )
return start, end
getStartEndFromPreviousDay = staticmethod( getStartEndFromPreviousDay )
def getStartEndFromPreviousWeek( currentTime, nbWeeks = 1 ):
"""
Returns the start and end time of
the week prior to the currentTime.
currentTime must be in iso format.
start and end are returned in iso format.
"""
currentTimeInSecs = StatsDateLib.getSecondsSinceEpoch( currentTime )
weekDay = int(time.strftime( "%w", time.gmtime( currentTimeInSecs ) ))
endInSecs = currentTimeInSecs - ( weekDay*24*60*60 )
startInSecs = endInSecs - ( 7*24*60*60 )
start = StatsDateLib.getIsoTodaysMidnight( StatsDateLib.getIsoFromEpoch( startInSecs ) )
end = StatsDateLib.getIsoTodaysMidnight( StatsDateLib.getIsoFromEpoch( endInSecs ) )
return start, end
getStartEndFromPreviousWeek = staticmethod( getStartEndFromPreviousWeek )
def getStartEndFromPreviousMonth( currentTime ):
"""
Returns the start and end time of
the month prior to the currentTime.
currentTime must be in iso format.
start and end are returned in iso format.
"""
date = currentTime.split()[0]
splitDate = date.split("-")
end = splitDate[0] + "-" + splitDate[1] + "-" + "01 00:00:00"
splitTime = currentTime.split()
date = splitTime[0]
splitDate = date.split("-")
if int( splitDate[1] ) != 1 :
month = int( splitDate[1] ) - 1
if month < 10 :
month = "0" + str( month )
splitDate[1] = month
else:
year = int( splitDate[0] ) - 1
splitDate[0] = str(year)
splitDate[1] = "01"
firstDayOfPreviousMonth = str( splitDate[0] ) + "-" + str( splitDate[1] ) + "-01"
start = firstDayOfPreviousMonth + " 00:00:00"
return start, end
getStartEndFromPreviousMonth = staticmethod( getStartEndFromPreviousMonth )
def getStartEndFromPreviousYear( currentTime ):
"""
Returns the start and end time of
the day prior to the currentTime.
currentTime must be in iso format.
start and end are returned in iso format.
"""
year = currentTime.split("-")[0]
year = str( int(year)-1 )
start = year + "-01-01 00:00:00"
year = currentTime.split("-")[0]
end = year + "-01-01 00:00:00"
return start, end
getStartEndFromPreviousYear = staticmethod( getStartEndFromPreviousYear )
def getStartEndFromCurrentDay( currentTime ):
"""
Returns the start and end time of
the current day.
currentTime must be in iso format.
start and end are returned in iso format.
"""
start = StatsDateLib.getIsoTodaysMidnight( currentTime )
tomorrow = StatsDateLib.getIsoFromEpoch( StatsDateLib.getSecondsSinceEpoch( currentTime ) + 24*60*60 )
end = StatsDateLib.getIsoTodaysMidnight( tomorrow )
return start, end
getStartEndFromCurrentDay = staticmethod( getStartEndFromCurrentDay )
def getStartEndFromCurrentWeek( currentTime ):
"""
Returns the start and end time of
the currentweek.
currentTime must be in iso format.
start and end are returned in iso format.
"""
currentTimeInSecs = StatsDateLib.getSecondsSinceEpoch( currentTime )
weekDay = int(time.strftime( "%w", time.gmtime( currentTimeInSecs ) ))
endInSecs = currentTimeInSecs + ( ( 7 - weekDay)*24*60*60 )
end = StatsDateLib.getIsoTodaysMidnight( StatsDateLib.getIsoFromEpoch( endInSecs ) )
startInSecs = currentTimeInSecs - ( weekDay*24*60*60 )
start = StatsDateLib.getIsoTodaysMidnight( StatsDateLib.getIsoFromEpoch( startInSecs ) )
return start, end
getStartEndFromCurrentWeek = staticmethod( getStartEndFromCurrentWeek )
def getStartEndFromCurrentMonth( currentTime ):
"""
Returns the start and end time of
the currentDay.
currentTime must be in iso format.
start and end are returned in iso format.
"""
splitTime = currentTime.split()
date = splitTime[0]
splitDate = date.split( "-" )
start = splitDate[0] + "-" + splitDate[1] + "-01 00:00:00"
if int( splitDate[1] ) != 12 :
month = int( splitDate[1] ) + 1
if month < 10:
month = "0" + str( month )
splitDate[1] = month
else:
year = int( splitDate[0] ) + 1
splitDate[0] = str(year)
splitDate[1] = "01"
firstDayOfMonth = str( splitDate[0] ) + "-" + str( splitDate[1] ) + "-01"
end = firstDayOfMonth + " 00:00:00"
return start, end
getStartEndFromCurrentMonth = staticmethod( getStartEndFromCurrentMonth )
def getStartEndFromCurrentYear( currentTime ):
"""
Returns the start and end time of
the currentDay.
currentTime must be in iso format.
start and end are returned in iso format.
"""
year = currentTime.split("-")[0]
start = year + "-01-01 00:00:00"
year = currentTime.split("-")[0]
year = str( int(year)+1 )
end = year + "-01-01 00:00:00"
return start, end
getStartEndFromCurrentYear = staticmethod( getStartEndFromCurrentYear )
def getHoursFromIso( iso = '2005-08-30 20:06:59' ):
"""
Returns the hours field from a iso format date.
"""
iso = iso.split(" ")[1]
hours, minutes, seconds = iso.split(':')
return hours
getHoursFromIso = staticmethod( getHoursFromIso )
def getMinutesFromIso( iso = '2005-08-30 20:06:59' ):
"""
Returns the minute field from a iso format date.
"""
hours, minutes, seconds = iso.split(':')
return minutes
getMinutesFromIso = staticmethod( getMinutesFromIso )
def rewindXDays( date = '2005-08-30 20:06:59' , x = 0 ):
"""
Takes an iso format date and substract the number
of days specified by x.
"""
seconds = StatsDateLib.getSecondsSinceEpoch( date )
seconds = seconds - ( x * 24*60*60 )
rewindedDate = StatsDateLib.getIsoFromEpoch( seconds )
return rewindedDate
rewindXDays = staticmethod( rewindXDays )
def getNumberOfDaysBetween( date1 = '2005-08-30 20:06:59', date2 = '2005-08-30 20:06:59' ):
"""
Takes two iso format dates and returns the number of days between them
"""
seconds1 = StatsDateLib.getSecondsSinceEpoch( date1 ) - StatsDateLib.getSecondsSinceStartOfDay( date1 )
seconds2 = StatsDateLib.getSecondsSinceEpoch( date2 ) - StatsDateLib.getSecondsSinceStartOfDay( date2 )
numberOfDays = abs( float( (seconds1-seconds2) /( 24*60*60 ) ) )
numberOfDays = int( numberOfDays )
return numberOfDays
getNumberOfDaysBetween = staticmethod( getNumberOfDaysBetween )
def areDifferentDays( date1 = '2005-08-30 20:06:59', date2 = '2005-08-30 20:06:59' ):
"""
Takes two iso format dates and returns whether or not both date are on different days.
"""
day1 = date1.split( " " )[0]
day2 = date2.split( " " )[0]
return day1 != day2
areDifferentDays = staticmethod( areDifferentDays )
def getSecondsSinceEpoch(date='2005-08-30 20:06:59', format='%Y-%m-%d %H:%M:%S'):
try:
timeStruct = time.strptime(date, format)
except:
print "date tried : %s" %date
return time.mktime(timeStruct)
getSecondsSinceEpoch = staticmethod( getSecondsSinceEpoch )
def getIsoLastMinuteOfDay( iso = '2005-08-30 20:06:59' ):
"""
Takes an iso format date like 2005-08-30 20:06:59.
Replaces hour, minutes and seconds by last minute of day.
Returns 2005-08-30 23:59:59.
"""
iso = iso.split( " " )
iso = iso[0]
iso = iso + " 23:59:59"
return iso
getIsoLastMinuteOfDay = staticmethod( getIsoLastMinuteOfDay )
def getIsoTodaysMidnight( iso ):
"""
Takes an iso format date like 2005-08-30 20:06:59.
Replaces hour, minutes and seconds by 00.
Returns 2005-08-30 00:00:00.
"""
iso = iso.split( " " )
iso = iso[0]
iso = iso + " 00:00:00"
return iso
getIsoTodaysMidnight = staticmethod( getIsoTodaysMidnight )
def getIsoWithRoundedHours( iso ):
"""
Takes an iso format date like 2005-08-30 20:06:59.
Replaces minutes and seconds by 00.
Returns 2005-08-30 20:00:00.
"""
iso = iso.split( ":" )
iso = iso[0]
iso = iso + ":00:00"
return iso
getIsoWithRoundedHours = staticmethod( getIsoWithRoundedHours )
def getIsoWithRoundedSeconds( iso ):
"""
Takes a numbers of seconds since epoch and tranforms it in iso format
2005-08-30 20:06:59. Replaces minutes and seconds by 00 thus returning
2005-08-30 20:00:00.
"""
#print "iso before modif : %s" %iso
iso = iso.split( ":" )
iso = iso[0] + ":" + iso[1] + ":00"
return iso
getIsoWithRoundedSeconds = staticmethod( getIsoWithRoundedSeconds )
def getSeconds(string):
# Should be used with string of following format: hh:mm:ss
hours, minutes, seconds = string.split(':')
return int(hours) * HOUR + int(minutes) * MINUTE + int(seconds)
getSeconds = staticmethod( getSeconds )
def getHoursSinceStartOfDay( date='2005-08-30 20:06:59' ):
"""
This method takes an iso style date and returns the number
of hours that have passed since 00:00:00 of the same day.
"""
try:
splitDate = date.split( " " )
splitDate = splitDate[1]
splitDate = splitDate.split( ":" )
hoursSinceStartOfDay = int( splitDate[0] )
return hoursSinceStartOfDay
except:
print "Cannot convert %s in getMinutesSinceStartOfDay. " %date
sys.exit()
getHoursSinceStartOfDay = staticmethod(getHoursSinceStartOfDay)
def isoDateDashed( date = "20060613162653" ):
"""
This method takes in parameter a non dashed iso date and
returns the date dashed and the time with : as seperator.
"""
dashedDate = '%Y-%m-%d %H:%M:%S' %date
return dashedDate
isoDateDashed = staticmethod( isoDateDashed )
def getMinutesSinceStartOfDay( date='2005-08-30 20:06:59' ):
"""
This method receives an iso date as parameter and returns the number of minutes
wich have passed since the start of that day.
"""
try:
splitDate = date.split( " " )
splitDate = splitDate[1]
splitDate = splitDate.split( ":" )
minutesSinceStartOfDay = int( splitDate[0] ) * 60 + int( splitDate[1] )
return minutesSinceStartOfDay
except:
print "Cannot convert %s in getMinutesSinceStartOfDay. " %date
sys.exit()
getMinutesSinceStartOfDay = staticmethod( getMinutesSinceStartOfDay )
def getSecondsSinceStartOfDay( date='2005-08-30 20:06:59' ):
"""
This method receives an iso date as parameter and returns the number of seconds
wich have passed since the start of that day.
"""
try:
splitDate = date.split( " " )
splitDate = splitDate[1]
splitDate = splitDate.split( ":" )
minutesSinceStartOfDay = ( int( splitDate[0] ) * 60 *60 ) + ( int( splitDate[1] ) *60 ) + int( splitDate[2] )
return minutesSinceStartOfDay
except:
print "Cannot convert %s in getMinutesSinceStartOfDay. " %date
sys.exit()
getSecondsSinceStartOfDay = staticmethod( getSecondsSinceStartOfDay )
def getNumericMonthFromString( month ) :
"""
This method takes a month in the string format and returns the month.
Returns 00 if month is unknown.
"""
value = '00'
if month == 'Jan' :
value = '01'
elif month == 'Feb' :
value = '02'
elif month == 'Mar' :
value = '03'
elif month == 'Apr' :
value = '04'
elif month == 'May' :
value = '05'
elif month == 'Jun' :
value = '06'
elif month == 'Jul' :
value = '07'
elif month == 'Aug' :
value = '08'
elif month == 'Sep' :
value = '09'
elif month == 'Oct' :
value = '10'
elif month == 'Nov' :
value = '11'
elif month == 'Dec' :
value = '12'
return value
getNumericMonthFromString = staticmethod( getNumericMonthFromString )
def getIsoFromEpoch( seconds ):
"""
Take a number of seconds built with getSecondsSinceEpoch
and returns a date in the format of '2005-08-30 20:06:59'
Thu May 18 13:00:00 2006
"""
timeString = time.ctime( seconds )
timeString = timeString.replace( " ", " " )#in speicla case there may be two spaces
splitTimeString = timeString.split( " " )
if int(splitTimeString[2]) < 10 :
splitTimeString[2] = "0" + splitTimeString[2]
originalDate = splitTimeString[4] + '-' + StatsDateLib.getNumericMonthFromString ( splitTimeString[1] ) + '-' + splitTimeString[2] + ' ' + splitTimeString[3]
return originalDate
getIsoFromEpoch = staticmethod ( getIsoFromEpoch )
def getOriginalHour( seconds ):
"""
Take a number of seconds built with getSecondsSinceEpoch
and returns a date in the format of '2005-08-30 20:06:59'
Thu May 18 13:00:00 2006
"""
timeString = time.ctime( seconds )
splitTimeString = timeString.split( " " )
originalHour = splitTimeString[3]
originalHour = originalHour.split( ":" )
originalHour = originalHour[0]
return originalHour
getOriginalHour = staticmethod ( getOriginalHour )
def getSeparators( width=DAY, interval = 20*MINUTE ):
separators = []
for value in range( interval, width+interval, interval ):
separators.append( value )
return separators
getSeparators = staticmethod( getSeparators )
def getSeparatorsWithStartTime( startTime = "2006-06-06 00:00:00", width=DAY, interval=60*MINUTE ):
"""
This method works exactly like getSeparators but it uses a start time to set
the separators
"""
separators = []
startTime = StatsDateLib.getSecondsSinceEpoch(startTime)
if interval <= width :
for value in range( int(interval+startTime), int( width+interval+startTime ), int( interval ) ):
separators.append( StatsDateLib.getIsoFromEpoch(value) )
if separators[ len(separators)-1 ] > width+startTime :
separators[ len(separators)-1 ] = StatsDateLib.getIsoFromEpoch(width+startTime)
return separators
getSeparatorsWithStartTime = staticmethod( getSeparatorsWithStartTime )
def getStartEndInIsoFormat( timeOfTheCall, span, spanType = "", fixedCurrent = False, fixedPrevious = False ):
"""
@summary : Calculates the start and end of a timespan based on specified parameters.
@param timeOfTheCall: Time at which these graphics were requested. In format.
@param spanOfTheGraphics: Span in hours of the graphics.
@param graphicType : daily | weekly | monthly | yearly
@param fixedCurrent: Whether to use the fixedCurrent day, week month or year.
@param fixedPrevious: Whether to use the fixedPrevious day week month or year.
"""
global _
#TODO :fixStartEnd method???
if fixedPrevious :
if spanType == _("daily") :
start, end = StatsDateLib.getStartEndFromPreviousDay( timeOfTheCall )
elif spanType == _("weekly"):
start, end = StatsDateLib.getStartEndFromPreviousWeek( timeOfTheCall )
elif spanType == _("monthly"):
start, end = StatsDateLib.getStartEndFromPreviousMonth( timeOfTheCall )
elif spanType == _("yearly") :
start, end = StatsDateLib.getStartEndFromPreviousYear( timeOfTheCall )
elif fixedCurrent:
if spanType == _("daily") :
start, end = StatsDateLib.getStartEndFromCurrentDay( timeOfTheCall )
elif spanType ==_("weekly"):
start, end = StatsDateLib.getStartEndFromCurrentWeek( timeOfTheCall )
elif spanType == _("monthly"):
start, end = StatsDateLib.getStartEndFromCurrentMonth( timeOfTheCall )
elif spanType == _("yearly"):
start, end = StatsDateLib.getStartEndFromCurrentYear( timeOfTheCall )
else:
if spanType == _("daily") :
start = StatsDateLib.getIsoFromEpoch( StatsDateLib.getSecondsSinceEpoch( timeOfTheCall ) - StatsDateLib.DAY )
elif spanType == _("weekly"):
start = StatsDateLib.getIsoFromEpoch( StatsDateLib.getSecondsSinceEpoch( timeOfTheCall ) - ( 7 * StatsDateLib.DAY ) )
elif spanType == _("monthly"):
start = StatsDateLib.getIsoFromEpoch( StatsDateLib.getSecondsSinceEpoch( timeOfTheCall ) - ( 30 * StatsDateLib.DAY ) )
elif spanType == _("yearly") :
start = StatsDateLib.getIsoFromEpoch( StatsDateLib.getSecondsSinceEpoch( timeOfTheCall ) - ( 365 * StatsDateLib.DAY ) )
else:
start = StatsDateLib.getIsoFromEpoch( StatsDateLib.getSecondsSinceEpoch( timeOfTheCall ) - span*60*60 )
end = timeOfTheCall
return start, end
getStartEndInIsoFormat = staticmethod( getStartEndInIsoFormat )
if __name__ == "__main__":
print ""
print ""
print "getIsoFromEpoch test #1 : "
print ""
print "StatsDateLib.getIsoFromEpoch(0) : "
print "Expected result : %s " %("1970-01-01 00:00:00")
print "Obtained result : %s " %StatsDateLib.getIsoFromEpoch(0)
if not StatsDateLib.getIsoFromEpoch(0) == "1970-01-01 00:00:00" : raise AssertionError("getIsoFromEpoch test #1 is broken.")
print ""
print ""
print "getNumberOfDaysBetween test #1 : "
print ""
print "StatsDateLib.getNumberOfDaysBetween( '2005-08-31 00:00:01','2005-08-30 23:59:59' ) : "
print "Expected result : %s " %("1")
print "Obtained result : %s " %StatsDateLib.getNumberOfDaysBetween( '2005-08-31 00:00:01','2005-08-30 23:59:59' )
if not StatsDateLib.getNumberOfDaysBetween( '2005-08-31 00:00:01','2005-08-30 23:59:59' ) == 1 : raise AssertionError("getNumberOfDaysBetween test #1 is broken.")
print ""
print ""
print "addMonthsToIsoDate test #1(basic test) : "
print ""
print """StatsDateLib.addMonthsToIsoDate( "2007-10-15 12:00:00", 1) : """
print "Expected result : %s " %("2007-11-15 12:00:00")
print "Obtained result : %s " %StatsDateLib.addMonthsToIsoDate( "2007-10-15 12:00:00", 1)
if not StatsDateLib.addMonthsToIsoDate( "2007-10-15 12:00:00", 1) == "2007-11-15 12:00:00" : raise AssertionError("addMonthsToIsoDate test #1 is broken.")
print ""
print ""
print "addMonthsToIsoDate test #2(test year increment): "
print ""
print """StatsDateLib.addMonthsToIsoDate( "2007-10-15 12:00:00", 15) : """
print "Expected result : %s " %("2009-01-15 12:00:00")
print "Obtained result : %s " %StatsDateLib.addMonthsToIsoDate( "2007-10-15 12:00:00", 15)
if not StatsDateLib.addMonthsToIsoDate( "2007-10-15 12:00:00", 15) == "2009-01-15 12:00:00" : raise AssertionError("addMonthsToIsoDate test #2 is broken.")
print ""
print ""
print "addMonthsToIsoDate test #3 (test day number too high in bissextile year): "
print ""
print """StatsDateLib.addMonthsToIsoDate( "2008-01-31 12:00:00", 1) : """
print "Expected result : %s " %("2008-02-29 12:00:00")
print "Obtained result : %s " %StatsDateLib.addMonthsToIsoDate( "2008-01-31 12:00:00", 1)
if not StatsDateLib.addMonthsToIsoDate( "2008-01-31 12:00:00", 1) == "2008-02-29 12:00:00" : raise AssertionError("addMonthsToIsoDate test #3 is broken.")
| khosrow/metpx | pxStats/lib/StatsDateLib.py | Python | gpl-2.0 | 32,661 |
import logging
import re
from streamlink.plugin import Plugin
from streamlink.plugin.api import useragents
from streamlink.plugin.api import validate
from streamlink.stream import HLSStream
from streamlink.utils import parse_json
log = logging.getLogger(__name__)
class Mitele(Plugin):
_url_re = re.compile(r"https?://(?:www\.)?mitele\.es/directo/(?P<channel>[\w-]+)")
pdata_url = "https://indalo.mediaset.es/mmc-player/api/mmc/v1/{channel}/live/html5.json"
gate_url = "https://gatekeeper.mediaset.es"
error_schema = validate.Schema({
"code": validate.any(validate.text, int),
"message": validate.text,
})
pdata_schema = validate.Schema(validate.transform(parse_json), validate.any(
validate.all(
{
"locations": [{
"gcp": validate.text,
"ogn": validate.any(None, validate.text),
}],
},
validate.get("locations"),
validate.get(0),
),
error_schema,
))
gate_schema = validate.Schema(
validate.transform(parse_json),
validate.any(
{
"mimeType": validate.text,
"stream": validate.url(),
},
error_schema,
)
)
def __init__(self, url):
super(Mitele, self).__init__(url)
self.session.http.headers.update({
"User-Agent": useragents.FIREFOX,
"Referer": self.url
})
@classmethod
def can_handle_url(cls, url):
return cls._url_re.match(url) is not None
def _get_streams(self):
channel = self._url_re.match(self.url).group("channel")
pdata = self.session.http.get(self.pdata_url.format(channel=channel),
acceptable_status=(200, 403, 404),
schema=self.pdata_schema)
log.trace("{0!r}".format(pdata))
if pdata.get("code"):
log.error("{0} - {1}".format(pdata["code"], pdata["message"]))
return
gdata = self.session.http.post(self.gate_url,
acceptable_status=(200, 403, 404),
data=pdata,
schema=self.gate_schema)
log.trace("{0!r}".format(gdata))
if gdata.get("code"):
log.error("{0} - {1}".format(gdata["code"], gdata["message"]))
return
log.debug("Stream: {0} ({1})".format(gdata["stream"], gdata.get("mimeType", "n/a")))
for s in HLSStream.parse_variant_playlist(self.session,
gdata["stream"],
name_fmt="{pixels}_{bitrate}").items():
yield s
__plugin__ = Mitele
| repotvsupertuga/tvsupertuga.repository | script.module.streamlink.base/resources/lib/streamlink/plugins/mitele.py | Python | gpl-2.0 | 2,844 |
#
# Copyright (C) 2008, 2013 Red Hat, Inc.
# Copyright (C) 2008 Cole Robinson <[email protected]>
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston,
# MA 02110-1301 USA.
#
# pylint: disable=E0611
from gi.repository import Gtk
from gi.repository import Gdk
# pylint: enable=E0611
import logging
from virtManager.baseclass import vmmGObjectUI
from virtManager.asyncjob import vmmAsyncJob
from virtManager import uiutil
from virtinst import StoragePool
PAGE_NAME = 0
PAGE_FORMAT = 1
class vmmCreatePool(vmmGObjectUI):
def __init__(self, conn):
vmmGObjectUI.__init__(self, "createpool.ui", "vmm-create-pool")
self.conn = conn
self._pool = None
self.builder.connect_signals({
"on_pool_forward_clicked" : self.forward,
"on_pool_back_clicked" : self.back,
"on_pool_cancel_clicked" : self.close,
"on_vmm_create_pool_delete_event" : self.close,
"on_pool_finish_clicked" : self.forward,
"on_pool_pages_change_page" : self.page_changed,
"on_pool_source_button_clicked" : self.browse_source_path,
"on_pool_target_button_clicked" : self.browse_target_path,
"on_pool_name_activate": self.forward,
"on_pool_hostname_activate" : self.hostname_changed,
"on_pool_iqn_chk_toggled": self.iqn_toggled,
})
self.bind_escape_key_close()
self.set_initial_state()
self.set_page(PAGE_NAME)
def show(self, parent):
logging.debug("Showing new pool wizard")
self.reset_state()
self.topwin.set_transient_for(parent)
self.topwin.present()
def close(self, ignore1=None, ignore2=None):
logging.debug("Closing new pool wizard")
self.topwin.hide()
return 1
def _cleanup(self):
self.conn = None
self._pool = None
def set_initial_state(self):
self.widget("pool-pages").set_show_tabs(False)
blue = Gdk.Color.parse("#0072A8")[1]
self.widget("header").modify_bg(Gtk.StateType.NORMAL, blue)
type_list = self.widget("pool-type")
type_model = Gtk.ListStore(str, str)
type_list.set_model(type_model)
uiutil.set_combo_text_column(type_list, 1)
format_list = self.widget("pool-format")
format_model = Gtk.ListStore(str, str)
format_list.set_model(format_model)
uiutil.set_combo_text_column(format_list, 1)
# Target path combo box entry
target_list = self.widget("pool-target-path")
# target_path, Label, pool class instance
target_model = Gtk.ListStore(str, str, object)
target_model.set_sort_column_id(0, Gtk.SortType.ASCENDING)
target_list.set_model(target_model)
target_list.set_entry_text_column(0)
# Source path combo box entry
source_list = self.widget("pool-source-path")
# source_path, Label, pool class instance
source_model = Gtk.ListStore(str, str, object)
source_model.set_sort_column_id(0, Gtk.SortType.ASCENDING)
source_list.set_model(source_model)
source_list.set_entry_text_column(0)
self.populate_pool_type()
def reset_state(self):
self.widget("pool-pages").set_current_page(0)
self.widget("pool-forward").show()
self.widget("pool-finish").hide()
self.widget("pool-back").set_sensitive(False)
self.widget("pool-name").set_text("")
self.widget("pool-name").grab_focus()
self.widget("pool-type").set_active(0)
self.widget("pool-target-path").get_child().set_text("")
self.widget("pool-source-path").get_child().set_text("")
self.widget("pool-hostname").set_text("")
self.widget("pool-iqn-chk").set_active(False)
self.widget("pool-iqn-chk").toggled()
self.widget("pool-iqn").set_text("")
self.widget("pool-format").set_active(-1)
self.widget("pool-build").set_sensitive(True)
self.widget("pool-build").set_active(False)
self.widget("pool-details-grid").set_visible(False)
def hostname_changed(self, ignore):
# If a hostname was entered, try to lookup valid pool sources.
self.populate_pool_sources()
def iqn_toggled(self, src):
self.widget("pool-iqn").set_sensitive(src.get_active())
def populate_pool_type(self):
model = self.widget("pool-type").get_model()
model.clear()
types = StoragePool.get_pool_types()
types.sort()
for typ in types:
model.append([typ, "%s: %s" %
(typ, StoragePool.get_pool_type_desc(typ))])
def populate_pool_format(self, formats):
model = self.widget("pool-format").get_model()
model.clear()
for f in formats:
model.append([f, f])
def populate_pool_sources(self):
source_list = self.widget("pool-source-path")
source_model = source_list.get_model()
source_model.clear()
target_list = self.widget("pool-target-path")
target_model = target_list.get_model()
target_model.clear()
use_list = source_list
use_model = source_model
entry_list = []
if self._pool.type == StoragePool.TYPE_SCSI:
entry_list = self.list_scsi_adapters()
use_list = source_list
use_model = source_model
elif self._pool.type == StoragePool.TYPE_LOGICAL:
pool_list = self.list_pool_sources()
entry_list = [[p.target_path, p.target_path, p]
for p in pool_list]
use_list = target_list
use_model = target_model
elif self._pool.type == StoragePool.TYPE_DISK:
entry_list = self.list_disk_devs()
use_list = source_list
use_model = source_model
elif self._pool.type == StoragePool.TYPE_NETFS:
host = self.get_config_host()
if host:
pool_list = self.list_pool_sources(host=host)
entry_list = [[p.source_path, p.source_path, p]
for p in pool_list]
use_list = source_list
use_model = source_model
for e in entry_list:
use_model.append(e)
if entry_list:
use_list.set_active(0)
def list_scsi_adapters(self):
scsi_hosts = self.conn.get_nodedevs("scsi_host")
host_list = [dev.host for dev in scsi_hosts]
clean_list = []
for h in host_list:
name = "host%s" % h
tmppool = self._make_stub_pool()
tmppool.source_path = name
entry = [name, name, tmppool]
if name not in [l[0] for l in clean_list]:
clean_list.append(entry)
return clean_list
def list_disk_devs(self):
devs = self.conn.get_nodedevs("storage")
devlist = []
for dev in devs:
if dev.drive_type != "disk" or not dev.block:
continue
devlist.append(dev.block)
devlist.sort()
clean_list = []
for dev in devlist:
tmppool = self._make_stub_pool()
tmppool.source_path = dev
entry = [dev, dev, tmppool]
if dev not in [l[0] for l in clean_list]:
clean_list.append(entry)
return clean_list
def list_pool_sources(self, host=None):
pool_type = self._pool.type
plist = []
try:
plist = StoragePool.pool_list_from_sources(
self.conn.get_backend(),
pool_type,
host=host)
except Exception:
logging.exception("Pool enumeration failed")
return plist
def show_options_by_pool(self):
def show_row(base, do_show):
widget = self.widget(base + "-label")
uiutil.set_grid_row_visible(widget, do_show)
src = self._pool.supports_property("source_path")
src_b = src and not self.conn.is_remote()
src_name = self._pool.type == StoragePool.TYPE_GLUSTER
tgt = self._pool.supports_property("target_path")
tgt_b = tgt and not self.conn.is_remote()
host = self._pool.supports_property("host")
fmt = self._pool.supports_property("formats")
iqn = self._pool.supports_property("iqn")
builddef, buildsens = self.get_build_default()
# Source path broswing is meaningless for net pools
if self._pool.type in [StoragePool.TYPE_NETFS,
StoragePool.TYPE_ISCSI,
StoragePool.TYPE_SCSI]:
src_b = False
show_row("pool-target", tgt)
show_row("pool-source", src)
show_row("pool-hostname", host)
show_row("pool-format", fmt)
show_row("pool-build", buildsens)
show_row("pool-iqn", iqn)
show_row("pool-source-name", src_name)
if tgt:
self.widget("pool-target-path").get_child().set_text(
self._pool.target_path)
self.widget("pool-target-button").set_sensitive(tgt_b)
self.widget("pool-source-button").set_sensitive(src_b)
self.widget("pool-build").set_active(builddef)
if src_name:
self.widget("pool-source-name").get_child().set_text(
self._pool.source_name)
self.widget("pool-format").set_active(-1)
if fmt:
self.populate_pool_format(self._pool.list_formats("formats"))
self.widget("pool-format").set_active(0)
self.populate_pool_sources()
def get_config_type(self):
return uiutil.get_list_selection(self.widget("pool-type"), 0)
def get_config_name(self):
return self.widget("pool-name").get_text()
def get_config_target_path(self):
src = self.widget("pool-target-path")
if not src.get_sensitive():
return None
ret = uiutil.get_list_selection(src, 1)
if ret is not None:
return ret
return src.get_child().get_text()
def get_config_source_path(self):
src = self.widget("pool-source-path")
if not src.get_sensitive():
return None
ret = uiutil.get_list_selection(src, 1)
if ret is not None:
return ret
return src.get_child().get_text().strip()
def get_config_host(self):
host = self.widget("pool-hostname")
if host.get_sensitive():
return host.get_text().strip()
return None
def get_config_source_name(self):
name = self.widget("pool-source-name")
if name.get_sensitive():
return name.get_text().strip()
return None
def get_config_format(self):
return uiutil.get_list_selection(self.widget("pool-format"), 0)
def get_config_iqn(self):
iqn = self.widget("pool-iqn")
if iqn.get_sensitive() and iqn.get_visible():
return iqn.get_text().strip()
return None
def get_build_default(self):
""" Return (default value, whether build option can be changed)"""
if not self._pool:
return (False, False)
if self._pool.type in [StoragePool.TYPE_DIR,
StoragePool.TYPE_FS,
StoragePool.TYPE_NETFS]:
# Building for these simply entails creating a directory
return (True, False)
elif self._pool.type in [StoragePool.TYPE_LOGICAL,
StoragePool.TYPE_DISK]:
# This is a dangerous operation, anything (False, True)
# should be assumed to be one.
return (False, True)
else:
return (False, False)
def browse_source_path(self, ignore1=None):
source = self._browse_file(_("Choose source path"),
startfolder="/dev", foldermode=False)
if source:
self.widget("pool-source-path").get_child().set_text(source)
def browse_target_path(self, ignore1=None):
target = self._browse_file(_("Choose target directory"),
startfolder="/var/lib/libvirt",
foldermode=True)
if target:
self.widget("pool-target-path").get_child().set_text(target)
def forward(self, ignore=None):
notebook = self.widget("pool-pages")
try:
if self.validate(notebook.get_current_page()) is not True:
return
if notebook.get_current_page() == PAGE_FORMAT:
self.finish()
else:
notebook.next_page()
except Exception, e:
self.err.show_err(_("Uncaught error validating input: %s") % str(e))
return
def back(self, ignore=None):
self.widget("pool-pages").prev_page()
def _finish_cb(self, error, details):
self.topwin.set_sensitive(True)
self.topwin.get_window().set_cursor(
Gdk.Cursor.new(Gdk.CursorType.TOP_LEFT_ARROW))
if error:
error = _("Error creating pool: %s") % error
self.err.show_err(error,
details=details)
else:
self.conn.schedule_priority_tick(pollpool=True)
self.close()
def finish(self):
self.topwin.set_sensitive(False)
self.topwin.get_window().set_cursor(
Gdk.Cursor.new(Gdk.CursorType.WATCH))
build = self.widget("pool-build").get_active()
progWin = vmmAsyncJob(self._async_pool_create, [build],
self._finish_cb, [],
_("Creating storage pool..."),
_("Creating the storage pool may take a "
"while..."),
self.topwin)
progWin.run()
def _async_pool_create(self, asyncjob, build):
meter = asyncjob.get_meter()
logging.debug("Starting backround pool creation.")
poolobj = self._pool.install(create=True, meter=meter, build=build)
poolobj.setAutostart(True)
logging.debug("Pool creation succeeded")
def set_page(self, page_number):
# Update page number
page_lbl = ("<span color='#59B0E2'>%s</span>" %
_("Step %(current_page)d of %(max_page)d") %
{'current_page': page_number + 1,
'max_page': PAGE_FORMAT + 1})
self.widget("header-pagenum").set_markup(page_lbl)
isfirst = (page_number == PAGE_NAME)
islast = (page_number == PAGE_FORMAT)
self.widget("pool-back").set_sensitive(not isfirst)
self.widget("pool-finish").set_visible(islast)
self.widget("pool-forward").set_visible(not islast)
self.widget(islast and "pool-finish" or "pool-forward").grab_focus()
self.widget("pool-details-grid").set_visible(islast)
if islast:
self.show_options_by_pool()
def page_changed(self, notebook_ignore, page_ignore, page_number):
self.set_page(page_number)
def get_pool_to_validate(self):
"""
Return a pool instance to use for parameter assignment validation.
For most pools this will be the one we built after step 1, but for
pools we find via FindPoolSources, this will be different
"""
source_list = self.widget("pool-source-path")
target_list = self.widget("pool-target-path")
pool = uiutil.get_list_selection(source_list, 2)
if pool is None:
pool = uiutil.get_list_selection(target_list, 2)
return pool
def _make_stub_pool(self):
pool = StoragePool(self.conn.get_backend())
pool.type = self.get_config_type()
return pool
def _validate_page_name(self, usepool=None):
try:
if usepool:
self._pool = usepool
else:
self._pool = self._make_stub_pool()
self._pool.name = self.get_config_name()
except ValueError, e:
return self.err.val_err(_("Pool Parameter Error"), e)
return True
def _validate_page_format(self):
target = self.get_config_target_path()
host = self.get_config_host()
source = self.get_config_source_path()
fmt = self.get_config_format()
iqn = self.get_config_iqn()
source_name = self.get_config_source_name()
if not self._validate_page_name(self.get_pool_to_validate()):
return
try:
self._pool.target_path = target
if host:
self._pool.host = host
if source:
self._pool.source_path = source
if fmt:
self._pool.format = fmt
if iqn:
self._pool.iqn = iqn
if source_name:
self._pool.source_name = source_name
self._pool.validate()
except ValueError, e:
return self.err.val_err(_("Pool Parameter Error"), e)
buildval = self.widget("pool-build").get_active()
buildsen = (self.widget("pool-build").get_sensitive() and
self.widget("pool-build").get_visible())
if buildsen and buildval:
ret = self.err.yes_no(_("Building a pool of this type will "
"format the source device. Are you "
"sure you want to 'build' this pool?"))
if not ret:
return ret
return True
def validate(self, page):
if page == PAGE_NAME:
return self._validate_page_name()
elif page == PAGE_FORMAT:
return self._validate_page_format()
def _browse_file(self, dialog_name, startfolder=None, foldermode=False):
mode = Gtk.FileChooserAction.OPEN
if foldermode:
mode = Gtk.FileChooserAction.SELECT_FOLDER
return self.err.browse_local(self.conn, dialog_name,
dialog_type=mode, start_folder=startfolder)
| aurex-linux/virt-manager | virtManager/createpool.py | Python | gpl-2.0 | 18,923 |
from django.conf.urls import patterns, include, url
urlpatterns = patterns('jumpserver.views',
# Examples:
url(r'^$', 'index', name='index'),
# url(r'^api/user/$', 'api_user'),
url(r'^skin_config/$', 'skin_config', name='skin_config'),
url(r'^login/$', 'Login', name='login'),
url(r'^logout/$', 'Logout', name='logout'),
url(r'^exec_cmd/$', 'exec_cmd', name='exec_cmd'),
url(r'^file/upload/$', 'upload', name='file_upload'),
url(r'^file/download/$', 'download', name='file_download'),
url(r'^setting', 'setting', name='setting'),
url(r'^terminal/$', 'web_terminal', name='terminal'),
url(r'^mylog/$', 'mylog', name='mylog'),
url(r'^juser/', include('juser.urls')),
url(r'^jasset/', include('jasset.urls')),
url(r'^jlog/', include('jlog.urls')),
url(r'^jperm/', include('jperm.urls')),
url(r'^dbtool/', include('dbtool.urls')),
url(r'^cachemanage/', include('cachemanage.urls')),
)
| xskh2007/zjump | jumpserver/urls.py | Python | gpl-2.0 | 955 |
#!/usr/bin/python3
# -*- coding: utf-8 -*-
# test_crafting.py
import os
import sys
import unittest
root_folder = os.path.abspath(os.path.dirname(os.path.abspath(__file__)) + os.sep + ".." )
pth = root_folder #+ os.sep + 'worldbuild'
sys.path.append(pth)
from worldbuild.crafting import craft as mod_craft
class TestTemplate(unittest.TestCase):
def setUp(self):
unittest.TestCase.setUp(self)
def tearDown(self):
unittest.TestCase.tearDown(self)
def test_01_recipe(self):
res = mod_craft.Recipe('1', 'new recipe','20','mix')
#print(res)
self.assertEqual(str(res),'new recipe')
def test_02_dataset_recipe(self):
recipes = mod_craft.DataSet(mod_craft.Recipe, mod_craft.get_fullname('recipes.csv'))
self.assertTrue(len(recipes.object_list) > 18)
tot_time_to_build = 0
for recipe in recipes.object_list:
#print(recipe)
tot_time_to_build += int(recipe.base_time_to_build)
#print('total time to build all recipes = ' + str(tot_time_to_build))
self.assertEqual(str(recipes.object_list[0]), 'Torch')
self.assertEqual(str(recipes.object_list[1]), 'Wooden Plank')
self.assertTrue(tot_time_to_build > 10)
if __name__ == '__main__':
unittest.main() | acutesoftware/worldbuild | tests/test_crafting.py | Python | gpl-2.0 | 1,300 |
from modules.Utils import runCommand
import re
import sys
def parseOptions(command):
so, se, rc = runCommand("gofed %s --help" % command)
if rc != 0:
return []
options = []
option_f = False
for line in so.split("\n"):
if line == "Options:":
option_f = True
continue
if option_f == True:
if line == "":
break
# line must start with two spaces and minus
if len(line) < 3:
continue
if line[:3] != " -":
continue
line = line.strip()
parts = line.split(' ')[0].split(',')
if parts == []:
continue
# do we have both short and long options?
opts = map(lambda i: i.strip().split(' ')[0].split('=')[0], parts)
for opt in opts:
options.append(opt)
return sorted(options)
if __name__ == "__main__":
if len(sys.argv) != 2:
print ""
command = sys.argv[1]
options = parseOptions(command)
if options == []:
print command + ":"
else:
print command + ":" + " ".join(options)
| fridex/gofed | parseOptions.py | Python | gpl-2.0 | 955 |
'''
Created on 24 Feb 2015
@author: oche
'''
from __future__ import unicode_literals
from __future__ import division
import argparse
import os
import sys
import time
import re
import logging
import json
import numpy
from plotter import makeSubPlot
from os.path import expanduser
from util import validURLMatch, validYoutubeURLMatch
from decimal import *
getcontext().prec = 3
try:
from pymediainfo import MediaInfo
except:
from util.pymediainfo import MediaInfo
# //add youtube-dl to the python path
sys.path.insert(0, os.path.join(os.path.dirname(os.path.abspath(__file__)) , "youtube-dl"))
from util import cleanResults
from util import getMean
from youtube_dl import YoutubeDL
import database.vEQ_database as DB
import processmonitor.processMonitor as procmon
from powermonitor.voltcraftmeter import VoltcraftMeter
# TODO: Set logging level from argument
def makeDefaultDBFolder():
home = expanduser("~")
print home
video_download_folder = os.path.join(home, "vEQ-benchmark")
if not os.path.exists(video_download_folder):
os.makedirs(video_download_folder)
return video_download_folder
vlc_verbosity = -1
default_youtube_quality= 'bestvideo'
benchmark_duration = 20#or -1 for length of video
meter = None
default_folder= makeDefaultDBFolder()
default_database = os.path.join( default_folder, "vEQ_db.sqlite")
logging.getLogger().setLevel(logging.DEBUG)
def main(argv=None):
parser = argparse.ArgumentParser(description="vEQ-benchmark: A Benchmarking and Measurement Tool for Video")
parser.add_argument("video" , metavar="VIDEO", help="A local file or URL(Youtube, Vimeo etc.) for the video to be benchmarked")
parser.add_argument("-y", "--youtube-format", metavar="format", dest="youtube_quality", default=default_youtube_quality, help="For Youtube videos, a value that corressponds to the quality level see youtube-dl for details")
parser.add_argument("-m", "--power-meter", metavar="meter", dest="meter", default='voltcraft', help="The meter to use for power measurement TODO: Expand this")
parser.add_argument("-d", "--duration", metavar="Duration", dest="benchmark_duration", default=60, type=int, help="The length of time in seconds for the benchmark to run.")
parser.add_argument("-D", "--Database-location", dest="db_loc", metavar ="location for database file or \'memory\'", help = "A absolute location for storing the database file ")
parser.add_argument("-P", "--plot", dest="to_plot", action='store_true', help="Flag to set if this session should be plotted")
parser.add_argument("-S", "--show", dest="to_show", action='store_true', help="Flag to set if the plot of this should be displayed on the screen after a session is completed")
parser.add_argument("-p", "--player", metavar="player", dest="system_player", default="libvlc", help="The Player to use to playback video - default is VLC MediaPlayer")
parser.add_argument("--hwdecode", dest="hw_decode", action='store_true', help="VLC Specific, turn hardware decoding on")
# TODO: implement dynamic power metering VoltcraftMeter
args = parser.parse_args()
video = args.video
benchmark_duration = args.benchmark_duration
youtube_quality =args.youtube_quality
db_loc = args.db_loc
to_show = args.to_show
to_plot = args.to_plot
m = args.meter
system_player = args.system_player
hw_decode = args.hw_decode
video_title = None
video_data = None
video_codec = None
video_height = None
video_width = None
file_size = None
video_url = None
online_video = False
if db_loc is None:
db_loc = default_database
logging.info("Started VEQ_Benchmark")
#TODO: Extract this from here
implementedPowerMeters = {
"voltcraft": VoltcraftMeter()
}
meter = implementedPowerMeters.get(m,None)
# can inject dependency here i.e power meter or smc or bios or batterty
# meter_type = parser.parse_args().meter
# meter = Meter(meter_type)
if meter is None:
logging.warning("No power monitoring device found")
elif meter.initDevice() is None:
meter = None
logging.warning("No power monitoring device found")
vEQdb = DB.vEQ_database(db_loc)
start_time = time.time()
cpu = procmon.get_processor()
os_info = procmon.get_os()
gpu = procmon.get_gpu()
specs =procmon.get_specs()
values = [start_time,os_info,cpu, gpu,specs]
sys_info_index = vEQdb.insertIntoSysInfoTable(values)
if not validURLMatch(video) and not (os.access(video, os.R_OK)):
print('Error: %s file not readable' % video)
logging.error('Error: %s file not readable' % video)
sys.exit(1)
try:
if not validURLMatch(video):
logging.debug("Found regular video - using MediaInfo to extract details")
video_url = video
video_info = MediaInfo.parse(video)
video_data = video_info.to_json()
for track in video_info.tracks:
if track.track_type == 'Video':
video_title = track.title
video_codec = track.codec
video_height = track.height
video_width = track.width
elif validURLMatch(video):
online_video = True
logging.debug("Found online video: Using youtube-dl to get information")
if validYoutubeURLMatch(video):
logging.debug("Found YouTube video: Using Youtube-dl to get information")
youtube_dl_opts = {
'format' : youtube_quality,
'quiet' : True
}
with YoutubeDL(youtube_dl_opts) as ydl:
try:
def getInfoDictValue(value, infodict):
try:
return infodict.get(value,"N,A")
except:
string = "Couldn't retrieve value " + str(value) +" from YoutubeDL"
logging.error(string)
sys.stderr.write(string)
if value == 'url':
sys.exit(1)
return "N/A"
info_dict = ydl.extract_info(video, download=False)
video = getInfoDictValue("url", info_dict)
video_title = info_dict.get('title',"None")
video_data = str(json.dumps(info_dict))
video_codec = info_dict['format']
video_height = info_dict['height']
video_width = info_dict['width']
file_size = info_dict.get('filesize', "None")
video_url = video
except:
error = sys.exc_info()
logging.error("Unexpected error while retrieve details using Youtube-DL: " + str(error))
video_codec, video_height, video_width = "Null",-1,-1
except:
error = sys.exc_info()
logging.error("Could not retrive video format information: " + str(error))
video_data = str(error)
video_codec, video_height, video_width = "Null",-1,-1
"""
values = [timestamp INT, name TEXT, specs TEXT, codec TEXT, width TEXT, height TEXT ]
"""
video_values = [start_time,video,video_data,video_codec,video_width,video_height]
video_index = vEQdb.insertIntoVideoInfoTable(video_values)
#==========================================VLC VIDEO SPECIFIC ===============
if system_player == "libvlc":
from videoInput.veqplayback import VLCPlayback
vlc_args = "--video-title-show --video-title-timeout 10 --sub-source marq --sub-filter marq " + "--verbose " + str(vlc_verbosity)
if hw_decode:
vlc_args = vlc_args + "--avcodec-hw=any"
vEQPlayback = VLCPlayback(video,vEQdb,vlc_args,meter)
logging.debug("Starting Playback with VLC")
vEQPlayback.startPlayback(benchmark_duration)
else:
# use subprocess to start video player and montioring!
# GenericPlaybackObject.startPlayback(benchmarkduration)
from videoInput.genericPlayback import GenericPlayback
generic_command = "/usr/bin/omxplayer"
generic_command = '/usr/bin/vlc-wrapper --avcodec-hw=any'
generic_command = 'start chrome'
workload = "../gopro.mp4" # pass this from cmd line or something
genericPlayback = GenericPlayback(workload=video,db=vEQdb,cmd=generic_command,meter=meter)
genericPlayback.startPlayback(benchmark_duration)
end_time = time.time()
total_duration = end_time - start_time
powers = vEQdb.getValuesFromPowerTable(start_time, end_time)
cpus = vEQdb.getCPUValuesFromPSTable(start_time, end_time)
memorys = vEQdb.getMemValuesFromPSTable(start_time, end_time)
reads = vEQdb.getValuesFromPSTable("io_bytesread", start_time, end_time)
writes = vEQdb.getValuesFromPSTable("io_byteswrite", start_time, end_time)
net_r = vEQdb.getValuesFromPSTable("net_recv", start_time, end_time)
def getDataRateFromArray(arry):
data_volume = 0
try:
data_volume = arry[-1] - arry[0]
except IndexError:
logging.error("Something went wrong with collecting data from array: " + str(arry.__namespace))
return data_volume
data_transferred = getDataRateFromArray(net_r)
data_read_from_io = getDataRateFromArray(reads)
data_writes_from_io = getDataRateFromArray(writes)
'''
http://stackoverflow.com/questions/4029436/subtracting-the-current-and-previous-item-in-a-list
'''
bitrate = [y - x for x,y in zip(net_r,net_r[1:])]
io_readrate = [y - x for x,y in zip(reads,reads[1:])]
io_writerate = [y - x for x,y in zip(writes,writes[1:])]
p = numpy.array(powers)
c = numpy.array(cpus)
m = numpy.array(memorys)
# get rid of zeros and negatives
p = p[p>0]
c = c[c>0]
m = m[m>0]
mean_power = getMean(p)
mean_cpu = getMean(c)
mean_memory = getMean(m)
mean_gpu = -1
#TODO: IMplement GPU
mean_bandwidth = str(Decimal(data_transferred * 8) / Decimal(1000000* total_duration))
mean_io_read = str(Decimal(data_read_from_io * 8) / Decimal(1048576 * total_duration))
mean_io_write = str(Decimal(data_writes_from_io * 8) / Decimal(1048576 * total_duration))
video_values = [start_time,video,video_data,video_codec,video_width,video_height]
summary_keys = ("video_name" , "video_url", "video_codec", "video_height", "video_width", "mean_power", "mean_cpu", "mean_memory", "mean_gpu" , "mean_bandwidth" ,"data_transferred", "file_size", "sys_info_FK", "video_info_FK")
summary_values = (video_title, video_url , video_codec, video_height, video_width, mean_power, mean_cpu,
mean_memory, mean_gpu , mean_bandwidth ,data_transferred, file_size, sys_info_index, video_index)
summary_dict = dict(zip(summary_keys, summary_values))
# print summary_dict
vEQdb.insertIntoVEQSummaryTable(summary_values)
# write this to a summary file json and a database
print video_title
try:
video_title = s = re.sub(r"[^\w\s]", '', video_title)
except:
video_title = video
print "============================================="
print "vEQ-Summary"
print "============================================="
print "Video Name: " + str(video_title)
if online_video:
print "Video URL: " + video
print "Benchmark Duration: " + str(end_time - start_time) + "secs"
print "Video Codec: " + str(video_codec)
print "Width: " + str(video_width)
print "Height: " + str(video_height)
print "Mean Power: " + str(mean_power) + "W"
print "Mean CPU Usage: " + str(mean_cpu) + "%"
print "Mean Memory Usage: " + str(mean_memory) + "%"
print "Video Filesize " + "Not Implemented (TODO)"
if online_video:
print "Mean Bandwidth: "+ mean_bandwidth + "Mbps"
print "Video Data Transferred: " + str(float( data_transferred / (1024**2))) + " MB"
print data_read_from_io
print "Video Data read from I/O: " + str(float( data_read_from_io / (1024**2))) + " MB"
print "Video Data written to I/O: " + str(float( data_writes_from_io / (1024**2))) + " MB"
print "============================================="
print "System Information"
print "============================================="
print "O/S: " + os_info
print "CPU Name: " + cpu
print "GPU Name: " + gpu
print "Memory Info: " + "Not Yet Implemented"
print "Disk Info: " + "Not Yet Implemented"
print "Active NIC Info: " + "Not Yet Implemented"
print "============================================="
# to_plot = True
# to_show = False
# TODO implemtent GPU monitoring
# gpus=None
# plot_title = str(video_codec) + "- (" + str(video_title) + ")"
# if True:
# # if to_plot:
# makeSubPlot(start_time=start_time, figure_title=plot_title, cpus=cpus, memorys=memorys, bitrate=bitrate, powers=powers, gpus=gpus, to_show=to_show)
# to_plot = False
to_show = True
# TODO implemtent GPU monitoring
gpus=None
plot_title = str(video_codec) + "- (" + str(video_title) + ")"
if to_plot:
makeSubPlot(start_time=start_time, figure_title=plot_title, cpus=c, memorys=m, bitrate=bitrate, powers=powers, gpus=gpus, to_show=to_show)
if __name__ == '__main__':'['
main()
| oche-jay/vEQ-benchmark | vEQ_benchmark.py | Python | gpl-2.0 | 13,919 |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Copyright 2007-2009,2011 Zuza Software Foundation
#
# This file is part of translate.
#
# translate is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# translate is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, see <http://www.gnu.org/licenses/>.
"""This module represents the Greek language.
.. seealso:: http://en.wikipedia.org/wiki/Greek_language
"""
from __future__ import unicode_literals
import re
from translate.lang import common
from translate.misc.dictutils import ordereddict
class el(common.Common):
"""This class represents Greek."""
# Greek uses ; as question mark and the middot instead
sentenceend = ".!;…"
sentencere = re.compile(r"""
(?s) # make . also match newlines
.*? # anything, but match non-greedy
[%s] # the puntuation for sentence ending
\s+ # the spacing after the puntuation
(?=[^a-zά-ώ\d]) # lookahead that next part starts with caps
""" % sentenceend, re.VERBOSE | re.UNICODE)
puncdict = ordereddict([
(";", "·"),
("?", ";"),
])
# Valid latin characters for use as accelerators
valid_latin_accel = ("abcdefghijklmnopqrstuvwxyz"
"ABCDEFGHIJKLMNOPQRSTUVWXYZ"
"1234567890")
# Valid greek characters for use as accelerators (accented characters
# and "ς" omitted)
valid_greek_accel = ("αβγδεζηθικλμνξοπρστυφχψω"
"ΑΒΓΔΕΖΗΘΙΚΛΜΝΞΟΠΡΣΤΥΦΧΨΩ")
# Valid accelerators
validaccel = "".join([valid_latin_accel, valid_greek_accel])
| claudep/translate | translate/lang/el.py | Python | gpl-2.0 | 2,156 |
# -*- coding: utf-8 -*-
# Copyright 2012 splinter authors. All rights reserved.
# Use of this source code is governed by a BSD-style
# license that can be found in the LICENSE file.
import re
from lxml.cssselect import CSSSelector
from zope.testbrowser.browser import Browser
from splinter.element_list import ElementList
from splinter.exceptions import ElementDoesNotExist
from splinter.driver import DriverAPI, ElementAPI
from splinter.cookie_manager import CookieManagerAPI
import mimetypes
import lxml.html
import mechanize
import time
class CookieManager(CookieManagerAPI):
def __init__(self, browser_cookies):
self._cookies = browser_cookies
def add(self, cookies):
if isinstance(cookies, list):
for cookie in cookies:
for key, value in cookie.items():
self._cookies[key] = value
return
for key, value in cookies.items():
self._cookies[key] = value
def delete(self, *cookies):
if cookies:
for cookie in cookies:
try:
del self._cookies[cookie]
except KeyError:
pass
else:
self._cookies.clearAll()
def all(self, verbose=False):
cookies = {}
for key, value in self._cookies.items():
cookies[key] = value
return cookies
def __getitem__(self, item):
return self._cookies[item]
def __eq__(self, other_object):
if isinstance(other_object, dict):
return dict(self._cookies) == other_object
class ZopeTestBrowser(DriverAPI):
driver_name = "zope.testbrowser"
def __init__(self, user_agent=None, wait_time=2):
self.wait_time = wait_time
mech_browser = self._get_mech_browser(user_agent)
self._browser = Browser(mech_browser=mech_browser)
self._cookie_manager = CookieManager(self._browser.cookies)
self._last_urls = []
def __enter__(self):
return self
def __exit__(self, exc_type, exc_value, traceback):
pass
def visit(self, url):
self._browser.open(url)
def back(self):
self._last_urls.insert(0, self.url)
self._browser.goBack()
def forward(self):
try:
self.visit(self._last_urls.pop())
except IndexError:
pass
def reload(self):
self._browser.reload()
def quit(self):
pass
@property
def htmltree(self):
return lxml.html.fromstring(self.html.decode('utf-8'))
@property
def title(self):
return self._browser.title
@property
def html(self):
return self._browser.contents
@property
def url(self):
return self._browser.url
def find_option_by_value(self, value):
html = self.htmltree
element = html.xpath('//option[@value="%s"]' % value)[0]
control = self._browser.getControl(element.text)
return ElementList([ZopeTestBrowserOptionElement(control, self)], find_by="value", query=value)
def find_option_by_text(self, text):
html = self.htmltree
element = html.xpath('//option[normalize-space(text())="%s"]' % text)[0]
control = self._browser.getControl(element.text)
return ElementList([ZopeTestBrowserOptionElement(control, self)], find_by="text", query=text)
def find_by_css(self, selector):
xpath = CSSSelector(selector).path
return self.find_by_xpath(xpath, original_find="css", original_selector=selector)
def find_by_xpath(self, xpath, original_find=None, original_selector=None):
html = self.htmltree
elements = []
for xpath_element in html.xpath(xpath):
if self._element_is_link(xpath_element):
return self._find_links_by_xpath(xpath)
elif self._element_is_control(xpath_element):
return self.find_by_name(xpath_element.name)
else:
elements.append(xpath_element)
find_by = original_find or "xpath"
query = original_selector or xpath
return ElementList([ZopeTestBrowserElement(element, self) for element in elements], find_by=find_by, query=query)
def find_by_tag(self, tag):
return self.find_by_xpath('//%s' % tag, original_find="tag", original_selector=tag)
def find_by_value(self, value):
return self.find_by_xpath('//*[@value="%s"]' % value, original_find="value", original_selector=value)
def find_by_id(self, id_value):
return self.find_by_xpath('//*[@id="%s"][1]' % id_value, original_find="id", original_selector=id_value)
def find_by_name(self, name):
elements = []
index = 0
while True:
try:
control = self._browser.getControl(name=name, index=index)
elements.append(control)
index += 1
except LookupError:
break
return ElementList([ZopeTestBrowserControlElement(element, self) for element in elements], find_by="name", query=name)
def find_link_by_text(self, text):
return self._find_links_by_xpath("//a[text()='%s']" % text)
def find_link_by_href(self, href):
return self._find_links_by_xpath("//a[@href='%s']" % href)
def find_link_by_partial_href(self, partial_href):
return self._find_links_by_xpath("//a[contains(@href, '%s')]" % partial_href)
def find_link_by_partial_text(self, partial_text):
return self._find_links_by_xpath("//a[contains(normalize-space(.), '%s')]" % partial_text)
def fill(self, name, value):
self.find_by_name(name=name).first._control.value = value
def fill_form(self, field_values):
for name, value in field_values.items():
element = self.find_by_name(name)
control = element.first._control
if control.type == 'checkbox':
if value:
control.value = control.options
else:
control.value = []
elif control.type == 'radio':
control.value = [option for option in control.options if option == value]
elif control.type == 'select':
control.value = [value]
else:
# text, textarea, password, tel
control.value = value
def choose(self, name, value):
control = self._browser.getControl(name=name)
control.value = [option for option in control.options if option == value]
def check(self, name):
control = self._browser.getControl(name=name)
control.value = control.options
def uncheck(self, name):
control = self._browser.getControl(name=name)
control.value = []
def attach_file(self, name, file_path):
filename = file_path.split('/')[-1]
control = self._browser.getControl(name=name)
content_type, _ = mimetypes.guess_type(file_path)
control.add_file(open(file_path), content_type, filename)
def _find_links_by_xpath(self, xpath):
html = self.htmltree
links = html.xpath(xpath)
return ElementList([ZopeTestBrowserLinkElement(link, self) for link in links], find_by="xpath", query=xpath)
def select(self, name, value):
self.find_by_name(name).first._control.value = [value]
def is_text_present(self, text, wait_time=None):
wait_time = wait_time or self.wait_time
end_time = time.time() + wait_time
while time.time() < end_time:
if self._is_text_present(text):
return True
return False
def _is_text_present(self, text):
try:
body = self.find_by_tag('body').first
return text in body.text
except ElementDoesNotExist:
# This exception will be thrown if the body tag isn't present
# This has occasionally been observed. Assume that the
# page isn't fully loaded yet
return False
def is_text_not_present(self, text, wait_time=None):
wait_time = wait_time or self.wait_time
end_time = time.time() + wait_time
while time.time() < end_time:
if not self._is_text_present(text):
return True
return False
def _element_is_link(self, element):
return element.tag == 'a'
def _element_is_control(self, element):
return hasattr(element, 'type')
def _get_mech_browser(self, user_agent):
mech_browser = mechanize.Browser()
if user_agent is not None:
mech_browser.addheaders = [("User-agent", user_agent), ]
return mech_browser
@property
def cookies(self):
return self._cookie_manager
re_extract_inner_html = re.compile(r'^<[^<>]+>(.*)</[^<>]+>$')
class ZopeTestBrowserElement(ElementAPI):
def __init__(self, element, parent):
self._element = element
self.parent = parent
def __getitem__(self, attr):
return self._element.attrib[attr]
def find_by_css(self, selector):
elements = self._element.cssselect(selector)
return ElementList([self.__class__(element, self) for element in elements])
def find_by_xpath(self, selector):
elements = self._element.xpath(selector)
return ElementList([self.__class__(element, self) for element in elements])
def find_by_name(self, name):
elements = self._element.cssselect('[name="%s"]' % name)
return ElementList([self.__class__(element, self) for element in elements])
def find_by_tag(self, name):
elements = self._element.cssselect(name)
return ElementList([self.__class__(element, self) for element in elements])
def find_by_value(self, value):
elements = self._element.cssselect('[value="%s"]' % value)
return ElementList([self.__class__(element, self) for element in elements])
def find_by_id(self, id):
elements = self._element.cssselect('#%s' % id)
return ElementList([self.__class__(element, self) for element in elements])
@property
def value(self):
return self._element.text_content()
@property
def text(self):
return self.value
@property
def outer_html(self):
return lxml.html.tostring(self._element, encoding='unicode').strip()
@property
def html(self):
return re_extract_inner_html.match(self.outer_html).group(1)
def has_class(self, class_name):
return len(self._element.find_class(class_name)) > 0
class ZopeTestBrowserLinkElement(ZopeTestBrowserElement):
def __init__(self, element, parent):
super(ZopeTestBrowserLinkElement, self).__init__(element, parent)
self._browser = parent._browser
def __getitem__(self, attr):
return super(ZopeTestBrowserLinkElement, self).__getitem__(attr)
def click(self):
return self._browser.open(self["href"])
class ZopeTestBrowserControlElement(ZopeTestBrowserElement):
def __init__(self, control, parent):
self._control = control
self.parent = parent
def __getitem__(self, attr):
return self._control.mech_control.attrs[attr]
@property
def value(self):
return self._control.value
@property
def checked(self):
return bool(self._control.value)
def click(self):
return self._control.click()
def fill(self, value):
self._control.value = value
def select(self, value):
self._control.value = [value]
class ZopeTestBrowserOptionElement(ZopeTestBrowserElement):
def __init__(self, control, parent):
self._control = control
self.parent = parent
def __getitem__(self, attr):
return self._control.mech_item.attrs[attr]
@property
def text(self):
return self._control.mech_item.get_labels()[0]._text
@property
def value(self):
return self._control.optionValue
@property
def selected(self):
return self._control.mech_item._selected
| devenbansod/SWD-Query | splinter/driver/zopetestbrowser.py | Python | gpl-2.0 | 12,097 |
from __future__ import print_function, division, absolute_import
# Copyright (c) 2016 Red Hat, Inc.
#
# This software is licensed to you under the GNU General Public License,
# version 2 (GPLv2). There is NO WARRANTY for this software, express or
# implied, including the implied warranties of MERCHANTABILITY or FITNESS
# FOR A PARTICULAR PURPOSE. You should have received a copy of GPLv2
# along with this software; if not, see
# http://www.gnu.org/licenses/old-licenses/gpl-2.0.txt.
#
# Red Hat trademarks are not licensed under GPLv2. No permission is
# granted to use or replicate Red Hat trademarks that are incorporated
# in this software or its documentation.
import logging
import sys
import six
import decorator
import dbus.service
import json
import re
from rhsmlib.dbus import exceptions
log = logging.getLogger(__name__)
__all__ = [
'dbus_handle_exceptions',
'dbus_service_method',
'dbus_service_signal'
]
@decorator.decorator
def dbus_handle_exceptions(func, *args, **kwargs):
"""Decorator to handle exceptions, log them, and wrap them if necessary"""
try:
ret = func(*args, **kwargs)
return ret
except Exception as err:
log.exception(err)
trace = sys.exc_info()[2]
severity = "error"
# Remove "HTTP error (...): " string from the messages:
pattern = '^HTTP error \x28.*\x29: '
err_msg = re.sub(pattern, '', str(err))
# Modify severity of some exception here
if "Ignoring request to auto-attach. It is disabled for org" in err_msg:
severity = "warning"
if hasattr(err, 'severity'):
severity = err.severity
# Raise exception string as JSON string. Thus it can be parsed and printed properly.
error_msg = json.dumps(
{
"exception": type(err).__name__,
"severity": severity,
"message": err_msg
}
)
six.reraise(exceptions.RHSM1DBusException, exceptions.RHSM1DBusException(error_msg), trace)
def dbus_service_method(*args, **kwargs):
# Tell python-dbus that "sender" will be the keyword to use for the sender unless otherwise
# defined.
kwargs.setdefault("sender_keyword", "sender")
return dbus.service.method(*args, **kwargs)
def dbus_service_signal(*args, **kwargs):
"""
Decorator used for signal
:param args:
:param kwargs:
:return:
"""
return dbus.service.signal(*args, **kwargs)
| Lorquas/subscription-manager | src/rhsmlib/dbus/util.py | Python | gpl-2.0 | 2,488 |
from unittest import TestCase
class TestLoadUser(TestCase):
def test_find_user(self):
from backend import load_user
user = load_user('Neill', 'password')
self.assertIsNotNone(user)
self.assertEqual(user.password, "Password")
user = load_user("Tony")
self.assertIsNone(user)
| neillc/memberdb-ng | backend/tests/test_load_user.py | Python | gpl-2.0 | 330 |
# -*- coding: utf-8 -*-
# Copyright (c) 2008 - 2009 Lukas Hetzenecker <[email protected]>
from PyQt4.QtCore import *
from PyQt4.QtGui import *
# Matplotlib
try:
from matplotlib.backends.backend_qt4agg import FigureCanvasQTAgg as FigureCanvas
from matplotlib.backends.backend_qt4 import NavigationToolbar2QT as NavigationToolbar
from matplotlib.figure import Figure
except ImportError:
USE_MATPLOTLIB = False
else:
USE_MATPLOTLIB= True
if USE_MATPLOTLIB:
class ContactCanvas(FigureCanvas):
def __init__(self, parent=None, width = 10, height = 3, dpi = 100, sharex = None, sharey = None):
self.fig = Figure(figsize = (width, height), dpi=dpi, facecolor = '#FFFFFF')
self.ax = self.fig.add_subplot(111, sharex = sharex, sharey = sharey)
FigureCanvas.__init__(self, self.fig)
FigureCanvas.setSizePolicy(self,
QSizePolicy.Expanding,
QSizePolicy.Expanding)
FigureCanvas.updateGeometry(self)
def format_labels(self):
labels_x = self.ax.get_xticklabels()
labels_y = self.ax.get_yticklabels()
for xlabel in labels_x:
xlabel.set_fontsize(8)
for ylabel in labels_y:
ylabel.set_fontsize(8)
ylabel.set_color('b')
else:
class ContactCanvas(QLabel):
def __init__(self, parent=None):
super(ContactCanvas, self).__init__(parent)
self.setText(self.tr("Matplotlib not found - Please install it."))
| ypid/series60-remote | pc/widget/ContactCanvas.py | Python | gpl-2.0 | 1,696 |
from routersploit.modules.creds.cameras.arecont.ssh_default_creds import Exploit
def test_check_success(target):
""" Test scenario - testing against SSH server """
exploit = Exploit()
assert exploit.target == ""
assert exploit.port == 22
assert exploit.threads == 1
assert exploit.defaults == ["admin:", ":"]
assert exploit.stop_on_success is True
assert exploit.verbosity is True
exploit.target = target.host
exploit.port = target.port
assert exploit.check() is False
assert exploit.check_default() is None
assert exploit.run() is None
| dasseclab/dasseclab | clones/routersploit/tests/creds/cameras/arecont/test_ssh_default_creds.py | Python | gpl-2.0 | 594 |
from random import shuffle
import pytest
from utils import testgen
from utils.version import current_version
from cfme.web_ui import paginator, summary_title
from cfme.containers.pod import Pod, paged_tbl as pod_paged_tbl
from cfme.containers.provider import ContainersProvider, paged_tbl as provider_paged_tbl,\
navigate_and_get_rows
from cfme.containers.service import Service, paged_tbl as service_paged_tbl
from cfme.containers.node import Node, list_tbl as node_paged_tbl
from cfme.containers.replicator import Replicator, paged_tbl as replicator_paged_tbl
from cfme.containers.image import Image, paged_tbl as image_paged_tbl
from cfme.containers.project import Project, paged_tbl as project_paged_tbl
from cfme.containers.template import Template, paged_tbl as template_paged_tbl
from cfme.containers.container import Container, paged_tbl as container_paged_tbl
from cfme.containers.image_registry import ImageRegistry, paged_tbl as image_registry_paged_tbl
pytestmark = [
pytest.mark.usefixtures('setup_provider'),
pytest.mark.tier(1)]
pytest_generate_tests = testgen.generate([ContainersProvider], scope='function')
class DataSet(object):
def __init__(self, obj, paged_tbl, polarion_id):
self.obj = obj
self.paged_tbl = paged_tbl
pytest.mark.polarion(polarion_id)(self)
TEST_OBJECTS = [
DataSet(ContainersProvider, provider_paged_tbl, 'CMP-9851'),
DataSet(Container, container_paged_tbl, 'CMP-9947'),
DataSet(Pod, pod_paged_tbl, 'CMP-9929'),
DataSet(Service, service_paged_tbl, 'CMP-10564'),
DataSet(Node, node_paged_tbl, 'CMP-9962'),
DataSet(Replicator, replicator_paged_tbl, 'CMP-10565'),
DataSet(Image, image_paged_tbl, 'CMP-9980'),
DataSet(ImageRegistry, image_registry_paged_tbl, 'CMP-9994'),
DataSet(Project, project_paged_tbl, 'CMP-9868'),
DataSet(Template, template_paged_tbl, 'CMP-10319')
]
def check_relationships(instance):
"""Check the relationships linking & data integrity"""
sum_values = instance.summary.relationships.items().values()
shuffle(sum_values)
for attr in sum_values:
if attr.clickable:
break
else:
return # No clickable object but we still want to pass
link_value = attr.value
attr.click()
if type(link_value) is int:
rec_total = paginator.rec_total()
if rec_total != link_value:
raise Exception('Difference between the value({}) in the relationships table in {}'
'to number of records ({}) in the target'
'page'.format(link_value, instance.name, rec_total))
else:
assert '(Summary)' in summary_title()
@pytest.mark.parametrize('data_set', TEST_OBJECTS, ids=[obj.obj for obj in TEST_OBJECTS])
def test_relationships_tables(provider, data_set):
"""This test verifies the integrity of the Relationships table.
clicking on each field in the Relationships table takes the user
to either Summary page where we verify that the field that appears
in the Relationships table also appears in the Properties table,
or to the page where the number of rows is equal to the number
that is displayed in the Relationships table.
"""
if current_version() < "5.7" and data_set.obj == Template:
pytest.skip('Templates are not exist in CFME version smaller than 5.7. skipping...')
rows = navigate_and_get_rows(provider, data_set.obj, data_set.paged_tbl, 1)
if not rows:
pytest.skip('No objects to test for relationships for {}'.format(data_set.obj.__name__))
row = rows[-1]
if data_set.obj is Container:
instance = data_set.obj(row.name.text, row.pod_name.text)
elif data_set.obj is ImageRegistry:
instance = data_set.obj(row.host.text, provider)
elif data_set.obj is Image:
instance = data_set.obj(row.name.text, row.tag.text, provider)
else:
instance = data_set.obj(row.name.text, provider)
check_relationships(instance)
| rananda/cfme_tests | cfme/tests/containers/test_relationships.py | Python | gpl-2.0 | 4,007 |
import json
import re
from trac.admin import IAdminCommandProvider
from trac.attachment import Attachment, IAttachmentChangeListener
from trac.core import Component, implements
from trac.versioncontrol import (
RepositoryManager, NoSuchChangeset, IRepositoryChangeListener)
from trac.web.api import HTTPNotFound, IRequestHandler, ITemplateStreamFilter
from genshi.builder import tag
from genshi.filters import Transformer
from code_comments.api import ICodeCommentChangeListener
from code_comments.comments import Comments
class Subscription(object):
"""
Representation of a code comment subscription.
"""
id = 0
user = ''
type = ''
path = ''
rev = ''
repos = ''
notify = True
def __init__(self, env, data=None):
if isinstance(data, dict):
self.__dict__ = data
self.env = env
def __str__(self):
"""
Returns a user friendly string representation.
"""
template = "{0} for {1} {2}"
if self.type == "changeset":
_identifier = self.rev
elif self.type == "browser":
_identifier = "{0} @ {1}".format(self.path, self.rev)
else:
_identifier = self.path
return template.format(self.user, self.type, _identifier)
@classmethod
def select(cls, env, args={}, notify=None):
"""
Retrieve existing subscription(s).
"""
select = 'SELECT * FROM code_comments_subscriptions'
if notify:
args['notify'] = bool(notify)
if len(args) > 0:
select += ' WHERE '
criteria = []
for key, value in args.iteritems():
template = '{0}={1}'
if isinstance(value, basestring):
template = '{0}=\'{1}\''
if (isinstance(value, tuple) or isinstance(value, list)):
template = '{0} IN (\'{1}\')'
value = '\',\''.join(value)
if isinstance(value, bool):
value = int(value)
criteria.append(template.format(key, value))
select += ' AND '.join(criteria)
cursor = env.get_read_db().cursor()
cursor.execute(select)
for row in cursor:
yield cls._from_row(env, row)
def insert(self, db=None):
"""
Insert a new subscription. Returns bool to indicate success.
"""
if self.id > 0:
# Already has an id, don't insert
return False
else:
with self.env.db_transaction as db:
cursor = db.cursor()
insert = ("INSERT INTO code_comments_subscriptions "
"(user, type, path, repos, rev, notify) "
"VALUES (%s, %s, %s, %s, %s, %s)")
values = (self.user, self.type, self.path, self.repos,
self.rev, self.notify)
cursor.execute(insert, values)
self.id = db.get_last_id(cursor, 'code_comments_subscriptions')
return True
def update(self, db=None):
"""
Update an existing subscription. Returns bool to indicate success.
"""
if self.id == 0:
# Doesn't have a valid id, don't update
return False
else:
with self.env.db_transaction as db:
cursor = db.cursor()
update = ("UPDATE code_comments_subscriptions SET "
"user=%s, type=%s, path=%s, repos=%s, rev=%s, "
"notify=%s WHERE id=%s")
values = (self.user, self.type, self.path, self.repos,
self.rev, self.notify, self.id)
try:
cursor.execute(update, values)
except db.IntegrityError:
self.env.log.warning("Subscription update failed.")
return False
return True
def delete(self):
"""
Delete an existing subscription.
"""
if self.id > 0:
with self.env.db_transaction as db:
cursor = db.cursor()
delete = ("DELETE FROM code_comments_subscriptions WHERE "
"id=%s")
cursor.execute(delete, (self.id,))
@classmethod
def _from_row(cls, env, row):
"""
Creates a subscription from a list (representing a database row).
"""
try:
subscription = cls(env)
subscription.id = int(row[0])
subscription.user = row[1]
subscription.type = row[2]
subscription.path = row[3]
subscription.repos = row[4]
subscription.rev = row[5]
subscription.notify = bool(row[6])
return subscription
except IndexError:
# Invalid row
return None
@classmethod
def _from_dict(cls, env, dict_, create=True):
"""
Retrieves or (optionally) creates a subscription from a dict.
"""
subscription = None
# Look for existing subscriptions
args = {
'user': dict_['user'],
'type': dict_['type'],
'path': dict_['path'],
'repos': dict_['repos'],
'rev': dict_['rev'],
}
subscriptions = cls.select(env, args)
# Only return the first one
for _subscription in subscriptions:
if subscription is None:
subscription = _subscription
env.log.info('Subscription found: [%d] %s',
subscription.id, subscription)
else:
# The unique constraint on the table should prevent this ever
# occurring
env.log.warning('Multiple subscriptions found: [%d] %s',
subscription.id, subscription)
# (Optionally) create a new subscription if we didn't find one
if subscription is None and create:
subscription = cls(env, dict_)
subscription.insert()
env.log.info('Subscription created: [%d] %s',
subscription.id, subscription)
return subscription
@classmethod
def from_attachment(cls, env, attachment, user=None, notify=True):
"""
Creates a subscription from an Attachment object.
"""
_path = "/{0}/{1}/{2}".format(attachment.parent_realm,
attachment.parent_id,
attachment.filename)
sub = {
'user': user or attachment.author,
'type': 'attachment',
'path': _path,
'repos': '',
'rev': '',
'notify': notify,
}
return cls._from_dict(env, sub)
@classmethod
def from_changeset(cls, env, changeset, user=None, notify=True):
"""
Creates a subscription from a Changeset object.
"""
sub = {
'user': user or changeset.author,
'type': 'changeset',
'path': '',
'repos': changeset.repos.reponame,
'rev': changeset.rev,
'notify': notify,
}
return cls._from_dict(env, sub)
@classmethod
def from_comment(cls, env, comment, user=None, notify=True):
"""
Creates a subscription from a Comment object.
"""
sub = {
'user': user or comment.author,
'type': comment.type,
'notify': notify,
}
# Munge attachments
if comment.type == 'attachment':
sub['path'] = comment.path.split(':')[1]
sub['repos'] = ''
sub['rev'] = ''
# Munge changesets and browser
if comment.type in ('changeset', 'browser'):
if comment.type == 'browser':
sub['path'] = comment.path
else:
sub['path'] = ''
repo = RepositoryManager(env).get_repository(None)
try:
sub['repos'] = repo.reponame
try:
_cs = repo.get_changeset(comment.revision)
sub['rev'] = _cs.rev
except NoSuchChangeset:
# Invalid changeset
return None
finally:
repo.close()
return cls._from_dict(env, sub)
@classmethod
def for_attachment(cls, env, attachment, path=None, notify=None):
"""
Returns all subscriptions for an attachment. The path can be
overridden.
"""
path_template = "/{0}/{1}/{2}"
_path = path or path_template.format(attachment.parent_realm,
attachment.parent_id,
attachment.filename)
args = {
'type': 'attachment',
'path': _path,
}
return cls.select(env, args, notify)
@classmethod
def for_changeset(cls, env, changeset, notify=None):
"""
Returns all subscriptions for an changeset.
"""
args = {
'type': 'changeset',
'repos': changeset.repos.reponame,
'rev': changeset.rev,
}
return cls.select(env, args, notify)
@classmethod
def for_comment(cls, env, comment, notify=None):
"""
Return all subscriptions for a comment.
"""
args = {}
if comment.type == 'attachment':
args['type'] = comment.type
args['path'] = comment.path.split(':')[1]
if comment.type == 'changeset':
args['type'] = comment.type
args['rev'] = str(comment.revision)
if comment.type == 'browser':
args['type'] = ('browser', 'changeset')
args['path'] = (comment.path, '')
args['rev'] = str(comment.revision)
return cls.select(env, args, notify)
@classmethod
def for_request(cls, env, req, create=False):
"""
Return a **single** subscription for a HTTP request.
"""
reponame = req.args.get('reponame')
rm = RepositoryManager(env)
repos = rm.get_repository(reponame)
path = req.args.get('path') or ''
rev = req.args.get('rev') or repos.youngest_rev
dict_ = {
'user': req.authname,
'type': req.args.get('realm'),
'path': '',
'rev': '',
'repos': '',
}
if dict_['type'] == 'attachment':
dict_['path'] = path
if dict_['type'] == 'changeset':
dict_['rev'] = path[1:]
dict_['repos'] = repos.reponame
if dict_['type'] == 'browser':
if len(path) == 0:
dict_['path'] = '/'
else:
dict_['path'] = path[1:]
dict_['rev'] = rev
dict_['repos'] = repos.reponame
return cls._from_dict(env, dict_, create=create)
class SubscriptionJSONEncoder(json.JSONEncoder):
"""
JSON Encoder for a Subscription object.
"""
def default(self, o):
data = o.__dict__.copy()
del data['env']
return data
class SubscriptionAdmin(Component):
"""
trac-admin command provider for subscription administration.
"""
implements(IAdminCommandProvider)
# IAdminCommandProvider methods
def get_admin_commands(self):
yield ('subscription seed', '',
"""Seeds subscriptions for existing attachments, changesets,
and comments.
""",
None, self._do_seed)
def _do_seed(self):
# Create a subscription for all existing attachments
cursor = self.env.get_read_db().cursor()
cursor.execute("SELECT DISTINCT type, id FROM attachment")
rows = cursor.fetchall()
for row in rows:
for attachment in Attachment.select(self.env, row[0], row[1]):
Subscription.from_attachment(self.env, attachment)
# Create a subscription for all existing revisions
rm = RepositoryManager(self.env)
repos = rm.get_real_repositories()
for repo in repos:
_rev = repo.get_oldest_rev()
while _rev:
try:
_cs = repo.get_changeset(_rev)
Subscription.from_changeset(self.env, _cs)
except NoSuchChangeset:
pass
_rev = repo.next_rev(_rev)
# Create a subscription for all existing comments
comments = Comments(None, self.env).all()
for comment in comments:
Subscription.from_comment(self.env, comment)
class SubscriptionListeners(Component):
"""
Automatically creates subscriptions for attachments, changesets, and
comments.
"""
implements(IAttachmentChangeListener, IRepositoryChangeListener,
ICodeCommentChangeListener)
# IAttachmentChangeListener methods
def attachment_added(self, attachment):
Subscription.from_attachment(self.env, attachment)
def attachment_deleted(self, attachment):
for subscription in Subscription.for_attachment(self.env, attachment):
subscription.delete()
def attachment_reparented(self, attachment, old_parent_realm,
old_parent_id):
path_template = "/{0}/{1}/{2}"
old_path = path_template.format(old_parent_realm,
old_parent_id,
attachment.filename)
new_path = path_template.format(attachment.parent_realm,
attachment.parent_id,
attachment.filename)
for subscription in Subscription.for_attachment(self.env, attachment,
old_path):
subscription.path = new_path
subscription.update()
# IRepositoryChangeListener methods
def changeset_added(self, repos, changeset):
Subscription.from_changeset(self.env, changeset)
def changeset_modified(self, repos, changeset, old_changeset):
if changeset.author != old_changeset.author:
# Create a new author subscription
Subscription.from_changeset(self.env, changeset)
# ICodeCommentChangeListener methods
def comment_created(self, comment):
Subscription.from_comment(self.env, comment)
class SubscriptionModule(Component):
implements(IRequestHandler, ITemplateStreamFilter)
# IRequestHandler methods
def match_request(self, req):
match = re.match(r'\/subscription\/(\w+)(\/?.*)$', req.path_info)
if match:
if match.group(1):
req.args['realm'] = match.group(1)
if match.group(2):
req.args['path'] = match.group(2)
return True
def process_request(self, req):
if req.method == 'POST':
return self._do_POST(req)
elif req.method == 'PUT':
return self._do_PUT(req)
return self._do_GET(req)
# ITemplateStreamFilter methods
def filter_stream(self, req, method, filename, stream, data):
if re.match(r'^/(changeset|browser|attachment).*', req.path_info):
filter = Transformer('//h1')
stream |= filter.before(self._subscription_button(req.path_info))
return stream
# Internal methods
def _do_GET(self, req):
subscription = Subscription.for_request(self.env, req)
if subscription is None:
req.send('', 'application/json', 204)
req.send(json.dumps(subscription, cls=SubscriptionJSONEncoder),
'application/json')
def _do_POST(self, req):
subscription = Subscription.for_request(self.env, req, create=True)
status = 201
req.send(json.dumps(subscription, cls=SubscriptionJSONEncoder),
'application/json', status)
def _do_PUT(self, req):
subscription = Subscription.for_request(self.env, req)
if subscription is None:
raise HTTPNotFound('Subscription to /%s%s for %s not found',
req.args.get('realm'), req.args.get('path'),
req.authname)
content = req.read()
if len(content) > 0:
data = json.loads(content)
subscription.notify = data['notify']
subscription.update()
req.send(json.dumps(subscription, cls=SubscriptionJSONEncoder),
'application/json')
def _subscription_button(self, path):
"""
Generates a (disabled) button to connect JavaScript to.
"""
return tag.button('Subscribe', id_='subscribe', disabled=True,
title=('Code comment subscriptions require '
'JavaScript to be enabled'),
data_base_url=self.env.project_url or self.env.abs_href(),
data_path=path)
| SpamExperts/trac-code-comments-plugin | code_comments/subscription.py | Python | gpl-2.0 | 17,278 |
from pyramid.httpexceptions import (
HTTPException,
HTTPFound,
HTTPNotFound,
HTTPBadRequest,
HTTPConflict,
)
from pyramid.security import Authenticated
from pyramid.view import view_config
from perpetualfailure.db import session
from perpetualfailure.knowledgebase.models import (
KB_Article,
KB_ArticleRevision,
)
import copy
import logging
log = logging.getLogger(__name__)
def traverse(path_, page=None, parents=None):
path = path_
# We're rolling out blank, let's start from the KB root (index)
if not page:
path = copy.copy(path_)
node = path.pop(0)
page = session.query(KB_Article).filter(KB_Article.parent == None, KB_Article.name == node).first()
if not parents:
parents = []
# Remove empty elements from the path
# Lets us do stuff like /kb////channels//// == /kb/channels
while (path and not path[0]):
path.pop(0)
# The path list is empty; we've reache the article we wanted (bottom level)
if not path:
return (page, parents)
# Search for the current path node in the names of this page's children
node = path.pop(0)
results = [article for article in page.children if article.name == node]
if not results:
# No results found
return (None, parents)
# Node found; update page variable and check for more.
parents.append(page)
page = results[0]
return traverse(path, page, parents)
@view_config(
route_name='knowledgebase.article.view',
renderer='knowledgebase/article/view.mako',
)
def viewArticle(request):
path = request.matchdict['path']
# Check whether we're trying to load the index or not
if not path or path == "/":
path = [""]
else:
path = path.split("/")
# The index should always be at the first index in the path
path[0] = "index"
# Find the article by traversing the article tree down to the article we
# want. asdfasdfsa fasdfadsf asdfasdf asdfasfdasd fas sadfasf ghei hei hei
(article, parents) = traverse(path)
if not article:
# Much cri :@(
return HTTPNotFound()
# RIP
revision_count = session.execute("select count(id) from knowledgebase_article_revision where article_id = %i;" % article.id).fetchall()[0][0]
# Feed the allmighty Mako
return {"article": article, "parents": parents, "revisions": revision_count}
@view_config(
route_name='knowledgebase.article.create',
renderer='knowledgebase/article/edit.mako',
# TODO: Add a factory and use the "create" permission.
permission=Authenticated,
)
def createArticle(request):
article = KB_Article()
# Construct a list from the path given in the route URL
path = request.matchdict['path'].split("/")
path = [node for node in path if node]
path.insert(0, "index")
if len(path) > 1:
parent = traverse(path[:-1])[0]
if not parent:
return HTTPNotFound()
if traverse(path)[0]:
return HTTPConflict()
# Validate data and if appropriate update and redirect.
r = articleUpdate(request, article, path)
if isinstance(r, HTTPException): return r
return {"article": article}
@view_config(
route_name='knowledgebase.article.edit',
renderer='knowledgebase/article/edit.mako',
# TODO: Add a factory and use the "edit" permission.
permission=Authenticated,
)
def editArticle(request):
# Construct a list from the path given in the route URL
path = request.matchdict['path'].split("/")
path = [node for node in path if node]
path.insert(0, "index")
article = traverse(path)[0]
if not article:
return HTTPNotFound()
# Validate data and if appropriate update and redirect.
r = articleUpdate(request, article, path)
if isinstance(r, HTTPException): return r
return {"article": article}
def articleUpdate(request, article, path, is_new=False):
if not request.method == "POST":
return None
for key in ['title', 'content']:
if key not in request.POST:
return HTTPBadRequest()
article.title = request.POST['title']
article.name = path[-1]
article.content = request.POST['content']
# Update the parent of this object
if len(path) > 1:
article.parent = traverse(path[:-1])[0]
elif article.parent:
# This is a root article but it's got a parent, remove the parent
# from this article object.
article.parent = None
curr_rev = KB_ArticleRevision(article)
prev_rev = article.revision
if prev_rev:
prev_rev.children.append(curr_rev)
session.add(prev_rev)
session.add(curr_rev)
article.revision = curr_rev
session.add(article)
return HTTPFound(location=request.route_path('knowledgebase.article.view', path=request.matchdict['path']))
@view_config(
route_name='knowledgebase.revision.compare',
renderer='knowledgebase/revision/compare.mako',
)
def compareRevisions(request):
base = getRevisionFromMatchdict(request, "base")
head = getRevisionFromMatchdict(request, "head")
baseText = ""
if base:
baseText = base.content.split("\n")
headText = ""
if head:
headText = head.content.split("\n")
baseFile = "article/%s/revision/%s" % (base.article.id, base.id)
headFile = "article/%s/revision/%s" % (head.article.id, head.id)
diff = "\n".join(list(difflib.unified_diff(baseText, headText, baseFile, headFile)))
return {"raw_diff": diff, "base": base, "head": head, "baseFile": baseFile,
"headFile": headFile}
def getRevisionFromMatchdict(request, key):
id = request.matchdict[key]
revision = session.query(KB_ArticleRevision) \
.filter(KB_ArticleRevision.id == id).first()
return revision
| ChatNode/PerpetualFailure | perpetualfailure/knowledgebase/views.py | Python | gpl-2.0 | 5,808 |
# DFF -- An Open Source Digital Forensics Framework
# Copyright (C) 2009-2010 ArxSys
# This program is free software, distributed under the terms of
# the GNU General Public License Version 2. See the LICENSE file
# at the top of the source tree.
#
# See http://www.digital-forensic.org for more information about this
# project. Please do not directly contact any of the maintainers of
# DFF for assistance; the project provides a web site, mailing lists
# and IRC channels for your use.
#
# Author(s):
# Jeremy Mounier <[email protected]>
#
#import sys
#Is digit
#import re
import os
from PyQt4.QtCore import Qt, SIGNAL
from PyQt4.QtGui import QScrollBar, QAbstractSlider
class hexScrollBar(QScrollBar):
def __init__(self, whex):
QScrollBar.__init__(self)
self.init(whex)
self.initCallBacks()
# self.setValues()
def init(self, whex):
self.whex = whex
self.heditor = self.whex.heditor
self.filesize = self.heditor.filesize
self.min = 0
self.single = 1
#Initialized in Whex with LFMOD
self.page = self.heditor.pageSize
self.max = 0
#Long File Mode
# self.lfmod = False
###### LFMOD ######
###################
# self.maxint = 2147483647
# self.lines = self.filesize / self.heditor.bytesPerLine
# self.restlines = self.filesize % 16
# if self.isInt(self.lines):
# self.max = self.lines - 1
# self.page = self.heditor.pageSize / 16
# else:
# self.lfmod = True
# self.max = self.maxint - 1
# self.page = self.heditor.pageSize
####################
####################
def initCallBacks(self):
self.connect(self, SIGNAL("sliderMoved(int)"), self.moved)
self.connect(self, SIGNAL("actionTriggered(int)"), self.triggered)
def setValues(self):
self.setMinimum(self.min)
self.setMaximum(self.max)
self.setSingleStep(self.single)
self.setPageStep(self.page)
self.setRange(self.min, self.max)
# def isLFMOD(self):
# return self.lfmod
# def isInt(self, val):
# try:
# res = int(val)
# if res < 2147483647:
# return True
# else:
# return False
# except ValueError, TypeError:
# return False
# else:
# return False
# LFMOD #
# def valueToOffset(self, value):
# return ((self.filesize * value) / self.maxint)
# def offsetToValue(self, offset):
# if self.isLFMOD():
# return ((self.maxint * offset) / self.filesize)
# else:
# return (offset / self.heditor.bytesPerLine)
########################################
# Navigation Operations #
########################################
def triggered(self, action):
if action == QAbstractSlider.SliderSingleStepAdd:
self.whex.view.move(self.singleStep(), 1)
elif action == QAbstractSlider.SliderSingleStepSub:
self.whex.view.move(self.singleStep(), 0)
elif action == QAbstractSlider.SliderPageStepSub:
self.whex.view.move(self.pageStep(), 0)
elif action == QAbstractSlider.SliderPageStepAdd:
self.whex.view.move(self.pageStep(), 1)
# def oldtriggered(self, action):
# offset = self.heditor.currentOffset
#######################
# LINES #
#######################
#LINE DOWN
# if action == QAbstractSlider.SliderSingleStepAdd:
# if offset + 16 <= (self.filesize - 5 * 16):
# self.heditor.readOffset(offset + 16)
#Update value
# if self.isLFMOD():
# self.setValue(self.offsetToValue(offset + 16))
# else:
# self.setValue(self.value() + 1)
#LINE UP
# elif action == QAbstractSlider.SliderSingleStepSub:
# if offset - 16 >= 0:
# self.heditor.readOffset(offset - 16)
# #Update value
# if self.isLFMOD():
# self.setValue(self.offsetToValue(offset - 16))
# else:
# self.setValue(self.value() - 1)
#######################
# PAGES #
#######################
#PAGE UP
# elif action == QAbstractSlider.SliderPageStepSub:
# if offset - (self.page * 16) >= 0:
# self.heditor.readOffset(offset - (self.page * 16))
# #Update value
# if self.isLFMOD():
# self.setValue(self.offsetToValue(offset - (self.page * 16)))
# else:
# self.setValue(self.value() - self.page)
#PAGE DOWN
# elif action == QAbstractSlider.SliderPageStepAdd:
# if offset + (self.page * 16) <= self.filesize - (5* 16):
# self.heditor.readOffset(offset + (self.page * 16))
# #Update value
# if self.isLFMOD():
# self.setValue(self.offsetToValue(offset + (self.page * 16)))
# else:
# self.setValue(self.value() + self.page)
def moved(self, value):
if self.whex.isLFMOD():
if value <= self.max:
offset = (self.filesize * value) / self.whex.maxint
self.heditor.readOffset(offset)
else:
if value <= self.max:
if value == self.max:
offset = self.filesize - (5 * self.heditor.bytesPerLine)
else:
offset = value * self.heditor.bytesPerLine
self.heditor.readOffset(offset)
| elthariel/dff | modules/viewer/hexedit/scrollbar.py | Python | gpl-2.0 | 5,795 |
import omf.cosim
glw = omf.cosim.GridLabWorld('6267', 'localhost', 'GC-solarAdd.glm', '2000-01-01 0:00:00')
glw.start()
print (glw.readClock())
# Changing solar gen status.
print (glw.read('test_solar', 'generator_status'))
glw.write('test_solar','generator_status', 'OFFLINE')
print ('Switched off solar')
print (glw.read('test_solar', 'generator_status'))
# Changing reactive power output.
print (glw.read('test_solar_inverter', 'Q_Out'))
glw.write('test_solar_inverter','Q_Out', '1000')
print ('Change Q_Out')
print (glw.read('test_solar_inverter', 'Q_Out'))
#glw.waitUntil('2000-01-01 0:30:00')
#print ('Stepped ahead 12 hours')
print (glw.readClock())
glw.resume()
print (glw.readClock())
glw.shutdown() | dpinney/omf | omf/scratch/MPUPV/solarSwitch.py | Python | gpl-2.0 | 709 |
# Mustafa Hussain
# Digital Image Processing with Dr. Anas Salah Eddin
# FL Poly, Spring 2015
#
# Homework 3: Spatial Filtering
#
# USAGE NOTES:
#
# Written in Python 2.7
#
# Please ensure that the script is running as the same directory as the images
# directory!
import cv2
import copy
#import matplotlib.pyplot as plt
import numpy
import math
#from skimage import exposure
INPUT_DIRECTORY = 'input/'
OUTPUT_DIRECTORY = 'output/'
IMAGE_FILE_EXTENSION = '.JPG'
MAX_INTENSITY = 255 # 8-bit images
def laplacianFilter(image):
"""Approximates the second derivative, bringing out edges.
Referencing below zero wraps around, so top and left sides will be sharpened.
We are not bothering with the right and bottom edges, because referencing
above the image size results in a boundary error.
"""
width, height = image.shape
filteredImage = copy.deepcopy(image)
originalImage = copy.deepcopy(image)
# Avoid right, bottom edges.
for i in range(width - 1):
for j in range(height - 1):
# Mask from homepages.inf.ed.ac.uk/rbf/HIPR2/log.htm
total = 0.0
total += -1 * float(image[i][j + 1])
total += -1 * float(image[i - 1][j])
total += 4 * float(image[i][j])
total += -1 * float(image[i + 1][j])
total += -1 * float(image[i][j - 1])
filteredImage[i][j] = total / 9.0
filteredImage = (filteredImage / numpy.max(filteredImage)) * MAX_INTENSITY
return filteredImage
def saveImage(image, filename):
"""Saves the image in the output directory with the filename given.
"""
cv2.imwrite(OUTPUT_DIRECTORY + filename + IMAGE_FILE_EXTENSION, image)
def openImage(fileName):
"""Opens the image in the input directory with the filename given.
"""
return cv2.imread(INPUT_DIRECTORY + fileName + IMAGE_FILE_EXTENSION, 0)
# Input images
inputForSharpening = 'testImage1'
# Import image.
imageForSharpening = openImage(inputForSharpening)
print("Laplacian Filter...")
filtered = laplacianFilter(imageForSharpening)
saveImage(filtered, inputForSharpening + 'Laplace')
print("Done.")
| hmustafamail/digitalimageprocessing | HW 5 - Frequency Filtering/Spatial Filtering/spatialFiltering.py | Python | gpl-2.0 | 2,087 |
# yum-rhn-plugin - RHN support for yum
#
# Copyright (C) 2006 Red Hat, Inc.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
# 02110-1301 USA
import sys
# Adjust path so we can see the src modules running from branch as well
# as test dir:
sys.path.insert(0, './')
sys.path.insert(0, '../')
sys.path.insert(0, '../../')
| colloquium/spacewalk | client/rhel/yum-rhn-plugin/test/settestpath.py | Python | gpl-2.0 | 992 |
#
# Script to build RPM's using latest LIS code, it will build the RPM's and copy it to LISISO folder
# currently we have two source tree one for RHEL 5 and one for RHEL6
#
#
import os
import sys
import shutil
import subprocess
homedir = os.getcwd()
directory = "lis-next"
if os.path.exists(directory):
shutil.rmtree(directory)
def run(cmd):
output = subprocess.call(cmd,shell=True)
return output
# Clean up LISISO direcotry
#print "Cleaning up LISISO direcroty"
#run("./cleanupISODir.sh")
def buildrhel5():
print "Cleaning up LISISO direcroty"
# run("./cleanupISODir5.sh")
os.makedirs(directory)
run("git clone https://github.com/LIS/lis-next")
os.chdir(directory+"/hv-rhel5.x/")
run("git checkout 4.1.3")
run("tar -cvzf lis-next-rh5.tar.gz hv")
shutil.copy("lis-next-rh5.tar.gz" , homedir+"/rh5/SOURCES/")
os.chdir(homedir)
shutil.rmtree(directory)
found = False
rhel5buildpath = homedir+"/rh5/pbuild"
pbuildconfigfile = homedir+"/rh5/pbuild/.pbuild"
pbuildMakefile = homedir+"/rh5/pbuild/Makefile"
shutil.copy(pbuildconfigfile , "/root/")
with open("/root/.pbuild", "a") as file:
file.write("logdir: "+homedir+"/BuilgLogDir/rh5")
with open(pbuildMakefile, "r") as myfile:
for line in myfile :
if "homedir=" in line:
found = True
if not found:
with open(pbuildMakefile, "a") as myfile:
myfile.write("homedir="+homedir)
# Change direcoty to buildpath, before building .
os.chdir(rhel5buildpath)
# Now clean the destination VM's .
clean = run("make clean")
if clean :
print "make clean failed"
sys.exit(1)
send = run("make send")
if send :
print "make send failed"
sys.exit(1)
make = run("make")
if make :
print "make failed"
sys.exit(1)
os.remove("/root/.pbuild")
os.chdir(homedir)
# run("./copyallrpmsrhel5.sh")
def buildrhel6():
print "Cleaning up LISISO direcroty"
# run("./cleanupISODir6.sh")
os.makedirs(directory)
run("git clone https://github.com/LIS/lis-next")
os.chdir(directory+"/hv-rhel6.x/")
run("git checkout 4.1.3")
run("tar -cvzf lis-next-rh6.tar.gz hv")
shutil.copy("lis-next-rh6.tar.gz" , homedir+"/rh6/SOURCES/")
os.chdir(homedir)
shutil.rmtree(directory)
found = False
rhel6buildpath=homedir+"/rh6/pbuild"
pbuildconfigfile=homedir+"/rh6/pbuild/.pbuild"
shutil.copy(pbuildconfigfile , "/root/")
with open("/root/.pbuild", "a") as file:
file.write("logdir: "+homedir+"/BuilgLogDir/rh6")
pbuildMakefile = homedir+"/rh6/pbuild/Makefile"
with open(pbuildMakefile, "r") as myfile:
for line in myfile :
if "homedir=" in line:
found = True
if not found:
with open(pbuildMakefile, "a") as myfile:
myfile.write("homedir="+homedir)
# Change direcoty to buildpath, before building .
os.chdir(rhel6buildpath)
# Now clean the destination VM's .
clean = run("make clean")
if clean :
print "make clean failed"
sys.exit(1)
send = run("make send")
if send :
print "make send failed"
sys.exit(1)
make = run("make")
if make :
print "make failed"
sys.exit(1)
os.remove("/root/.pbuild")
os.chdir(homedir)
#run("./copyallrpmsrhel6.sh")
def buildrhel7():
print "Cleaning up LISISO direcroty"
# run("./cleanupISODir7.sh")
os.makedirs(directory)
run("git clone https://github.com/LIS/lis-next")
os.chdir(directory+"/hv-rhel7.x/")
run("git checkout 4.1.3")
run("tar -cvzf lis-next-rh7.tar.gz hv")
shutil.copy("lis-next-rh7.tar.gz" , homedir+"/rh7/SOURCES/")
os.chdir(homedir)
shutil.rmtree(directory)
found = False
rhel7buildpath = homedir+"/rh7/pbuild"
pbuildconfigfile = homedir+"/rh7/pbuild/.pbuild"
shutil.copy(pbuildconfigfile , "/root/")
with open("/root/.pbuild", "a") as file:
file.write("logdir: "+homedir+"/BuilgLogDir/rh7")
pbuildMakefile = homedir+"/rh7/pbuild/Makefile"
with open(pbuildMakefile, "r") as myfile:
for line in myfile :
if "homedir=" in line:
found = True
if not found:
with open(pbuildMakefile, "a") as myfile:
myfile.write("homedir="+homedir)
# Change direcoty to buildpath, before building .
os.chdir(rhel7buildpath)
# Now clean the destination VM's .
clean = run("make clean")
if clean :
print "make clean failed"
sys.exit(1)
send = run("make send")
if send :
print "make send failed"
sys.exit(1)
make = run("make")
if make :
print "make failed"
sys.exit(1)
os.remove("/root/.pbuild")
os.chdir(homedir)
#run("./copyallrpmsrhel7.sh")
### Main entry for script.###
def main(argv):
for arg in sys.argv:
if arg == "rh5":
print "RHEL5 Build initializing...."
buildrhel5()
elif arg == "rh6":
print "RHEL6 Build initializing...."
buildrhel6()
elif arg == "rh7":
print "RHEL7 Build initializing...."
buildrhel7()
elif arg == "all":
print "RHEL5 , RHEL6 and RHEL 7 Build initializing...."
buildrhel5()
buildrhel6()
buildrhel7()
elif len(sys.argv) == 1:
print "USAGE : createrpms.py <rh5 , rh6 or all>"
sys.exit(2)
# Tar the LISISO directory .
#run("tar -cvzf lis-rpms-autogen.tar.gz LISISO")
if __name__ == "__main__":
main(sys.argv[1:])
| alexngmsft/lis-next | rpmbuild/createrpms.py | Python | gpl-2.0 | 5,034 |
#!/bin/env python
from CoaSim import *
cm = CustomMarker(0.1)
assert cm.position == 0.1
class MyMarker(CustomMarker):
def __init__(self,pos):
CustomMarker.__init__(self,pos)
def defaultValue(self):
return 1
def mutate(self, parentAllele, edgeLength):
return parentAllele+1
mm = MyMarker(0.5)
assert mm.position == 0.5
# regression test...
assert simulate([mm],5,seed=1).sequences == [[4], [5], [3], [5], [2]]
class NonMarker(object):
pass
try:
simulate([NonMarker()],2)
assert False
except TypeError, e:
assert str(e) == 'arg #1 contains a non-marker'
class Uninitialized(Marker): pass
try:
simulate([Uninitialized()],2)
assert False
except ValueError, e:
assert str(e) == 'arg #1 contains an un-initialized marker'
class Uninitialized(CustomMarker):
def __init__(self): pass
try:
simulate([Uninitialized()],2)
assert False
except ValueError, e:
assert str(e) == 'arg #1 contains an un-initialized marker'
class MissingDefaultValue(CustomMarker):
def __init__(self):
CustomMarker.__init__(self,0.2)
try:
simulate([MissingDefaultValue()],2)
assert False
except AttributeError, e:
assert str(e) == 'defaultValue'
class IncorrectDefaultValue(CustomMarker):
def __init__(self):
CustomMarker.__init__(self,0.2)
def defaultValue(self, x):
return 3
try:
simulate([IncorrectDefaultValue()],2)
assert False
except TypeError, e:
pass
class IncorrectDefaultValue(CustomMarker):
def __init__(self):
CustomMarker.__init__(self,0.2)
def defaultValue(self):
return None
try:
simulate([IncorrectDefaultValue()],2)
assert False
except TypeError, e:
assert str(e) == 'defaultValue() must return an integer.'
class MissingMutate(CustomMarker):
def __init__(self):
CustomMarker.__init__(self,0.2)
def defaultValue(self): return 0
try:
simulate([MissingMutate()],2)
assert False
except AttributeError, e:
assert str(e) == 'mutate'
class IncorrectMutate(CustomMarker):
def __init__(self):
CustomMarker.__init__(self,0.2)
def defaultValue(self): return 0
def mutate(self): return 0
try:
simulate([IncorrectMutate()],2)
assert False
except TypeError, e:
pass
class IncorrectMutate(CustomMarker):
def __init__(self):
CustomMarker.__init__(self,0.2)
def defaultValue(self): return 0
def mutate(self,parentAllele,edgeLength): return ""
try:
simulate([IncorrectMutate()],2)
assert False
except TypeError, e:
pass
| mailund/CoaSim | Python/customMarkerTest.py | Python | gpl-2.0 | 2,572 |
# Copyright 1999-2009 Gentoo Foundation
# Distributed under the terms of the GNU General Public License v2
# Don't use the unicode-wrapped os and shutil modules here since
# the whole _selinux module itself will be wrapped.
import os
import shutil
import portage
from portage import _encodings
from portage import _unicode_decode
from portage import _unicode_encode
from portage.localization import _
import selinux
from selinux import is_selinux_enabled
def copyfile(src, dest):
src = _unicode_encode(src, encoding=_encodings['fs'], errors='strict')
dest = _unicode_encode(dest, encoding=_encodings['fs'], errors='strict')
(rc, ctx) = selinux.lgetfilecon(src)
if rc < 0:
src = _unicode_decode(src, encoding=_encodings['fs'], errors='replace')
raise OSError(_("copyfile: Failed getting context of \"%s\".") % src)
setfscreate(ctx)
try:
shutil.copyfile(src, dest)
finally:
setfscreate()
def getcontext():
(rc, ctx) = selinux.getcon()
if rc < 0:
raise OSError(_("getcontext: Failed getting current process context."))
return ctx
def mkdir(target, refdir):
target = _unicode_encode(target, encoding=_encodings['fs'], errors='strict')
refdir = _unicode_encode(refdir, encoding=_encodings['fs'], errors='strict')
(rc, ctx) = selinux.getfilecon(refdir)
if rc < 0:
refdir = _unicode_decode(refdir, encoding=_encodings['fs'],
errors='replace')
raise OSError(
_("mkdir: Failed getting context of reference directory \"%s\".") \
% refdir)
setfscreate(ctx)
try:
os.mkdir(target)
finally:
setfscreate()
def rename(src, dest):
src = _unicode_encode(src, encoding=_encodings['fs'], errors='strict')
dest = _unicode_encode(dest, encoding=_encodings['fs'], errors='strict')
(rc, ctx) = selinux.lgetfilecon(src)
if rc < 0:
src = _unicode_decode(src, encoding=_encodings['fs'], errors='replace')
raise OSError(_("rename: Failed getting context of \"%s\".") % src)
setfscreate(ctx)
try:
os.rename(src,dest)
finally:
setfscreate()
def settype(newtype):
ret = getcontext().split(":")
ret[2] = newtype
return ":".join(ret)
def setexec(ctx="\n"):
ctx = _unicode_encode(ctx, encoding=_encodings['content'], errors='strict')
if selinux.setexeccon(ctx) < 0:
ctx = _unicode_decode(ctx, encoding=_encodings['content'],
errors='replace')
if selinux.security_getenforce() == 1:
raise OSError(_("Failed setting exec() context \"%s\".") % ctx)
else:
portage.writemsg("!!! " + \
_("Failed setting exec() context \"%s\".") % ctx, \
noiselevel=-1)
def setfscreate(ctx="\n"):
ctx = _unicode_encode(ctx,
encoding=_encodings['content'], errors='strict')
if selinux.setfscreatecon(ctx) < 0:
ctx = _unicode_decode(ctx,
encoding=_encodings['content'], errors='replace')
raise OSError(
_("setfscreate: Failed setting fs create context \"%s\".") % ctx)
def spawn_wrapper(spawn_func, selinux_type):
selinux_type = _unicode_encode(selinux_type,
encoding=_encodings['content'], errors='strict')
def wrapper_func(*args, **kwargs):
con = settype(selinux_type)
setexec(con)
try:
return spawn_func(*args, **kwargs)
finally:
setexec()
return wrapper_func
def symlink(target, link, reflnk):
target = _unicode_encode(target, encoding=_encodings['fs'], errors='strict')
link = _unicode_encode(link, encoding=_encodings['fs'], errors='strict')
reflnk = _unicode_encode(reflnk, encoding=_encodings['fs'], errors='strict')
(rc, ctx) = selinux.lgetfilecon(reflnk)
if rc < 0:
reflnk = _unicode_decode(reflnk, encoding=_encodings['fs'],
errors='replace')
raise OSError(
_("symlink: Failed getting context of reference symlink \"%s\".") \
% reflnk)
setfscreate(ctx)
try:
os.symlink(target, link)
finally:
setfscreate()
| fastinetserver/portage-idfetch | pym/portage/_selinux.py | Python | gpl-2.0 | 3,723 |
# Copyright 2004-2012 Tom Rothamel <[email protected]>
#
# Permission is hereby granted, free of charge, to any person
# obtaining a copy of this software and associated documentation files
# (the "Software"), to deal in the Software without restriction,
# including without limitation the rights to use, copy, modify, merge,
# publish, distribute, sublicense, and/or sell copies of the Software,
# and to permit persons to whom the Software is furnished to do so,
# subject to the following conditions:
#
# The above copyright notice and this permission notice shall be
# included in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
import renpy.display
from renpy.display.render import render, Render, Matrix2D
# This file contains displayables that are image-like, because they take
# up a rectangular area of the screen, and do not respond to input.
class Solid(renpy.display.core.Displayable):
"""
:doc: disp_imagelike
A displayable that fills the area its assigned with `color`.
::
image white = Solid("#fff")
"""
def __init__(self, color, **properties):
super(Solid, self).__init__(**properties)
if color is not None:
self.color = renpy.easy.color(color)
else:
self.color = None
def visit(self):
return [ ]
def render(self, width, height, st, at):
color = self.color or self.style.color
rv = Render(width, height)
if color is None or width <= 0 or height <= 0:
return rv
SIZE = 10
if width < SIZE or height < SIZE:
tex = renpy.display.draw.solid_texture(width, height, color)
else:
tex = renpy.display.draw.solid_texture(SIZE, SIZE, color)
rv.forward = Matrix2D(1.0 * SIZE / width, 0, 0, 1.0 * SIZE / height)
rv.reverse = Matrix2D(1.0 * width / SIZE, 0, 0, 1.0 * height / SIZE)
rv.blit(tex, (0, 0))
return rv
class Frame(renpy.display.core.Displayable):
"""
:doc: disp_imagelike
:args: (image, xborder, yborder, tile=False, **properties)
A displayable that resizes an image to fill the available area,
while preserving the width and height of its borders. is often
used as the background of a window or button.
.. figure:: frame_example.png
Using a frame to resize an image to double its size.
`image`
An image manipulator that will be resized by this frame.
`left`
The size of the border on the left side.
`top`
The size of the border on the top.
`right`
The size of the border on the right side. If None, defaults
to `left`.
`bottom`
The side of the border on the bottom. If None, defaults to `top`.
`tile`
If true, tiling is used to resize sections of the image,
rather than scaling.
::
# Resize the background of the text window if it's too small.
init python:
style.window.background = Frame("frame.png", 10, 10)
"""
__version__ = 1
def after_upgrade(self, version):
if version < 2:
self.left = self.xborder
self.right = self.xborder
self.top = self.yborder
self.bottom = self.yborder
def __init__(self, image, left, top, right=None, bottom=None, bilinear=True, tile=False, **properties):
super(Frame, self).__init__(**properties)
self.image = renpy.easy.displayable(image)
self.tile = tile
if right is None:
right = left
if bottom is None:
bottom = top
self.left = left
self.top = top
self.right = right
self.bottom = bottom
def render(self, width, height, st, at):
crend = render(self.image, width, height, st, at)
sw, sh = crend.get_size()
sw = int(sw)
sh = int(sh)
dw = int(width)
dh = int(height)
bw = self.left + self.right
bh = self.top + self.bottom
xborder = min(bw, sw - 2, dw)
if xborder:
left = self.left * xborder / bw
right = self.right * xborder / bw
else:
left = 0
right = 0
yborder = min(bh, sh - 2, dh)
if yborder:
top = self.top * yborder / bh
bottom = self.bottom * yborder / bh
else:
top = 0
bottom = 0
if renpy.display.draw.info["renderer"] == "sw":
return self.sw_render(crend, dw, dh, left, top, right, bottom)
def draw(x0, x1, y0, y1):
# Compute the coordinates of the left, right, top, and
# bottom sides of the region, for both the source and
# destination surfaces.
# left side.
if x0 >= 0:
dx0 = x0
sx0 = x0
else:
dx0 = dw + x0
sx0 = sw + x0
# right side.
if x1 > 0:
dx1 = x1
sx1 = x1
else:
dx1 = dw + x1
sx1 = sw + x1
# top side.
if y0 >= 0:
dy0 = y0
sy0 = y0
else:
dy0 = dh + y0
sy0 = sh + y0
# bottom side
if y1 > 0:
dy1 = y1
sy1 = y1
else:
dy1 = dh + y1
sy1 = sh + y1
# Quick exit.
if sx0 == sx1 or sy0 == sy1:
return
# Compute sizes.
csw = sx1 - sx0
csh = sy1 - sy0
cdw = dx1 - dx0
cdh = dy1 - dy0
if csw <= 0 or csh <= 0 or cdh <= 0 or cdw <= 0:
return
# Get a subsurface.
cr = crend.subsurface((sx0, sy0, csw, csh))
# Scale or tile if we have to.
if csw != cdw or csh != cdh:
if self.tile:
newcr = Render(cdw, cdh)
newcr.clipping = True
for x in xrange(0, cdw, csw):
for y in xrange(0, cdh, csh):
newcr.blit(cr, (x, y))
cr = newcr
else:
newcr = Render(cdw, cdh)
newcr.forward = Matrix2D(1.0 * csw / cdw, 0, 0, 1.0 * csh / cdh)
newcr.reverse = Matrix2D(1.0 * cdw / csw, 0, 0, 1.0 * cdh / csh)
newcr.blit(cr, (0, 0))
cr = newcr
# Blit.
rv.blit(cr, (dx0, dy0))
return
rv = Render(dw, dh)
self.draw_pattern(draw, left, top, right, bottom)
return rv
def draw_pattern(self, draw, left, top, right, bottom):
# Top row.
if top:
if left:
draw(0, left, 0, top)
draw(left, -right, 0, top)
if right:
draw(-right, 0, 0, top)
# Middle row.
if left:
draw(0, left, top, -bottom)
draw(left, -right, top, -bottom)
if right:
draw(-right, 0, top, -bottom)
# Bottom row.
if bottom:
if left:
draw(0, left, -bottom, 0)
draw(left, -right, -bottom, 0)
if right:
draw(-right, 0, -bottom, 0)
def sw_render(self, crend, dw, dh, left, top, right, bottom):
source = crend.render_to_texture(True)
sw, sh = source.get_size()
dest = renpy.display.swdraw.surface(dw, dh, True)
rv = dest
def draw(x0, x1, y0, y1):
# Compute the coordinates of the left, right, top, and
# bottom sides of the region, for both the source and
# destination surfaces.
# left side.
if x0 >= 0:
dx0 = x0
sx0 = x0
else:
dx0 = dw + x0
sx0 = sw + x0
# right side.
if x1 > 0:
dx1 = x1
sx1 = x1
else:
dx1 = dw + x1
sx1 = sw + x1
# top side.
if y0 >= 0:
dy0 = y0
sy0 = y0
else:
dy0 = dh + y0
sy0 = sh + y0
# bottom side
if y1 > 0:
dy1 = y1
sy1 = y1
else:
dy1 = dh + y1
sy1 = sh + y1
# Quick exit.
if sx0 == sx1 or sy0 == sy1 or dx1 <= dx0 or dy1 <= dy0:
return
# Compute sizes.
srcsize = (sx1 - sx0, sy1 - sy0)
dstsize = (int(dx1 - dx0), int(dy1 - dy0))
# Get a subsurface.
surf = source.subsurface((sx0, sy0, srcsize[0], srcsize[1]))
# Scale or tile if we have to.
if dstsize != srcsize:
if self.tile:
tilew, tileh = srcsize
dstw, dsth = dstsize
surf2 = renpy.display.pgrender.surface_unscaled(dstsize, surf)
for y in range(0, dsth, tileh):
for x in range(0, dstw, tilew):
surf2.blit(surf, (x, y))
surf = surf2
else:
surf2 = renpy.display.scale.real_transform_scale(surf, dstsize)
surf = surf2
# Blit.
dest.blit(surf, (dx0, dy0))
self.draw_pattern(draw, left, top, right, bottom)
rrv = renpy.display.render.Render(dw, dh)
rrv.blit(rv, (0, 0))
rrv.depends_on(crend)
# And, finish up.
return rrv
def visit(self):
return [ self.image ]
| MSEMJEJME/Get-Dumped | renpy/display/imagelike.py | Python | gpl-2.0 | 10,715 |
from main import KeyboardHandler
import threading
import thread
import pyatspi
def parse(s):
"""parse a string like control+f into (modifier, key).
Unknown modifiers will return ValueError."""
m = 0
lst = s.split('+')
if not len(lst):
return (0, s)
# Are these right?
d = {
"shift": 1 << pyatspi.MODIFIER_SHIFT,
"control": 1 << pyatspi.MODIFIER_CONTROL,
"alt": 1 << pyatspi.MODIFIER_ALT,
"win": 1 << pyatspi.MODIFIER_META3,
}
for item in lst:
if item in d:
m |= d[item]
lst.remove(item)
# end if
if len(lst) > 1: # more than one key, parse error
raise ValueError('unknown modifier %s' % lst[0])
return (m, lst[0].lower())
class AtspiThread(threading.Thread):
def run(self):
pyatspi.Registry.registerKeystrokeListener(handler, kind=(
pyatspi.KEY_PRESSED_EVENT,), mask=pyatspi.allModifiers())
pyatspi.Registry.start()
# the keys we registered
keys = {}
def handler(e):
m, k = e.modifiers, e.event_string.lower()
# not sure why we can't catch control+f. Try to fix it.
if (not e.is_text) and e.id >= 97 <= 126:
k = chr(e.id)
if (m, k) not in keys:
return False
thread.start_new(keys[(m, k)], ())
return True # don't pass it on
class LinuxKeyboardHandler(KeyboardHandler):
def __init__(self, *args, **kwargs):
KeyboardHandler.__init__(self, *args, **kwargs)
t = AtspiThread()
t.start()
def register_key(self, key, function):
"""key will be a string, such as control+shift+f.
We need to convert that, using parse_key,
into modifier and key to put into our dictionary."""
# register key so we know if we have it on event receive.
t = parse(key)
keys[t] = function
# if we got this far, the key is valid.
KeyboardHandler.register_key(self, key, function)
def unregister_key(self, key, function):
KeyboardHandler.unregister_key(self, key, function)
del keys[parse(key)]
| Oliver2213/Queriet | queriet/keyboard_handler/linux.py | Python | gpl-2.0 | 2,059 |
#!/usr/bin/env python
#
# Plugins.py
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License version 2 as
# published by the Free Software Foundation.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston,
# MA 02111-1307 USA
from .Plugin import Plugin
class Plugins(list):
def __init__(self):
list.__init__(self)
@property
def length(self):
return len(self)
def __getattr__(self, key):
return self.namedItem(key)
def __getitem__(self, key):
try:
key = int(key)
return self.item(key)
except:
return self.namedItem(key)
def item(self, index):
if index >= self.length:
return Plugin()
return list.__getitem__(self, index)
def namedItem(self, name):
index = 0
while index < self.length:
p = self.item(index)
if p['name'].startswith(name):
return p
index += 1
print 'PLUGIN NOT FOUND:', name
return Plugin()
def refresh(self, reloadDocuments = False):
pass
| max0d41/ThugBrowser | src/DOM/Plugins.py | Python | gpl-2.0 | 1,544 |
import xml.etree.cElementTree
from os import environ, unlink, symlink, path
from Tools.Directories import SCOPE_SKIN, resolveFilename
import time
from Tools.StbHardware import setRTCoffset
class Timezones:
def __init__(self):
self.timezones = []
self.readTimezonesFromFile()
def readTimezonesFromFile(self):
try:
root = xml.etree.cElementTree.parse(resolveFilename(SCOPE_SKIN, 'timezone.xml')).getroot()
for zone in root.findall("zone"):
self.timezones.append((zone.get('name',""), zone.get('zone',"")))
except:
pass
if len(self.timezones) == 0:
self.timezones = [("UTC", "UTC")]
def activateTimezone(self, index):
if len(self.timezones) <= index:
return
environ['TZ'] = self.timezones[index][1]
try:
unlink("/etc/localtime")
except OSError:
pass
try:
symlink("/usr/share/zoneinfo/%s" %(self.timezones[index][1]), "/etc/localtime")
except OSError:
pass
try:
time.tzset()
except:
from enigma import e_tzset
e_tzset()
if path.exists("/proc/stb/fp/rtc_offset"):
setRTCoffset()
def getTimezoneList(self):
return [ str(x[0]) for x in self.timezones ]
def getDefaultTimezone(self):
# TODO return something more useful - depending on country-settings?
t = "(GMT+01:00) Amsterdam, Berlin, Bern, Rome, Vienna"
for (a,b) in self.timezones:
if a == t:
return a
return self.timezones[0][0]
timezones = Timezones()
| OpenSPA/dvbapp | lib/python/Components/Timezones.py | Python | gpl-2.0 | 1,411 |
#!/usr/bin/env python3
import math
import random
import mapp
import geom
class Robot:
def __init__(self, mapp, num_particles):
"""
Initialize the robot with a map.
Inputs:
mapp: a Map object on which the robot will move.
"""
self.d_sigma = 0.05 # Uncertainty for distances.
self.a_sigma = 0.05 # Uncertainty for angles.
self.size = 0.2 # Size of the robot in meters.
self.ang = 0
self.coor = (0, 0)
self.alp_slow = 0.1
self.alp_fast = 0.5
self.w_slow = 0.1
self.w_fast = 0.1
self.w_random = 0
self.alp_dist = 0.3
self.w_dist = 10
self.num_particles = num_particles
self.particles = []
self.mapp = mapp
# Draw num_particles random particles inside the map.
for i in range(self.num_particles):
self.particles.append((self.random_particle(), 0))
def random_particle(self):
close = True
while close:
x = random.random() * self.mapp.width
y = random.random() * self.mapp.height
close = self.mapp.closest_wall((x, y)) < self.size
ang = random.random() * 2*math.pi
return (ang, (x, y))
def put(self, ang, coor):
"""
Put the robot on a place on the map.
Inputs:
ang: The orientation of the robot in radians.
x: The x-coordinate of the robot in meters.
y: the y-coordinate of the robot in meters.
"""
self.ang = ang
self.coor = coor
def intersects(self, position, wall):
"""
Checks if the wall intersects the robot at a given position.
Inputs:
state: A tuple with the robot coordinates: (x, y).
wall: A tuple with the wall's begin and end points:
((x1, y1), (x2, y2))
Output:
True if the wall intersects the robot, False otherwise.
"""
return geom.dist_point_line(position, wall) < self.size
def motion_model(self, u, state=None, exact=False):
"""
Calculate the next state for a given state and control.
Inputs:
u: A tuple of the form (angle, distance) describing the
desired movement.
state: A tuple of the form (angle, (x_coordinate,
y_coordinate)) describing the current state.
Output:
A tuple of the form (angle, (x, y)).
"""
# If no state is given, use the current state of the robot.
if state is None:
ang = self.ang
coor = self.coor
else:
ang = state[0]
coor = state[1]
# Calculate the angle and distance under which to move.
if exact:
ang += u[0]
dist = u[1]
else:
ang += random.gauss(u[0], self.a_sigma)
dist = random.gauss(u[1], u[1] * self.d_sigma)
while ang > 2*math.pi:
ang -= 2*math.pi
while ang < -2*math.pi:
ang += 2*math.pi
# Calculate a step size of at most 0.1, so that the destination
# will be exactly reached.
steps = int(math.ceil(dist / 0.1))
x_step = dist / steps * math.cos(ang)
y_step = dist / steps * math.sin(ang)
# Take small steps until the destination is reached, or the
# robot collides with a wall.
step = 0
intersect = False
while step < steps and not intersect:
# Calculate the position after an incremented number of
# steps.
step += 1
position = (
coor[0] + step * x_step,
coor[1] + step * y_step
)
# Check if the robot collides with any of the walls. If so,
# make sure we exit the while-loop.
for wall in self.mapp.walls:
if self.intersects(position, wall):
intersect = True
step -= 1
break
# Calculate the final position of the robot and return this.
x = coor[0] + step * x_step
y = coor[1] + step * y_step
return (intersect, (ang, (x, y)))
def move(self, ang, dist, exact=False):
"""
Move the robot according to the motion model and update the
particles.
Inputs:
ang: The angle over which to rotate the robot.
dist: The distance over which to move the robot.
Output:
True if the particles approximate the robot pose good
enough.
"""
u = (ang, dist)
# Move the robot.
_, new_state = self.motion_model(u, exact=exact)
self.ang, self.coor = new_state
self.measurement = self.measure()
# Initialize the temporary particle list with a dummy particle.
# Elements are of the form ((ang, (x, y)), weight)
temp = [((0, (0, 0)), 0)]
for particle in self.particles:
_, new_part = self.motion_model(u, particle[0])
weight = self.measurement_model(new_part, particle[1])
temp.append((new_part, temp[-1][1] + weight, weight))
# Remove the dummy particle and empty the particle list.
temp.pop(0)
self.particles = []
rand_particles = []
total_weight = temp[-1][1]
self.set_weights(temp)
# Add num_particles new particles to the list, according to the
# cumulative distribution stored in temp[i][1].
for i in range(self.num_particles):
if random.random() < self.w_random:
rand_particles.append((self.random_particle(), 0))
else:
selector = random.random() * total_weight
# Find the largest temporary particle whose cumulative
# weight is smaller than the random selector.
k = 0
while temp[k][1] < selector:
k += 1
self.particles.append((temp[k][0], temp[k][2]))
# See if the non-random particles are close enough yet.
self.w_dist += self.alp_dist * (self.particles_distance() - self.w_dist)
self.particles.extend(rand_particles)
return self.w_dist < 0.5
def particles_distance(self):
"""
Calculate the average distance of the best portion of the
particles to the actual robot position.
"""
avg_num = len(self.particles)//3
distances = []
distances = [geom.dist_points(self.coor, p[0][1]) for p in self.particles]
return sum(sorted(distances)[:avg_num])/avg_num
def print(self):
"""
Print info on the location of the robot.
"""
print('angle: ' + str(round(self.ang, 2)) +
', coordinates: ('+str(round(self.coor[0], 2)) +
', ' + str(round(self.coor[1], 2)) + ')')
def draw(self):
"""
Draw a map with the robot and current particles on it.
Output:
An Image file, from the PIL module.
"""
return self.mapp.draw(
robot=self.coor,
particles=[p[0] for p in self.particles]
)
class Robot1(Robot):
half_measures = 25 # Half of the number of measurements (the total
# number must be even to simplify calculations.)
min_range = -10 # The minimal and
max_range = 10 # maximal measuring distance.
hit_sigma = 0.3 # See Thrun p. 172.
def __init(self, mapp, num_particles):
self.measurement = []
super(Robot, self).__init__(mapp, num_particles)
def set_weights(self, particles):
"""
Update the moving averages used to determine the number of
random particles that will be drawn.
Inputs:
particles: A list with the temporary particles:
(coordinate, cumulative weight, weight).
"""
w_max = max([p[2]**(1/len(self.measurement)) for p in particles])
self.w_slow += self.alp_slow * (w_max - self.w_slow)
self.w_fast += self.alp_fast * (w_max - self.w_fast)
self.w_random = 1 - 2*self.w_fast
def measure(self, state=None, exact=False):
"""
Do a range scan around a location on the map.
Inputs:
state: A tuple of the form (angle, (x, y)) describing the
robot location.
exact: A boolean describing wether or not to incorporate
noise in the measurements.
Output:
An array with at most half_measures*2 measurements.
Measurements are of the form (relative angle, distance) and
incorporate noise.
"""
# If no state is given, use the current state of the robot.
if state is None:
ang = self.ang
coor = self.coor
else:
ang = state[0]
coor = state[1]
measurement = []
# Do range_resolution measurements angles with uniform
# differences.
for i in range(self.half_measures):
theta = math.pi * i / self.half_measures
if exact:
real_angle = ang + theta
else:
real_angle = random.gauss(ang + theta, self.a_sigma)
beam = (
coor, (
coor[0] + math.cos(real_angle),
coor[1] + math.sin(real_angle)
)
)
# Loop through all the walls, and see if the beam hits them.
# Do this in both positive and negative direction, so that
# at the end of the loop we have the distances to the
# closest wall on either side of the robot.
pos_dist = self.max_range
neg_dist = -self.max_range
for wall in self.mapp.walls:
# Find the parameters for which the beam and the wall
# intersect.
t1, t2 = geom.intersect_lines(beam, wall)
# If t2 lies between 0 and 1, the beam hits the wall
# at a distance equal to t1.
if t2 >= 0 and t2 <= 1:
if t1 > 0 and t1 < pos_dist:
pos_dist = t1
elif t1 < 0 and t1 > neg_dist:
neg_dist = t1
# Add a noised version of both measurements to the list if
# they are valid.
if not exact:
pos_dist += random.gauss(0, self.d_sigma * pos_dist)
neg_dist += random.gauss(0, self.d_sigma * neg_dist)
measurement.append((
theta,
min(self.max_range, pos_dist)
))
measurement.append((
theta - math.pi,
min(self.max_range, -neg_dist)
))
return measurement
def measurement_model(self, particle, old_weight):
"""
Calculate the probability of a measurement at a location of the
robot.
Inputs:
particle: A tuple (angle, (x, y)).
old_weight: the old weight of the particle.
Output:
The probability of the measurement.
"""
ang, coor = particle
new_weight = 1
sqrt2pi = math.sqrt(2*math.pi) # Repeatedly used constant
# Calculate the probability of each measurement and multiply
# them in prob.
for meas in self.measurement:
if meas[1] != self.max_range:
x = coor[0] + meas[1] * math.cos(ang + meas[0])
y = coor[1] + meas[1] * math.sin(ang + meas[0])
d = self.mapp.closest_wall((x, y))
# Multiply the total measurement probability by the
# probability of this measurement, using a Gauss function
# with mean 0 and std dev hit_sigma.
w = math.exp(-d**2 / (2*self.hit_sigma**2)) / (self.hit_sigma*sqrt2pi) + 0.01
new_weight *= w
return new_weight
def autonome_move(self):
"""
Find out an optimal direction to move in, and perform the move.
Output:
True if the particles approximate the robot pose good
enough.
"""
# Only use the 10% of the particles with the highest weight.
particles = sorted(self.particles, key=lambda p: p[1], reverse=True)
particles = [p[0] for p in particles[:5]]
measurements = []
for p in particles:
state = (0, p[1])
measurements.append(self.measure(state=state, exact=True))
# Create a root state with empty angles list and usability
# factor 0. States always contain
# - A list with angles in which to rotate and move in order
# to reach the state.
# - The usability factor of the state.
# - A list of particles that must be used to find the best
# directions. Particles are of the form (angle, (x, y)).
states = [([], 0, particles)]
depth = 5
# Go depth steps deep to find the best direction under which to
# move the robot.
for i in range(depth):
# Only preserve the 3 best states to speed up calculations.
states = states[:2]
new_states = []
# Calculate all the children states for the preserved
# states and add them to a new list of states. Sort the list
# based on the usability factor.
for state in states:
new_states.extend(self.new_states(state, measurements))
states = sorted(new_states, key=lambda s: s[1], reverse=True)
# Take the best angle (the one with the highest factor) from the
# list and perform the actual move.
angle = states[0][0][0]
return self.move(angle, 1)
def new_states(self, state, measurements):
"""
This function is used by self.autonome_move(). Given a set of
particles, determine what the best directions are in order to
find the robot pose.
Inputs:
state: A tuple describing the state. See
self.autonome_move() for a full explanation.
measurements: A list with the measurements of the root
particles from self.autonome_move().
Output:
A list of new state similar to the input.
"""
angles = [i/5 * math.pi for i in range(-2, 3)]
fav = {}
particles = state[2]
new_states = []
# Loop through the list of angles that must be examined.
for angle in angles:
u = (angle, 1)
new_particles = []
# Calculate the next pose for all particles. Measure at the
# new pose and calculate the difference of this measurement
# with the measurement of the corresponding root particle.
# A higher difference is better.
factor = 0
for i in range(len(particles)):
_, new_part = self.motion_model(u, particles[i], exact=True)
new_particles.append(new_part)
new_state = (0, new_part[1])
measurement = self.measure(state=new_state, exact=True)
avg_diff = 0
for m in range(2*self.half_measures):
meas = measurements[i][m][1]
avg_diff += abs(meas - measurement[m][1])
avg_diff /= 2*self.half_measures
factor += avg_diff / len(particles)
# Add a state to the list of new states.
new_states.append((
state[0]+[angle],
state[1]+factor,
new_particles
))
return new_states
class Robot2(Robot):
def __init(self, mapp, num_particles):
self.measurement = 0
super(Robot, self).__init__(mapp, num_particles)
def set_weights(self, particles):
"""
Update the moving averages used to determine the number of
random particles that will be drawn.
Inputs:
particles: A list with the temporary particles:
(coordinate, cumulative weight, weight).
"""
w_avg = sum([p[2] for p in particles]) / self.num_particles
self.w_slow += self.alp_slow * (w_avg - self.w_slow)
self.w_fast += self.alp_fast * (w_avg - self.w_fast)
self.w_random = 1 - 4*self.w_fast
def measure(self, state=None):
"""
Measure the colour of the floor under the robot.
Inputs:
state: The location of the robot as a tuple (angle, (x, y)).
"""
# If no state is given, use the current state of the robot.
if state is None:
coor = self.coor
else:
coor = state[1]
return self.mapp.get_coordinate(coor)
def measurement_model(self, particle, old_weight):
"""
Calculate the probability of a measurement at a location of the
robot.
Inputs:
particle: A tuple (angle, (x, y)).
old_weight: the old weight of the particle.
Output:
The probability of the measurement.
"""
if self.mapp.get_coordinate(particle[1]) == self.measurement:
new_weight = 1
else:
new_weight = 0
return 0.1*old_weight + 0.9*new_weight
def autonome_move(self):
"""
Find out an optimal direction to move in, and perform the move.
Output:
True if the particles approximate the robot pose good
enough.
"""
# Only use the 20% of the particles with the highest weight.
particles = sorted(self.particles, key=lambda p: p[1], reverse=True)
particles = [p[0] for p in particles[:self.num_particles//5]]
# Create a root state with empty angles list and usability
# factor 0. States always contain
# - A list with previously found angles.
# - The usability factor of the state.
# - A list of particles that must be used to find the best
# directions. Particles are of the form (angle, (x, y)).
states = [([], 0, particles)]
depth = 4
# Go depth steps deep to find the best direction under which to
# move the robot.
for i in range(depth):
# Only preserve the 3 best states to speed up calculations.
states = states[:3]
new_states = []
# Calculate all the children states for the preserved
# states and add them to a new list of states. Sort the list
# based on the usability factor.
for state in states:
new_states.extend(self.new_states(state))
states = sorted(new_states, key=lambda s: s[1])
# Take the best angle (the one with the highest factor) from the
# list and perform the actual move.
angle = states[0][0][0]
return self.move(angle, 1)
def new_states(self, state):
"""
This function is used by self.autonome_move(). Given a set of
particles, determine what the best directions are in order to
find the robot pose.
Inputs:
state: A tuple describing the state. See
self.autonome_move() for a full explanation.
Output:
A list of new state similar to the input.
"""
angles = [
0,
math.pi/6, -math.pi/6,
math.pi/3, -math.pi/3,
3*math.pi/4, -3*math.pi/4
]
angles = [i/5 * math.pi for i in range(-2, 3)]
fav = {}
particles = state[2]
new_states = []
# Loop through the list of angles that must be examined.
for angle in angles:
u = (angle, 1)
new_particles = []
# Calculate the next pose for all particles, and measure at
# the same time.
count = {}
for p in particles:
_, new_part = self.motion_model(u, p, exact=True)
new_particles.append(new_part)
meas = self.measure(state=new_part)
if not meas in count:
count[meas] = 1
else:
count[meas] += 1
# Calculate the usability factor. This is the sum of the
# squares of the frequencies of the floor colours measured
# by the different particles. Lower is better.
factor = sum([c**2 for c in count.values()])
# Add a state to the list of new states.
new_states.append((
state[0]+[angle],
state[1]+factor,
new_particles
))
return new_states
| Lebuin/project-robotica | robot.py | Python | gpl-2.0 | 21,805 |
#!/usr/bin/python
#
# This file is part of the Lampadas Documentation System.
#
# Copyright (c) 2000, 2001, 2002 David Merrill <[email protected]>.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
#
from Globals import *
from globals import *
from Config import config
from HTML import page_factory
from Tables import tables
from Sessions import sessions
from URLParse import URI
from Log import log
from mod_python import apache
import os
from CoreDM import dms
def document(req,
title='',
short_title='',
pub_status_code='',
type_code='',
topic_code='',
username='',
maintained='',
maintainer_wanted='',
lang='',
review_status_code='',
tech_review_status_code='',
pub_date='',
last_update='',
tickle_date='',
isbn='',
encoding='',
rating='',
format_code='',
dtd_code='',
license_code='',
copyright_holder='',
sk_seriesid='',
abstract='',
short_desc='',
collection_code='',
columns={},
layout='compact'
):
"""
Returns the results of a document search.
"""
# Read session state
sessions.get_session(req)
uri = URI(req.uri)
page = dms.page.get_by_id('doctable')
# serve search results by manually replacing the
# doctable here instead of during the regular call.
# It's a bit ugly, but works.
# We store and restore the contents to avoid doing
# a copy.deepcopy() which I haven't tested but imagine to
# be rather expensive. -- DCM
save_page = page.page[uri.lang]
table = tables.doctable(uri,
title = title,
short_title = short_title,
pub_status_code = pub_status_code,
type_code = type_code,
topic_code = topic_code,
username = username,
maintained = maintained,
maintainer_wanted = maintainer_wanted,
lang = lang,
review_status_code = review_status_code,
tech_review_status_code = tech_review_status_code,
pub_date = pub_date,
last_update = last_update,
tickle_date = tickle_date,
isbn = isbn,
encoding = encoding,
rating = rating,
format_code = format_code,
dtd_code = dtd_code,
license_code = license_code,
copyright_holder = copyright_holder,
sk_seriesid = sk_seriesid,
abstract = abstract,
short_desc = short_desc,
collection_code = collection_code,
layout = layout,
show_search = 1)
page.page[uri.lang] = page.page[uri.lang].replace('|tabdocs|', table)
uri = URI('doctable' + referer_lang_ext(req))
uri.base = '../../'
html = page_factory.build_page(page, uri)
# Restore the original page
page.page[uri.lang] = save_page
return html
| tLDP/lampadas | pylib/data/search.py | Python | gpl-2.0 | 4,556 |
#!/usr/bin/env python3
"""
Sandbox for "swell-foop" command
"""
import glob
import os
import signal
import sys
import network_mod
import subtask_mod
class Main:
"""
Main class
"""
def __init__(self) -> None:
try:
self.config()
sys.exit(self.run())
except (EOFError, KeyboardInterrupt):
sys.exit(114)
except SystemExit as exception:
sys.exit(exception)
@staticmethod
def config() -> None:
"""
Configure program
"""
if hasattr(signal, 'SIGPIPE'):
signal.signal(signal.SIGPIPE, signal.SIG_DFL)
if os.name == 'nt':
argv = []
for arg in sys.argv:
files = glob.glob(arg) # Fixes Windows globbing bug
if files:
argv.extend(files)
else:
argv.append(arg)
sys.argv = argv
@staticmethod
def run() -> int:
"""
Start program
"""
command = network_mod.Sandbox(
'/usr/games/swell-foop',
args=sys.argv[1:],
errors='stop'
)
# Start slow for very large history (.local/share/swell-foop/)
if not os.path.isfile(command.get_file() + '.py'):
configs = [
'/dev/dri',
f'/run/user/{os.getuid()}/dconf',
os.path.join(os.getenv('HOME', '/'), '.config/dconf/user'),
]
command.sandbox(configs)
subtask_mod.Background(command.get_cmdline()).run()
return 0
if __name__ == '__main__':
if '--pydoc' in sys.argv:
help(__name__)
else:
Main()
| drtuxwang/system-config | bin/swell_foop_.py | Python | gpl-2.0 | 1,717 |
import logging
from autotest.client.shared import error
from virttest import aexpect, utils_misc
@error.context_aware
def run_autotest_regression(test, params, env):
"""
Autotest regression test:
Use Virtual Machines to test autotest.
1) Clone the given guest OS (only Linux) image twice.
2) Boot 2 VMs (autotest_server_vm and autotest_client_vm)
4) Install the autotest server in the server vm
5) Run the unittests
6) Run the pylint checker
7) Run a simple client sleeptest
8) Run a simple server sleeptest
9) Register the client vm in the autotest server
10) Schedule a simple job sleeptest in the client. Wait for client reboot.
11) If any of these steps have failed, fail the test and report the error
@param test: virt test object
@param params: Dictionary with the test parameters
@param env: Dictionary with test environment.
"""
step_failures = []
autotest_repo = params['autotest_repo']
autotest_branch = params['autotest_branch']
autotest_commit = params['autotest_commit']
password = params['password']
autotest_install_timeout = int(params.get('autotest_install_timeout', 1800))
unittests_run_timeout = int(params.get('unittests_run_timeout', 1800))
pylint_run_timeout = int(params.get('pylint_run_timeout', 1800))
vm_names = params["vms"].split()
server_name = vm_names[0]
client_name = vm_names[1]
vm_server = env.get_vm(server_name)
vm_server.verify_alive()
vm_client = env.get_vm(client_name)
vm_client.verify_alive()
timeout = float(params.get("login_timeout", 240))
session_server = vm_server.wait_for_login(timeout=timeout)
session_client = vm_client.wait_for_login(timeout=timeout)
client_ip = vm_client.get_address()
server_ip = vm_server.get_address()
step1 = "autotest-server-install"
try:
installer_file = "install-autotest-server.sh"
installer_url = ("https://raw.github.com/autotest/autotest/master"
"/contrib/%s" % installer_file)
# Download the install script and execute it
download_cmd = ("python -c 'from urllib2 import urlopen; "
"r = urlopen(\"%s\"); "
"f = open(\"%s\", \"w\"); "
"f.write(r.read())'" % (installer_url,
installer_file))
session_server.cmd(download_cmd)
permission_cmd = ("chmod +x install-autotest-server.sh")
session_server.cmd(permission_cmd)
install_cmd = ("./install-autotest-server.sh -u Aut0t3st -d Aut0t3st "
"-g %s -b %s" % (autotest_repo, autotest_branch))
if autotest_commit:
install_cmd += " -c %s" % autotest_commit
session_server.cmd(install_cmd, timeout=autotest_install_timeout)
vm_server.copy_files_from(guest_path="/tmp/install-autotest-server*log",
host_path=test.resultsdir)
except aexpect.ShellCmdError, e:
for line in e.output.splitlines():
logging.error(line)
step_failures.append(step1)
top_commit = None
try:
session_server.cmd("test -d /usr/local/autotest/.git")
session_server.cmd("cd /usr/local/autotest")
top_commit = session_server.cmd("echo `git log -n 1 --pretty=format:%H`")
top_commit = top_commit.strip()
logging.info("Autotest top commit for repo %s, branch %s: %s",
autotest_repo, autotest_branch, top_commit)
except aexpect.ShellCmdError, e:
for line in e.output.splitlines():
logging.error(line)
if top_commit is not None:
session_server.close()
session_server = vm_server.wait_for_login(timeout=timeout,
username='autotest',
password='Aut0t3st')
step2 = "unittests"
try:
session_server.cmd("cd /usr/local/autotest")
session_server.cmd("utils/unittest_suite.py --full",
timeout=unittests_run_timeout)
except aexpect.ShellCmdError, e:
for line in e.output.splitlines():
logging.error(line)
step_failures.append(step2)
step3 = "pylint"
try:
session_server.cmd("cd /usr/local/autotest")
session_server.cmd("utils/check_patch.py --full --yes",
timeout=pylint_run_timeout)
except aexpect.ShellCmdError, e:
for line in e.output.splitlines():
logging.error(line)
step_failures.append(step3)
step4 = "client_run"
try:
session_server.cmd("cd /usr/local/autotest/client")
session_server.cmd("./autotest-local run sleeptest",
timeout=pylint_run_timeout)
session_server.cmd("rm -rf results/default")
except aexpect.ShellCmdError, e:
for line in e.output.splitlines():
logging.error(line)
step_failures.append(step4)
step5 = "server_run"
try:
session_client.cmd("iptables -F")
session_server.cmd("cd /usr/local/autotest")
session_server.cmd("server/autotest-remote -m %s --ssh-user root "
"--ssh-pass %s "
"-c client/tests/sleeptest/control" %
(client_ip, password),
timeout=pylint_run_timeout)
session_server.cmd("rm -rf results-*")
except aexpect.ShellCmdError, e:
for line in e.output.splitlines():
logging.error(line)
step_failures.append(step5)
step6 = "registering_client_cli"
try:
label_name = "label-%s" % utils_misc.generate_random_id()
create_label_cmd = ("/usr/local/autotest/cli/autotest-rpc-client "
"label create -t %s -w %s" %
(label_name, server_ip))
session_server.cmd(create_label_cmd)
list_labels_cmd = ("/usr/local/autotest/cli/autotest-rpc-client "
"label list -a -w %s" % server_ip)
list_labels_output = session_server.cmd(list_labels_cmd)
for line in list_labels_output.splitlines():
logging.debug(line)
if not label_name in list_labels_output:
raise ValueError("No label %s in the output of %s" %
(label_name, list_labels_cmd))
create_host_cmd = ("/usr/local/autotest/cli/autotest-rpc-client "
"host create -t %s %s -w %s" %
(label_name, client_ip, server_ip))
session_server.cmd(create_host_cmd)
list_hosts_cmd = ("/usr/local/autotest/cli/autotest-rpc-client "
"host list -w %s" % server_ip)
list_hosts_output = session_server.cmd(list_hosts_cmd)
for line in list_hosts_output.splitlines():
logging.debug(line)
if not client_ip in list_hosts_output:
raise ValueError("No client %s in the output of %s" %
(client_ip, create_label_cmd))
if not label_name in list_hosts_output:
raise ValueError("No label %s in the output of %s" %
(label_name, create_label_cmd))
except (aexpect.ShellCmdError, ValueError), e:
if isinstance(e, aexpect.ShellCmdError):
for line in e.output.splitlines():
logging.error(line)
elif isinstance(e, ValueError):
logging.error(e)
step_failures.append(step6)
step7 = "running_job_cli"
try:
session_client.cmd("iptables -F")
job_name = "Sleeptest %s" % utils_misc.generate_random_id()
def job_is_status(status):
list_jobs_cmd = ("/usr/local/autotest/cli/autotest-rpc-client "
"job list -a -w %s" % server_ip)
list_jobs_output = session_server.cmd(list_jobs_cmd)
if job_name in list_jobs_output:
if status in list_jobs_output:
return True
elif "Aborted" in list_jobs_output:
raise ValueError("Job is in aborted state")
elif "Failed" in list_jobs_output:
raise ValueError("Job is in failed state")
else:
return False
else:
raise ValueError("Job %s does not show in the "
"output of %s" % list_jobs_cmd)
def job_is_completed():
return job_is_status("Completed")
def job_is_running():
return job_is_status("Running")
job_create_cmd = ("/usr/local/autotest/cli/autotest-rpc-client "
"job create --test sleeptest -m %s '%s' -w %s" %
(client_ip, job_name, server_ip))
session_server.cmd(job_create_cmd)
if not utils_misc.wait_for(job_is_running, 300, 0, 10,
"Waiting for job to start running"):
raise ValueError("Job did not start running")
# Wait for the session to become unresponsive
if not utils_misc.wait_for(lambda: not session_client.is_responsive(),
timeout=300):
raise error.ValueError("Client machine did not reboot")
# Establish a new client session
session_client = vm_client.wait_for_login(timeout=timeout)
# Wait for the job to complete
if not utils_misc.wait_for(job_is_completed, 300, 0, 10,
"Waiting for job to complete"):
raise ValueError("Job did not complete")
# Copy logs back so we can analyze them
vm_server.copy_files_from(guest_path="/usr/local/autotest/results/*",
host_path=test.resultsdir)
except (aexpect.ShellCmdError, ValueError), e:
if isinstance(e, aexpect.ShellCmdError):
for line in e.output.splitlines():
logging.error(line)
elif isinstance(e, ValueError):
logging.error(e)
step_failures.append(step7)
def report_version():
if top_commit is not None:
logging.info("Autotest git repo: %s", autotest_repo)
logging.info("Autotest git branch: %s", autotest_repo)
logging.info("Autotest top commit: %s", top_commit)
if step_failures:
logging.error("The autotest regression testing failed")
report_version()
raise error.TestFail("The autotest regression testing had the "
"following steps failed: %s" % step_failures)
else:
logging.info("The autotest regression testing passed")
report_version()
| sathnaga/virt-test | tests/autotest_regression.py | Python | gpl-2.0 | 11,381 |
from Screens.Screen import Screen
from Components.GUIComponent import GUIComponent
from Components.VariableText import VariableText
from Components.ActionMap import ActionMap
from Components.Label import Label
from Components.Button import Button
from Components.FileList import FileList
from Components.ScrollLabel import ScrollLabel
from Components.config import config, configfile
from Components.FileList import MultiFileSelectList
from Screens.MessageBox import MessageBox
from os import path, remove, walk, stat, rmdir
from time import time
from enigma import eTimer, eBackgroundFileEraser, eLabel
from glob import glob
import Components.Task
# Import smtplib for the actual sending function
import smtplib, base64
# Here are the email package modules we'll need
from email.mime.multipart import MIMEMultipart
from email.mime.text import MIMEText
from email.Utils import formatdate
_session = None
def get_size(start_path=None):
total_size = 0
if start_path:
for dirpath, dirnames, filenames in walk(start_path):
for f in filenames:
fp = path.join(dirpath, f)
total_size += path.getsize(fp)
return total_size
return 0
def AutoLogManager(session=None, **kwargs):
global debuglogcheckpoller
debuglogcheckpoller = LogManagerPoller()
debuglogcheckpoller.start()
class LogManagerPoller:
"""Automatically Poll LogManager"""
def __init__(self):
# Init Timer
self.TrimTimer = eTimer()
self.TrashTimer = eTimer()
def start(self):
if self.TrimTimerJob not in self.TrimTimer.callback:
self.TrimTimer.callback.append(self.TrimTimerJob)
if self.TrashTimerJob not in self.TrashTimer.callback:
self.TrashTimer.callback.append(self.TrashTimerJob)
self.TrimTimer.startLongTimer(0)
self.TrashTimer.startLongTimer(0)
def stop(self):
if self.TrimTimerJob in self.TrimTimer.callback:
self.TrimTimer.callback.remove(self.TrimTimerJob)
if self.TrashTimerJob in self.TrashTimer.callback:
self.TrashTimer.callback.remove(self.TrashTimerJob)
self.TrimTimer.stop()
self.TrashTimer.stop()
def TrimTimerJob(self):
print '[LogManager] Trim Poll Started'
Components.Task.job_manager.AddJob(self.createTrimJob())
def TrashTimerJob(self):
print '[LogManager] Trash Poll Started'
self.JobTrash()
# Components.Task.job_manager.AddJob(self.createTrashJob())
def createTrimJob(self):
job = Components.Task.Job(_("LogManager"))
task = Components.Task.PythonTask(job, _("Checking Logs..."))
task.work = self.JobTrim
task.weighting = 1
return job
def createTrashJob(self):
job = Components.Task.Job(_("LogManager"))
task = Components.Task.PythonTask(job, _("Checking Logs..."))
task.work = self.JobTrash
task.weighting = 1
return job
def openFiles(self, ctimeLimit, allowedBytes):
ctimeLimit = ctimeLimit
allowedBytes = allowedBytes
def JobTrim(self):
filename = ""
for filename in glob(config.crash.debug_path.value + '*.log'):
try:
if path.getsize(filename) > (config.crash.debugloglimit.value * 1024 * 1024):
fh = open(filename, 'rb+')
fh.seek(-(config.crash.debugloglimit.value * 1024 * 1024), 2)
data = fh.read()
fh.seek(0) # rewind
fh.write(data)
fh.truncate()
fh.close()
except:
pass
self.TrimTimer.startLongTimer(3600) #once an hour
def JobTrash(self):
ctimeLimit = time() - (config.crash.daysloglimit.value * 3600 * 24)
allowedBytes = 1024*1024 * int(config.crash.sizeloglimit.value)
mounts = []
matches = []
print "[LogManager] probing folders"
f = open('/proc/mounts', 'r')
for line in f.readlines():
parts = line.strip().split()
mounts.append(parts[1])
f.close()
for mount in mounts:
if path.isdir(path.join(mount,'logs')):
matches.append(path.join(mount,'logs'))
matches.append('/home/root/logs')
print "[LogManager] found following log's:", matches
if len(matches):
for logsfolder in matches:
print "[LogManager] looking in:", logsfolder
logssize = get_size(logsfolder)
bytesToRemove = logssize - allowedBytes
candidates = []
size = 0
for root, dirs, files in walk(logsfolder, topdown=False):
for name in files:
try:
fn = path.join(root, name)
st = stat(fn)
if st.st_ctime < ctimeLimit:
print "[LogManager] " + str(fn) + ": Too old:", name, st.st_ctime
eBackgroundFileEraser.getInstance().erase(fn)
bytesToRemove -= st.st_size
else:
candidates.append((st.st_ctime, fn, st.st_size))
size += st.st_size
except Exception, e:
print "[LogManager] Failed to stat %s:"% name, e
# Remove empty directories if possible
for name in dirs:
try:
rmdir(path.join(root, name))
except:
pass
candidates.sort()
# Now we have a list of ctime, candidates, size. Sorted by ctime (=deletion time)
for st_ctime, fn, st_size in candidates:
print "[LogManager] " + str(logsfolder) + ": bytesToRemove", bytesToRemove
if bytesToRemove < 0:
break
eBackgroundFileEraser.getInstance().erase(fn)
bytesToRemove -= st_size
size -= st_size
self.TrashTimer.startLongTimer(43200) #twice a day
class LogManager(Screen):
def __init__(self, session):
Screen.__init__(self, session)
self.logtype = 'crashlogs'
self['myactions'] = ActionMap(['ColorActions', 'OkCancelActions', 'DirectionActions'],
{
'ok': self.changeSelectionState,
'cancel': self.close,
'red': self.changelogtype,
'green': self.showLog,
'yellow': self.deletelog,
'blue': self.sendlog,
"left": self.left,
"right": self.right,
"down": self.down,
"up": self.up
}, -1)
self["key_red"] = Button(_("Debug Logs"))
self["key_green"] = Button(_("View"))
self["key_yellow"] = Button(_("Delete"))
self["key_blue"] = Button(_("Send"))
self.onChangedEntry = [ ]
self.sentsingle = ""
self.selectedFiles = config.logmanager.sentfiles.value
self.previouslySent = config.logmanager.sentfiles.value
self.defaultDir = config.crash.debug_path.value
self.matchingPattern = 'enigma2_crash_'
self.filelist = MultiFileSelectList(self.selectedFiles, self.defaultDir, showDirectories = False, matchingPattern = self.matchingPattern )
self["list"] = self.filelist
self["LogsSize"] = self.logsinfo = LogInfo(config.crash.debug_path.value, LogInfo.USED, update=False)
self.onLayoutFinish.append(self.layoutFinished)
if not self.selectionChanged in self["list"].onSelectionChanged:
self["list"].onSelectionChanged.append(self.selectionChanged)
def createSummary(self):
from Screens.PluginBrowser import PluginBrowserSummary
return PluginBrowserSummary
def selectionChanged(self):
item = self["list"].getCurrent()
desc = ""
if item:
name = str(item[0][0])
else:
name = ""
for cb in self.onChangedEntry:
cb(name, desc)
def layoutFinished(self):
self["LogsSize"].update(config.crash.debug_path.value)
idx = 0
self["list"].moveToIndex(idx)
self.setWindowTitle()
def setWindowTitle(self):
self.setTitle(self.defaultDir)
def up(self):
self["list"].up()
def down(self):
self["list"].down()
def left(self):
self["list"].pageUp()
def right(self):
self["list"].pageDown()
def saveSelection(self):
self.selectedFiles = self["list"].getSelectedList()
self.previouslySent = self["list"].getSelectedList()
config.logmanager.sentfiles.setValue(self.selectedFiles)
config.logmanager.sentfiles.save()
configfile.save()
def exit(self):
self.close(None)
def changeSelectionState(self):
try:
self.sel = self["list"].getCurrent()[0]
except:
self.sel = None
if self.sel:
self["list"].changeSelectionState()
self.selectedFiles = self["list"].getSelectedList()
def changelogtype(self):
self["LogsSize"].update(config.crash.debug_path.value)
import re
if self.logtype == 'crashlogs':
self["key_red"].setText(_("Crash Logs"))
self.logtype = 'debuglogs'
self.matchingPattern = 'Enigma2'
else:
self["key_red"].setText(_("Debug Logs"))
self.logtype = 'crashlogs'
self.matchingPattern = 'enigma2_crash_'
self["list"].matchingPattern = re.compile(self.matchingPattern)
self["list"].changeDir(self.defaultDir)
def showLog(self):
try:
self.sel = self["list"].getCurrent()[0]
except:
self.sel = None
if self.sel:
self.session.open(LogManagerViewLog, self.sel[0])
def deletelog(self):
try:
self.sel = self["list"].getCurrent()[0]
except:
self.sel = None
self.selectedFiles = self["list"].getSelectedList()
if self.selectedFiles:
message = _("Do you want to delete all selected files:\n(choose 'No' to only delete the currently selected file.)")
ybox = self.session.openWithCallback(self.doDelete1, MessageBox, message, MessageBox.TYPE_YESNO)
ybox.setTitle(_("Delete Confirmation"))
elif self.sel:
message = _("Are you sure you want to delete this log:\n") + str(self.sel[0])
ybox = self.session.openWithCallback(self.doDelete3, MessageBox, message, MessageBox.TYPE_YESNO)
ybox.setTitle(_("Delete Confirmation"))
else:
self.session.open(MessageBox, _("You have selected no logs to delete."), MessageBox.TYPE_INFO, timeout = 10)
def doDelete1(self, answer):
self.selectedFiles = self["list"].getSelectedList()
self.selectedFiles = ",".join(self.selectedFiles).replace(",", " ")
self.sel = self["list"].getCurrent()[0]
if answer is True:
message = _("Are you sure you want to delete all selected logs:\n") + self.selectedFiles
ybox = self.session.openWithCallback(self.doDelete2, MessageBox, message, MessageBox.TYPE_YESNO)
ybox.setTitle(_("Delete Confirmation"))
else:
message = _("Are you sure you want to delete this log:\n") + str(self.sel[0])
ybox = self.session.openWithCallback(self.doDelete3, MessageBox, message, MessageBox.TYPE_YESNO)
ybox.setTitle(_("Delete Confirmation"))
def doDelete2(self, answer):
if answer is True:
self.selectedFiles = self["list"].getSelectedList()
self["list"].instance.moveSelectionTo(0)
for f in self.selectedFiles:
remove(f)
config.logmanager.sentfiles.setValue("")
config.logmanager.sentfiles.save()
configfile.save()
self["list"].changeDir(self.defaultDir)
def doDelete3(self, answer):
if answer is True:
self.sel = self["list"].getCurrent()[0]
self["list"].instance.moveSelectionTo(0)
if path.exists(self.defaultDir + self.sel[0]):
remove(self.defaultDir + self.sel[0])
self["list"].changeDir(self.defaultDir)
self["LogsSize"].update(config.crash.debug_path.value)
def sendlog(self, addtionalinfo = None):
try:
self.sel = self["list"].getCurrent()[0]
except:
self.sel = None
if self.sel:
self.sel = str(self.sel[0])
self.selectedFiles = self["list"].getSelectedList()
self.resend = False
for send in self.previouslySent:
if send in self.selectedFiles:
self.selectedFiles.remove(send)
if send == (self.defaultDir + self.sel):
self.resend = True
if self.selectedFiles:
message = _("Do you want to send all selected files:\n(choose 'No' to only send the currently selected file.)")
ybox = self.session.openWithCallback(self.sendlog1, MessageBox, message, MessageBox.TYPE_YESNO)
ybox.setTitle(_("Send Confirmation"))
elif self.sel and not self.resend:
self.sendallfiles = False
message = _("Are you sure you want to send this log:\n") + self.sel
ybox = self.session.openWithCallback(self.sendlog2, MessageBox, message, MessageBox.TYPE_YESNO)
ybox.setTitle(_("Send Confirmation"))
elif self.sel and self.resend:
self.sendallfiles = False
message = _("You have already sent this log, are you sure you want to resend this log:\n") + self.sel
ybox = self.session.openWithCallback(self.sendlog2, MessageBox, message, MessageBox.TYPE_YESNO)
ybox.setTitle(_("Send Confirmation"))
else:
self.session.open(MessageBox, _("You have selected no logs to send."), MessageBox.TYPE_INFO, timeout = 10)
def sendlog1(self, answer):
if answer:
self.sendallfiles = True
message = _("Do you want to add any additional information ?")
ybox = self.session.openWithCallback(self.sendlog3, MessageBox, message, MessageBox.TYPE_YESNO)
ybox.setTitle(_("Additional Info"))
else:
self.sendallfiles = False
message = _("Are you sure you want to send this log:\n") + str(self.sel[0])
ybox = self.session.openWithCallback(self.sendlog2, MessageBox, message, MessageBox.TYPE_YESNO)
ybox.setTitle(_("Send Confirmation"))
def sendlog2(self, answer):
if answer:
self.sendallfiles = False
message = _("Do you want to add any additional information ?")
ybox = self.session.openWithCallback(self.sendlog3, MessageBox, message, MessageBox.TYPE_YESNO)
ybox.setTitle(_("Additional Info"))
def sendlog3(self, answer):
if answer:
message = _("Do you want to attach a text file to explain the log ?\n(choose 'No' to type message using virtual keyboard.)")
ybox = self.session.openWithCallback(self.sendlog4, MessageBox, message, MessageBox.TYPE_YESNO)
ybox.setTitle(_("Attach a file"))
else:
self.doSendlog()
def sendlog4(self, answer):
if answer:
self.session.openWithCallback(self.doSendlog, LogManagerFb)
else:
from Screens.VirtualKeyBoard import VirtualKeyBoard
self.session.openWithCallback(self.doSendlog, VirtualKeyBoard, title = 'Additonal Info')
def doSendlog(self, additonalinfo = None):
ref = str(time())
# Create the container (outer) email message.
msg = MIMEMultipart()
if config.logmanager.user.value != '' and config.logmanager.useremail.value != '':
fromlogman = config.logmanager.user.value + ' <' + config.logmanager.useremail.value + '>'
tocrashlogs = '[email protected]'
msg['From'] = fromlogman
msg['To'] = tocrashlogs
msg['Cc'] = fromlogman
msg['Date'] = formatdate(localtime=True)
msg['Subject'] = 'Ref: ' + ref
if additonalinfo != "":
msg.attach(MIMEText(additonalinfo, 'plain'))
else:
msg.attach(MIMEText(config.logmanager.additionalinfo.value, 'plain'))
if self.sendallfiles:
self.selectedFiles = self["list"].getSelectedList()
for send in self.previouslySent:
if send in self.selectedFiles:
self.selectedFiles.remove(send)
self.sel = ",".join(self.selectedFiles).replace(",", " ")
self["list"].instance.moveSelectionTo(0)
for f in self.selectedFiles:
self.previouslySent.append(f)
fp = open(f, 'rb')
data = MIMEText(fp.read())
fp.close()
msg.attach(data)
self.saveSelection()
sentfiles = self.sel
else:
self.sel = self["list"].getCurrent()[0]
self.sel = str(self.sel[0])
sentfiles = self.sel
fp = open((self.defaultDir + self.sel), 'rb')
data = MIMEText(fp.read())
fp.close()
msg.attach(data)
self.sentsingle = self.defaultDir + self.sel
self.changeSelectionState()
self.saveSelection()
# Send the email via our own SMTP server.
wos_user = '[email protected]'
wos_pwd = base64.b64decode('TF95X0dCRlRFNHRDenVKN1dNdlEkZj14')
try:
print "connecting to server: mail.openld.es"
#socket.setdefaulttimeout(30)
s = smtplib.SMTP("mail.openld.es",25)
s.login(wos_user, wos_pwd)
if config.logmanager.usersendcopy.value:
s.sendmail(fromlogman, [tocrashlogs, fromlogman], msg.as_string())
s.quit()
self.session.open(MessageBox, sentfiles + ' ' + _('has been sent to the SVN team team.\nplease quote') + ' ' + str(ref) + ' ' + _('when asking question about this log\n\nA copy has been sent to yourself.'), MessageBox.TYPE_INFO)
else:
s.sendmail(fromlogman, tocrashlogs, msg.as_string())
s.quit()
self.session.open(MessageBox, sentfiles + ' ' + _('has been sent to the SVN team team.\nplease quote') + ' ' + str(ref) + ' ' + _('when asking question about this log'), MessageBox.TYPE_INFO)
except Exception,e:
self.session.open(MessageBox, _("Error:\n%s" % e), MessageBox.TYPE_INFO, timeout = 10)
else:
self.session.open(MessageBox, _('You have not setup your user info in the setup screen\nPress MENU, and enter your info, then try again'), MessageBox.TYPE_INFO, timeout = 10)
def myclose(self):
self.close()
class LogManagerViewLog(Screen):
def __init__(self, session, selected):
self.session = session
Screen.__init__(self, session)
self.setTitle(selected)
if path.exists(config.crash.debug_path.value + selected):
log = file(config.crash.debug_path.value + selected).read()
else:
log = ""
self["list"] = ScrollLabel(str(log))
self["setupActions"] = ActionMap(["SetupActions", "ColorActions", "DirectionActions"],
{
"cancel": self.cancel,
"ok": self.cancel,
"up": self["list"].pageUp,
"down": self["list"].pageDown,
"right": self["list"].lastPage
}, -2)
def cancel(self):
self.close()
class LogManagerFb(Screen):
def __init__(self, session, logpath=None):
if logpath is None:
if path.isdir(config.logmanager.path.value):
logpath = config.logmanager.path.value
else:
logpath = "/"
self.session = session
Screen.__init__(self, session)
self["list"] = FileList(logpath, matchingPattern = "^.*")
self["red"] = Label(_("delete"))
self["green"] = Label(_("move"))
self["yellow"] = Label(_("copy"))
self["blue"] = Label(_("rename"))
self["actions"] = ActionMap(["ChannelSelectBaseActions","WizardActions", "DirectionActions", "MenuActions", "NumberActions", "ColorActions"],
{
"ok": self.ok,
"back": self.exit,
"up": self.goUp,
"down": self.goDown,
"left": self.goLeft,
"right": self.goRight,
"0": self.doRefresh,
}, -1)
self.onLayoutFinish.append(self.mainlist)
def exit(self):
config.logmanager.additionalinfo.setValue("")
if self["list"].getCurrentDirectory():
config.logmanager.path.setValue(self["list"].getCurrentDirectory())
config.logmanager.path.save()
self.close()
def ok(self):
if self.SOURCELIST.canDescent(): # isDir
self.SOURCELIST.descent()
if self.SOURCELIST.getCurrentDirectory(): #??? when is it none
self.setTitle(self.SOURCELIST.getCurrentDirectory())
else:
self.onFileAction()
def goLeft(self):
self.SOURCELIST.pageUp()
def goRight(self):
self.SOURCELIST.pageDown()
def goUp(self):
self.SOURCELIST.up()
def goDown(self):
self.SOURCELIST.down()
def doRefresh(self):
self.SOURCELIST.refresh()
def mainlist(self):
self["list"].selectionEnabled(1)
self.SOURCELIST = self["list"]
self.setTitle(self.SOURCELIST.getCurrentDirectory())
def onFileAction(self):
config.logmanager.additionalinfo.setValue(file(self.SOURCELIST.getCurrentDirectory()+self.SOURCELIST.getFilename()).read())
if self["list"].getCurrentDirectory():
config.logmanager.path.setValue(self["list"].getCurrentDirectory())
config.logmanager.path.save()
self.close()
class LogInfo(VariableText, GUIComponent):
FREE = 0
USED = 1
SIZE = 2
def __init__(self, path, type, update = True):
GUIComponent.__init__(self)
VariableText.__init__(self)
self.type = type
# self.path = config.crash.debug_path.value
if update:
self.update(path)
def update(self, path):
try:
total_size = get_size(path)
except OSError:
return -1
if self.type == self.USED:
try:
if total_size < 10000000:
total_size = "%d kB" % (total_size >> 10)
elif total_size < 10000000000:
total_size = "%d MB" % (total_size >> 20)
else:
total_size = "%d GB" % (total_size >> 30)
self.setText(_("Space used:") + " " + total_size)
except:
# occurs when f_blocks is 0 or a similar error
self.setText("-?-")
GUI_WIDGET = eLabel
| popazerty/enigma2 | lib/python/Screens/LogManager.py | Python | gpl-2.0 | 19,571 |
# This file is part of Scapy
# See http://www.secdev.org/projects/scapy for more information
# Copyright (C) Philippe Biondi <[email protected]>
# This program is published under a GPLv2 license
"""
SuperSocket.
"""
from __future__ import absolute_import
from select import select, error as select_error
import errno
import os
import socket
import struct
import time
from scapy.config import conf
from scapy.consts import LINUX, DARWIN, WINDOWS
from scapy.data import MTU, ETH_P_IP
from scapy.compat import raw, bytes_encode
from scapy.error import warning, log_runtime
import scapy.modules.six as six
import scapy.packet
from scapy.utils import PcapReader, tcpdump
class _SuperSocket_metaclass(type):
def __repr__(self):
if self.desc is not None:
return "<%s: %s>" % (self.__name__, self.desc)
else:
return "<%s>" % self.__name__
class SuperSocket(six.with_metaclass(_SuperSocket_metaclass)):
desc = None
closed = 0
nonblocking_socket = False
read_allowed_exceptions = ()
def __init__(self, family=socket.AF_INET, type=socket.SOCK_STREAM, proto=0): # noqa: E501
self.ins = socket.socket(family, type, proto)
self.outs = self.ins
self.promisc = None
def send(self, x):
sx = raw(x)
try:
x.sent_time = time.time()
except AttributeError:
pass
return self.outs.send(sx)
def recv_raw(self, x=MTU):
"""Returns a tuple containing (cls, pkt_data, time)"""
return conf.raw_layer, self.ins.recv(x), None
def recv(self, x=MTU):
cls, val, ts = self.recv_raw(x)
if not val or not cls:
return
try:
pkt = cls(val)
except KeyboardInterrupt:
raise
except Exception:
if conf.debug_dissector:
from scapy.sendrecv import debug
debug.crashed_on = (cls, val)
raise
pkt = conf.raw_layer(val)
if ts:
pkt.time = ts
return pkt
def fileno(self):
return self.ins.fileno()
def close(self):
if self.closed:
return
self.closed = True
if getattr(self, "outs", None):
if getattr(self, "ins", None) != self.outs:
if WINDOWS or self.outs.fileno() != -1:
self.outs.close()
if getattr(self, "ins", None):
if WINDOWS or self.ins.fileno() != -1:
self.ins.close()
def sr(self, *args, **kargs):
from scapy import sendrecv
return sendrecv.sndrcv(self, *args, **kargs)
def sr1(self, *args, **kargs):
from scapy import sendrecv
a, b = sendrecv.sndrcv(self, *args, **kargs)
if len(a) > 0:
return a[0][1]
else:
return None
def sniff(self, *args, **kargs):
from scapy import sendrecv
return sendrecv.sniff(opened_socket=self, *args, **kargs)
def tshark(self, *args, **kargs):
from scapy import sendrecv
return sendrecv.tshark(opened_socket=self, *args, **kargs)
@staticmethod
def select(sockets, remain=conf.recv_poll_rate):
"""This function is called during sendrecv() routine to select
the available sockets.
:param sockets: an array of sockets that need to be selected
:returns: an array of sockets that were selected and
the function to be called next to get the packets (i.g. recv)
"""
try:
inp, _, _ = select(sockets, [], [], remain)
except (IOError, select_error) as exc:
# select.error has no .errno attribute
if exc.args[0] != errno.EINTR:
raise
return inp, None
def __del__(self):
"""Close the socket"""
self.close()
def __enter__(self):
return self
def __exit__(self, exc_type, exc_value, traceback):
"""Close the socket"""
self.close()
class L3RawSocket(SuperSocket):
desc = "Layer 3 using Raw sockets (PF_INET/SOCK_RAW)"
def __init__(self, type=ETH_P_IP, filter=None, iface=None, promisc=None, nofilter=0): # noqa: E501
self.outs = socket.socket(socket.AF_INET, socket.SOCK_RAW, socket.IPPROTO_RAW) # noqa: E501
self.outs.setsockopt(socket.SOL_IP, socket.IP_HDRINCL, 1)
self.ins = socket.socket(socket.AF_PACKET, socket.SOCK_RAW, socket.htons(type)) # noqa: E501
if iface is not None:
self.ins.bind((iface, type))
def recv(self, x=MTU):
pkt, sa_ll = self.ins.recvfrom(x)
if sa_ll[2] == socket.PACKET_OUTGOING:
return None
if sa_ll[3] in conf.l2types:
cls = conf.l2types[sa_ll[3]]
lvl = 2
elif sa_ll[1] in conf.l3types:
cls = conf.l3types[sa_ll[1]]
lvl = 3
else:
cls = conf.default_l2
warning("Unable to guess type (interface=%s protocol=%#x family=%i). Using %s", sa_ll[0], sa_ll[1], sa_ll[3], cls.name) # noqa: E501
lvl = 3
try:
pkt = cls(pkt)
except KeyboardInterrupt:
raise
except Exception:
if conf.debug_dissector:
raise
pkt = conf.raw_layer(pkt)
if lvl == 2:
pkt = pkt.payload
if pkt is not None:
from scapy.arch import get_last_packet_timestamp
pkt.time = get_last_packet_timestamp(self.ins)
return pkt
def send(self, x):
try:
sx = raw(x)
x.sent_time = time.time()
self.outs.sendto(sx, (x.dst, 0))
except socket.error as msg:
log_runtime.error(msg)
class SimpleSocket(SuperSocket):
desc = "wrapper around a classic socket"
def __init__(self, sock):
self.ins = sock
self.outs = sock
class StreamSocket(SimpleSocket):
desc = "transforms a stream socket into a layer 2"
nonblocking_socket = True
def __init__(self, sock, basecls=None):
if basecls is None:
basecls = conf.raw_layer
SimpleSocket.__init__(self, sock)
self.basecls = basecls
def recv(self, x=MTU):
pkt = self.ins.recv(x, socket.MSG_PEEK)
x = len(pkt)
if x == 0:
return None
pkt = self.basecls(pkt)
pad = pkt.getlayer(conf.padding_layer)
if pad is not None and pad.underlayer is not None:
del(pad.underlayer.payload)
from scapy.packet import NoPayload
while pad is not None and not isinstance(pad, NoPayload):
x -= len(pad.load)
pad = pad.payload
self.ins.recv(x)
return pkt
class SSLStreamSocket(StreamSocket):
desc = "similar usage than StreamSocket but specialized for handling SSL-wrapped sockets" # noqa: E501
def __init__(self, sock, basecls=None):
self._buf = b""
super(SSLStreamSocket, self).__init__(sock, basecls)
# 65535, the default value of x is the maximum length of a TLS record
def recv(self, x=65535):
pkt = None
if self._buf != b"":
try:
pkt = self.basecls(self._buf)
except Exception:
# We assume that the exception is generated by a buffer underflow # noqa: E501
pass
if not pkt:
buf = self.ins.recv(x)
if len(buf) == 0:
raise socket.error((100, "Underlying stream socket tore down"))
self._buf += buf
x = len(self._buf)
pkt = self.basecls(self._buf)
pad = pkt.getlayer(conf.padding_layer)
if pad is not None and pad.underlayer is not None:
del(pad.underlayer.payload)
while pad is not None and not isinstance(pad, scapy.packet.NoPayload):
x -= len(pad.load)
pad = pad.payload
self._buf = self._buf[x:]
return pkt
class L2ListenTcpdump(SuperSocket):
desc = "read packets at layer 2 using tcpdump"
def __init__(self, iface=None, promisc=None, filter=None, nofilter=False,
prog=None, *arg, **karg):
self.outs = None
args = ['-w', '-', '-s', '65535']
if iface is not None:
if WINDOWS:
try:
args.extend(['-i', iface.pcap_name])
except AttributeError:
args.extend(['-i', iface])
else:
args.extend(['-i', iface])
elif WINDOWS or DARWIN:
args.extend(['-i', conf.iface.pcap_name if WINDOWS else conf.iface]) # noqa: E501
if not promisc:
args.append('-p')
if not nofilter:
if conf.except_filter:
if filter:
filter = "(%s) and not (%s)" % (filter, conf.except_filter)
else:
filter = "not (%s)" % conf.except_filter
if filter is not None:
args.append(filter)
self.tcpdump_proc = tcpdump(None, prog=prog, args=args, getproc=True)
self.ins = PcapReader(self.tcpdump_proc.stdout)
def recv(self, x=MTU):
return self.ins.recv(x)
def close(self):
SuperSocket.close(self)
self.tcpdump_proc.kill()
class TunTapInterface(SuperSocket):
"""A socket to act as the host's peer of a tun / tap interface.
"""
desc = "Act as the host's peer of a tun / tap interface"
def __init__(self, iface=None, mode_tun=None, *arg, **karg):
self.iface = conf.iface if iface is None else iface
self.mode_tun = ("tun" in self.iface) if mode_tun is None else mode_tun
self.closed = True
self.open()
def open(self):
"""Open the TUN or TAP device."""
if not self.closed:
return
self.outs = self.ins = open(
"/dev/net/tun" if LINUX else ("/dev/%s" % self.iface), "r+b",
buffering=0
)
if LINUX:
from fcntl import ioctl
# TUNSETIFF = 0x400454ca
# IFF_TUN = 0x0001
# IFF_TAP = 0x0002
# IFF_NO_PI = 0x1000
ioctl(self.ins, 0x400454ca, struct.pack(
"16sH", bytes_encode(self.iface),
0x0001 if self.mode_tun else 0x1002,
))
self.closed = False
def __call__(self, *arg, **karg):
"""Needed when using an instantiated TunTapInterface object for
conf.L2listen, conf.L2socket or conf.L3socket.
"""
return self
def recv(self, x=MTU):
if self.mode_tun:
data = os.read(self.ins.fileno(), x + 4)
proto = struct.unpack('!H', data[2:4])[0]
return conf.l3types.get(proto, conf.raw_layer)(data[4:])
return conf.l2types.get(1, conf.raw_layer)(
os.read(self.ins.fileno(), x)
)
def send(self, x):
sx = raw(x)
if hasattr(x, "sent_time"):
x.sent_time = time.time()
if self.mode_tun:
try:
proto = conf.l3types[type(x)]
except KeyError:
log_runtime.warning(
"Cannot find layer 3 protocol value to send %s in "
"conf.l3types, using 0",
x.name if hasattr(x, "name") else type(x).__name__
)
proto = 0
sx = struct.pack('!HH', 0, proto) + sx
try:
os.write(self.outs.fileno(), sx)
except socket.error:
log_runtime.error("%s send", self.__class__.__name__, exc_info=True) # noqa: E501
| mtury/scapy | scapy/supersocket.py | Python | gpl-2.0 | 11,654 |
import py, sys, platform
import pytest
from testing import backend_tests, test_function, test_ownlib
from cffi import FFI
import _cffi_backend
class TestFFI(backend_tests.BackendTests,
test_function.TestFunction,
test_ownlib.TestOwnLib):
TypeRepr = "<ctype '%s'>"
@staticmethod
def Backend():
return _cffi_backend
def test_not_supported_bitfield_in_result(self):
ffi = FFI(backend=self.Backend())
ffi.cdef("struct foo_s { int a,b,c,d,e; int x:1; };")
e = py.test.raises(NotImplementedError, ffi.callback,
"struct foo_s foo(void)", lambda: 42)
assert str(e.value) == ("<struct foo_s(*)(void)>: "
"cannot pass as argument or return value a struct with bit fields")
def test_inspecttype(self):
ffi = FFI(backend=self.Backend())
assert ffi.typeof("long").kind == "primitive"
assert ffi.typeof("long(*)(long, long**, ...)").cname == (
"long(*)(long, long * *, ...)")
assert ffi.typeof("long(*)(long, long**, ...)").ellipsis is True
def test_new_handle(self):
ffi = FFI(backend=self.Backend())
o = [2, 3, 4]
p = ffi.new_handle(o)
assert ffi.typeof(p) == ffi.typeof("void *")
assert ffi.from_handle(p) is o
assert ffi.from_handle(ffi.cast("char *", p)) is o
py.test.raises(RuntimeError, ffi.from_handle, ffi.NULL)
class TestBitfield:
def check(self, source, expected_ofs_y, expected_align, expected_size):
# NOTE: 'expected_*' is the numbers expected from GCC.
# The numbers expected from MSVC are not explicitly written
# in this file, and will just be taken from the compiler.
ffi = FFI()
ffi.cdef("struct s1 { %s };" % source)
ctype = ffi.typeof("struct s1")
# verify the information with gcc
ffi1 = FFI()
ffi1.cdef("""
static const int Gofs_y, Galign, Gsize;
struct s1 *try_with_value(int fieldnum, long long value);
""")
fnames = [name for name, cfield in ctype.fields
if name and cfield.bitsize > 0]
setters = ['case %d: s.%s = value; break;' % iname
for iname in enumerate(fnames)]
lib = ffi1.verify("""
struct s1 { %s };
struct sa { char a; struct s1 b; };
#define Gofs_y offsetof(struct s1, y)
#define Galign offsetof(struct sa, b)
#define Gsize sizeof(struct s1)
struct s1 *try_with_value(int fieldnum, long long value)
{
static struct s1 s;
memset(&s, 0, sizeof(s));
switch (fieldnum) { %s }
return &s;
}
""" % (source, ' '.join(setters)))
if sys.platform == 'win32':
expected_ofs_y = lib.Gofs_y
expected_align = lib.Galign
expected_size = lib.Gsize
else:
assert (lib.Gofs_y, lib.Galign, lib.Gsize) == (
expected_ofs_y, expected_align, expected_size)
# the real test follows
assert ffi.offsetof("struct s1", "y") == expected_ofs_y
assert ffi.alignof("struct s1") == expected_align
assert ffi.sizeof("struct s1") == expected_size
# compare the actual storage of the two
for name, cfield in ctype.fields:
if cfield.bitsize < 0 or not name:
continue
if int(ffi.cast(cfield.type, -1)) == -1: # signed
min_value = -(1 << (cfield.bitsize-1))
max_value = (1 << (cfield.bitsize-1)) - 1
else:
min_value = 0
max_value = (1 << cfield.bitsize) - 1
for t in [1, 2, 4, 8, 16, 128, 2813, 89728, 981729,
-1,-2,-4,-8,-16,-128,-2813,-89728,-981729]:
if min_value <= t <= max_value:
self._fieldcheck(ffi, lib, fnames, name, t)
def _fieldcheck(self, ffi, lib, fnames, name, value):
s = ffi.new("struct s1 *")
setattr(s, name, value)
assert getattr(s, name) == value
raw1 = ffi.buffer(s)[:]
t = lib.try_with_value(fnames.index(name), value)
raw2 = ffi.buffer(t, len(raw1))[:]
assert raw1 == raw2
def test_bitfield_basic(self):
self.check("int a; int b:9; int c:20; int y;", 8, 4, 12)
self.check("int a; short b:9; short c:7; int y;", 8, 4, 12)
self.check("int a; short b:9; short c:9; int y;", 8, 4, 12)
def test_bitfield_reuse_if_enough_space(self):
self.check("int a:2; char y;", 1, 4, 4)
self.check("int a:1; char b ; int c:1; char y;", 3, 4, 4)
self.check("int a:1; char b:8; int c:1; char y;", 3, 4, 4)
self.check("char a; int b:9; char y;", 3, 4, 4)
self.check("char a; short b:9; char y;", 4, 2, 6)
self.check("int a:2; char b:6; char y;", 1, 4, 4)
self.check("int a:2; char b:7; char y;", 2, 4, 4)
self.check("int a:2; short b:15; char c:2; char y;", 5, 4, 8)
self.check("int a:2; char b:1; char c:1; char y;", 1, 4, 4)
@pytest.mark.skipif("platform.machine().startswith('arm')")
def test_bitfield_anonymous_no_align(self):
L = FFI().alignof("long long")
self.check("char y; int :1;", 0, 1, 2)
self.check("char x; int z:1; char y;", 2, 4, 4)
self.check("char x; int :1; char y;", 2, 1, 3)
self.check("char x; long long z:48; char y;", 7, L, 8)
self.check("char x; long long :48; char y;", 7, 1, 8)
self.check("char x; long long z:56; char y;", 8, L, 8 + L)
self.check("char x; long long :56; char y;", 8, 1, 9)
self.check("char x; long long z:57; char y;", L + 8, L, L + 8 + L)
self.check("char x; long long :57; char y;", L + 8, 1, L + 9)
@pytest.mark.skipif("not platform.machine().startswith('arm')")
def test_bitfield_anonymous_align_arm(self):
L = FFI().alignof("long long")
self.check("char y; int :1;", 0, 4, 4)
self.check("char x; int z:1; char y;", 2, 4, 4)
self.check("char x; int :1; char y;", 2, 4, 4)
self.check("char x; long long z:48; char y;", 7, L, 8)
self.check("char x; long long :48; char y;", 7, 8, 8)
self.check("char x; long long z:56; char y;", 8, L, 8 + L)
self.check("char x; long long :56; char y;", 8, L, 8 + L)
self.check("char x; long long z:57; char y;", L + 8, L, L + 8 + L)
self.check("char x; long long :57; char y;", L + 8, L, L + 8 + L)
@pytest.mark.skipif("platform.machine().startswith('arm')")
def test_bitfield_zero(self):
L = FFI().alignof("long long")
self.check("char y; int :0;", 0, 1, 4)
self.check("char x; int :0; char y;", 4, 1, 5)
self.check("char x; int :0; int :0; char y;", 4, 1, 5)
self.check("char x; long long :0; char y;", L, 1, L + 1)
self.check("short x, y; int :0; int :0;", 2, 2, 4)
self.check("char x; int :0; short b:1; char y;", 5, 2, 6)
self.check("int a:1; int :0; int b:1; char y;", 5, 4, 8)
@pytest.mark.skipif("not platform.machine().startswith('arm')")
def test_bitfield_zero_arm(self):
L = FFI().alignof("long long")
self.check("char y; int :0;", 0, 4, 4)
self.check("char x; int :0; char y;", 4, 4, 8)
self.check("char x; int :0; int :0; char y;", 4, 4, 8)
self.check("char x; long long :0; char y;", L, 8, L + 8)
self.check("short x, y; int :0; int :0;", 2, 4, 4)
self.check("char x; int :0; short b:1; char y;", 5, 4, 8)
self.check("int a:1; int :0; int b:1; char y;", 5, 4, 8)
def test_error_cases(self):
ffi = FFI()
py.test.raises(TypeError,
'ffi.cdef("struct s1 { float x:1; };"); ffi.new("struct s1 *")')
py.test.raises(TypeError,
'ffi.cdef("struct s2 { char x:0; };"); ffi.new("struct s2 *")')
py.test.raises(TypeError,
'ffi.cdef("struct s3 { char x:9; };"); ffi.new("struct s3 *")')
def test_struct_with_typedef(self):
ffi = FFI()
ffi.cdef("typedef struct { float x; } foo_t;")
p = ffi.new("foo_t *", [5.2])
assert repr(p).startswith("<cdata 'foo_t *' ")
def test_struct_array_no_length(self):
ffi = FFI()
ffi.cdef("struct foo_s { int x; int a[]; };")
p = ffi.new("struct foo_s *", [100, [200, 300, 400]])
assert p.x == 100
assert ffi.typeof(p.a) is ffi.typeof("int *") # no length available
assert p.a[0] == 200
assert p.a[1] == 300
assert p.a[2] == 400
@pytest.mark.skipif("sys.platform != 'win32'")
def test_getwinerror(self):
ffi = FFI()
code, message = ffi.getwinerror(1155)
assert code == 1155
assert message == ("No application is associated with the "
"specified file for this operation")
ffi.cdef("void SetLastError(int);")
lib = ffi.dlopen("Kernel32.dll")
lib.SetLastError(2)
code, message = ffi.getwinerror()
assert code == 2
assert message == "The system cannot find the file specified"
code, message = ffi.getwinerror(-1)
assert code == 2
assert message == "The system cannot find the file specified"
| mhnatiuk/phd_sociology_of_religion | scrapper/build/cffi/testing/test_ffi_backend.py | Python | gpl-2.0 | 9,406 |
# -*- Mode: Python -*-
# vi:si:et:sw=4:sts=4:ts=4
#
# Copyright (C) 2009-2010 Fluendo, S.L. (www.fluendo.com).
# Copyright (C) 2009-2010 Marc-Andre Lureau <[email protected]>
# Copyright (C) 2014 Juan Font Alonso <[email protected]>
# This file may be distributed and/or modified under the terms of
# the GNU General Public License version 2 as published by
# the Free Software Foundation.
# This file is distributed without any warranty; without even the implied
# warranty of merchantability or fitness for a particular purpose.
# See "LICENSE" in the source distribution for more information.
import logging
import re
import urllib2
class M3U8(object):
def __init__(self, url=None):
self.url = url
self._programs = [] # main list of programs & bandwidth
self._files = {} # the current program playlist
self._first_sequence = None # the first sequence to start fetching
self._last_sequence = None # the last sequence, to compute reload delay
self._reload_delay = None # the initial reload delay
self._update_tries = None # the number consecutive reload tries
self._last_content = None
self._endlist = False # wether the list ended and should not be refreshed
self._encryption_method = None
self._key_url = None
self._key = None
def endlist(self):
return self._endlist
def has_programs(self):
return len(self._programs) != 0
def get_program_playlist(self, program_id=None, bitrate=None):
# return the (uri, dict) of the best matching playlist
if not self.has_programs():
raise
_, best = min((abs(int(x['BANDWIDTH']) - bitrate), x)
for x in self._programs)
return best['uri'], best
def reload_delay(self):
# return the time between request updates, in seconds
if self._endlist or not self._last_sequence:
raise
if self._update_tries == 0:
ld = self._files[self._last_sequence]['duration']
self._reload_delay = min(self.target_duration * 3, ld)
d = self._reload_delay
elif self._update_tries == 1:
d = self._reload_delay * 0.5
elif self._update_tries == 2:
d = self._reload_delay * 1.5
else:
d = self._reload_delay * 3.0
logging.debug('Reload delay is %r' % d)
return int(d)
def has_files(self):
return len(self._files) != 0
def iter_files(self):
# return an iter on the playlist media files
if not self.has_files():
return
if not self._endlist:
current = max(self._first_sequence, self._last_sequence - 3)
else:
# treat differently on-demand playlists?
current = self._first_sequence
while True:
try:
f = self._files[current]
current += 1
yield f
if (f.has_key('endlist')):
break
except:
yield None
def update(self, content):
# update this "constructed" playlist,
# return wether it has actually been updated
if self._last_content and content == self._last_content:
logging.info("Content didn't change")
self._update_tries += 1
return False
self._update_tries = 0
self._last_content = content
def get_lines_iter(c):
c = c.decode("utf-8-sig")
for l in c.split('\n'):
if l.startswith('#EXT'):
yield l
elif l.startswith('#'):
pass
else:
yield l
self._lines = get_lines_iter(content)
first_line = self._lines.next()
if not first_line.startswith('#EXTM3U'):
logging.error('Invalid first line: %r' % first_line)
raise
self.target_duration = None
discontinuity = False
allow_cache = None
i = 0
new_files = []
for l in self._lines:
if l.startswith('#EXT-X-STREAM-INF'):
def to_dict(l):
i = re.findall('(?:[\w-]*="[\w\.\,]*")|(?:[\w-]*=[\w]*)', l)
d = {v.split('=')[0]: v.split('=')[1].replace('"','') for v in i}
return d
d = to_dict(l[18:])
print "stream info: " + str(d)
d['uri'] = self._lines.next()
self._add_playlist(d)
elif l.startswith('#EXT-X-TARGETDURATION'):
self.target_duration = int(l[22:])
elif l.startswith('#EXT-X-MEDIA-SEQUENCE'):
self.media_sequence = int(l[22:])
i = self.media_sequence
elif l.startswith('#EXT-X-DISCONTINUITY'):
discontinuity = True
elif l.startswith('#EXT-X-PROGRAM-DATE-TIME'):
print l
elif l.startswith('#EXT-X-ALLOW-CACHE'):
allow_cache = l[19:]
elif l.startswith('#EXT-X-KEY'):
self._encryption_method = l.split(',')[0][18:]
self._key_url = l.split(',')[1][5:-1]
response = urllib2.urlopen(self._key_url)
self._key = response.read()
response.close()
elif l.startswith('#EXTINF'):
v = l[8:].split(',')
d = dict(file=self._lines.next().strip(),
title=v[1].strip(),
duration=float(v[0]),
sequence=i,
discontinuity=discontinuity,
allow_cache=allow_cache)
discontinuity = False
i += 1
new = self._set_file(i, d)
if i > self._last_sequence:
self._last_sequence = i
if new:
new_files.append(d)
elif l.startswith('#EXT-X-ENDLIST'):
if i > 0:
self._files[i]['endlist'] = True
self._endlist = True
elif l.startswith('#EXT-X-VERSION'):
pass
elif len(l.strip()) != 0:
print l
if not self.has_programs() and not self.target_duration:
logging.error("Invalid HLS stream: no programs & no duration")
raise
if len(new_files):
logging.debug("got new files in playlist: %r", new_files)
return True
def _add_playlist(self, d):
self._programs.append(d)
def _set_file(self, sequence, d):
new = False
if not self._files.has_key(sequence):
new = True
if not self._first_sequence:
self._first_sequence = sequence
elif sequence < self._first_sequence:
self._first_sequence = sequence
self._files[sequence] = d
return new
def __repr__(self):
return "M3U8 %r %r" % (self._programs, self._files)
| juanfont/hls-proxy | m3u8.py | Python | gpl-2.0 | 7,177 |
from django.apps import AppConfig
class FinancesConfig(AppConfig):
name = "finances"
| open-austin/influence-texas | src/influencetx/finances/apps.py | Python | gpl-2.0 | 91 |
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
#
# See LICENSE for more details.
#
# Copyright: Red Hat Inc. 2015
# Author: Cleber Rosa <[email protected]>
"""Extensions/plugins dispatchers."""
import copy
import logging
import sys
from stevedore import EnabledExtensionManager
from .settings import settings
from ..utils import stacktrace
class Dispatcher(EnabledExtensionManager):
"""
Base dispatcher for various extension types
"""
#: Default namespace prefix for Avocado extensions
NAMESPACE_PREFIX = 'avocado.plugins.'
def __init__(self, namespace, invoke_kwds={}):
self.load_failures = []
super(Dispatcher, self).__init__(namespace=namespace,
check_func=self.enabled,
invoke_on_load=True,
invoke_kwds=invoke_kwds,
on_load_failure_callback=self.store_load_failure,
propagate_map_exceptions=True)
def plugin_type(self):
"""
Subset of entry points namespace for this dispatcher
Given an entry point `avocado.plugins.foo`, plugin type is `foo`. If
entry point does not conform to the Avocado standard prefix, it's
returned unchanged.
"""
if self.namespace.startswith(self.NAMESPACE_PREFIX):
return self.namespace[len(self.NAMESPACE_PREFIX):]
else:
return self.namespace
def fully_qualified_name(self, extension):
"""
Returns the Avocado fully qualified plugin name
:param extension: an Stevedore Extension instance
:type extension: :class:`stevedore.extension.Extension`
"""
return "%s.%s" % (self.plugin_type(), extension.entry_point.name)
def settings_section(self):
"""
Returns the config section name for the plugin type handled by itself
"""
return "plugins.%s" % self.plugin_type()
def enabled(self, extension):
disabled = settings.get_value('plugins', 'disable', key_type=list)
return self.fully_qualified_name(extension) not in disabled
def names(self):
"""
Returns the names of the discovered extensions
This differs from :func:`stevedore.extension.ExtensionManager.names`
in that it returns names in a predictable order, by using standard
:func:`sorted`.
"""
return sorted(super(Dispatcher, self).names())
def _init_plugins(self, extensions):
super(Dispatcher, self)._init_plugins(extensions)
self.extensions.sort(key=lambda x: x.name)
configured_order = settings.get_value(self.settings_section(), "order",
key_type=list, default=[])
ordered = []
for name in configured_order:
for ext in self.extensions:
if name == ext.name:
ordered.append(ext)
for ext in self.extensions:
if ext not in ordered:
ordered.append(ext)
self.extensions = ordered
@staticmethod
def store_load_failure(manager, entrypoint, exception):
manager.load_failures.append((entrypoint, exception))
class CLIDispatcher(Dispatcher):
"""
Calls extensions on configure/run
Automatically adds all the extension with entry points registered under
'avocado.plugins.cli'
"""
def __init__(self):
super(CLIDispatcher, self).__init__('avocado.plugins.cli')
class CLICmdDispatcher(Dispatcher):
"""
Calls extensions on configure/run
Automatically adds all the extension with entry points registered under
'avocado.plugins.cli.cmd'
"""
def __init__(self):
super(CLICmdDispatcher, self).__init__('avocado.plugins.cli.cmd')
class JobPrePostDispatcher(Dispatcher):
"""
Calls extensions before Job execution
Automatically adds all the extension with entry points registered under
'avocado.plugins.job.prepost'
"""
def __init__(self):
super(JobPrePostDispatcher, self).__init__('avocado.plugins.job.prepost')
def map_method(self, method_name, job):
for ext in self.extensions:
try:
if hasattr(ext.obj, method_name):
method = getattr(ext.obj, method_name)
method(job)
except SystemExit:
raise
except KeyboardInterrupt:
raise
except:
job.log.error('Error running method "%s" of plugin "%s": %s',
method_name, ext.name, sys.exc_info()[1])
class ResultDispatcher(Dispatcher):
def __init__(self):
super(ResultDispatcher, self).__init__('avocado.plugins.result')
def map_method(self, method_name, result, job):
for ext in self.extensions:
try:
if hasattr(ext.obj, method_name):
method = getattr(ext.obj, method_name)
method(result, job)
except SystemExit:
raise
except KeyboardInterrupt:
raise
except:
job.log.error('Error running method "%s" of plugin "%s": %s',
method_name, ext.name, sys.exc_info()[1])
class ResultEventsDispatcher(Dispatcher):
def __init__(self, args):
super(ResultEventsDispatcher, self).__init__(
'avocado.plugins.result_events',
invoke_kwds={'args': args})
self.log = logging.getLogger("avocado.app")
def map_method(self, method_name, *args):
for ext in self.extensions:
try:
if hasattr(ext.obj, method_name):
method = getattr(ext.obj, method_name)
method(*args)
except SystemExit:
raise
except KeyboardInterrupt:
raise
except:
self.log.error('Error running method "%s" of plugin "%s": %s',
method_name, ext.name, sys.exc_info()[1])
class VarianterDispatcher(Dispatcher):
def __init__(self):
super(VarianterDispatcher, self).__init__('avocado.plugins.varianter')
def __getstate__(self):
"""
Very fragile pickle which works when all Varianter plugins are
available on both machines.
TODO: Replace this with per-plugin-refresh-mechanism
"""
return {"extensions": getattr(self, "extensions")}
def __setstate__(self, state):
"""
Very fragile pickle which works when all Varianter plugins are
available on both machines.
TODO: Replace this with per-plugin-refresh-mechanism
"""
self.__init__()
self.extensions = state.get("extensions")
def _map_method(self, method_name, deepcopy=False, *args, **kwargs):
"""
:warning: **kwargs are not supported for deepcopy=True
"""
ret = []
for ext in self.extensions:
try:
if hasattr(ext.obj, method_name):
method = getattr(ext.obj, method_name)
if deepcopy:
copied_args = [copy.deepcopy(arg) for arg in args]
ret.append(method(*copied_args))
else:
ret.append(method(*args, **kwargs))
except SystemExit:
raise
except KeyboardInterrupt:
raise
except: # catch any exception pylint: disable=W0702
stacktrace.log_exc_info(sys.exc_info(), logger='avocado.debug')
log = logging.getLogger("avocado.app")
log.error('Error running method "%s" of plugin "%s": %s',
method_name, ext.name, sys.exc_info()[1])
return ret
def map_method(self, method_name, *args, **kwargs):
return self._map_method(method_name, False, *args, **kwargs)
def map_method_copy(self, method_name, *args):
"""
The same as map_method, but use copy.deepcopy on each passed arg
"""
return self._map_method(method_name, True, *args)
| thomas-schmid-ubnt/avocado | avocado/core/dispatcher.py | Python | gpl-2.0 | 8,715 |
"""
Classes and functions to handle storage devices.
This exports:
- two functions for get image/blkdebug filename
- class for image operates and basic parameters
"""
from __future__ import division
import errno
import logging
import os
import shutil
import re
import functools
import collections
from avocado.core import exceptions
from avocado.utils import process
from virttest import iscsi
from virttest import utils_misc
from virttest import utils_numeric
from virttest import virt_vm
from virttest import gluster
from virttest import lvm
from virttest import ceph
from virttest import data_dir
from virttest.compat_52lts import decode_to_text
def preprocess_images(bindir, params, env):
# Clone master image form vms.
for vm_name in params.get("vms").split():
vm = env.get_vm(vm_name)
if vm:
vm.destroy(free_mac_addresses=False)
vm_params = params.object_params(vm_name)
for image in vm_params.get("master_images_clone").split():
image_obj = QemuImg(vm_params, bindir, image)
image_obj.clone_image(vm_params, vm_name, image, bindir)
def preprocess_image_backend(bindir, params, env):
enable_gluster = params.get("enable_gluster")
gluster_image = params.get("gluster_brick")
if enable_gluster and gluster_image:
return gluster.create_gluster_vol(params)
return True
def postprocess_images(bindir, params):
for vm in params.get("vms").split():
vm_params = params.object_params(vm)
for image in vm_params.get("master_images_clone").split():
image_obj = QemuImg(vm_params, bindir, image)
image_obj.rm_cloned_image(vm_params, vm, image, bindir)
def file_exists(params, filename_path):
"""
Check if image_filename exists.
:param params: Dictionary containing the test parameters.
:param filename_path: path to file
:type filename_path: str
:param root_dir: Base directory for relative filenames.
:type root_dir: str
:return: True if image file exists else False
"""
gluster_image = params.get("gluster_brick")
if gluster_image:
return gluster.file_exists(params, filename_path)
if params.get("enable_ceph") == "yes":
image_name = params.get("image_name")
image_format = params.get("image_format", "qcow2")
ceph_monitor = params.get("ceph_monitor")
rbd_pool_name = params["rbd_pool_name"]
rbd_image_name = "%s.%s" % (image_name.split("/")[-1], image_format)
ceph_conf = params.get("ceph_conf")
keyring_conf = params.get("image_ceph_keyring_conf")
return ceph.rbd_image_exist(ceph_monitor, rbd_pool_name,
rbd_image_name, ceph_conf, keyring_conf)
return os.path.exists(filename_path)
def file_remove(params, filename_path):
"""
Remove the image
:param params: Dictionary containing the test parameters.
:param filename_path: path to file
"""
if params.get("enable_ceph") == "yes":
image_name = params.get("image_name")
image_format = params.get("image_format", "qcow2")
ceph_monitor = params.get("ceph_monitor")
rbd_pool_name = params["rbd_pool_name"]
rbd_image_name = "%s.%s" % (image_name.split("/")[-1], image_format)
ceph_conf = params.get("ceph_conf")
keyring_conf = params.get("image_ceph_keyring_conf")
return ceph.rbd_image_rm(ceph_monitor, rbd_pool_name, rbd_image_name,
ceph_conf, keyring_conf)
if params.get("gluster_brick"):
# TODO: Add implementation for gluster_brick
return
if params.get('storage_type') in ('iscsi', 'lvm', 'iscsi-direct'):
# TODO: Add implementation for iscsi/lvm
return
# skip removing raw device
if params.get('image_raw_device') == 'yes':
return
if os.path.exists(filename_path):
os.unlink(filename_path)
return
def get_image_blkdebug_filename(params, root_dir):
"""
Generate an blkdebug file path from params and root_dir.
blkdebug files allow error injection in the block subsystem.
:param params: Dictionary containing the test parameters.
:param root_dir: Base directory for relative filenames.
:note: params should contain:
blkdebug -- the name of the debug file.
"""
blkdebug_name = params.get("drive_blkdebug", None)
if blkdebug_name is not None:
blkdebug_filename = utils_misc.get_path(root_dir, blkdebug_name)
else:
blkdebug_filename = None
return blkdebug_filename
def get_image_filename(params, root_dir, basename=False):
"""
Generate an image path from params and root_dir.
:param params: Dictionary containing the test parameters.
:param root_dir: Base directory for relative filenames.
:param basename: True to use only basename of image_name
:note: params should contain:
image_name -- the name of the image file, without extension
image_format -- the format of the image (qcow2, raw etc)
:raise VMDeviceError: When no matching disk found (in indirect method).
"""
enable_gluster = params.get("enable_gluster", "no") == "yes"
enable_ceph = params.get("enable_ceph", "no") == "yes"
enable_iscsi = params.get("enable_iscsi", "no") == "yes"
image_name = params.get("image_name")
storage_type = params.get("storage_type")
if image_name:
image_format = params.get("image_format", "qcow2")
if enable_iscsi:
if storage_type == 'iscsi-direct':
portal = params.get('portal_ip')
target = params.get('target')
lun = params.get('lun', 0)
user = params.get('chap_user')
password = params.get('chap_passwd')
return iscsi.get_image_filename(portal, target, lun,
user, password)
if enable_gluster:
return gluster.get_image_filename(params, image_name, image_format)
if enable_ceph:
rbd_pool_name = params["rbd_pool_name"]
rbd_image_name = "%s.%s" % (image_name.split("/")[-1],
image_format)
ceph_conf = params.get('ceph_conf')
ceph_monitor = params.get('ceph_monitor')
return ceph.get_image_filename(ceph_monitor, rbd_pool_name,
rbd_image_name, ceph_conf)
return get_image_filename_filesytem(params, root_dir, basename=basename)
else:
logging.warn("image_name parameter not set.")
def get_image_filename_filesytem(params, root_dir, basename=False):
"""
Generate an image path from params and root_dir.
:param params: Dictionary containing the test parameters.
:param root_dir: Base directory for relative filenames.
:param basename: True to use only basename of image_name
:note: params should contain:
image_name -- the name of the image file, without extension
image_format -- the format of the image (qcow2, raw etc)
:raise VMDeviceError: When no matching disk found (in indirect method).
"""
def sort_cmp(first, second):
"""
This function used for sort to suit for this test, first sort by len
then by value.
"""
def cmp(x, y):
return (x > y) - (x < y)
first_contains_digit = re.findall(r'[vhs]d[a-z]*[\d]+', first)
second_contains_digit = re.findall(r'[vhs]d[a-z]*[\d]+', second)
if not first_contains_digit and not second_contains_digit:
if len(first) > len(second):
return 1
elif len(first) < len(second):
return -1
if len(first) == len(second):
if first_contains_digit and second_contains_digit:
return cmp(first, second)
elif first_contains_digit:
return -1
elif second_contains_digit:
return 1
return cmp(first, second)
image_name = params.get("image_name", "image")
if basename:
image_name = os.path.basename(image_name)
indirect_image_select = params.get("indirect_image_select")
if indirect_image_select:
re_name = image_name
indirect_image_select = int(indirect_image_select)
matching_images = decode_to_text(process.system_output("ls -1d %s" % re_name,
shell=True))
matching_images = sorted(matching_images.split('\n'),
key=functools.cmp_to_key(sort_cmp))
if matching_images[-1] == '':
matching_images = matching_images[:-1]
try:
image_name = matching_images[indirect_image_select]
except IndexError:
raise virt_vm.VMDeviceError("No matching disk found for "
"name = '%s', matching = '%s' and "
"selector = '%s'" %
(re_name, matching_images,
indirect_image_select))
for protected in params.get('indirect_image_blacklist', '').split(' '):
match_image = re.match(protected, image_name)
if match_image and match_image.group(0) == image_name:
# We just need raise an error if it is totally match, such as
# sda sda1 and so on, but sdaa should not raise an error.
raise virt_vm.VMDeviceError("Matching disk is in blacklist. "
"name = '%s', matching = '%s' and "
"selector = '%s'" %
(re_name, matching_images,
indirect_image_select))
image_format = params.get("image_format", "qcow2")
if params.get("image_raw_device") == "yes":
return image_name
if image_format:
image_filename = "%s.%s" % (image_name, image_format)
else:
image_filename = image_name
image_filename = utils_misc.get_path(root_dir, image_filename)
return image_filename
secret_dir = os.path.join(data_dir.get_data_dir(), "images/secrets")
def _make_secret_dir():
"""Create image secret directory."""
try:
os.makedirs(secret_dir)
except OSError as e:
if e.errno != errno.EEXIST:
raise
class ImageSecret(object):
"""Image secret object."""
def __init__(self, image, data):
if not data:
raise ValueError("Empty image secret for %s" % image)
self.image_id = image
self.data = data
self.filename = os.path.join(secret_dir, "%s.secret" % image)
self.aid = "%s_%s" % (self.image_id, "encrypt0")
def __str__(self):
return self.aid
@classmethod
def image_secret_define_by_params(cls, image, params):
"""Get image secret from vt params."""
image_secret = params.get("image_secret")
image_format = params.get("image_format")
image_encryption = params.get("image_encryption")
if ((image_format == "qcow2" and image_encryption == "luks") or
image_format == "luks"):
return cls(image, image_secret)
def save_to_file(self):
"""Save secret data to file."""
_make_secret_dir()
with open(self.filename, "w") as fd:
fd.write(self.data)
class StorageAuth(object):
"""
Image storage authentication class.
iscsi auth: initiator + password
ceph auth: ceph key
"""
def __init__(self, image, data, data_format, storage_type, **info):
"""
:param image: image tag name
:param data: sensitive data like password
:param data_format: raw or base64
:param storage_type: ceph, gluster or iscsi-direct
:param info: other access information, such as:
iscsi-direct: initiator
gluster: debug, logfile
"""
self.image = image
self.aid = '%s_access_secret' % self.image
self.storage_type = storage_type
self.filename = os.path.join(secret_dir, "%s.secret" % self.aid)
if self.storage_type == 'iscsi-direct':
self._chap_passwd = data
self.iscsi_initiator = info.get('initiator')
elif self.storage_type == 'ceph':
self._ceph_key = data
self.data_format = data_format
if self.data is not None:
self.save_to_file()
@property
def data(self):
if self.storage_type == 'iscsi-direct':
return self._chap_passwd
elif self.storage_type == 'ceph':
return self._ceph_key
else:
return None
def save_to_file(self):
"""Save secret data to file."""
_make_secret_dir()
with open(self.filename, "w") as fd:
fd.write(self.data)
@classmethod
def auth_info_define_by_params(cls, image, params):
"""
:param image: image tag name
:param params: image specified parmas, i.e. params.object_params(image)
"""
auth = None
storage_type = params.get("storage_type")
enable_ceph = params.get("enable_ceph", "no") == "yes"
enable_iscsi = params.get("enable_iscsi", "no") == "yes"
if enable_iscsi:
if storage_type == 'iscsi-direct':
initiator = params.get('initiator')
data = params.get('chap_passwd')
data_format = params.get('data_format', 'raw')
auth = cls(image, data, data_format, storage_type,
initiator=initiator) if data or initiator else None
elif enable_ceph:
data = params.get('ceph_key')
data_format = params.get('data_format', 'base64')
auth = cls(image, data, data_format,
storage_type) if data else None
return auth
class ImageAccessInfo(object):
"""
Access info to the logical image, which can include the network
storage image only, or the image and its backing images.
"""
def __init__(self, image, image_auth, image_backing_auth):
"""
:param image: image tag name
:param image_auth: StorageAuth object to access image itself
:param image_backing_auth: a dict({image: StorageAuth object}),
used for accessing the backing images
"""
self.image = image
self.image_auth = image_auth
self.image_backing_auth = image_backing_auth
@classmethod
def access_info_define_by_params(cls, image, params):
"""
:param image: image tag name
:param params: a dict containing the test parameters
:return: an ImageAccessInfo object or None
"""
access_info = None
info = retrieve_access_info(image, params)
if info:
access = info.pop(image, None)
access_info = cls(image, access, info)
return access_info
def retrieve_access_info(image, params):
"""
Create the image and its backing images' access info objects,
keep the same order as the images in the image_chain.
:param image: image tag name
:param params: a dict containing the test parameters
:return: A dict({image: StorageAuth object or None})
"""
access_info = collections.OrderedDict()
image_chain = params.objects("image_chain")
images = image_chain if image_chain else [image]
for img in images:
auth = StorageAuth.auth_info_define_by_params(
img, params.object_params(img))
if auth is not None:
access_info[img] = auth
if img == image:
# ignore image's snapshot images
break
return access_info
def retrieve_secrets(image, params):
"""Get all image secrets in image_chain, up to image."""
secrets = []
# use image instead if image_chain is empty
# or no backing image available
image_chain = params.get("image_chain", "")
if image not in image_chain:
image_chain = image
for img in image_chain.split():
img_params = params.object_params(img)
secret = ImageSecret.image_secret_define_by_params(img, img_params)
if secret:
secrets.append(secret)
# NOTE: break here to exclude secrets of snapshots.
if img == image:
break
return secrets
class ImageEncryption(object):
"""Image encryption configuration."""
__slots__ = ("format", "key_secret", "base_key_secrets",
"cipher_alg", "cipher_mode", "ivgen_alg",
"ivgen_hash_alg", "hash_alg", "iter_time")
def __init__(self, encryption_format, key_secret, base_key_secrets,
cipher_alg, cipher_mode, ivgen_alg, ivgen_hash_alg, hash_alg,
iter_time):
"""
Initialize image encryption configuration.
:param encrypt_format: encryption format
:param key_secret: ImageSecret object for this image
:param base_key_secrets: ImageSecret objects from its backing images
:param cipher_alg: name of encryption cipher algorithm
:param cipher_mode: name of encryption cipher mode
:param ivgen_alg: name of iv generator algorithm
:param ivgen_hash_alg: name of iv generator hash algorithm
:param hash_alg: name of encryption hash algorithm
:param iter_time: time to spend in PBKDF in milliseconds
"""
self.format = encryption_format
self.key_secret = key_secret
self.base_key_secrets = base_key_secrets
self.cipher_alg = cipher_alg
self.cipher_mode = cipher_mode
self.ivgen_alg = ivgen_alg
self.ivgen_hash_alg = ivgen_hash_alg
self.hash_alg = hash_alg
self.iter_time = iter_time
def __iter__(self):
return iter(self.__slots__)
@classmethod
def encryption_define_by_params(cls, image, params):
"""Get image encryption from vt params."""
encryption_format = params.get("image_encryption")
key_secrets = retrieve_secrets(image, params)
key_secret = None
if key_secrets and key_secrets[-1].image_id == image:
key_secret = key_secrets.pop()
cipher_alg = params.get("image_cipher_alg")
cipher_mode = params.get("image_cipher_mode")
ivgen_alg = params.get("image_ivgen_alg")
ivgen_hash_alg = params.get("image_ivgen_hash_alg")
hash_alg = params.get("image_hash_alg")
iter_time = params.get("image_iter_time")
return cls(encryption_format, key_secret, key_secrets, cipher_alg,
cipher_mode, ivgen_alg, ivgen_hash_alg, hash_alg, iter_time)
@property
def image_key_secrets(self):
"""All image secrets required to use this image."""
if self.key_secret:
return self.base_key_secrets + [self.key_secret]
return self.base_key_secrets
def copy_nfs_image(params, root_dir, basename=False):
"""
copy image from image_path to nfs mount dir if image is not available
or corrupted.
:param params: Test dict params
:param root_dir: Base directory for relative filenames.
:param basename: True to use only basename of image name
:raise: TestSetupFail if image is unavailable/corrupted
"""
if params.get("setup_local_nfs", "no") == "yes":
# check for image availability in NFS shared path
base_dir = params["nfs_mount_dir"]
dst = get_image_filename(params, base_dir, basename=basename)
if(not os.path.isfile(dst) or
utils_misc.get_image_info(dst)['lcounts'].lower() == "true"):
source = get_image_filename(params, root_dir)
logging.debug("Checking for image available in image data "
"path - %s", source)
# check for image availability in images data directory
if(os.path.isfile(source) and not
utils_misc.get_image_info(source)['lcounts'].lower() == "true"):
logging.debug("Copying guest image from %s to %s", source,
dst)
shutil.copy(source, dst)
else:
raise exceptions.TestSetupFail("Guest image is unavailable"
"/corrupted in %s and %s" %
(source, dst))
class OptionMissing(Exception):
"""
Option not found in the odbject
"""
def __init__(self, option):
self.option = option
def __str__(self):
return "%s is missing. Please check your parameters" % self.option
class QemuImg(object):
"""
A basic class for handling operations of disk/block images.
"""
def __init__(self, params, root_dir, tag):
"""
Init the default value for image object.
:param params: Dictionary containing the test parameters.
:param root_dir: Base directory for relative filenames.
:param tag: Image tag defined in parameter images.
"""
self.params = params
self.image_filename = get_image_filename(params, root_dir)
self.image_format = params.get("image_format", "qcow2")
self.size = params.get("image_size", "10G")
self.storage_type = params.get("storage_type", "local fs")
self.check_output = params.get("check_output") == "yes"
self.image_blkdebug_filename = get_image_blkdebug_filename(params,
root_dir)
self.remote_keywords = params.get("remote_image",
"gluster iscsi rbd").split()
self.encryption_config = ImageEncryption.encryption_define_by_params(
tag, params)
image_chain = params.get("image_chain")
self.tag = tag
self.root_dir = root_dir
self.base_tag = None
self.snapshot_tag = None
if image_chain:
image_chain = re.split(r"\s+", image_chain)
if tag in image_chain:
index = image_chain.index(tag)
if index < len(image_chain) - 1:
self.snapshot_tag = image_chain[index + 1]
if index > 0:
self.base_tag = image_chain[index - 1]
if self.base_tag:
base_params = params.object_params(self.base_tag)
self.base_image_filename = get_image_filename(base_params,
root_dir)
self.base_format = base_params.get("image_format")
if self.snapshot_tag:
ss_params = params.object_params(self.snapshot_tag)
self.snapshot_image_filename = get_image_filename(ss_params,
root_dir)
self.snapshot_format = ss_params.get("image_format")
self.image_access = ImageAccessInfo.access_info_define_by_params(
self.tag, self.params)
def check_option(self, option):
"""
Check if object has the option required.
:param option: option should be checked
"""
if option not in self.__dict__:
raise OptionMissing(option)
def is_remote_image(self):
"""
Check if image is from a remote server or not
"""
for keyword in self.remote_keywords:
if self.image_filename.startswith(keyword):
return True
return False
def backup_image(self, params, root_dir, action, good=True,
skip_existing=False):
"""
Backup or restore a disk image, depending on the action chosen.
:param params: Dictionary containing the test parameters.
:param root_dir: Base directory for relative filenames.
:param action: Whether we want to backup or restore the image.
:param good: If we are backing up a good image(we want to restore it)
or a bad image (we are saving a bad image for posterior analysis).
:note: params should contain:
image_name -- the name of the image file, without extension
image_format -- the format of the image (qcow2, raw etc)
"""
def get_backup_set(filename, backup_dir, action, good):
"""
Get all sources and destinations required for each backup.
"""
if not os.path.isdir(backup_dir):
os.makedirs(backup_dir)
basename = os.path.basename(filename)
bkp_set = []
if action not in ('backup', 'restore'):
logging.error("No backup sets for action: %s, state: %s",
action, good)
return bkp_set
if good:
src = filename
dst = os.path.join(backup_dir, "%s.backup" % basename)
if action == 'backup':
bkp_set = [[src, dst]]
elif action == 'restore':
bkp_set = [[dst, src]]
else:
# We have to make 2 backups, one of the bad image, another one
# of the good image
src_bad = filename
src_good = os.path.join(backup_dir, "%s.backup" % basename)
hsh = utils_misc.generate_random_string(4)
dst_bad = (os.path.join(backup_dir, "%s.bad.%s" %
(basename, hsh)))
dst_good = (os.path.join(backup_dir, "%s.good.%s" %
(basename, hsh)))
if action == 'backup':
bkp_set = [[src_bad, dst_bad], [src_good, dst_good]]
elif action == 'restore':
bkp_set = [[src_good, src_bad]]
return bkp_set
backup_dir = params.get("backup_dir", "")
if not os.path.isabs(backup_dir):
backup_dir = os.path.join(root_dir, backup_dir)
backup_set = get_backup_set(self.image_filename, backup_dir,
action, good)
if self.is_remote_image():
backup_func = self.copy_data_remote
elif params.get('image_raw_device') == 'yes':
backup_func = self.copy_data_raw
else:
backup_func = self.copy_data_file
if action == 'backup':
backup_size = 0
for src, dst in backup_set:
if os.path.isfile(src):
backup_size += os.path.getsize(src)
else:
# TODO: get the size of block/remote images
if self.size:
backup_size += int(
float(utils_numeric.normalize_data_size(
self.size, order_magnitude="B"))
)
s = os.statvfs(backup_dir)
image_dir_free_disk_size = s.f_bavail * s.f_bsize
logging.info("backup image size: %d, available size: %d.",
backup_size, image_dir_free_disk_size)
if not self.is_disk_size_enough(backup_size,
image_dir_free_disk_size):
return
# backup secret file if presented
if self.encryption_config.key_secret:
bk_set = get_backup_set(self.encryption_config.key_secret.filename,
secret_dir, action, good)
for src, dst in bk_set:
self.copy_data_file(src, dst)
for src, dst in backup_set:
if action == 'backup' and skip_existing and os.path.exists(dst):
logging.debug("Image backup %s already exists, skipping...",
dst)
continue
backup_func(src, dst)
def rm_backup_image(self):
"""
Remove backup image
"""
backup_dir = utils_misc.get_path(self.root_dir,
self.params.get("backup_dir", ""))
image_name = os.path.join(backup_dir, "%s.backup" %
os.path.basename(self.image_filename))
logging.debug("Removing image file %s as requested", image_name)
if os.path.exists(image_name):
os.unlink(image_name)
else:
logging.warning("Image file %s not found", image_name)
def save_image(self, params, filename, root_dir=None):
"""
Save images to a path for later debugging.
:param params: Dictionary containing the test parameters.
:param filename: new filename for saved images.
:param root_dir: directory for saved images.
"""
src = self.image_filename
if root_dir is None:
root_dir = os.path.dirname(src)
backup_func = self.copy_data_file
if self.is_remote_image():
backup_func = self.copy_data_remote
elif params.get('image_raw_device') == 'yes':
backup_func = self.copy_data_raw
backup_size = 0
if os.path.isfile(src):
backup_size = os.path.getsize(src)
else:
# TODO: get the size of block/remote images
if self.size:
backup_size += int(
float(utils_numeric.normalize_data_size(
self.size, order_magnitude="B"))
)
s = os.statvfs(root_dir)
image_dir_free_disk_size = s.f_bavail * s.f_bsize
logging.info("Checking disk size on %s.", root_dir)
if not self.is_disk_size_enough(backup_size,
image_dir_free_disk_size):
return
backup_func(src, utils_misc.get_path(root_dir, filename))
@staticmethod
def is_disk_size_enough(required, available):
"""Check if available disk size is enough for the data copy."""
minimum_disk_free = 1.2 * required
if available < minimum_disk_free:
logging.error("Free space: %s MB", (available / 1048576.))
logging.error("Backup size: %s MB", (required / 1048576.))
logging.error("Minimum free space acceptable: %s MB",
(minimum_disk_free / 1048576.))
logging.error("Available disk space is not enough. Skipping...")
return False
return True
def copy_data_remote(self, src, dst):
pass
@staticmethod
def copy_data_raw(src, dst):
"""Using dd for raw device."""
if os.path.exists(src):
process.system("dd if=%s of=%s bs=4k conv=sync" % (src, dst))
else:
logging.info("No source %s, skipping dd...", src)
@staticmethod
def copy_data_file(src, dst):
"""Copy for files."""
if os.path.isfile(src):
logging.debug("Copying %s -> %s", src, dst)
_dst = dst + '.part'
shutil.copy(src, _dst)
os.rename(_dst, dst)
else:
logging.info("No source file %s, skipping copy...", src)
@staticmethod
def clone_image(params, vm_name, image_name, root_dir):
"""
Clone master image to vm specific file.
:param params: Dictionary containing the test parameters.
:param vm_name: Vm name.
:param image_name: Master image name.
:param root_dir: Base directory for relative filenames.
"""
if not params.get("image_name_%s_%s" % (image_name, vm_name)):
m_image_name = params.get("image_name", "image")
vm_image_name = params.get("image_name_%s" % vm_name, "%s_%s" % (m_image_name, vm_name))
if params.get("clone_master", "yes") == "yes":
image_params = params.object_params(image_name)
image_params["image_name"] = vm_image_name
master_image = params.get("master_image_name")
if master_image:
image_format = params.get("image_format", "qcow2")
m_image_fn = "%s.%s" % (master_image, image_format)
m_image_fn = utils_misc.get_path(root_dir, m_image_fn)
else:
m_image_fn = get_image_filename(params, root_dir)
image_fn = get_image_filename(image_params, root_dir)
force_clone = params.get("force_image_clone", "no")
if not os.path.exists(image_fn) or force_clone == "yes":
logging.info("Clone master image for vms.")
process.run(params.get("image_clone_command") %
(m_image_fn, image_fn))
params["image_name_%s" % vm_name] = vm_image_name
params["image_name_%s_%s" % (image_name, vm_name)] = vm_image_name
@staticmethod
def rm_cloned_image(params, vm_name, image_name, root_dir):
"""
Remove vm specific file.
:param params: Dictionary containing the test parameters.
:param vm_name: Vm name.
:param image_name: Master image name.
:param root_dir: Base directory for relative filenames.
"""
if params.get("image_name_%s_%s" % (image_name, vm_name)):
m_image_name = params.get("image_name", "image")
vm_image_name = "%s_%s" % (m_image_name, vm_name)
if params.get("clone_master", "yes") == "yes":
image_params = params.object_params(image_name)
image_params["image_name"] = vm_image_name
image_fn = get_image_filename(image_params, root_dir)
logging.debug("Removing vm specific image file %s", image_fn)
if os.path.exists(image_fn):
process.run(params.get("image_remove_command") % (image_fn))
else:
logging.debug("Image file %s not found", image_fn)
class Rawdev(object):
"""
Base class for raw storage devices such as iscsi and local disks
"""
def __init__(self, params, root_dir, tag):
"""
Init the default value for image object.
:param params: Dictionary containing the test parameters.
:param root_dir: Base directory for relative filenames.
:param tag: Image tag defined in parameter images
"""
host_set_flag = params.get("host_setup_flag")
if host_set_flag is not None:
self.exec_cleanup = int(host_set_flag) & 2 == 2
else:
self.exec_cleanup = False
if params.get("force_cleanup") == "yes":
self.exec_cleanup = True
self.image_name = tag
class Iscsidev(Rawdev):
"""
Class for handle iscsi devices for VM
"""
def __init__(self, params, root_dir, tag):
"""
Init the default value for image object.
:param params: Dictionary containing the test parameters.
:param root_dir: Base directory for relative filenames.
:param tag: Image tag defined in parameter images
"""
Rawdev.__init__(self, params, root_dir, tag)
self.emulated_file_remove = False
self.emulated_image = params.get("emulated_image")
if self.emulated_image:
self.emulated_image = os.path.join(root_dir, self.emulated_image)
if params.get("emulated_file_remove", "no") == "yes":
self.emulated_file_remove = True
params["iscsi_thread_id"] = self.image_name
self.iscsidevice = iscsi.Iscsi.create_iSCSI(params, root_dir=root_dir)
self.device_id = params.get("device_id")
self.iscsi_init_timeout = int(params.get("iscsi_init_timeout", 10))
class LVMdev(Rawdev):
"""
Class for handle LVM devices for VM
"""
def __init__(self, params, root_dir, tag):
"""
Init the default value for image object.
:param params: Dictionary containing the test parameters.
:param root_dir: Base directory for relative filenames.
:param tag: Image tag defined in parameter images
"""
super(LVMdev, self).__init__(params, root_dir, tag)
if params.get("emulational_device", "yes") == "yes":
self.lvmdevice = lvm.EmulatedLVM(params, root_dir=root_dir)
else:
self.lvmdevice = lvm.LVM(params)
| balamuruhans/avocado-vt | virttest/storage.py | Python | gpl-2.0 | 36,710 |
import numpy as np
#x must be a np array
def lnbin(x, BinNum):
"""
Logarithmically bins a numpy array, returns (midpoints, Freq)
This function take the input of a data vector x, which is to be binned;
it also takes in the amount bins one would like the data binned into. The
output is two vectors, one containing the normalised frequency of each bin
(Freq), the other, the midpoint of each bin (midpts).
Added and error to the binned frequency: eFreq (As of June 30 2010). If this
option is not required, just call the function without including the third out
put; i.e.: [midpts Freq]=lnbin(x,BinNum).
Updated 2/6/14 to change the min to scale automatically
"""
if type(x) != np.ndarray:
try:
x = np.array(x)
except:
print 'Improper input format!'
raise
x = np.sort(x)
i = 0
while x[i] <= 0:
i += 1
percent_binned = float((x.size-(i+1))) / x.size*100
#print 'Percentage of input vec binned {}'.format(percent_binned)
FPT = x[i:]
LFPT = np.log(FPT)
max1 = np.log( np.ceil(np.amax(FPT)))
#min1 = 1
min1 = np.log(np.floor(np.min(FPT)))
LFreq = np.zeros((BinNum, 1))
LTime = np.zeros((BinNum, 1))
Lends = np.zeros((BinNum, 2))
step = (max1-min1) / BinNum
#LOG Binning Data ###########################
for i in range(FPT.size):
for k in range(BinNum):
if( k*step+min1 <= LFPT[i] and LFPT[i] < (k+1)*step+min1):
LFreq[k] += 1 #check LFreq on the first bin
LTime[k] = (k+1)*step-(0.5*step)+min1
Lends[k, 0] = k*step+min1
Lends[k, 1] = (k+1)*step+min1
ends = np.exp(Lends)
widths = ends[:,1] - ends[:,0]
Freq = LFreq.T / widths / x.size
eFreq = 1.0 / np.sqrt(LFreq) * Freq
midpts = np.exp(LTime)
return (midpts[:,0], Freq.T[:,0]) | bkerster/utilities | lnbin.py | Python | gpl-2.0 | 1,964 |
#
# Copyright 2010-2011 Red Hat, Inc.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
#
# Refer to the README and COPYING files for full details of the license
#
"""
Generic LVM interface wrapper
Incapsulates the actual LVM mechanics.
"""
import errno
import os
import re
import pwd
import grp
import logging
from collections import namedtuple
import pprint as pp
import threading
from itertools import chain
from subprocess import list2cmdline
from vdsm import constants
import misc
import multipath
import storage_exception as se
from vdsm.config import config
import devicemapper
log = logging.getLogger("Storage.LVM")
LVM_DEFAULT_TTL = 100
PV_FIELDS = ("uuid,name,size,vg_name,vg_uuid,pe_start,pe_count,"
"pe_alloc_count,mda_count,dev_size")
VG_FIELDS = ("uuid,name,attr,size,free,extent_size,extent_count,free_count,"
"tags,vg_mda_size,vg_mda_free,lv_count,pv_count,pv_name")
LV_FIELDS = "uuid,name,vg_name,attr,size,seg_start_pe,devices,tags"
VG_ATTR_BITS = ("permission", "resizeable", "exported",
"partial", "allocation", "clustered")
LV_ATTR_BITS = ("voltype", "permission", "allocations", "fixedminor", "state",
"devopen", "target", "zero")
# Returned by lvm commands for pv_name when a pv name is not available.
UNKNOWN_DEVICE = "unknown device"
PV = namedtuple("PV", PV_FIELDS + ",guid")
VG = namedtuple("VG", VG_FIELDS + ",writeable,partial")
VG_ATTR = namedtuple("VG_ATTR", VG_ATTR_BITS)
LV = namedtuple("LV", LV_FIELDS + ",writeable,opened,active")
LV_ATTR = namedtuple("LV_ATTR", LV_ATTR_BITS)
Stub = namedtuple("Stub", "name, stale")
class Unreadable(Stub):
__slots__ = ()
def __getattr__(self, attrName):
log.warning("%s can't be reloaded, please check your storage "
"connections.", self.name)
raise AttributeError("Failed reload: %s" % self.name)
# VG states
VG_OK = "OK"
VG_PARTIAL = "PARTIAL"
VG_UNKNOWN = "UNKNOWN"
SEPARATOR = "|"
LVM_NOBACKUP = ("--autobackup", "n")
LVM_FLAGS = ("--noheadings", "--units", "b", "--nosuffix", "--separator",
SEPARATOR, "--ignoreskippedcluster")
PV_PREFIX = "/dev/mapper"
# Assuming there are no spaces in the PV name
re_pvName = re.compile(PV_PREFIX + '[^\s\"]+', re.MULTILINE)
# operations lock
LVM_OP_INVALIDATE = "lvm invalidate operation"
LVM_OP_RELOAD = "lvm reload operation"
PVS_CMD = ("pvs",) + LVM_FLAGS + ("-o", PV_FIELDS)
VGS_CMD = ("vgs",) + LVM_FLAGS + ("-o", VG_FIELDS)
LVS_CMD = ("lvs",) + LVM_FLAGS + ("-o", LV_FIELDS)
# FIXME we must use different METADATA_USER ownership for qemu-unreadable
# metadata volumes
USER_GROUP = constants.DISKIMAGE_USER + ":" + constants.DISKIMAGE_GROUP
LVMCONF_TEMPLATE = """
devices {
preferred_names = ["^/dev/mapper/"]
ignore_suspended_devices=1
write_cache_state=0
disable_after_error_count=3
obtain_device_list_from_udev=0
%s
}
global {
locking_type=1
prioritise_write_locks=1
wait_for_locks=1
use_lvmetad=0
}
backup {
retain_min = 50
retain_days = 0
}
"""
VAR_RUN_VDSM = constants.P_VDSM_RUN
VDSM_LVM_SYSTEM_DIR = os.path.join(VAR_RUN_VDSM, "lvm")
VDSM_LVM_CONF = os.path.join(VDSM_LVM_SYSTEM_DIR, "lvm.conf")
USER_DEV_LIST = filter(None, config.get("irs", "lvm_dev_whitelist").split(","))
def _buildFilter(devices):
strippeds = set(d.strip() for d in devices)
strippeds.discard('') # Who has put a blank here?
strippeds = sorted(strippeds)
dmPaths = [dev.replace(r'\x', r'\\x') for dev in strippeds]
filt = '|'.join(dmPaths)
if len(filt) > 0:
filt = "'a|" + filt + "|', "
filt = "filter = [ " + filt + "'r|.*|' ]"
return filt
def _buildConfig(devList):
flt = _buildFilter(chain(devList, USER_DEV_LIST))
conf = LVMCONF_TEMPLATE % flt
return conf.replace("\n", " ")
def _updateLvmConf(conf):
# Make a convenience copy for the debugging purposes
try:
if not os.path.isdir(VDSM_LVM_SYSTEM_DIR):
os.mkdir(VDSM_LVM_SYSTEM_DIR)
with open(VDSM_LVM_CONF, "w") as lvmconf:
lvmconf.write(conf)
except IOError as e:
# We are not interested in exceptions here, note it and
log.warning("Cannot create %s file %s", VDSM_LVM_CONF, str(e))
#
# Make sure that "args" is suitable for consumption in interfaces
# that expect an iterabale argument. The string is treated a single
# argument an converted into list, containing that string.
# Strings have not __iter__ attribute.
#
def _normalizeargs(args=None):
if args is None:
args = []
elif not hasattr(args, "__iter__"):
args = [args]
return args
def _tags2Tuple(sTags):
"""
Tags comma separated string as a list.
Return an empty tuple for sTags == ""
"""
return tuple(sTags.split(",")) if sTags else tuple()
def makePV(*args):
guid = os.path.basename(args[1])
args += (guid,)
return PV(*args)
def makeVG(*args):
args = list(args)
# Convert tag string into tuple.
tags = _tags2Tuple(args[VG._fields.index("tags")])
args[VG._fields.index("tags")] = tags
# Convert attr string into named tuple fields.
# tuple("wz--n-") = ('w', 'z', '-', '-', 'n', '-')
sAttr = args[VG._fields.index("attr")]
attr_values = tuple(sAttr[:len(VG_ATTR._fields)])
attrs = VG_ATTR(*attr_values)
args[VG._fields.index("attr")] = attrs
# Convert pv_names list to tuple.
args[VG._fields.index("pv_name")] = \
tuple(args[VG._fields.index("pv_name")])
# Add properties. Should be ordered as VG_PROPERTIES.
args.append(attrs.permission == "w") # Writable
args.append(VG_OK if attrs.partial == "-" else VG_PARTIAL) # Partial
return VG(*args)
def makeLV(*args):
args = list(args)
# Convert tag string into tuple.
tags = _tags2Tuple(args[LV._fields.index("tags")])
args[LV._fields.index("tags")] = tags
# Convert attr string into named tuple fields.
sAttr = args[LV._fields.index("attr")]
attr_values = tuple(sAttr[:len(LV_ATTR._fields)])
attrs = LV_ATTR(*attr_values)
args[LV._fields.index("attr")] = attrs
# Add properties. Should be ordered as VG_PROPERTIES.
args.append(attrs.permission == "w") # writable
args.append(attrs.devopen == "o") # opened
args.append(attrs.state == "a") # active
return LV(*args)
class LVMCache(object):
"""
Keep all the LVM information.
"""
def _getCachedExtraCfg(self):
if not self._filterStale:
return self._extraCfg
with self._filterLock:
if not self._filterStale:
return self._extraCfg
self._extraCfg = _buildConfig(multipath.getMPDevNamesIter())
_updateLvmConf(self._extraCfg)
self._filterStale = False
return self._extraCfg
def _addExtraCfg(self, cmd, devices=tuple()):
newcmd = [constants.EXT_LVM, cmd[0]]
if devices:
conf = _buildConfig(devices)
else:
conf = self._getCachedExtraCfg()
newcmd += ["--config", conf]
if len(cmd) > 1:
newcmd += cmd[1:]
return newcmd
def invalidateFilter(self):
self._filterStale = True
def invalidateCache(self):
self.invalidateFilter()
self.flush()
def __init__(self):
self._filterStale = True
self._extraCfg = None
self._filterLock = threading.Lock()
self._oplock = misc.OperationMutex()
self._stalepv = True
self._stalevg = True
self._stalelv = True
self._pvs = {}
self._vgs = {}
self._lvs = {}
def cmd(self, cmd, devices=tuple()):
finalCmd = self._addExtraCfg(cmd, devices)
rc, out, err = misc.execCmd(finalCmd, sudo=True)
if rc != 0:
# Filter might be stale
self.invalidateFilter()
newCmd = self._addExtraCfg(cmd)
# Before blindly trying again make sure
# that the commands are not identical, because
# the devlist is sorted there is no fear
# of two identical filters looking differently
if newCmd != finalCmd:
return misc.execCmd(newCmd, sudo=True)
return rc, out, err
def __str__(self):
return ("PVS:\n%s\n\nVGS:\n%s\n\nLVS:\n%s" %
(pp.pformat(self._pvs),
pp.pformat(self._vgs),
pp.pformat(self._lvs)))
def bootstrap(self):
self._reloadpvs()
self._reloadvgs()
self._reloadAllLvs()
def _reloadpvs(self, pvName=None):
cmd = list(PVS_CMD)
pvNames = _normalizeargs(pvName)
cmd.extend(pvNames)
with self._oplock.acquireContext(LVM_OP_RELOAD):
rc, out, err = self.cmd(cmd)
if rc != 0:
log.warning("lvm pvs failed: %s %s %s", str(rc), str(out),
str(err))
pvNames = pvNames if pvNames else self._pvs.keys()
for p in pvNames:
if isinstance(self._pvs.get(p), Stub):
self._pvs[p] = Unreadable(self._pvs[p].name, True)
return dict(self._pvs)
updatedPVs = {}
for line in out:
fields = [field.strip() for field in line.split(SEPARATOR)]
pv = makePV(*fields)
if pv.name == UNKNOWN_DEVICE:
log.error("Missing pv: %s in vg: %s", pv.uuid, pv.vg_name)
continue
self._pvs[pv.name] = pv
updatedPVs[pv.name] = pv
# If we updated all the PVs drop stale flag
if not pvName:
self._stalepv = False
# Remove stalePVs
stalePVs = [staleName for staleName in self._pvs.keys()
if staleName not in updatedPVs.iterkeys()]
for staleName in stalePVs:
log.warning("Removing stale PV: %s", staleName)
self._pvs.pop((staleName), None)
return updatedPVs
def _getVGDevs(self, vgNames):
devices = []
for name in vgNames:
try:
pvs = self._vgs[name].pv_name # pv_names tuple
except (KeyError, AttributeError): # Yet unknown VG, stub
devices = tuple()
break # unknownVG = True
else:
devices.extend(pvs)
else: # All known VGs
devices = tuple(devices)
return devices
def _reloadvgs(self, vgName=None):
cmd = list(VGS_CMD)
vgNames = _normalizeargs(vgName)
cmd.extend(vgNames)
with self._oplock.acquireContext(LVM_OP_RELOAD):
rc, out, err = self.cmd(cmd, self._getVGDevs(vgNames))
if rc != 0:
log.warning("lvm vgs failed: %s %s %s", str(rc), str(out),
str(err))
vgNames = vgNames if vgNames else self._vgs.keys()
for v in vgNames:
if isinstance(self._vgs.get(v), Stub):
self._vgs[v] = Unreadable(self._vgs[v].name, True)
if not len(out):
return dict(self._vgs)
updatedVGs = {}
vgsFields = {}
for line in out:
fields = [field.strip() for field in line.split(SEPARATOR)]
uuid = fields[VG._fields.index("uuid")]
pvNameIdx = VG._fields.index("pv_name")
pv_name = fields[pvNameIdx]
if pv_name == UNKNOWN_DEVICE:
# PV is missing, e.g. device lost of target not connected
continue
if uuid not in vgsFields:
fields[pvNameIdx] = [pv_name] # Make a pv_names list
vgsFields[uuid] = fields
else:
vgsFields[uuid][pvNameIdx].append(pv_name)
for fields in vgsFields.itervalues():
vg = makeVG(*fields)
if int(vg.pv_count) != len(vg.pv_name):
log.error("vg %s has pv_count %s but pv_names %s",
vg.name, vg.pv_count, vg.pv_name)
self._vgs[vg.name] = vg
updatedVGs[vg.name] = vg
# If we updated all the VGs drop stale flag
if not vgName:
self._stalevg = False
# Remove stale VGs
staleVGs = [staleName for staleName in self._vgs.keys()
if staleName not in updatedVGs.iterkeys()]
for staleName in staleVGs:
removeVgMapping(staleName)
log.warning("Removing stale VG: %s", staleName)
self._vgs.pop((staleName), None)
return updatedVGs
def _reloadlvs(self, vgName, lvNames=None):
lvNames = _normalizeargs(lvNames)
cmd = list(LVS_CMD)
if lvNames:
cmd.extend(["%s/%s" % (vgName, lvName) for lvName in lvNames])
else:
cmd.append(vgName)
with self._oplock.acquireContext(LVM_OP_RELOAD):
rc, out, err = self.cmd(cmd, self._getVGDevs((vgName, )))
if rc != 0:
log.warning("lvm lvs failed: %s %s %s", str(rc), str(out),
str(err))
lvNames = lvNames if lvNames else self._lvs.keys()
for l in lvNames:
if isinstance(self._lvs.get(l), Stub):
self._lvs[l] = Unreadable(self._lvs[l].name, True)
return dict(self._lvs)
updatedLVs = {}
for line in out:
fields = [field.strip() for field in line.split(SEPARATOR)]
lv = makeLV(*fields)
# For LV we are only interested in its first extent
if lv.seg_start_pe == "0":
self._lvs[(lv.vg_name, lv.name)] = lv
updatedLVs[(lv.vg_name, lv.name)] = lv
# Determine if there are stale LVs
if lvNames:
staleLVs = (lvName for lvName in lvNames
if (vgName, lvName) not in updatedLVs.iterkeys())
else:
# All the LVs in the VG
staleLVs = (lvName for v, lvName in self._lvs.keys()
if (v == vgName) and
((vgName, lvName) not in updatedLVs.iterkeys()))
for lvName in staleLVs:
log.warning("Removing stale lv: %s/%s", vgName, lvName)
self._lvs.pop((vgName, lvName), None)
log.debug("lvs reloaded")
return updatedLVs
def _reloadAllLvs(self):
"""
Used only during bootstrap.
"""
cmd = list(LVS_CMD)
rc, out, err = self.cmd(cmd)
if rc == 0:
updatedLVs = set()
for line in out:
fields = [field.strip() for field in line.split(SEPARATOR)]
lv = makeLV(*fields)
# For LV we are only interested in its first extent
if lv.seg_start_pe == "0":
self._lvs[(lv.vg_name, lv.name)] = lv
updatedLVs.add((lv.vg_name, lv.name))
# Remove stales
for vgName, lvName in self._lvs.keys():
if (vgName, lvName) not in updatedLVs:
self._lvs.pop((vgName, lvName), None)
log.error("Removing stale lv: %s/%s", vgName, lvName)
self._stalelv = False
return dict(self._lvs)
def _invalidatepvs(self, pvNames):
with self._oplock.acquireContext(LVM_OP_INVALIDATE):
pvNames = _normalizeargs(pvNames)
for pvName in pvNames:
self._pvs[pvName] = Stub(pvName, True)
def _invalidateAllPvs(self):
with self._oplock.acquireContext(LVM_OP_INVALIDATE):
self._stalepv = True
self._pvs.clear()
def _invalidatevgs(self, vgNames):
vgNames = _normalizeargs(vgNames)
with self._oplock.acquireContext(LVM_OP_INVALIDATE):
for vgName in vgNames:
self._vgs[vgName] = Stub(vgName, True)
def _invalidateAllVgs(self):
with self._oplock.acquireContext(LVM_OP_INVALIDATE):
self._stalevg = True
self._vgs.clear()
def _invalidatelvs(self, vgName, lvNames=None):
with self._oplock.acquireContext(LVM_OP_INVALIDATE):
lvNames = _normalizeargs(lvNames)
# Invalidate LVs in a specific VG
if lvNames:
# Invalidate a specific LVs
for lvName in lvNames:
self._lvs[(vgName, lvName)] = Stub(lvName, True)
else:
# Invalidate all the LVs in a given VG
for lv in self._lvs.values():
if not isinstance(lv, Stub):
if lv.vg_name == vgName:
self._lvs[(vgName, lv.name)] = Stub(lv.name, True)
def _invalidateAllLvs(self):
with self._oplock.acquireContext(LVM_OP_INVALIDATE):
self._stalelv = True
self._lvs.clear()
def flush(self):
self._invalidateAllPvs()
self._invalidateAllVgs()
self._invalidateAllLvs()
def getPv(self, pvName):
# Get specific PV
pv = self._pvs.get(pvName)
if not pv or isinstance(pv, Stub):
pvs = self._reloadpvs(pvName)
pv = pvs.get(pvName)
return pv
def getAllPvs(self):
# Get everything we have
if self._stalepv:
pvs = self._reloadpvs()
else:
pvs = dict(self._pvs)
stalepvs = [pv.name for pv in pvs.itervalues()
if isinstance(pv, Stub)]
if stalepvs:
reloaded = self._reloadpvs(stalepvs)
pvs.update(reloaded)
return pvs.values()
def getVg(self, vgName):
# Get specific VG
vg = self._vgs.get(vgName)
if not vg or isinstance(vg, Stub):
vgs = self._reloadvgs(vgName)
vg = vgs.get(vgName)
return vg
def getVgs(self, vgNames):
"""Reloads all the VGs of the set.
Can block for suspended devices.
Fills the cache but not uses it.
Only returns found VGs.
"""
return [vg for vgName, vg in self._reloadvgs(vgNames).iteritems()
if vgName in vgNames]
def getAllVgs(self):
# Get everything we have
if self._stalevg:
vgs = self._reloadvgs()
else:
vgs = dict(self._vgs)
stalevgs = [vg.name for vg in vgs.itervalues()
if isinstance(vg, Stub)]
if stalevgs:
reloaded = self._reloadvgs(stalevgs)
vgs.update(reloaded)
return vgs.values()
def getLv(self, vgName, lvName=None):
# Checking self._stalelv here is suboptimal, because
# unnecessary reloads
# are done.
# Return vgName/lvName info
# If both 'vgName' and 'lvName' are None then return everything
# If only 'lvName' is None then return all the LVs in the given VG
# If only 'vgName' is None it is weird, so return nothing
# (we can consider returning all the LVs with a given name)
if lvName:
# vgName, lvName
lv = self._lvs.get((vgName, lvName))
if not lv or isinstance(lv, Stub):
# while we here reload all the LVs in the VG
lvs = self._reloadlvs(vgName)
lv = lvs.get((vgName, lvName))
if not lv:
log.warning("lv: %s not found in lvs vg: %s response",
lvName, vgName)
res = lv
else:
# vgName, None
# If there any stale LVs reload the whole VG, since it would
# cost us around same efforts anyhow and these stale LVs can
# be in the vg.
# Will be better when the pvs dict will be part of the vg.
# Fix me: should not be more stubs
if self._stalelv or any(isinstance(lv, Stub)
for lv in self._lvs.values()):
lvs = self._reloadlvs(vgName)
else:
lvs = dict(self._lvs)
# lvs = self._reloadlvs()
lvs = [lv for lv in lvs.values()
if not isinstance(lv, Stub) and (lv.vg_name == vgName)]
res = lvs
return res
def getAllLvs(self):
# None, None
if self._stalelv or any(isinstance(lv, Stub)
for lv in self._lvs.values()):
lvs = self._reloadAllLvs()
else:
lvs = dict(self._lvs)
return lvs.values()
_lvminfo = LVMCache()
def bootstrap(refreshlvs=()):
"""
Bootstrap lvm module
This function builds the lvm cache and ensure that all unused lvs are
deactivated, expect lvs matching refreshlvs, which are refreshed instead.
"""
_lvminfo.bootstrap()
refreshlvs = set(refreshlvs)
for vg in _lvminfo.getAllVgs():
deactivate = []
refresh = []
for lv in _lvminfo.getLv(vg.name):
if lv.active:
if lv.name in refreshlvs:
refresh.append(lv.name)
elif lv.opened:
log.debug("Skipping open lv: vg=%s lv=%s", vg.name,
lv.name)
else:
deactivate.append(lv.name)
if deactivate:
log.info("Deactivating lvs: vg=%s lvs=%s", vg.name, deactivate)
try:
_setLVAvailability(vg.name, deactivate, "n")
except se.CannotDeactivateLogicalVolume:
log.error("Error deactivating lvs: vg=%s lvs=%s", vg.name,
deactivate)
# Some lvs are inactive now
_lvminfo._invalidatelvs(vg.name, deactivate)
if refresh:
log.info("Refreshing lvs: vg=%s lvs=%s", vg.name, refresh)
try:
refreshLVs(vg.name, refresh)
except se.LogicalVolumeRefreshError:
log.error("Error refreshing lvs: vg=%s lvs=%s", vg.name,
refresh)
def invalidateCache():
_lvminfo.invalidateCache()
def _fqpvname(pv):
if pv and not pv.startswith(PV_PREFIX):
pv = os.path.join(PV_PREFIX, pv)
return pv
def _createpv(devices, metadataSize, options=tuple()):
"""
Size for pvcreate should be with units k|m|g
pvcreate on a dev that is already a PV but not in a VG returns rc = 0.
The device is re-created with the new parameters.
"""
cmd = ["pvcreate"]
if options:
cmd.extend(options)
if metadataSize != 0:
cmd.extend(("--metadatasize", "%sm" % metadataSize,
"--metadatacopies", "2",
"--metadataignore", "y"))
cmd.extend(devices)
rc, out, err = _lvminfo.cmd(cmd, devices)
return rc, out, err
def _initpvs(devices, metadataSize, force=False):
def _initpvs_removeHolders():
"""Remove holders for all devices."""
for device in devices:
try:
devicemapper.removeMappingsHoldingDevice(
os.path.basename(device))
except OSError as e:
if e.errno == errno.ENODEV:
raise se.PhysDevInitializationError("%s: %s" %
(device, str(e)))
else:
raise
if force is True:
options = ("-y", "-ff")
_initpvs_removeHolders()
else:
options = tuple()
rc, out, err = _createpv(devices, metadataSize, options)
_lvminfo._invalidatepvs(devices)
if rc != 0:
log.error("pvcreate failed with rc=%s", rc)
log.error("%s, %s", out, err)
raise se.PhysDevInitializationError(str(devices))
return (set(devices), set(), rc, out, err)
def getLvDmName(vgName, lvName):
return "%s-%s" % (vgName.replace("-", "--"), lvName)
def removeVgMapping(vgName):
"""
Removes the mapping of the specified volume group.
Utilizes the fact that the mapping created by the LVM looks like that
e45c12b0--f520--498a--82bb--c6cb294b990f-master
i.e vg name concatenated with volume name (dash is escaped with dash)
"""
mappingPrefix = getLvDmName(vgName, "")
mappings = devicemapper.getAllMappedDevices()
for mapping in mappings:
if not mapping.startswith(mappingPrefix):
continue
try:
devicemapper.removeMapping(mapping)
except Exception:
pass
# Activation of the whole vg is assumed to be used nowhere.
# This is a separate function just in case.
def _setVgAvailability(vgs, available):
vgs = _normalizeargs(vgs)
cmd = ["vgchange", "--available", available] + vgs
rc, out, err = _lvminfo.cmd(cmd, _lvminfo._getVGDevs(vgs))
for vg in vgs:
_lvminfo._invalidatelvs(vg)
if rc != 0:
# During deactivation, in vg.py (sic):
# we ignore error here because we don't care about this vg anymore
if available == "n":
log.info("deactivate vg %s failed: rc %s - %s %s (ignored)" %
(vgs, rc, out, err))
else:
raise se.VolumeGroupActionError(
"vgchange on vg(s) %s failed. %d %s %s" % (vgs, rc, out, err))
def changelv(vg, lvs, attrs):
"""
Change multiple attributes on multiple LVs.
vg: VG name
lvs: a single LV name or iterable of LV names.
attrs: an iterable of (attr, value) pairs),
e.g. (('--available', 'y'), ('--permission', 'rw')
Note:
You may activate an activated LV without error
but lvchange returns an error (RC=5) when activating rw if already rw
"""
lvs = _normalizeargs(lvs)
# If it fails or not we (may be) change the lv,
# so we invalidate cache to reload these volumes on first occasion
lvnames = tuple("%s/%s" % (vg, lv) for lv in lvs)
cmd = ["lvchange"]
cmd.extend(LVM_NOBACKUP)
if isinstance(attrs[0], str):
# ("--attribute", "value")
cmd.extend(attrs)
else:
# (("--aa", "v1"), ("--ab", "v2"))
for attr in attrs:
cmd.extend(attr)
cmd.extend(lvnames)
rc, out, err = _lvminfo.cmd(tuple(cmd), _lvminfo._getVGDevs((vg, )))
_lvminfo._invalidatelvs(vg, lvs)
if rc != 0 and len(out) < 1:
raise se.StorageException("%d %s %s\n%s/%s" % (rc, out, err, vg, lvs))
def _setLVAvailability(vg, lvs, available):
try:
changelv(vg, lvs, ("--available", available))
except se.StorageException as e:
error = ({"y": se.CannotActivateLogicalVolumes,
"n": se.CannotDeactivateLogicalVolume}
.get(available, se.VolumeGroupActionError))
raise error(str(e))
#
# Public Object Accessors
#
def getPV(pvName):
pv = _lvminfo.getPv(_fqpvname(pvName))
if pv is None:
raise se.InaccessiblePhysDev((pvName,))
return pv
def getAllPVs():
return _lvminfo.getAllPvs()
def testPVCreate(devices, metadataSize):
"""
Only tests the pv creation.
Should not affect the cache state.
Receives guids iterable.
Returns (un)pvables, (un)succeed guids.
"""
devs = tuple("%s/%s" % (PV_PREFIX, dev) for dev in devices)
options = ("--test",)
rc, out, err = _createpv(devs, metadataSize, options)
if rc == 0:
unusedDevs = set(devices)
usedDevs = set()
else:
unusedDevs = set(re_pvName.findall("\n".join(out)))
usedDevs = set(devs) - set(unusedDevs)
log.debug("rc: %s, out: %s, err: %s, unusedDevs: %s, usedDevs: %s",
rc, out, err, unusedDevs, usedDevs)
return unusedDevs, usedDevs
def resizePV(vgName, guid):
"""
In case the LUN was increased on storage server, in order to see the
changes it is needed to resize the PV after the multipath devices have
been resized
Raises se.CouldNotResizePhysicalVolume if pvresize fails
"""
pvName = _fqpvname(guid)
cmd = ["pvresize", pvName]
rc, out, err = _lvminfo.cmd(cmd, _lvminfo._getVGDevs((vgName, )))
if rc != 0:
raise se.CouldNotResizePhysicalVolume(pvName, err)
_lvminfo._invalidatepvs(pvName)
_lvminfo._invalidatevgs(vgName)
def getVG(vgName):
vg = _lvminfo.getVg(vgName) # returns single VG namedtuple
if not vg:
raise se.VolumeGroupDoesNotExist(vgName)
else:
return vg
def getVGs(vgNames):
return _lvminfo.getVgs(vgNames) # returns list
def getAllVGs():
return _lvminfo.getAllVgs() # returns list
# TODO: lvm VG UUID should not be exposed.
# Remove this function when hsm.public_createVG is removed.
def getVGbyUUID(vgUUID):
# cycle through all the VGs until the one with the given UUID found
for vg in getAllVGs():
try:
if vg.uuid == vgUUID:
return vg
except AttributeError as e:
# An unreloadable VG found but may be we are not looking for it.
log.debug("%s" % e.message, exc_info=True)
continue
# If not cry loudly
raise se.VolumeGroupDoesNotExist("vg_uuid: %s" % vgUUID)
def getLV(vgName, lvName=None):
lv = _lvminfo.getLv(vgName, lvName)
# getLV() should not return None
if not lv:
raise se.LogicalVolumeDoesNotExistError("%s/%s" % (vgName, lvName))
else:
return lv
#
# Public Volume Group interface
#
def createVG(vgName, devices, initialTag, metadataSize, extentsize="128m",
force=False):
pvs = [_fqpvname(pdev) for pdev in _normalizeargs(devices)]
_checkpvsblksize(pvs)
_initpvs(pvs, metadataSize, force)
# Activate the 1st PV metadata areas
cmd = ["pvchange", "--metadataignore", "n"]
cmd.append(pvs[0])
rc, out, err = _lvminfo.cmd(cmd, tuple(pvs))
if rc != 0:
raise se.PhysDevInitializationError(pvs[0])
options = ["--physicalextentsize", extentsize]
if initialTag:
options.extend(("--addtag", initialTag))
cmd = ["vgcreate"] + options + [vgName] + pvs
rc, out, err = _lvminfo.cmd(cmd, tuple(pvs))
if rc == 0:
_lvminfo._invalidatepvs(pvs)
_lvminfo._invalidatevgs(vgName)
log.debug("Cache after createvg %s", _lvminfo._vgs)
else:
raise se.VolumeGroupCreateError(vgName, pvs)
def removeVG(vgName):
cmd = ["vgremove", "-f", vgName]
rc, out, err = _lvminfo.cmd(cmd, _lvminfo._getVGDevs((vgName, )))
pvs = tuple(pvName for pvName, pv in _lvminfo._pvs.iteritems()
if not isinstance(pv, Stub) and pv.vg_name == vgName)
# PVS needs to be reloaded anyhow: if vg is removed they are staled,
# if vg remove failed, something must be wrong with devices and we want
# cache updated as well
_lvminfo._invalidatepvs(pvs)
# If vgremove failed reintroduce the VG into the cache
if rc != 0:
_lvminfo._invalidatevgs(vgName)
raise se.VolumeGroupRemoveError("VG %s remove failed." % vgName)
else:
# Remove the vg from the cache
_lvminfo._vgs.pop(vgName, None)
def removeVGbyUUID(vgUUID):
vg = getVGbyUUID(vgUUID)
if vg:
removeVG(vg.name)
def extendVG(vgName, devices, force):
pvs = [_fqpvname(pdev) for pdev in _normalizeargs(devices)]
_checkpvsblksize(pvs, getVGBlockSizes(vgName))
vg = _lvminfo.getVg(vgName)
# Format extension PVs as all the other already in the VG
_initpvs(pvs, int(vg.vg_mda_size) / 2 ** 20, force)
cmd = ["vgextend", vgName] + pvs
devs = tuple(_lvminfo._getVGDevs((vgName, )) + tuple(pvs))
rc, out, err = _lvminfo.cmd(cmd, devs)
if rc == 0:
_lvminfo._invalidatepvs(pvs)
_lvminfo._invalidatevgs(vgName)
log.debug("Cache after extending vg %s", _lvminfo._vgs)
else:
raise se.VolumeGroupExtendError(vgName, pvs)
def chkVG(vgName):
cmd = ["vgck", vgName]
rc, out, err = _lvminfo.cmd(cmd, _lvminfo._getVGDevs((vgName, )))
if rc != 0:
_lvminfo._invalidatevgs(vgName)
_lvminfo._invalidatelvs(vgName)
raise se.StorageDomainAccessError("%s: %s" % (vgName, err))
return True
def deactivateVG(vgName):
getVG(vgName) # Check existence
_setVgAvailability(vgName, available="n")
def invalidateVG(vgName):
_lvminfo._invalidatevgs(vgName)
_lvminfo._invalidatelvs(vgName)
def _getpvblksize(pv):
dev = devicemapper.getDmId(os.path.basename(pv))
return multipath.getDeviceBlockSizes(dev)
def _checkpvsblksize(pvs, vgBlkSize=None):
for pv in pvs:
pvBlkSize = _getpvblksize(pv)
logPvBlkSize, phyPvBlkSize = pvBlkSize
if logPvBlkSize not in constants.SUPPORTED_BLOCKSIZE:
raise se.DeviceBlockSizeError(pvBlkSize)
if phyPvBlkSize < logPvBlkSize:
raise se.DeviceBlockSizeError(pvBlkSize)
# WARN: This is setting vgBlkSize to the first value found by
# _getpvblksize (if not provided by the function call).
# It makes sure that all the PVs have the same block size.
if vgBlkSize is None:
vgBlkSize = pvBlkSize
if logPvBlkSize != vgBlkSize[0]:
raise se.VolumeGroupBlockSizeError(vgBlkSize, pvBlkSize)
def checkVGBlockSizes(vgUUID, vgBlkSize=None):
pvs = listPVNames(vgUUID)
if not pvs:
raise se.VolumeGroupDoesNotExist("vg_uuid: %s" % vgUUID)
_checkpvsblksize(pvs, vgBlkSize)
def getVGBlockSizes(vgUUID):
pvs = listPVNames(vgUUID)
if not pvs:
raise se.VolumeGroupDoesNotExist("vg_uuid: %s" % vgUUID)
# Returning the block size of the first pv is correct since we don't allow
# devices with different block size to be on the same VG.
return _getpvblksize(pvs[0])
#
# Public Logical volume interface
#
def createLV(vgName, lvName, size, activate=True, contiguous=False,
initialTag=None):
"""
Size units: MB (1024 ** 2 = 2 ** 20)B.
"""
# WARNING! From man vgs:
# All sizes are output in these units: (h)uman-readable, (b)ytes,
# (s)ectors, (k)ilobytes, (m)egabytes, (g)igabytes, (t)erabytes,
# (p)etabytes, (e)xabytes.
# Capitalise to use multiples of 1000 (S.I.) instead of 1024.
cont = {True: "y", False: "n"}[contiguous]
cmd = ["lvcreate"]
cmd.extend(LVM_NOBACKUP)
cmd.extend(("--contiguous", cont, "--size", "%sm" % size))
if initialTag is not None:
cmd.extend(("--addtag", initialTag))
cmd.extend(("--name", lvName, vgName))
rc, out, err = _lvminfo.cmd(cmd, _lvminfo._getVGDevs((vgName, )))
if rc == 0:
_lvminfo._invalidatevgs(vgName)
_lvminfo._invalidatelvs(vgName, lvName)
else:
raise se.CannotCreateLogicalVolume(vgName, lvName)
# TBD: Need to explore the option of running lvcreate w/o devmapper
# so that if activation is not needed it would be skipped in the
# first place
if activate:
lv_path = lvPath(vgName, lvName)
st = os.stat(lv_path)
uName = pwd.getpwuid(st.st_uid).pw_name
gName = grp.getgrgid(st.st_gid).gr_name
if ":".join((uName, gName)) != USER_GROUP:
cmd = [constants.EXT_CHOWN, USER_GROUP, lv_path]
if misc.execCmd(cmd, sudo=True)[0] != 0:
log.warning("Could not change ownership of one or more "
"volumes in vg (%s) - %s", vgName, lvName)
else:
_setLVAvailability(vgName, lvName, "n")
def removeLVs(vgName, lvNames):
lvNames = _normalizeargs(lvNames)
# Assert that the LVs are inactive before remove.
for lvName in lvNames:
if _isLVActive(vgName, lvName):
# Fix me
# Should not remove active LVs
# raise se.CannotRemoveLogicalVolume(vgName, lvName)
log.warning("Removing active volume %s/%s" % (vgName, lvName))
# LV exists or not in cache, attempting to remove it.
# Removing Stubs also. Active Stubs should raise.
# Destroy LV
# Fix me:removes active LVs too. "-f" should be removed.
cmd = ["lvremove", "-f"]
cmd.extend(LVM_NOBACKUP)
for lvName in lvNames:
cmd.append("%s/%s" % (vgName, lvName))
rc, out, err = _lvminfo.cmd(cmd, _lvminfo._getVGDevs((vgName, )))
if rc == 0:
for lvName in lvNames:
# Remove the LV from the cache
_lvminfo._lvs.pop((vgName, lvName), None)
# If lvremove succeeded it affected VG as well
_lvminfo._invalidatevgs(vgName)
else:
# Otherwise LV info needs to be refreshed
_lvminfo._invalidatelvs(vgName, lvNames)
raise se.CannotRemoveLogicalVolume(vgName, str(lvNames))
def _resizeLV(op, vgName, lvName, size):
"""
Size units: MB (1024 ** 2 = 2 ** 20)B.
"""
# WARNING! From man vgs:
# All sizes are output in these units: (h)uman-readable, (b)ytes,
# (s)ectors,(k)ilobytes, (m)egabytes, (g)igabytes, (t)erabytes,
# (p)etabytes, (e)xabytes.
# Capitalise to use multiples of 1000 (S.I.) instead of 1024.
cmd = (op,) + LVM_NOBACKUP
cmd += ("--size", "%sm" % (size,), "%s/%s" % (vgName, lvName))
rc, out, err = _lvminfo.cmd(cmd, _lvminfo._getVGDevs((vgName, )))
if rc == 0:
_lvminfo._invalidatevgs(vgName)
_lvminfo._invalidatelvs(vgName, lvName)
elif rc == 3:
# In LVM we trust. Hope that 3 is only for this.
log.debug("New size (in extents) matches existing size (in extents).")
elif rc != 0:
# get the free extents size
# YaRC
vg = getVG(vgName)
free_size = int(vg.extent_size) * int(vg.free_count) # in B
if free_size < int(size) * constants.MEGAB:
raise se.VolumeGroupSizeError("%s/%s %d > %d (MiB)" %
(vgName, lvName, int(size),
free_size / constants.MEGAB))
raise se.LogicalVolumeExtendError(vgName, lvName, "%sM" % (size, ))
def extendLV(vgName, lvName, size):
_resizeLV("lvextend", vgName, lvName, size)
def reduceLV(vgName, lvName, size):
_resizeLV("lvreduce", vgName, lvName, size)
def activateLVs(vgName, lvNames):
lvNames = _normalizeargs(lvNames)
toActivate = [lvName for lvName in lvNames
if not _isLVActive(vgName, lvName)]
if toActivate:
_setLVAvailability(vgName, toActivate, "y")
def deactivateLVs(vgName, lvNames):
lvNames = _normalizeargs(lvNames)
toDeactivate = [lvName for lvName in lvNames
if _isLVActive(vgName, lvName)]
if toDeactivate:
_setLVAvailability(vgName, toDeactivate, "n")
def renameLV(vg, oldlv, newlv):
cmd = ("lvrename",) + LVM_NOBACKUP + (vg, oldlv, newlv)
rc, out, err = _lvminfo.cmd(cmd, _lvminfo._getVGDevs((vg, )))
if rc != 0:
raise se.LogicalVolumeRenameError("%s %s %s" % (vg, oldlv, newlv))
_lvminfo._lvs.pop((vg, oldlv), None)
_lvminfo._reloadlvs(vg, newlv)
def refreshLVs(vgName, lvNames):
# If the logical volumes are active, reload their metadata.
cmd = ['lvchange', '--refresh']
cmd.extend("%s/%s" % (vgName, lv) for lv in lvNames)
rc, out, err = _lvminfo.cmd(cmd, _lvminfo._getVGDevs((vgName, )))
_lvminfo._invalidatelvs(vgName, lvNames)
if rc != 0:
raise se.LogicalVolumeRefreshError("%s failed" % list2cmdline(cmd))
# Fix me: Function name should mention LV or unify with VG version.
# may be for all the LVs in the whole VG?
def addtag(vg, lv, tag):
lvname = "%s/%s" % (vg, lv)
cmd = ("lvchange",) + LVM_NOBACKUP + ("--addtag", tag) + (lvname,)
rc, out, err = _lvminfo.cmd(cmd, _lvminfo._getVGDevs((vg, )))
_lvminfo._invalidatelvs(vg, lv)
if rc != 0:
# Fix me: should be se.ChangeLogicalVolumeError but this not exists.
raise se.MissingTagOnLogicalVolume("%s/%s" % (vg, lv), tag)
def changeLVTags(vg, lv, delTags=(), addTags=()):
lvname = '%s/%s' % (vg, lv)
delTags = set(delTags)
addTags = set(addTags)
if delTags.intersection(addTags):
raise se.LogicalVolumeReplaceTagError(
"Cannot add and delete the same tag lv: `%s` tags: `%s`" %
(lvname, ", ".join(delTags.intersection(addTags))))
cmd = ['lvchange']
cmd.extend(LVM_NOBACKUP)
for tag in delTags:
cmd.extend(("--deltag", tag))
for tag in addTags:
cmd.extend(('--addtag', tag))
cmd.append(lvname)
rc, out, err = _lvminfo.cmd(cmd, _lvminfo._getVGDevs((vg, )))
_lvminfo._invalidatelvs(vg, lv)
if rc != 0:
raise se.LogicalVolumeReplaceTagError(
'lv: `%s` add: `%s` del: `%s` (%s)' %
(lvname, ", ".join(addTags), ", ".join(delTags), err[-1]))
def addLVTags(vg, lv, addTags):
changeLVTags(vg, lv, addTags=addTags)
#
# Helper functions
#
def lvPath(vgName, lvName):
return os.path.join("/dev", vgName, lvName)
def lvDmDev(vgName, lvName):
"""Return the LV dm device.
returns: dm-X
If the LV is inactive there is no dm device
and OSError will be raised.
"""
lvp = lvPath(vgName, lvName)
return os.path.basename(os.readlink(lvp))
def _isLVActive(vgName, lvName):
"""Active volumes have a mp link.
This function should not be used out of this module.
"""
return os.path.exists(lvPath(vgName, lvName))
def changeVGTags(vgName, delTags=(), addTags=()):
delTags = set(delTags)
addTags = set(addTags)
if delTags.intersection(addTags):
raise se.VolumeGroupReplaceTagError(
"Cannot add and delete the same tag vg: `%s` tags: `%s`" %
(vgName, ", ".join(delTags.intersection(addTags))))
cmd = ["vgchange"]
for tag in delTags:
cmd.extend(("--deltag", tag))
for tag in addTags:
cmd.extend(("--addtag", tag))
cmd.append(vgName)
rc, out, err = _lvminfo.cmd(cmd, _lvminfo._getVGDevs((vgName, )))
_lvminfo._invalidatevgs(vgName)
if rc != 0:
raise se.VolumeGroupReplaceTagError(
"vg:%s del:%s add:%s (%s)" %
(vgName, ", ".join(delTags), ", ".join(addTags), err[-1]))
def replaceVGTag(vg, oldTag, newTag):
changeVGTags(vg, [oldTag], [newTag])
def getFirstExt(vg, lv):
return getLV(vg, lv).devices.strip(" )").split("(")
def listPVNames(vgName):
try:
pvNames = _lvminfo._vgs[vgName].pv_name
except (KeyError, AttributeError):
pvNames = getVG(vgName).pv_name
return pvNames
def setrwLV(vg, lv, rw=True):
permission = {False: 'r', True: 'rw'}[rw]
try:
changelv(vg, lv, ("--permission", permission))
except se.StorageException:
l = getLV(vg, lv)
if l.writeable == rw:
# Ignore the error since lv is now rw, hoping that the error was
# because lv was already rw, see BZ#654691. We may hide here
# another lvchange error.
return
raise se.CannotSetRWLogicalVolume(vg, lv, permission)
def lvsByTag(vgName, tag):
return [lv for lv in getLV(vgName) if tag in lv.tags]
def invalidateFilter():
_lvminfo.invalidateFilter()
# Fix me: unify with addTag
def replaceLVTag(vg, lv, deltag, addtag):
"""
Removes and add tags atomically.
"""
lvname = "%s/%s" % (vg, lv)
cmd = (("lvchange",) + LVM_NOBACKUP + ("--deltag", deltag) +
("--addtag", addtag) + (lvname,))
rc, out, err = _lvminfo.cmd(cmd, _lvminfo._getVGDevs((vg, )))
_lvminfo._invalidatelvs(vg, lv)
if rc != 0:
raise se.LogicalVolumeReplaceTagError("%s/%s" % (vg, lv),
"%s,%s" % (deltag, addtag))
| kvaps/vdsm | vdsm/storage/lvm.py | Python | gpl-2.0 | 44,652 |
# -*- coding:utf-8 -*-
def decode(data):
try:
value, idx = __decode(data, 0)
retval = (True, value)
except Exception as e:
retval = (False, e.message)
finally:
return retval
def encode(data):
try:
value = __encode(data)
retval = (True, value)
except Exception, e:
retval = (False, e.message)
finally:
return retval
# 内部函数
# 解析bencode数据
def __decode(data, start_idx):
if data[start_idx] == 'i':
value, start_idx = __decode_int(data, start_idx + 1)
elif data[start_idx].isdigit():
value, start_idx = __decode_str(data, start_idx)
elif data[start_idx] == 'l':
value, start_idx = __decode_list(data, start_idx + 1)
elif data[start_idx] == 'd':
value, start_idx = __decode_dict(data, start_idx + 1)
else:
raise ValueError('__decode: not in i, l, d')
return value, start_idx
# 解析整数
def __decode_int(data, start_idx):
end_idx = data.index('e', start_idx)
try:
value = int(data[start_idx: end_idx])
except Exception:
raise Exception('__decode_int: error')
return value, end_idx + 1
# 解析字符串
def __decode_str(data, start_idx):
try:
end_idx = data.index(':', start_idx)
str_len = int(data[start_idx: end_idx])
start_idx = end_idx + 1
end_idx = start_idx + str_len
value = data[start_idx: end_idx]
except Exception:
raise Exception('__decode_str: error')
return value, end_idx
# 解析列表
def __decode_list(data, start_idx):
values = []
while data[start_idx] != 'e':
value, start_idx = __decode(data, start_idx)
values.append(value)
return values, start_idx + 1
# 解析字典
def __decode_dict(data, start_idx):
dict_value = dict()
while data[start_idx] != 'e':
key, start_idx = __decode(data, start_idx)
value, start_idx = __decode(data, start_idx)
dict_value[key] = value
return dict_value, start_idx + 1
# 数据编码
def __encode(data):
if isinstance(data, int):
value = __encode_int(data)
elif isinstance(data, str):
value = __encode_str(data)
elif isinstance(data, dict):
value = __encode_dict(data)
elif isinstance(data, list):
value = __encode_list(data)
else:
raise Exception('__encode: Error')
return value
# 数字编码
def __encode_int(data):
return 'i' + str(data) + 'e'
# 字符串编码
def __encode_str(data):
str_len = len(data)
return str(str_len) + ':' + data
# 列表编码
def __encode_list(data):
ret = 'l'
for datai in data:
ret += __encode(datai)
return ret + 'e'
# 字典编码
def __encode_dict(data):
ret = 'd'
for key, value in data.items():
ret += __encode(key)
ret += __encode(value)
return ret + 'e'
| fupenglin/PyDHT | dht_bencode.py | Python | gpl-2.0 | 2,915 |
#This file is distributed under the terms of the GNU General Public license.
#Copyright (C) 2011 Jekin Trivedi <[email protected]> (See the file COPYING for details).
from atlas import *
from physics import *
from physics import Quaternion
from physics import Vector3D
import server
class Repairing(server.Task):
"""A very simple Repair system for Repairing structures."""
materials = ["wood"]
def consume_materials (self) :
""" A method which gets the material to be consumed from the inventory & returns the consume operation """
for item in self.character.contains:
if item.type[0] == str(self.materials[0]):
set = Operation("set", Entity(item.id, status = -1), to = item)
return set
else :
print "No Wood in inventory"
return 0
def repair_operation(self, op):
""" The repair op is FROM the the character,
TO the structure that is getting Repaired which we
term the target. """
if len(op) < 1:
sys.stderr.write("Repair task has no target in repair op")
# FIXME Use weak references, once we have them
self.target = server.world.get_object_ref(op[0].id)
self.tool = op.to
def tick_operation(self, op):
""" This method is called repeatedly, each time a Repair turn occurs.
In this example the interval is fixed, but it can be varied. """
# print "Repair.tick"
res=Oplist()
current_status = 0
if self.target() is None:
# print "Target is no more"
self.irrelevant()
return
if self.character.stamina <= 0:
# print "I am exhausted"
self.irrelevant()
return
if square_distance(self.character.location, self.target().location) > self.target().location.bbox.square_bounding_radius():
self.progress = current_status
self.rate = 0
return self.next_tick(1.75)
# Some entity do not have status defined. If not present we assume that the entity is unharmed & stop the task
if hasattr ( self.target(), 'status' ) :
current_status = self.target().status
else:
set = Operation("set", Entity(self.self.target(), status = 1),
to = self.target)
res.append(set)
current_status = 1.0
self.irrelevant()
if current_status < 0.9:
set=Operation("set", Entity(self.target().id, status=current_status+0.1), to=self.target())
res.append(set)
consume = self.consume_materials ()
if consume :
res.append(consume)
else :
self.irrelevant()
else:
set = Operation("set", Entity(self.target().id, status = 1),
to = self.target())
res.append(set)
self.irrelevant()
self.progress = current_status
self.rate = 0.1 / 1.75
res.append(self.next_tick(1.75))
return res
| alriddoch/cyphesis | rulesets/mason/world/tasks/Repairing.py | Python | gpl-2.0 | 3,147 |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
__author__ = 'Yan Yan'
'''
Deployment toolkit.
'''
import os, re
from datetime import datetime
from fabric.api import *
env.user = 'michael'
env.sudo_user = 'root'
env.hosts = ['192.168.0.3']
db_user = 'www-data'
db_password = 'www-data'
_TAR_FILE = 'dist-awesome.tar.gz'
_REMOTE_TMP_TAR = '/tmp/%s' % _TAR_FILE
_REMOTE_BASE_DIR = '/srv/awesome'
def _current_path():
return os.path.abspath('.')
def _now():
return datetime.now().strftime('%y-%m-%d_%H.%M.%S')
def backup():
'''
Dump entire database on server and backup to local.
'''
dt = _now()
f = 'backup-awesome-%s.sql' % dt
with cd('/tmp'):
run('mysqldump --user=%s --password=%s --skip-opt --add-drop-table --default-character-set=utf8 --quick awesome > %s' % (db_user, db_password, f))
run('tar -czvf %s.tar.gz %s' % (f, f))
get('%s.tar.gz' % f, '%s/backup/' % _current_path())
run('rm -f %s' % f)
run('rm -f %s.tar.gz' % f)
def build():
'''
Build dist package.
'''
includes = ['static', 'templates', 'transwarp', 'favicon.ico', '*.py']
excludes = ['test', '.*', '*.pyc', '*.pyo']
local('rm -f dist/%s' % _TAR_FILE)
with lcd(os.path.join(_current_path(), 'www')):
cmd = ['tar', '--dereference', '-czvf', '../dist/%s' % _TAR_FILE]
cmd.extend(['--exclude=\'%s\'' % ex for ex in excludes])
cmd.extend(includes)
local(' '.join(cmd))
def deploy():
newdir = 'www-%s' % _now()
run('rm -f %s' % _REMOTE_TMP_TAR)
put('dist/%s' % _TAR_FILE, _REMOTE_TMP_TAR)
with cd(_REMOTE_BASE_DIR):
sudo('mkdir %s' % newdir)
with cd('%s/%s' % (_REMOTE_BASE_DIR, newdir)):
sudo('tar -xzvf %s' % _REMOTE_TMP_TAR)
with cd(_REMOTE_BASE_DIR):
sudo('rm -f www')
sudo('ln -s %s www' % newdir)
sudo('chown www-data:www-data www')
sudo('chown -R www-data:www-data %s' % newdir)
with settings(warn_only=True):
sudo('supervisorctl stop awesome')
sudo('supervisorctl start awesome')
sudo('/etc/init.d/nginx reload')
RE_FILES = re.compile('\r?\n')
def rollback():
'''
rollback to previous version
'''
with cd(_REMOTE_BASE_DIR):
r = run('ls -p -1')
files = [s[:-1] for s in RE_FILES.split(r) if s.startswith('www-') and s.endswith('/')]
files.sort(cmp=lambda s1, s2: 1 if s1 < s2 else -1)
r = run('ls -l www')
ss = r.split(' -> ')
if len(ss) != 2:
print ('ERROR: \'www\' is not a symbol link.')
return
current = ss[1]
print ('Found current symbol link points to: %s\n' % current)
try:
index = files.index(current)
except ValueError, e:
print ('ERROR: symbol link is invalid.')
return
if len(files) == index + 1:
print ('ERROR: already the oldest version.')
old = files[index + 1]
print ('==================================================')
for f in files:
if f == current:
print (' Current ---> %s' % current)
elif f == old:
print (' Rollback to ---> %s' % old)
else:
print (' %s' % f)
print ('==================================================')
print ('')
yn = raw_input ('continue? y/N ')
if yn != 'y' and yn != 'Y':
print ('Rollback cancelled.')
return
print ('Start rollback...')
sudo('rm -f www')
sudo('ln -s %s www' % old)
sudo('chown www-data:www-data www')
with settings(warn_only=True):
sudo('supervisorctl stop awesome')
sudo('supervisorctl start awesome')
sudo('/etc/init.d/nginx reload')
print ('ROLLBACKED OK.')
def restore2local():
'''
Restore db to local
'''
backup_dir = os.path.join(_current_path(), 'backup')
fs = os.listdir(backup_dir)
files = [f for f in fs if f.startswith('backup-') and f.endswith('.sql.tar.gz')]
files.sort(cmp=lambda s1, s2: 1 if s1 < s2 else -1)
if len(files)==0:
print 'No backup files found.'
return
print ('Found %s backup files:' % len(files))
print ('==================================================')
n = 0
for f in files:
print ('%s: %s' % (n, f))
n = n + 1
print ('==================================================')
print ('')
try:
num = int(raw_input ('Restore file: '))
except ValueError:
print ('Invalid file number.')
return
restore_file = files[num]
yn = raw_input('Restore file %s: %s? y/N ' % (num, restore_file))
if yn != 'y' and yn != 'Y':
print ('Restore cancelled.')
return
print ('Start restore to local database...')
p = raw_input('Input mysql root password: ')
sqls = [
'drop database if exists awesome;',
'create database awesome;',
'grant select, insert, update, delete on awesome.* to \'%s\'@\'localhost\' identified by \'%s\';' % (db_user, db_password)
]
for sql in sqls:
local(r'mysql -uroot -p%s -e "%s"' % (p, sql))
with lcd(backup_dir):
local('tar zxvf %s' % restore_file)
local(r'mysql -uroot -p%s awesome < backup/%s' % (p, restore_file[:-7]))
with lcd(backup_dir):
local('rm -f %s' % restore_file[:-7])
| Byronic94/py-blog | fabfile.py | Python | gpl-2.0 | 5,473 |
import sys
import string
f = sys.stdin
g = sys.stdout
echo = 0
while 1:
l = f.readline()
if not l: break
ll=string.strip(l)
if ll=='BEGIN-LOG':
echo = 1
elif ll=='END-LOG':
echo = 0
elif echo:
l=string.replace(l,"-0.000"," 0.000") # squish annoying negative zeros
g.write(l)
| gratefulfrog/lib | python/pymol/pymol_path/test/trim.py | Python | gpl-2.0 | 319 |
# encoding=utf-8
import codecs
import sys
from src.view.FileProcessingOutput import FileProcessingOutput
class FileProcessing():
def __init__(self):
self.fileProcessingOutput = FileProcessingOutput()
def read_input_file(self, file_path, file_type):
'''
Lectura de archivo y procesamiento de archivos
:param file_path:
:return: file_lines
'''
file_lines = []
line_counter = 0
self.fileProcessingOutput.print_reading_file(file_path)
try:
with codecs.open(file_path, encoding='utf8') as f:
for l in f:
line_counter += 1
line = l.strip().encode("utf-8")
if line != "":
if self.check_line_format(line, file_type, line_counter):
file_lines.append(line)
self.fileProcessingOutput.print_input_file_lines(len(file_lines))
except:
self.fileProcessingOutput.print_error_reading_file()
sys.exit()
if not file_lines:
self.fileProcessingOutput.print_error_reading_file()
sys.exit()
return file_lines
def check_line_format(self, line, file_type, line_counter):
'''
Verifica que la linea se ajuste al formato de proceso, y al tipo de archivo ingresado
:param line: Linea a procesar
:param file_type: Tipo de archivo
:param line_counter: Contador de linea, para notificar en caso de error.
:return: Retorna si la linea cumple o no con el formato establecido.
'''
if file_type == 0:
return True
elif file_type == 1:
if not ':' in line:
self.fileProcessingOutput.print_error_delimiter_not_found(line_counter)
sys.exit()
return True
elif file_type == 2:
if not ':' in line:
self.fileProcessingOutput.print_error_delimiter_not_found(line_counter)
sys.exit()
_splitted_line = line.split(':')
if len(_splitted_line) < 3:
self.fileProcessingOutput.print_error_format_not_correct(line_counter)
sys.exit()
return True
| r4wd3r/VAGO | src/controller/FileProcessing.py | Python | gpl-2.0 | 2,276 |
import serial
class SerialConnection:
"""RS-232 connection to the Agilent power supply"""
def __init__(self, port='/dev/usb/ttyUSB0', baudrate=9600, parity=serial.PARITY_NONE, bytesize=serial.EIGHTBITS):
"""Initialize the connection"""
self.serial = serial.Serial(port=port, baudrate=baudrate, parity=parity, bytesize=bytesize, stopbits=serial.STOPBITS_TWO, dsrdtr=True, timeout=1)
def write(self, data):
"""Send one command to the device"""
self.serial.write(data+'\n')
def readline(self):
"""Read one line from the device"""
return self.serial.readline()[:-2]
def question(self, data, cnt=0):
"""Send one query to the device and returns the answer"""
if cnt>2: raise Exception("Too many empty responses to query: "%data)
self.write(data)
res = self.readline()
if res is "" : return self.question(data,cnt+1)
return res
def open(self):
"""Open the connection"""
self.serial.open()
def close(self):
"""Close the connection"""
self.serial.close()
def isOpen(self):
"""Check if the port is opened."""
return self.serial.isOpen()
| delaere/aaptos | SerialConnection.py | Python | gpl-2.0 | 1,127 |
# encoding: utf-8
# module gtk._gtk
# from /usr/lib/python2.7/dist-packages/gtk-2.0/gtk/_gtk.so
# by generator 1.135
# no doc
# imports
import atk as __atk
import gio as __gio
import gobject as __gobject
import gobject._gobject as __gobject__gobject
class PrintStatus(__gobject.GEnum):
# no doc
def __init__(self, *args, **kwargs): # real signature unknown
pass
__weakref__ = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""list of weak references to the object (if defined)"""
__dict__ = None # (!) real value is ''
__enum_values__ = {
0: 0,
1: 1,
2: 2,
3: 3,
4: 4,
5: 5,
6: 6,
7: 7,
8: 8,
}
__gtype__ = None # (!) real value is ''
| ProfessorX/Config | .PyCharm30/system/python_stubs/-1247972723/gtk/_gtk/PrintStatus.py | Python | gpl-2.0 | 792 |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Copyright (C) 2007 by Kai Blin
#
# Plunger is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; version 2 of the License.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License along with
# this program; if not, write to the Free Software Foundation, Inc.,
# 51 Franklin St, Fifth Floor, Boston, MA 02110-1301, USA
"""The plunger file handler for the MD3 format.
The module supports export only for now.
"""
import math
import struct
try:
from plunger import toolbox
except ImportError:
import sys
sys.path.append('..')
import toolbox
sys.path.pop()
format = "md3"
extension = ".md3"
needs_dir = False
does_export = True
does_import = False
# Info from http://icculus.org/homepages/phaethon/q3a/formats/md3format.html
# Augmented by the libmodelfile headers by Alistair Riddoch, as the specfile
# is kind of inaccurate.
MD3_IDENT = "IDP3"
MD3_VERSION = 15
MD3_MAX_FRAMES = 1024
MD3_MAX_TAGS = 16
MD3_MAX_SURFACES = 32
MD3_MAX_SHADERS = 256
MD3_MAX_VERTS = 4096
MD3_MAX_TRIANGLES = 8192
class Md3Frame:
def __init__(self):
self.min_bounds = [0,0,0]
self.max_bounds = [0,0,0]
self.local_origin = [0,0,0]
self.radius = 0.0
self.name = ""
self.fmt = "fff fff fff f 8s"
def packSize(self):
return struct.calcsize(self.fmt)
def pack(self):
pack_str = ""
pack_str += struct.pack("fff", self.min_bounds.split())
pack_str += struct.pack("fff", self.max_bounds.split())
pack_str += struct.pack("fff", self.local_origin.split())
pack_str += struct.pack("f", self.radius)
pack_str += struct.pack("8s", self.name)
return pack_str
class Md3Tag:
def __init__(self):
self.name = ""
self.origin = [0,0,0]
self.axis = [[1,0,0], [0,1,0], [0,0,1]]
self.fmt = "64s fff fff fff fff"
def packSize(self):
return struct.calcsize(self.fmt)
def pack(self):
pack_str = ""
pack_str += struct.pack("64s", self.name)
pack_str += struct.pack("fff", self.origin.split())
for row in self.axis:
pack_str += struct.pack("fff", row.split())
return pack_str
class Md3Shader:
def __init__(self):
self.name = ""
self.index = 0
self.fmt = "64s i"
def packSize(self):
return struct.calcsize(self.fmt)
def pack(self):
pack_str = ""
pack_str += struct.pack("64s", self.name)
pack_str += struct.pack("i", self.index)
class Md3Triangle:
def __init__(self):
self.indices = [0,0,0]
self.fmt = "iii"
def packSize(self):
return struct.calcsize(self.fmt)
def pack(self):
return struct.pack("iii", self.indices.split())
class Md3TexCoord:
def __init__(self):
self.uv_coords = [0,0]
self.fmt = "ff"
def packSize(self):
return struct.calcsize(self.fmt)
def pack(self):
return struct.pack(self.fmt, self.uv_coords.split())
class Md3Vertex:
def __init__(self):
self.coord = [0,0,0]
self.normal = [0,0]
self.factor = 1.0 / 64
self.fmt = "hhh BB"
def packSize(self):
return struct.calcsize(self.fmt)
def pack(self):
pack_str = ""
pack_str += struct.pack("hhh", self.coord.split())
pack_str += struct.pack("BB", self.normal.split())
return pack_str
def scaleDown(self, coords):
return [i * self.factor for i in coords]
class Md3Surface:
def __init__(self):
self.ident = MD3_IDENT
self.name = ""
self.num_frames = 0
self.num_shaders = 0
self.num_verts = 0
self.num_triangles = 0
self.shaders = []
self.triangles = []
self.uv_coords = []
self.vertices = []
self.fmt = "4s 68s iiiiiiiii"
def packSize(self):
size = struct.calcsize(self.fmt)
size += len(self.shaders) * Md3Shader().packSize()
size += len(self.triangles) * Md3Triangle().packSize()
size += len(self.uv_coords) * Md3TexCoord().packSize()
size += len(self.vertices) * Md3Vertex().packSize()
return size
def pack(self):
pack_str = ""
pack_str += struct.pack("4s", self.ident)
pack_str += struct.pack("68s", self.name)
pack_str += struct.pack("ii", self.num_frames, self.num_shaders)
pack_str += struct.pack("ii", self.num_verts, self.num_triangles)
ofs_shaders = struct.calcsize(self.fmt)
ofs_triangles = ofs_shaders + len(self.shaders) * Md3Shader().packSize()
ofs_uv_coords = ofs_triangles + len(self.triangles) * Md3Triangle().packSize()
ofs_vertices = ofs_uv_coords + len(self.uv_coords) * Md3TexCoord().packSize()
ofs_end = ofs_vertices + len(self.vertices) * Md3Vertex().packSize()
pack_str += struct.pack("ii", ofs_triangles, ofs_shaders)
pack_str += struct.pack("iii", ofs_uv_coords, ofs_vertices, ofs_end)
for shader in self.shaders:
pack_str += shader.pack()
for tri in self.triangles:
pack_str += tri.pack()
for texcoord in self.uv_coords:
pack_str += texcoord.pack()
for vert in self.vertices:
pack_str += vert.pack()
class MD3Object:
def __init__(self):
self.ident = MD3_IDENT
self.version = MD3_VERSION
self.name = ""
self.num_frames = 0
self.num_tags = 0
self.num_surfaces = 0
self.num_skins = 0
self.frames = []
self.tags = []
self.surfaces = []
def pack(self):
pack_str = ""
fmt = "4si68siiiiiiii"
pack_str += struct.pack("4s", self.ident)
pack_str += struct.pack("i", self.version)
pack_str += struct.pack("68s", self.name)
pack_str += struct.pack("i", self.num_frames)
pack_str += struct.pack("i", self.num_tags)
pack_str += struct.pack("i", self.num_surfaces)
pack_str += struct.pack("i", self.num_skins)
ofs_frames = struct.calcsize(fmt)
ofs_tags = ofs_frames + len(self.frames) * Md3Frame().packSize()
ofs_surfaces = ofs_tags + len(self.tags) * Md3Tag().packSize()
ofs_eof = ofs_surfaces + len(self.surfaces) * Md3Surface().packSize()
pack_str += struct.pack("i", ofs_frames)
pack_str += struct.pack("i", ofs_tags)
pack_str += struct.pack("i", ofs_surfaces)
pack_str += struct.pack("i", ofs_eof)
for frame in self.frames:
pack_str += frame.pack()
for tag in self.tags:
pack_str += tag.pack()
for surface in self.surfaces:
pack_str += surface.pack()
return pack_str
def importAsset(model, asset):
raise NotImplementedError
def exportAsset(model, asset):
out = toolbox.writeAny(asset)
md3_object = MD3Object()
meshes = model.getMeshes()
#TODO: Put stuff into the MD3Object here()
out.write(md3_object.pack())
out.close()
def encodeNormal(x,y,z):
"""Returns (azimuth, zenith) angles of the normal vector
"""
azimuth = math.atan2(y, x) * 255 / (2 * math.pi)
zenith = math.acos(z) * 255 / (2 * math.pi)
return (azimuth, zenith)
| kblin/plunger | plunger/plugins/md3.py | Python | gpl-2.0 | 7,657 |
# -*- coding: utf-8 -*-
# __author__: Yixuan LI
# __email__: [email protected]
import os
import json
import re
from optparse import OptionParser
import tweepy
import time
class UserTimeline:
def __init__(self,inputDir,outputDir):
self.inputDir = inputDir
self.outputDir = outputDir
os.system("mkdir -p %s"%(outputDir))
# Get the names of the files under the input directory and save them in a list
self.fileList = os.listdir(inputDir)
print self.fileList
self.userHash = {} # [key,value] pair to record the unique users in the tweets
self.uniqueUserCount = 0 # count unique users in the dataset
self.tweetCount = 0 # total tweets processed
self.api = None
def authentication(self):
consumer_key="z86C8djY3bYOPD1WkYV73nVP6"
consumer_secret="BT8oKrcj955MKjv0qS8Kra2Iw91E3uSMTqEVurfTmKjXfG0hNm"
access_token="746349096-Bz1n8T6vNEFBAMG2YqVdJFOtrM321d5HeupxMlxM"
access_token_secret="ZZQZsjvJXnIlyl04Mg2vCxS8g122b3AljpiytiKCKRFPL"
auth = tweepy.OAuthHandler(consumer_key, consumer_secret)
auth.set_access_token(access_token, access_token_secret)
self.api = tweepy.API(auth)
print "authentication finished"
def get_user_id(self):
written = 0
if os.path.exists(self.outputDir + "/" + "uniqueUserID.txt"):
pass
else:
for tweetFile in self.fileList[1:]:
with open(self.inputDir+"/"+tweetFile,'r') as fin:
for line in fin:
try:
lineContents = json.loads(line) # load a line
self.tweetCount += 1
print self.tweetCount # for debugging
except:
continue
try:
if lineContents["coordinates"] is not None:
continue
else:
# extract user's id
userID = lineContents["user"]["id"]
# extract tweet text and convert the string to lower case (http://stackoverflow.com/questions/6797984/how-to-convert-string-to-lowercase-in-python)
#tweet = lineContents["text"].lower()
if not self.userHash.has_key(userID): # if the user has not been counted
self.uniqueUserCount += 1 # count the number of unique users
self.userHash[userID] = True
fileNum = int(self.uniqueUserCount/7250 + 1)
with open(self.outputDir + "/" + "uniqueUserID_"+str(fileNum)+".txt","a") as fileout:
written += 1
fileout.write(str(userID))
fileout.write("\n")
print written," written"
except:
continue
print "There are ", self.uniqueUserCount, "unique users"
print self.tweetCount, " tweets processed"
def get_user_timeline(self):
with open(self.outputDir + "/" + "uniqueUserID_6.txt",'r') as fin:
for userID in fin:
# store the tweets of each user in a single file named by the {userID}.json
filePath = self.outputDir + "/" + str(userID[:-1])+".json"
print userID
if os.path.exists(filePath):
with open(filePath,'r') as myfile:
count = sum(1 for line in myfile)
if count > 900:
continue
else:
# http://stackoverflow.com/questions/6996603/how-do-i-delete-a-file-or-folder-in-python
os.remove(filePath)
pageCount = 1
trialTime = 0
# get user timeline tweets
while pageCount < 6:
print "Collecting", pageCount, " -th page"
# open the output file in append mode
self.fout = open(filePath,"a")
try:
tweets = self.api.user_timeline(id=userID,count=200,page=pageCount)
pageCount += 1
except:
time.sleep(70)
trialTime += 1
if trialTime == 2:
pageCount = 8
continue
# write to file
# Note that data returned by api.user_timeline is status object
for tweet in tweets:
print tweet.text
# convert tweepy status object to json format
# http://stackoverflow.com/questions/27900451/convert-tweepy-status-object-into-json
self.fout.write(json.dumps(tweet._json))
self.fout.write('\n')
time.sleep(70) # rate limit (15 requests per 15 minutes window)
if __name__=='__main__':
#########################################################################################
# Parse the arguments
class MyParser(OptionParser):
def format_epilog(self, formatter):
return self.epilog
usage = "usage: python plot_stats.py [options]"
description = """
"""
epilog = """
"""
parser = MyParser(usage, description=description,epilog=epilog)
parser.add_option("--inputDir", "--input file of twitter data", dest="input_path", default=None,
help="input directory of twitter streaming data in JSON format [default: None]")
parser.add_option("--outputDir", "--output directory of twitter user timeline data", dest="output_path", default=None,
help="output directory of twitter user timeline data [default: None]")
(options, args) = parser.parse_args()
# input directory
inputDir = options.input_path
# output directory
outputDir = options.output_path
########################################################################
getter = UserTimeline(inputDir,outputDir)
getter.authentication()
#getter.get_user_id()
getter.get_user_timeline()
| YixuanLi/geo-tweet | twitter-timeline/get_non_locator_timeline.py | Python | gpl-2.0 | 5,282 |
import math
import random
import GameData
from Util.TileTypes import *
from Util import Line, StarCallback
def initializeRandom( x, y ):
dist = math.sqrt( x ** 2 + y ** 2 )
angle = math.atan2( x, y ) / math.pi * 5
rand = ( random.random() * 7 ) - 3.5
val = ( ( dist + angle + rand ) % 10 )
if val > 5:
return 1
else:
return 0
def circle(x0, y0, radius, endRadius, cb):
stepSize = 1.0 / endRadius
angle = math.pi / 2
while angle >= 0:
c = math.cos( angle )
s = math.sin( angle )
r = radius
while r < endRadius:
cb( int( c * r ) + x0, int( s * r ) + y0 )
cb( int( s * r ) + x0, int( c * r ) + y0 )
cb(-int( c * r ) + x0, int( s * r ) + y0 )
cb(-int( s * r ) + x0, int( c * r ) + y0 )
cb( int( c * r ) + x0,-int( s * r ) + y0 )
cb( int( s * r ) + x0,-int( c * r ) + y0 )
cb(-int( c * r ) + x0,-int( s * r ) + y0 )
cb(-int( s * r ) + x0,-int( c * r ) + y0 )
r += 0.5
angle -= stepSize
def buildFixedWalls( self, I, _buffer, val ):
#Clear center room
centerX = int( self.width / 2 )
centerY = int( self.height / 2 )
for x in range( centerX - GameData.MapGen_CenterRoom_Size[0] - 1, centerX + GameData.MapGen_CenterRoom_Size[0] + 1 ):
for y in range( centerY - GameData.MapGen_CenterRoom_Size[1] - 1, centerY + GameData.MapGen_CenterRoom_Size[1] + 1 ):
_buffer[ I( x, y ) ] = 0
#Build center room walls
for x in range( centerX - GameData.MapGen_CenterRoom_Size[0] - 1, centerX + GameData.MapGen_CenterRoom_Size[0] + 1 ):
_buffer[ I( x, centerY - GameData.MapGen_CenterRoom_Size[1] - 1 ) ] = val
_buffer[ I( x, centerY + GameData.MapGen_CenterRoom_Size[1] ) ] = val
for y in range( centerY - GameData.MapGen_CenterRoom_Size[1] - 1, centerY + GameData.MapGen_CenterRoom_Size[1] + 1 ):
_buffer[ I( centerX - GameData.MapGen_CenterRoom_Size[0] - 1, y ) ] = val
_buffer[ I( centerX + GameData.MapGen_CenterRoom_Size[0], y ) ] = val
def preIterInit( self, I, _buffer ):
#Outer wall
for x in range( self.width ):
_buffer[ I( x, 0 ) ] = 1
_buffer[ I( x, self.height - 1 ) ] = 1
for y in range( self.height ):
_buffer[ I( 0, y ) ] = 1
_buffer[ I( self.width - 1, y ) ] = 1
#Area around outer wall
for x in range( 1, self.width- 1 ):
_buffer[ I( x, 1 ) ] = 0
_buffer[ I( x, self.height - 2 ) ] = 0
for y in range( 1, self.height - 1 ):
_buffer[ I( 1, y ) ] = 0
_buffer[ I( self.width - 2, y ) ] = 0
buildFixedWalls( self, I, _buffer, 1 )
def postInit( self, I, _buffer ):
centerX = int( self.width / 2 )
centerY = int( self.height / 2 )
for x in range( self.width ):
for y in range( self.height ):
i = I( x, y )
val = _buffer[ i ]
if val == 0:
_buffer[ i ] = TILE_AIR #NOOP, but for clarity
elif val == 1:
_buffer[ i ] = TILE_WALL
else:
raise Exception( "Incorrect tile type in postInit!" )
for x in range( self.width ):
_buffer[ I( x, 0 ) ] = TILE_FIXED_WALL
_buffer[ I( x, self.height - 1 ) ] = TILE_FIXED_WALL
for y in range( self.height ):
_buffer[ I( 0, y ) ] = TILE_FIXED_WALL
_buffer[ I( self.width - 1, y ) ] = TILE_FIXED_WALL
buildFixedWalls( self, I, _buffer, TILE_FIXED_WALL )
curSurface = ( GameData.MapGen_CenterRoom_Size[0] * 2 ) * ( GameData.MapGen_CenterRoom_Size[1] * 2 )
curRadius = -1
def setFixedWall( x, y ):
_buffer[ I( int( x ), int( y ) ) ] = TILE_FIXED_WALL
circleNum = 0
while curRadius < GameData.MapGen_MaxCircleRadius:
sectionCount = max( circleNum * GameData.MapGen_CircleSectionsPerLayer, 1 )
nextSurface = curSurface + ( GameData.MapGen_BaseSurface * sectionCount )
nextRadius = int( math.sqrt( nextSurface / math.pi ) )
circle( centerX, centerY, nextRadius, nextRadius + 2, setFixedWall )
#Seperate sections in circle
if sectionCount > 1:
for i in range( sectionCount ):
angle = i * math.pi * 2 / sectionCount
s = math.sin( angle )
c = math.cos( angle )
Line( int( s * ( curRadius + 1 ) ) + centerX, int( c * ( curRadius + 1 ) ) + centerY, int( s * nextRadius ) + centerX, int( c * nextRadius ) + centerY, StarCallback( setFixedWall ) )
curRadius = nextRadius
curSurface = int( curRadius ** 2 * math.pi )
circleNum += 1
print( curRadius )
curRadius += 1
curRadiusSquared = curRadius ** 2
for x in range( self.width ):
for y in range( self.height ):
if ( ( x - centerX ) ** 2 + ( y - centerY ) ** 2 ) > curRadiusSquared:
_buffer[ I( x, y ) ] = TILE_AIR #NOOP, but for clarity
| KevinVDVelden/7DRL_2015 | Game/MapGen.py | Python | gpl-2.0 | 4,996 |
#
# Copyright (C) 2009 Red Hat, Inc.
#
# This copyrighted material is made available to anyone wishing to use,
# modify, copy, or redistribute it subject to the terms and conditions of
# the GNU General Public License v.2, or (at your option) any later version.
# This program is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY expressed or implied, including the implied warranties of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General
# Public License for more details. You should have received a copy of the
# GNU General Public License along with this program; if not, write to the
# Free Software Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
# 02110-1301, USA. Any Red Hat trademarks that are incorporated in the
# source code or documentation are not subject to the GNU General Public
# License and may only be used or replicated with the express permission of
# Red Hat, Inc.
#
# Red Hat Author(s): Chris Lumens <[email protected]>
#
from blivet.util import stringize, unicodeize
from pykickstart.constants import AUTOPART_TYPE_PLAIN, AUTOPART_TYPE_BTRFS, AUTOPART_TYPE_LVM, \
AUTOPART_TYPE_LVM_THINP
class PartSpec(object):
def __init__(self, mountpoint=None, fstype=None, size=None, max_size=None,
grow=False, btr=False, lv=False, thin=False, weight=0,
required_space=0, encrypted=False, schemes=None):
""" Create a new storage specification. These are used to specify
the default partitioning layout as an object before we have the
storage system up and running. The attributes are obvious
except for the following:
btr -- Should this be allocated as a btrfs subvolume? If not,
it will be allocated as a partition.
lv -- Should this be allocated as a logical volume? If not,
it will be allocated as a partition.
thin -- Should this be allocated as a thin logical volume if it is
being allocated as a logical volume?
weight -- An integer that modifies the sort algorithm for partition
requests. A larger value means the partition will end up
closer to the front of the disk. This is mainly used to
make sure /boot ends up in front, and any special (PReP,
appleboot, etc.) partitions end up in front of /boot.
This value means nothing unless lv and btr are both False.
required_space -- This value is only taken into account if
lv=True, and specifies the size in MiB that the
containing VG must be for this PartSpec to even
get used. The VG's size is calculated before any
other LVs are created inside it. If not enough
space exists, this PartSpec will never get turned
into an LV.
encrypted -- Should this request be encrypted? For logical volume
requests, this is satisfied if the PVs are encrypted
as in the case of encrypted LVM autopart.
schemes -- Create the mount point only for specific schemes if any.
"""
self.mountpoint = mountpoint
self.fstype = fstype
self.size = size
self.max_size = max_size
self.grow = grow
self.lv = lv
self.btr = btr
self.thin = thin
self.weight = weight
self.required_space = required_space
self.encrypted = encrypted
self.schemes = schemes or set()
# Force str and unicode types in case any of the properties are unicode
def _to_string(self):
s = ("%(type)s instance (%(id)s) -- \n"
" mountpoint = %(mountpoint)s lv = %(lv)s"
" thin = %(thin)s btrfs = %(btrfs)s\n"
" weight = %(weight)s fstype = %(fstype)s encrypted = %(enc)s\n"
" size = %(size)s max_size = %(max_size)s grow = %(grow)s\n" %
{"type": self.__class__.__name__, "id": "%#x" % id(self),
"mountpoint": self.mountpoint, "lv": self.lv, "btrfs": self.btr,
"weight": self.weight, "fstype": self.fstype, "size": self.size,
"enc": self.encrypted, "max_size": self.max_size, "grow": self.grow,
"thin": self.thin})
return s
def is_partition(self, scheme):
"""Is the specified device a partition in the given scheme?
:param scheme: a partitioning scheme
:return: True or False
"""
return not self.is_volume(scheme)
def is_volume(self, scheme):
"""Is the specified device a volume in the given scheme?
:param scheme: a partitioning scheme
:return: True or False
"""
if scheme == AUTOPART_TYPE_PLAIN:
return False
return self.is_lvm_volume(scheme) or self.is_btrfs_subvolume(scheme)
def is_lvm_volume(self, scheme):
"""Is the specified device an LVM volume in the given scheme?
:param scheme: a partitioning scheme
:return: True or False
"""
return scheme in (AUTOPART_TYPE_LVM, AUTOPART_TYPE_LVM_THINP) and self.lv
def is_lvm_thin_volume(self, scheme):
"""Is the specified device an LVM thin volume in the given scheme?
:param scheme: a partitioning scheme
:return: True or False
"""
if not self.is_lvm_volume(scheme):
return False
return scheme == AUTOPART_TYPE_LVM_THINP and self.thin
def is_btrfs_subvolume(self, scheme):
"""Is the specified device a Btrfs subvolume in the given scheme?
:param scheme: a partitioning scheme
:return: True or False
"""
return scheme == AUTOPART_TYPE_BTRFS and self.btr
def __str__(self):
return stringize(self._to_string())
def __unicode__(self):
return unicodeize(self._to_string())
def __eq__(self, other):
return isinstance(other, PartSpec) and vars(self) == vars(other)
| sgallagher/anaconda | pyanaconda/modules/storage/partitioning/specification.py | Python | gpl-2.0 | 6,228 |
import bpy
from .utils import MultiCamContext
class MultiCamFadeError(Exception):
def __init__(self, msg):
self.msg = msg
def __str__(self):
return repr(self.msg)
class BlendObj(object):
def __init__(self, **kwargs):
self.children = set()
p = self.parent = kwargs.get('parent')
if p is not None:
kwargs.setdefault('context', p.context)
self.context = kwargs.get('context')
self.blend_obj = kwargs.get('blend_obj')
if hasattr(self.__class__, 'fcurve_property'):
self.fcurve_property = self.__class__.fcurve_property
if not hasattr(self, 'fcurve_property'):
self.fcurve_property = kwargs.get('fcurve_property')
@property
def blend_obj(self):
return getattr(self, '_blend_obj', None)
@blend_obj.setter
def blend_obj(self, value):
old = self.blend_obj
if value == old:
return
self._blend_obj = value
self.on_blend_obj_set(value, old)
def on_blend_obj_set(self, new, old):
self._fcurve = None
@property
def context(self):
context = getattr(self, '_context', None)
if context is None:
context = bpy.context
return context
@context.setter
def context(self, value):
old = getattr(self, '_context', None)
if old == value:
return
self._context = value
self.on_context_set(value, old)
def on_context_set(self, new, old):
self._fcurve = None
for obj in self.children:
obj.context = new
@property
def fcurve(self):
fc = getattr(self, '_fcurve', None)
if fc is None:
fc = self._fcurve = self.get_fcurve()
return fc
def get_fcurve(self):
path = self.blend_obj.path_from_id()
action = self.context.scene.animation_data.action
if action is None:
return None
prop = self.fcurve_property
for fc in action.fcurves.values():
if path not in fc.data_path:
continue
if fc.data_path.split('.')[-1] != prop:
continue
return fc
def remove_fcurve(self):
if self.fcurve is None:
return
action = self.context.scene.animation_data.action
action.fcurves.remove(self.fcurve)
self._fcurve = None
def iter_keyframes(self):
for kf in self.fcurve.keyframe_points.values():
yield kf.co
def insert_keyframe(self, frame, value, prop=None, **kwargs):
if prop is None:
prop = self.fcurve_property
if self.fcurve is None:
self.blend_obj.keyframe_insert(prop, frame=frame)
kf = self.get_keyframe(frame)
kf.co[1] = value
else:
kf = self.fcurve.keyframe_points.insert(frame, value)
for key, val in kwargs.items():
setattr(kf, key, val)
return kf
def get_keyframe(self, frame):
for kf in self.fcurve.keyframe_points.values():
if kf.co[0] == frame:
return kf
def add_child(self, cls, **kwargs):
kwargs.setdefault('parent', self)
obj = cls(**kwargs)
self.children.add(obj)
return obj
def del_child(self, obj):
self.children.discard(obj)
class MultiCam(BlendObj):
fcurve_property = 'multicam_source'
def __init__(self, **kwargs):
super(MultiCam, self).__init__(**kwargs)
self.mc_fader = self.add_child(MultiCamFade)
self.cuts = {}
self.strips = {}
def bake_strips(self):
if not len(self.cuts):
self.build_cuts()
self.build_strip_keyframes()
self.blend_obj.mute = True
def build_cuts(self):
for frame, channel in self.iter_keyframes():
self.cuts[frame] = channel
if channel not in self.strips:
self.get_strip_from_channel(channel)
def build_fade(self, fade=None, frame=None):
if fade is None and frame is not None:
fade = self.mc_fader.build_fade(frame)
if fade is None:
return
for channel in range(1, self.blend_obj.channel):
if channel not in self.strips:
self.get_strip_from_channel(channel)
if channel not in self.strips:
continue
self.strips[channel].build_fade(fade)
def build_fades(self):
self.mc_fader.build_fades()
def build_strip_keyframes(self):
for strip in self.strips.values():
strip.build_keyframes()
def get_strip_from_channel(self, channel):
for s in self.context.scene.sequence_editor.sequences:
if s.channel == channel:
source = self.add_child(MulticamSource, blend_obj=s)
self.strips[channel] = source
return source
class MultiCamFade(BlendObj):
def __init__(self, **kwargs):
self.multicam = kwargs.get('parent', kwargs.get('multicam'))
self.fade_props = {}
self.fades = {}
super(MultiCamFade, self).__init__(**kwargs)
if self.blend_obj is None:
self.blend_obj = self.get_fade_prop_group()
def on_blend_obj_set(self, new, old):
for prop in self.fade_props.values():
self.del_child(prop)
self.fade_props.clear()
self.fades.clear()
if new is None:
return
self.get_fade_props()
def get_fade_prop_group(self):
mc_data_path = self.multicam.blend_obj.path_from_id()
return self.context.scene.multicam_fader_properties.get(mc_data_path)
def get_fade_props(self):
action = self.context.scene.animation_data.action
group_name = 'Multicam Fader (%s)' % (self.multicam.blend_obj.name)
group = action.groups.get(group_name)
for fc in group.channels:
key = fc.data_path.split('.')[-1]
fade_prop = self.add_child(MultiCamFadeProp, fcurve_property=key)
self.fade_props[key] = fade_prop
def build_fade(self, frame):
self.build_fades(frame)
return self.fades.get(frame)
def build_fades(self, fade_frame=None):
prop_iters = {}
for key, prop in self.fade_props.items():
prop_iters[key] = prop.iter_keyframes()
def find_next_fade(frame=None):
prop_vals = {'start':{}, 'end':{}}
start_frame = None
try:
for key, prop in prop_iters.items():
frame, value = next(prop)
if start_frame is None:
start_frame = frame
elif frame != start_frame:
raise MultiCamFadeError('keyframes are not aligned: %s' % ({'frame':frame, 'prop_vals':prop_vals}))
prop_vals['start'][key] = value
except StopIteration:
return None, None, None
end_frame = None
for key, prop in prop_iters.items():
frame, value = next(prop)
if end_frame is None:
end_frame = frame
elif frame != end_frame:
raise MultiCamFadeError('keyframes are not aligned: %s' % ({'frame':frame, 'prop_vals':prop_vals}))
prop_vals['end'][key] = value
return start_frame, end_frame, prop_vals
while True:
need_update = False
start_frame, end_frame, prop_vals = find_next_fade()
if start_frame is None:
break
if fade_frame is not None and fade_frame != start_frame:
continue
d = {
'start_frame':start_frame,
'end_frame':end_frame,
'start_source':prop_vals['start']['start_source'],
'next_source':prop_vals['start']['next_source'],
}
if start_frame not in self.fades:
need_update = True
self.fades[start_frame] = d
else:
for key, val in self.fades[start_frame].items():
if d[key] != val:
need_update = True
self.fades[start_frame][key] = d[key]
if need_update:
self.multicam.build_fade(d)
if fade_frame is not None:
break
class MultiCamFadeProp(BlendObj):
def __init__(self, **kwargs):
super(MultiCamFadeProp, self).__init__(**kwargs)
self.blend_obj = self.parent.blend_obj
class MulticamSource(BlendObj):
fcurve_property = 'blend_alpha'
def __init__(self, **kwargs):
super(MulticamSource, self).__init__(**kwargs)
self.multicam = self.parent
self.mc_fader = self.multicam.mc_fader
self._keyframe_data = None
@property
def keyframe_data(self):
d = self._keyframe_data
if d is None:
d = self._keyframe_data = self.build_keyframe_data()
return d
def build_keyframe_data(self):
d = {}
cuts = self.multicam.cuts
channel = self.blend_obj.channel
is_active = False
is_first_keyframe = True
for frame in sorted(cuts.keys()):
cut = cuts[frame]
if cut == channel:
d[frame] = True
is_active = True
elif is_active:
d[frame] = False
is_active = False
elif is_first_keyframe:
d[frame] = False
is_first_keyframe = False
return d
def build_fade(self, fade):
channel = self.blend_obj.channel
start_frame = fade['start_frame']
end_frame = fade['end_frame']
start_ch = fade['start_source']
end_ch = fade['next_source']
if channel < min([start_ch, end_ch]):
## this strip won't be affected
return
if start_ch == channel:
if end_ch < channel:
values = [1., 0.]
else:
values = [1., 1.]
elif end_ch == channel:
if start_ch < channel:
values = [0., 1.]
else:
values = [1., 1.]
elif channel > max([start_ch, end_ch]) or channel < max([start_ch, end_ch]):
values = [0., 0.]
else:
return
self.insert_keyframe(start_frame, values[0], interpolation='BEZIER')
self.insert_keyframe(end_frame, values[1], interpolation='CONSTANT')
self.insert_keyframe(end_frame+1, 1., interpolation='CONSTANT')
def build_fades(self):
for start_frame in sorted(self.mc_fader.fades.keys()):
fade = self.mc_fader.fades[start_frame]
self.build_fade(fade)
def build_keyframes(self):
self.remove_fcurve()
for frame, is_active in self.keyframe_data.items():
if is_active:
value = 1.
else:
value = 0.
self.insert_keyframe(frame, value, interpolation='CONSTANT')
class MultiCamBakeStrips(bpy.types.Operator, MultiCamContext):
'''Bakes the mulicam source into the affected strips using opacity'''
bl_idname = 'sequencer.bake_multicam_strips'
bl_label = 'Bake Multicam Strips'
def execute(self, context):
mc = MultiCam(blend_obj=self.get_strip(context),
context=context)
mc.bake_strips()
return {'FINISHED'}
def register():
bpy.utils.register_class(MultiCamBakeStrips)
def unregister():
bpy.utils.unregister_class(MultiCamBakeStrips)
| nocarryr/blender-scripts | multicam_tools/multicam.py | Python | gpl-2.0 | 11,734 |
import time
import traceback
from .optionparser import args
debug_level=args.debug or 0
debug_file=args.debug_file
timestamp=args.time
if debug_file:
import re
debug_file = re.compile(debug_file)
if debug_level > 0: print('DEBUG_LEVEL=',debug_level)
if debug_file: print('DEBUG_FILE=',debug_file)
def debug (message, level=10):
if timestamp: ts= '%s:'%time.time()
else: ts = ''
if level <= debug_level:
stack = traceback.extract_stack()
if len(stack) >= 2:
caller=stack[-2]
finame=caller[0]
line = caller[1]
else:
finame = " ".join(stack)
line = ""
if args.debug_file:
if debug_file.search(finame):
print("DEBUG: ",ts,"%s: %s"%(finame,line),message)
else:
print("DEBUG: ",ts,"%s: %s"%(finame,line),message)
timers = {}
class TimeAction:
def __init__ (self, name, level=10):
self.level = level
if level <= debug_level:
self.name = name
self.start = time.time()
def end (self):
if self.level <= debug_level:
end = time.time()
t=end-self.start
# grab our location
stack=traceback.extract_stack()
if len(stack)>2:
caller=stack[-2]
finame=caller[0]
line = caller[1]
else:
finame = " ".join(stack)
line = ""
if not args.debug_file or debug_file.search(finame):
print("DEBUG: %s TOOK %s SECONDS"%(self.name,t))
if self.name not in timers: timers[self.name]=[t]
else: timers[self.name].append(t)
def print_timer_info ():
for n,times in list(timers.items()):
print("%s:"%n, end=' ')
for t in times: print("%.02e"%t,",", end=' ')
print("")
if __name__ == '__main__':
t=TimeAction('this is a test',0)
debug('This is a test',0)
debug('This is another test',0)
t.end()
print_timer_info()
| kirienko/gourmet | src/gourmet/gdebug.py | Python | gpl-2.0 | 2,063 |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Copyright 2012 Anne Archibald <[email protected]>
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston,
# MA 02110-1301, USA.
#
#
class ContainerError(ValueError):
"""Error signaling something went wrong with container handling"""
pass
class Container(object):
"""A container is an object that manages objects it contains.
The objects in a container each have a .container attribute that
points to the container. This attribute is managed by the container
itself.
This class is a base class that provides common container functionality,
to be used to simplify implementation of list and dict containers.
"""
def _set_container(self, item):
if hasattr( item, "container" ) and item.container not in (None,self):
# raise ContainerError("Item %s was added to container %s but was already in container %s" % (item, self, item.container))
item.container.remove( item )
item.container = self
def _unset_container(self, item):
if item.container is not self:
raise ContainerError("Item %s was removed from container %s but was not in it" % (item, self))
item.container = None
def _set_container_multi(self, items):
"""Put items in the container in an all-or-nothing way"""
r = []
try:
for i in items:
self._set_container(i)
r.append(i)
r = None
finally: # Make sure items don't get added to this if any fail
if r is not None:
for i in r:
try:
self._unset_container(i)
except ContainerError:
pass
def _unset_container_multi(self, items):
"""Remove items from the container in an all-or-nothing way"""
r = []
try:
for i in items:
self._unset_container(i)
r.append(i)
r = None
finally:
if r is not None:
for i in r:
try:
self._set_container(i)
except ContainerError:
pass
class ContainerList(list,Container):
"""A ContainerList is a list whose children know they're in it.
Each element in the ContainerList has a .container attribute which points
to the ContainerList itself. This container pointer is maintained automatically.
"""
def __init__(self, items=[], owner=None):
list.__init__(self, items)
self._set_container_multi(items)
self.owner = owner
def __repr__(self):
return "<CL %s>" % list.__repr__(self)
def append(self, item):
self._set_container(item)
list.append(self,item)
def extend(self, items):
self._set_container_multi(items)
list.extend(self,items)
def insert(self, i, item):
self._set_container(item)
list.insert(self,i,item)
def remove(self, item):
self._unset_container(item)
list.remove(self,item)
def pop(self, i=-1):
self._unset_container(self[i])
return list.pop(self,i)
# These don't work because they make the elements part of more than one list, or one list more than once
def __add__(self, other):
raise NotImplementedError
def __radd__(self, other):
raise NotImplementedError
def __imul__(self,other):
raise NotImplementedError
def __mul__(self, other):
raise NotImplementedError
def __rmul__(self,other):
raise NotImplementedError
# only works if other is not also a Container
def __iadd__(self, other):
self.extend(other)
return self
def __setitem__(self, key, value):
# FIXME: check slices work okay
if isinstance(key, slice):
self._unset_container_multi(self[key])
try:
self._set_container_multi(value)
except ContainerError:
self._set_container_multi(self[key])
raise
else:
self._unset_container(self[key])
try:
self._set_container(value)
except ContainerError:
self._set_container(self[key])
raise
list.__setitem__(self,key,value)
def __delitem__(self, key):
# FIXME: check slices work okay
if isinstance(key, slice):
self._unset_container_multi(self[key])
else:
self._unset_container(self[key])
list.__delitem__(self,key)
# Needed for python2, forbidden for python3
def __delslice__(self,i,j):
del self[slice(i,j,None)]
class ContainerDict(dict,Container):
"""A ContainerDict is a dict whose children know they're in it.
Each element in the ContainerDict has a .container attribute which points
to the ContainerDict itself. This container pointer is maintained automatically.
"""
def __init__(self, contents=None, **kwargs):
if contents is None:
dict.__init__(self, **kwargs)
else:
dict.__init__(self, contents, **kwargs)
self._set_container_multi(list(self.values()))
def __repr__(self):
return "<CD %s>" % dict.__repr__(self)
def __setitem__(self, key, value):
if key in self:
self._unset_container(self[key])
try:
self._set_container(value)
except ContainerError:
if key in self:
self._set_container(self[key])
raise
dict.__setitem__(self,key,value)
def __delitem__(self, key):
if key in self:
self._unset_container(self[key])
dict.__delitem__(self,key)
def pop(self, key):
if key in self:
self._unset_container(self[key])
return dict.pop(self,key)
def popitem(self):
key, value = dict.popitem(self)
self._unset_container(value)
return key, value
def setdefault(self, key, default=None):
if key not in self:
self._set_container(default)
dict.setdefault(self, key, default)
def update(self, other):
for (k,v) in list(other.items()):
self[k] = v
if __name__=='__main__':
class Gear(object):
def __init__(self, name, container=None):
self.name = name
self.container = container
def __repr__(self):
return "<G "+str(self.name)+">"
gears = [Gear(n) for n in range(10)]
a = Gear("A")
b = Gear("B")
c = Gear("C")
d = Gear("D")
e = Gear("E")
p = ContainerList([a,b,c])
print(p)
try:
p.append(a)
except ContainerError as err:
print(err)
else:
raise AssertionError
print(p[1])
print(p[::2])
p[1] = d
print(p)
p[1] = b
p[::2] = [d,e]
print(p)
del p[:]
p2 = ContainerList([a,b,c])
print(p2)
p2.extend([d,e])
print(p2)
print(p2.pop())
print(p2)
p2.remove(d)
print(p2)
p2 += [d,e]
print(p2)
try:
d = ContainerDict(a=a, b=b, c=c)
except ContainerError as err:
print(err)
else:
raise AssertionError
del p2[:]
d = ContainerDict(a=a, b=b, c=c)
print(d)
print(d["a"])
d["a"] = a
try:
d["a"] = b
except ContainerError as err:
print(err)
else:
raise AssertionError
del d["a"]
d["a"] = a
d.pop("a")
print(d)
d["a"] = a
k,v = d.popitem()
d[k] = v
d.setdefault("e",e)
d.setdefault("e",e)
print(d)
del d["e"]
d.update(dict(e=e))
print(d)
| jwvhewitt/dmeternal | old_game/container.py | Python | gpl-2.0 | 8,515 |
# -*- Mode:Python; indent-tabs-mode:nil; tab-width:4 -*-
#
# Copyright 2002 Ben Escoto <[email protected]>
# Copyright 2007 Kenneth Loafman <[email protected]>
#
# This file is part of duplicity.
#
# Duplicity is free software; you can redistribute it and/or modify it
# under the terms of the GNU General Public License as published by the
# Free Software Foundation; either version 2 of the License, or (at your
# option) any later version.
#
# Duplicity is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with duplicity; if not, write to the Free Software Foundation,
# Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
import os
import duplicity.backend
hsi_command = "hsi"
class HSIBackend(duplicity.backend.Backend):
def __init__(self, parsed_url):
duplicity.backend.Backend.__init__(self, parsed_url)
self.host_string = parsed_url.hostname
self.remote_dir = parsed_url.path
if self.remote_dir:
self.remote_prefix = self.remote_dir + "/"
else:
self.remote_prefix = ""
def _put(self, source_path, remote_filename):
commandline = '%s "put %s : %s%s"' % (hsi_command, source_path.name, self.remote_prefix, remote_filename)
self.subprocess_popen(commandline)
def _get(self, remote_filename, local_path):
commandline = '%s "get %s : %s%s"' % (hsi_command, local_path.name, self.remote_prefix, remote_filename)
self.subprocess_popen(commandline)
def _list(self):
import sys
commandline = '%s "ls -l %s"' % (hsi_command, self.remote_dir)
l = self.subprocess_popen(commandline)[2]
l = l.split(os.linesep)[3:]
for i in range(0, len(l)):
if l[i]:
l[i] = l[i].split()[-1]
return [x for x in l if x]
def _delete(self, filename):
commandline = '%s "rm %s%s"' % (hsi_command, self.remote_prefix, filename)
self.subprocess_popen(commandline)
duplicity.backend.register_backend("hsi", HSIBackend)
duplicity.backend.uses_netloc.extend(['hsi'])
| mjuric/duplicity | duplicity/backends/hsibackend.py | Python | gpl-2.0 | 2,314 |
# -*- coding: utf-8 -*-
from canaimagnulinux.wizard.interfaces import IChat
from canaimagnulinux.wizard.interfaces import ISocialNetwork
from canaimagnulinux.wizard.utils import CanaimaGnuLinuxWizardMF as _
from collective.beaker.interfaces import ISession
from collective.z3cform.wizard import wizard
from plone import api
from plone.z3cform.fieldsets import group
from z3c.form import field
try:
from zope.browserpage import viewpagetemplatefile
except ImportError:
# Plone < 4.1
from zope.app.pagetemplate import viewpagetemplatefile
import logging
logger = logging.getLogger(__name__)
class ChatGroup(group.Group):
prefix = 'chats'
label = _(u'Chats Information')
fields = field.Fields(IChat)
class SocialNetworkGroup(group.Group):
prefix = 'socialnetwork'
label = _(u'Social Network Information')
fields = field.Fields(ISocialNetwork)
class SocialNetworkStep(wizard.GroupStep):
prefix = 'Social'
label = _(u'Social Network accounts')
description = _(u'Input your social networks details')
template = viewpagetemplatefile.ViewPageTemplateFile('templates/socialnetwork.pt')
fields = field.Fields()
groups = [ChatGroup, SocialNetworkGroup]
def __init__(self, context, request, wizard):
# Use collective.beaker for session managment
session = ISession(request, None)
self.sessionmanager = session
super(SocialNetworkStep, self).__init__(context, request, wizard)
def load(self, context):
member = api.user.get_current()
data = self.getContent()
# Chats group
if not data.get('irc', None):
irc = member.getProperty('irc')
if type(irc).__name__ == 'object':
irc = None
data['irc'] = irc
if not data.get('telegram', None):
telegram = member.getProperty('telegram')
if type(telegram).__name__ == 'object':
telegram = None
data['telegram'] = telegram
if not data.get('skype', None):
skype = member.getProperty('skype')
if type(skype).__name__ == 'object':
skype = None
data['skype'] = skype
# Social Network group
if not data.get('twitter', None):
twitter = member.getProperty('twitter')
if type(twitter).__name__ == 'object':
twitter = None
data['twitter'] = twitter
if not data.get('instagram', None):
instagram = member.getProperty('instagram')
if type(instagram).__name__ == 'object':
instagram = None
data['instagram'] = instagram
if not data.get('facebook', None):
facebook = member.getProperty('facebook')
if type(facebook).__name__ == 'object':
facebook = None
data['facebook'] = facebook
def apply(self, context, initial_finish=False):
data = self.getContent()
return data
def applyChanges(self, data):
member = api.user.get_current()
member.setMemberProperties(mapping={
'irc': data['irc'],
'telegram': data['telegram'],
'skype': data['skype'],
'twitter': data['twitter'],
'instagram': data['instagram'],
'facebook': data['facebook']}
)
| CanaimaGNULinux/canaimagnulinux.wizard | canaimagnulinux/wizard/browser/socialnetwork.py | Python | gpl-2.0 | 3,378 |
# -*- coding: utf-8 -*-
import application
import platform
import exceptions
from ctypes import c_char_p
from libloader import load_library
import paths
if platform.architecture()[0][:2] == "32":
lib = load_library("api_keys32", x86_path=paths.app_path("keys/lib"))
else:
lib = load_library("api_keys64", x64_path=paths.app_path("keys/lib"))
# import linuxKeys
# lib = linuxKeys
keyring = None
def setup():
global keyring
if keyring == None:
keyring = Keyring()
class Keyring(object):
def __init__(self):
super(Keyring, self).__init__()
def _call_method(self, function):
result = getattr(lib, function)
result = c_char_p(result.__call__())
return result.value
def get(self, func):
if hasattr(application,func+"_override"):
return getattr(application,func+'_override')
return getattr(self, "_call_method")("get_"+func)
| codeofdusk/ProjectMagenta | src/keys/__init__.py | Python | gpl-2.0 | 847 |
import itertools
from django.conf import settings
from django.contrib.syndication.views import Feed, FeedDoesNotExist
from django.utils import timezone
from schedule.feeds.ical import ICalendarFeed
from schedule.models import Calendar
class UpcomingEventsFeed(Feed):
feed_id = "upcoming"
def feed_title(self, obj):
return "Upcoming Events for %s" % obj.name
def get_object(self, request, calendar_id):
return Calendar.objects.get(pk=calendar_id)
def link(self, obj):
if not obj:
raise FeedDoesNotExist
return obj.get_absolute_url()
def items(self, obj):
return itertools.islice(
#obj.occurrences_after(timezone.now()),
obj.occurrences_after(timezone.localtime(timezone.now())),
getattr(settings, "FEED_LIST_LENGTH", 10),
)
def item_id(self, item):
return str(item.id)
def item_title(self, item):
return item.event.title
def item_authors(self, item):
if item.event.creator is None:
return [{"name": ""}]
return [{"name": item.event.creator.username}]
def item_updated(self, item):
return item.event.created_on
def item_content(self, item):
return "{} \n {}".format(item.event.title, item.event.description)
class CalendarICalendar(ICalendarFeed):
def items(self):
cal_id = self.args[1]
cal = Calendar.objects.get(pk=cal_id)
return cal.events.all()
def item_uid(self, item):
return str(item.id)
def item_start(self, item):
return item.start
def item_end(self, item):
return item.end
def item_summary(self, item):
return item.title
def item_created(self, item):
return item.created_on
| tommybobbins/PiThermostat | django/hotf/schedule/feeds/__init__.py | Python | gpl-2.0 | 1,787 |
import logging
from django.db import models
class BaseModel(models.Model):
class Meta:
# Makes django recognize model in split modules
app_label = 'sdn'
# Turns this into an abstract model (does not create table for it)
abstract = True
# Default exception for models in manager
class ModelException(Exception):
# Get an instance of a logger
logger = logging.getLogger(__name__)
def __init__(self, msg):
self.msg = msg
self.logger.warning(msg)
def __str__(self):
return repr(self.msg)
| ComputerNetworks-UFRGS/AuroraSDN | sdn/models/base_model.py | Python | gpl-2.0 | 601 |
# -*- coding:UTF-8 -*-
from keras.models import Sequential
from keras.layers import Dense, Dropout, Activation
from keras.layers import Embedding
from keras.layers import LSTM,Flatten
import numpy as np
import loadpkl
(X_train, y_train),(X_test, y_test) = loadpkl.load_data()
#print "hello"
X_train = np.array(X_train)
y_train = np.array(y_train)
print X_train.shape
print y_train.shape
t0=X_train.shape[0]/300
#print t0
X_train = X_train.reshape(t0,50,6) #训练集行数
y_train = y_train.reshape(t0,50)
print X_train.shape
print y_train.shape
X_test = np.array(X_test)
y_test = np.array(y_test)
t1=X_test.shape[0]/300
print t1
X_test = X_test.reshape(t1,50,6)#测试集行数 、
y_test = y_test.reshape(t1,50)
#print (X_train, y_train)
#print (X_test, y_test)
#print X_train
#print y_train
Y_train = y_train
#Y_test = y_test
#model = Sequential()
#model.add(Dense(200, input_dim = 30))
#model.add(Activation('tanh'))
#model.add(Dense(100))
#model.add(Activation('sigmoid'))
#model.add(Dense(50))
#model.add(Activation('tanh'))
#model.add(Dense(30))
#model.add(Activation('tanh'))
#model.add(Dense(20))
#model.add(Activation('tanh'))
#model.add(Dense(6))
#model.add(Activation('softmax'))
#model.compile(optimizer = 'rmsprop', loss = 'mse', metrics=['accuracy'])
#model.fit(X_train, Y_train, batch_size=10, nb_epoch=100, verbose=1, validation_split=0.2, shuffle=True)
#prob = model.predict(X_test)
#prob=model.evaluate(X_test, Y_test, batch_size=32, verbose=1)
#print prob
model = Sequential()
model.add(LSTM(256, input_shape=(50,6),return_sequences=True))# 32
#model.add(Dropout(0.5))
model.add(LSTM(128,return_sequences=True))
#model.add(Dropout(0.5))
model.add(LSTM(64,return_sequences=True))
model.add(Dropout(0.5))
model.add(LSTM(50,return_sequences=True))
#model.add(Flatten())
#model.add(Dense(50))
model.compile(optimizer = 'rmsprop', loss = 'mse', metrics=['accuracy'])
model.fit(X_train, Y_train, batch_size=100, nb_epoch=10, verbose=1, validation_split=0.2, shuffle=True)
#json_string = model.to_json()
#open('my_model_architecture.json', 'w').write(json_string)
#model.save_weights('my_model_weights.h5')
prob = model.predict(X_test)
#n=0
#for i in prob:
# if n<100:
# print i
# n=n+1
# else:break
print prob
#model = Sequential()
#model.add(LSTM(32, input_shape=(6,5),return_sequences=True))
#model.add(Activation('sigmoid'))
#model.add(LSTM(16,return_sequences=True))
#model.add(Activation('sigmoid'))
#model.add(LSTM(8,return_sequences=True))
#model.add(Activation('sigmoid'))
#model.add(LSTM(6))
#model.add(Activation('tanh'))
#model.compile(optimizer = 'rmsprop', loss = 'mse', metrics=['accuracy'])
#model.fit(X_train, Y_train, batch_size=100, nb_epoch=100, verbose=1, validation_split=0.2, shuffle=True)
#prob = model.predict(X_test)
#n=0
#for i in prob:
# if n<100:
# print i
# n=n+1
# else:break
#print prob
| lefteye/superroutingchioce | model.py | Python | gpl-2.0 | 2,888 |
"""
Module with universal etree module
"""
__all__ = ('etree', )
try:
from lxml import etree
except ImportError:
try:
import xml.etree.cElementTree as etree
except (ImportError, SystemError):
import xml.etree.ElementTree as etree
| bacher09/gpackages-metadata | packages_metadata/generic_metadata/my_etree.py | Python | gpl-2.0 | 262 |
# Copyright 2007, Red Hat, Inc
# James Bowes <[email protected]>
# Alex Wood <[email protected]>
#
# This software may be freely redistributed under the terms of the GNU
# general public license.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 675 Mass Ave, Cambridge, MA 02139, USA.
import func_module
import yum
# XXX Use internal yum callback or write a useful one.
class DummyCallback(object):
def event(self, state, data=None):
pass
class Yum(func_module.FuncModule):
version = "0.0.1"
api_version = "0.1.0"
description = "Package updates through yum."
def update(self, pkg=None):
ayum = yum.YumBase()
ayum.doGenericSetup()
ayum.doRepoSetup()
try:
ayum.doLock()
if pkg != None:
tx_result = ayum.update(pattern=pkg)
else:
tx_result = ayum.update()
ayum.buildTransaction()
ayum.processTransaction(
callback=DummyCallback())
finally:
ayum.closeRpmDB()
ayum.doUnlock()
return map(str, tx_result)
def check_update(self, filter=[], repo=None):
"""Returns a list of packages due to be updated
You can specify a filter using the standard yum wildcards
"""
# parsePackages expects a list and doesn't react well if you send in a plain string with a wildcard in it
# (the string is broken into a list and one of the list elements is "*" which matches every package)
if type(filter) not in [list, tuple]:
filter = [filter]
ayum = yum.YumBase()
ayum.doConfigSetup()
ayum.doTsSetup()
if repo is not None:
ayum.repos.enableRepo(repo)
pkg_list = ayum.doPackageLists('updates').updates
if filter:
# exactmatch are all the packages with exactly the same name as one in the filter list
# matched are all the packages that matched under any wildcards
# unmatched are all the items in the filter list that didn't match anything
exactmatch, matched, unmatched = yum.packages.parsePackages(pkg_list, filter)
pkg_list = exactmatch + matched
return map(str, pkg_list)
| kadamski/func | func/minion/modules/yumcmd.py | Python | gpl-2.0 | 2,371 |
#!/usr/bin/env python
# coding=utf8
#
"""
Python SDK for Weibo
Simple wrapper for weibo oauth2
author: [email protected]
"""
import time
from utils.http import request
from utils.http import SDataDict
from utils.http import encode_params
from utils.const import WEIBO_DOMAIN
from utils.const import WEIBO_VERSION
from utils.errors import WeiboAPIError
from utils.errors import SSOBaseException
class HttpObject(object):
def __init__(self, client, method):
self.client = client
self.method = method
def __getattr__(self, attr):
def wrap(**kwargs):
if self.client.is_expires():
raise WeiboAPIError('21327', 'expired_token')
return request(self.method,
'%s%s.json' % (self.client.api_url,
attr.replace('__', '/')),
self.client.access_token,
**kwargs)
return wrap
class APIClient(object):
""" API client using synchronized invocation """
def __init__(self, app_key, app_secret, redirect_uri=None, response_type='code'):
self.client_id = app_key
self.client_secret = app_secret
self.redirect_uri = redirect_uri
self.response_type = response_type
self.auth_url = 'http://%s/oauth2/' % WEIBO_DOMAIN
self.api_url = 'https://%s/%s/' % (WEIBO_DOMAIN, WEIBO_VERSION)
self.api_url = 'http://%s/' % WEIBO_DOMAIN
self.access_token = None
self.expires = 0.0
self.get = HttpObject(self, 'GET')
self.post = HttpObject(self, 'POST')
self.upload = HttpObject(self, 'UPLOAD')
def set_access_token(self, access_token, expires_in):
self.access_token = str(access_token)
self.expires = float(expires_in)
def get_authorize_url(self, redirect_uri=None, display='default'):
""" return the authroize url that should be redirect """
redirect = redirect_uri if redirect_uri else self.redirect_uri
if not redirect:
raise WeiboAPIError('21305', 'Parameter absent: redirect_uri', 'OAuth2 request')
kwargs = dict(client_id=self.client_id,
response_type='code',
display=display,
redirect_uri=redirect)
encoded_params, _ = encode_params('GET', **kwargs)
print encoded_params
return '%s%s?%s' % (self.auth_url, 'authorize', encoded_params)
def request_access_token(self, code, redirect_uri=None):
"""
return access token as object:
{"access_token":"your-access-token","expires_in":12345678}
expires_in is standard unix-epoch-time
"""
redirect = redirect_uri if redirect_uri else self.redirect_uri
if not redirect:
raise WeiboAPIError('21305', 'Parameter absent: redirect_uri')
r = request('GET', '%s%s' % (self.auth_url, 'access_token'),
client_id=self.client_id, client_secret=self.client_secret,
redirect_uri=redirect, code=code, grant_type='authorization_code')
r.expires_in += int(time.time())
return r
def is_expires(self):
return not self.access_token or time.time() > self.expires
def __getattr__(self, attr):
return getattr(self.get, attr)
| seraphln/chat2all | chat2all/sso/weibo/api.py | Python | gpl-2.0 | 3,381 |
# -*- coding: utf-8 -*-
###############################################################################
#
# GetCategories
# Returns the latest category hierarchy for the eBay site.
#
# Python versions 2.6, 2.7, 3.x
#
# Copyright 2014, Temboo Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
# either express or implied. See the License for the specific
# language governing permissions and limitations under the License.
#
#
###############################################################################
from temboo.core.choreography import Choreography
from temboo.core.choreography import InputSet
from temboo.core.choreography import ResultSet
from temboo.core.choreography import ChoreographyExecution
import json
class GetCategories(Choreography):
def __init__(self, temboo_session):
"""
Create a new instance of the GetCategories Choreo. A TembooSession object, containing a valid
set of Temboo credentials, must be supplied.
"""
super(GetCategories, self).__init__(temboo_session, '/Library/eBay/Trading/GetCategories')
def new_input_set(self):
return GetCategoriesInputSet()
def _make_result_set(self, result, path):
return GetCategoriesResultSet(result, path)
def _make_execution(self, session, exec_id, path):
return GetCategoriesChoreographyExecution(session, exec_id, path)
class GetCategoriesInputSet(InputSet):
"""
An InputSet with methods appropriate for specifying the inputs to the GetCategories
Choreo. The InputSet object is used to specify input parameters when executing this Choreo.
"""
def set_CategoryParent(self, value):
"""
Set the value of the CategoryParent input for this Choreo. ((optional, string) Indicates the ID of the highest-level category to return. Multiple CategoryParent IDs can be specified in a comma-separated list.)
"""
super(GetCategoriesInputSet, self)._set_input('CategoryParent', value)
def set_CategorySiteID(self, value):
"""
Set the value of the CategorySiteID input for this Choreo. ((optional, string) The ID for the site for which to retrieve the category hierarchy. Use the numeric site code (e.g., 0 for US, 77 for eBay Germany, etc).)
"""
super(GetCategoriesInputSet, self)._set_input('CategorySiteID', value)
def set_DetailLevel(self, value):
"""
Set the value of the DetailLevel input for this Choreo. ((optional, string) The level of detail to return in the response. Valid values are: ReturnAll.)
"""
super(GetCategoriesInputSet, self)._set_input('DetailLevel', value)
def set_LevelLimit(self, value):
"""
Set the value of the LevelLimit input for this Choreo. ((optional, string) Indicates the maximum depth of the category hierarchy to retrieve, where the top-level categories (meta-categories) are at level 1. Default is 0.)
"""
super(GetCategoriesInputSet, self)._set_input('LevelLimit', value)
def set_ResponseFormat(self, value):
"""
Set the value of the ResponseFormat input for this Choreo. ((optional, string) The format that the response should be in. Valid values are: json (the default) and xml.)
"""
super(GetCategoriesInputSet, self)._set_input('ResponseFormat', value)
def set_SandboxMode(self, value):
"""
Set the value of the SandboxMode input for this Choreo. ((optional, boolean) Indicates that the request should be made to the sandbox endpoint instead of the production endpoint. Set to 1 to enable sandbox mode.)
"""
super(GetCategoriesInputSet, self)._set_input('SandboxMode', value)
def set_SiteID(self, value):
"""
Set the value of the SiteID input for this Choreo. ((optional, string) The eBay site ID that you want to access. Defaults to 0 indicating the US site.)
"""
super(GetCategoriesInputSet, self)._set_input('SiteID', value)
def set_UserToken(self, value):
"""
Set the value of the UserToken input for this Choreo. ((required, string) A valid eBay Auth Token.)
"""
super(GetCategoriesInputSet, self)._set_input('UserToken', value)
def set_ViewAllNodes(self, value):
"""
Set the value of the ViewAllNodes input for this Choreo. ((optional, boolean) A flag that controls whether all eBay categories are returned, or only leaf categories are returned. To retrieve leaf categories, set this parameter to 'false'.)
"""
super(GetCategoriesInputSet, self)._set_input('ViewAllNodes', value)
class GetCategoriesResultSet(ResultSet):
"""
A ResultSet with methods tailored to the values returned by the GetCategories Choreo.
The ResultSet object is used to retrieve the results of a Choreo execution.
"""
def getJSONFromString(self, str):
return json.loads(str)
def get_Response(self):
"""
Retrieve the value for the "Response" output from this Choreo execution. (The response from eBay.)
"""
return self._output.get('Response', None)
class GetCategoriesChoreographyExecution(ChoreographyExecution):
def _make_result_set(self, response, path):
return GetCategoriesResultSet(response, path)
| willprice/arduino-sphere-project | scripts/example_direction_finder/temboo/Library/eBay/Trading/GetCategories.py | Python | gpl-2.0 | 5,654 |
#!/usr/bin/python
# -*- coding: utf-8 -*-
import cv2
from Tkinter import *
from PIL import Image, ImageTk
import tkFileDialog
appname = "example"
class App(object):
def __init__(self, root=None):
if not root:
root = Tk()
self.root = root
self.initUI()
def initUI(self):
self.root.title(appname)
menubar = Menu(self.root)
self.root.config(menu=menubar)
fileMenu = Menu(menubar, tearoff=0)
menubar.add_command(label="Tomar Foto", command=self.tomarFoto)
# Rafa
for i in range(3):
self.root.columnconfigure(i, weight=1)
for i in range(20):
self.root.rowconfigure(i, weight=1)
self.etiqueta = Label(self.root, text="Hola")
# fin Rafa
self.canvas = Canvas(self.root)
# self.canvas.pack(side=LEFT, fill=BOTH)
self.canvas.pack(side=BOTTOM, fill=X)
self.scrollbar_vert = Scrollbar(self.root)
self.scrollbar_vert.pack(side=RIGHT, fill=Y)
self.scrollbar_hor = Scrollbar(self.root)
self.scrollbar_hor.config(orient=HORIZONTAL)
self.scrollbar_hor.pack(side=BOTTOM, fill=X)
def onExit(self):
self.root.quit()
def tomarFoto(self):
# Bloque : Tomamos la foto desde la web cam y la grabamos en formato PGM
video_capture = cv2.VideoCapture(0)
ret, frame = video_capture.read()
cv2.imshow('Video', frame)
params = list()
params.append(cv2.cv.CV_IMWRITE_PXM_BINARY)
params.append(1)
print "hola"
frame2 = cv2.cvtColor(frame, cv2.cv.CV_BGR2GRAY) # convert to grayscale
cv2.imwrite('cara2.pgm', frame2, params)
cv2.imwrite('cara2.PGM', frame2, params)
video_capture.release()
cv2.destroyAllWindows()
# Fin de Tomamos la foto desde la web cam y la grabamos en formato PGM
filename = 'cara2.pgm'
self.img = Image.open(filename)
self.photo_image = ImageTk.PhotoImage(self.img)
self.canvas.pack_forget()
self.canvas = Canvas(self.root, width=self.img.size[0], height=self.img.size[1])
self.canvas.create_image(10, 10, anchor=NW, image=self.photo_image)
self.canvas.pack(side=LEFT, fill=BOTH)
self.canvas.config(yscrollcommand=self.scrollbar_vert.set)
self.canvas.config(xscrollcommand=self.scrollbar_hor.set)
self.canvas.config(scrollregion=self.canvas.bbox(ALL))
self.scrollbar_vert.config(command=self.canvas.yview)
self.scrollbar_hor.config(command=self.canvas.xview)
def run(self):
self.root.mainloop()
def main():
root = Tk()
root.geometry("250x150+300+300")
app = App(root)
app.run()
if __name__ == '__main__':
main()
| zrafa/ev | python+tk+opencv/ejemplo-funciona.py | Python | gpl-2.0 | 2,626 |
# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
#
# Description: File system resilience testing application
# Author: Hubert Kario <[email protected]>
#
# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
#
# Copyright (c) 2015 Hubert Kario. All rights reserved.
#
# This copyrighted material is made available to anyone wishing
# to use, modify, copy, or redistribute it subject to the terms
# and conditions of the GNU General Public License version 2.
#
# This program is distributed in the hope that it will be
# useful, but WITHOUT ANY WARRANTY; without even the implied
# warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
# PURPOSE. See the GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public
# License along with this program; if not, write to the Free
# Software Foundation, Inc., 51 Franklin Street, Fifth Floor,
# Boston, MA 02110-1301, USA.
#
# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
# compatibility with Python 2.6, for that we need unittest2 package,
# which is not available on 3.3 or 3.4
try:
import unittest2 as unittest
except ImportError:
import unittest
try:
import mock
from mock import call
except ImportError:
import unittest.mock as mock
from unittest.mock import call
from fsresck.nbd.request import NBDRequestSocket, recvexactly, Error, \
NBDRequest
from fsresck.compat import compat_str
class TestError(unittest.TestCase):
def test___repr__(self):
with self.assertRaises(Error) as exception:
raise Error('test')
self.assertIn("request.Error('test'", repr(exception.exception))
class TestNBDRequest(unittest.TestCase):
def test___init__(self):
request = NBDRequest(None, None, None, None)
self.assertIsNotNone(request)
def test___ne__(self):
request1 = NBDRequest(1, 2, 3, 4)
request2 = NBDRequest(1, 2, 3, 4)
self.assertFalse(request1 != request2)
class TestRecvexactly(unittest.TestCase):
def test_zero_read(self):
sock = None
data = recvexactly(sock, 0)
self.assertEqual(bytearray(0), data)
def test_full_read(self):
sock = mock.MagicMock()
sock.recv_into.return_value = 10
data = recvexactly(sock, 10)
self.assertEqual(bytearray(10), data)
sock.recv_into.assert_called_once_with(data, 10, 0)
def test_partial_reads(self):
sock = mock.MagicMock()
sock.recv_into.side_effect = (4, 6)
data = recvexactly(sock, 10)
self.assertEqual(bytearray(10), data)
self.assertEqual(len(sock.recv_into.call_args_list), 2)
call = sock.recv_into.call_args_list[0]
self.assertEqual(call[0][1:], (10, 0))
call = sock.recv_into.call_args_list[1]
self.assertEqual(call[0][1:], (6, 0))
def test_broken_read(self):
sock = mock.MagicMock()
sock.recv_into.side_effect = (4, 0)
with self.assertRaises(Error):
recvexactly(sock, 10)
class TestNBDRequestSocket(unittest.TestCase):
def test___init__(self):
sock = NBDRequestSocket(None)
self.assertIsNotNone(sock)
@mock.patch('fsresck.nbd.request.recvexactly')
def test_recv(self, mock_mthd):
mock_mthd.return_value = bytearray(
b'\x25\x60\x95\x13' # magic value
b'\x00\x00\x00\x00' # command type - read
b'\x50\xe4\x93\x01\x00\x88\xff\xff' # handle
b'\x00\x00\x00\x00\x00\x00\x00\x00' # offset
b'\x00\x00\x40\x00' # length
)
obj = NBDRequestSocket(None).recv()
self.assertEqual(NBDRequest(0, 0x50e493010088ffff, 0, 0x4000), obj)
@mock.patch('fsresck.nbd.request.recvexactly')
def test_recv_write(self, mock_mthd):
mock_mthd.side_effect = (bytearray(
b'\x25\x60\x95\x13' # magic value
b'\x00\x00\x00\x01' # command type - write
b'\x50\xe4\x93\x01\x00\x88\xff\xff' # handle
b'\x00\x00\x00\x00\x00\x00\x00\x00' # offset
b'\x00\x00\x00\x04'), # length
bytearray(
b'\xff\xff\xff\xff' # payload
))
obj = NBDRequestSocket(None).recv()
self.assertEqual(bytearray(b'\xff'*4), obj.data)
self.assertEqual(NBDRequest(1, 0x50e493010088ffff, 0, 0x04,
bytearray(b'\xff'*4)), obj)
@mock.patch('fsresck.nbd.request.recvexactly')
def test_recv_bad_write(self, mock_mthd):
mock_mthd.return_value = bytearray(
b'\x25\x60\x95\x14' # bad magic value
b'\x00\x00\x00\x00' # command type - read
b'\x50\xe4\x93\x01\x00\x88\xff\xff' # handle
b'\x00\x00\x00\x00\x00\x00\x00\x00' # offset
b'\x00\x00\x40\x00' # length
)
sock = NBDRequestSocket(None)
with self.assertRaises(Error):
sock.recv()
def test_send_read(self):
raw_sock = mock.MagicMock()
raw_sock.sendall.return_value = None
cmd = NBDRequest(0, 0x134, 0, 0x4000)
sock = NBDRequestSocket(raw_sock)
sock.send(cmd)
raw_sock.sendall.assert_called_once_with(compat_str(bytearray(
b'\x25\x60\x95\x13'
b'\x00\x00\x00\x00'
b'\x00\x00\x00\x00\x00\x00\x014'
b'\x00\x00\x00\x00\x00\x00\x00\x00'
b'\x00\x00@\x00')))
def test_send_write(self):
raw_sock = mock.MagicMock()
raw_sock.sendall.return_value = None
cmd = NBDRequest(1, 0x134, 0, 0x04, bytearray(b'\xff'*4))
sock = NBDRequestSocket(raw_sock)
sock.send(cmd)
raw_sock.sendall.assert_called_once_with(compat_str(bytearray(
b'\x25\x60\x95\x13'
b'\x00\x00\x00\x01'
b'\x00\x00\x00\x00\x00\x00\x014'
b'\x00\x00\x00\x00\x00\x00\x00\x00'
b'\x00\x00\x00\x04'
b'\xff\xff\xff\xff')))
| tomato42/fsresck | tests/nbd/test_request.py | Python | gpl-2.0 | 6,139 |
'''
Created on Jan 19, 2013
@author: dsnowdon
'''
import os
import tempfile
import datetime
import json
import logging
from naoutil.jsonobj import to_json_string, from_json_string
from naoutil.general import find_class
import robotstate
from event import *
from action import *
from naoutil.naoenv import make_environment
'''
Here we define the memory locations used to store state
'''
MEM_SECURITY_DISTANCE = "WandererSecurityDistance"
MEM_HEADING = "WandererWalkHeading"
MEM_WALK_PATH = "WandererWalkPath"
MEM_DETECTED_FACE_DIRECTION = "WandererFaceDirection"
MEM_PLANNED_ACTIONS = "WandererActionsPlanned"
MEM_CURRENT_ACTIONS = "WandererActionsInProgress"
MEM_COMPLETED_ACTIONS = "WandererActionsCompleted"
MEM_CURRENT_EVENT = "WandererEvent"
MEM_MAP = "WandererMap"
MEM_LOCATION = "WandererLocation"
EVENT_LOOK_FOR_PEOPLE = "WandererEventLookForPeople"
DEFAULT_CONFIG_FILE = "wanderer"
PROPERTY_PLANNER_CLASS = "plannerClass"
DEFAULT_PLANNER_CLASS = "wanderer.randomwalk.RandomWalk"
PROPERTY_EXECUTOR_CLASS = "executorClass"
DEFAULT_EXECUTOR_CLASS = "wanderer.wanderer.PlanExecutor"
PROPERTY_MAPPER_CLASS = "mapperClass"
DEFAULT_MAPPER_CLASS = "wanderer.wanderer.NullMapper"
PROPERTY_UPDATER_CLASSES = "updaterClasses"
PROPERTY_HTTP_PORT = "httpPort"
DEFAULT_HTTP_PORT = 8080
PROPERTY_DATA_COLLECTOR_HOST = "dataCollectorHost"
PROPERTY_DATA_COLLECTOR_PORT = "dataCollectorPort"
PROPERTY_LOOK_FOR_PEOPLE = "lookForPeople"
STATIC_WEB_DIR = "web"
CENTRE_BIAS = False
HEAD_HORIZONTAL_OFFSET = 0
WANDERER_NAME = "wanderer"
# START GLOBALS
# We put instances of planners, executors and mappers here so we don't need to continually create
# new instances
planner_instance = None
executor_instance = None
mapper_instance = None
updater_instances = None
# END GLOBALS
wanderer_logger = logging.getLogger("wanderer.wanderer")
def init_state(env, startPos):
# declare events
env.memory.declareEvent(EVENT_LOOK_FOR_PEOPLE);
# getData & removeData throw errors if the value is not set,
# so ensure all the memory locations we want to use are initialised
env.memory.insertData(MEM_CURRENT_EVENT, None)
# set "security distance"
env.memory.insertData(MEM_SECURITY_DISTANCE, "0.25")
# should we look for people as we go?
lookForPeople = env.get_property(DEFAULT_CONFIG_FILE, PROPERTY_LOOK_FOR_PEOPLE)
if lookForPeople:
env.memory.raiseEvent(EVENT_LOOK_FOR_PEOPLE, True)
env.log("Looking for people")
else:
env.memory.raiseEvent(EVENT_LOOK_FOR_PEOPLE, False)
env.log("Not looking for people")
# set initial position (in list of positions)
env.memory.insertData(MEM_WALK_PATH, [startPos])
# current actions and completed actions
env.memory.insertData(MEM_PLANNED_ACTIONS, "")
env.memory.insertData(MEM_CURRENT_ACTIONS, "")
env.memory.insertData(MEM_COMPLETED_ACTIONS, "")
def shutdown(env):
planner = get_planner_instance(env)
planner.shutdown()
executor = get_executor_instance(env, None)
executor.shutdown()
mapper = get_mapper_instance(env)
mapper.shutdown()
updater_instances = get_updaters(env)
for updater in updater_instances:
updater.shutdown()
'''
Base class for wanderer planning.
Handles generating plans and reacting to events
'''
class Planner(object):
def __init__(self, env_):
super(Planner, self).__init__()
self.env = env_
def handleEvent(self, event, state):
plan = self.dispatch(event, state)
save_plan(self.env, plan)
log_plan(self.env, "New plan", plan)
return plan
# return true if this event should cause the current plan to be executed and
# a new plan created to react to it
def does_event_interrupt_plan(self, event, state):
return True
def dispatch(self, event, state):
methodName = 'handle'+ event.name()
try:
method = getattr(self, methodName)
return method(event, state)
except AttributeError:
self.env.log("Unimplemented event handler for: {}".format(event.name()))
def shutdown(self):
pass
'''
Base class for executing plans. Since we may need to trigger choreographe
boxes we delegate actually performing a single action to an actionExecutor
which in most cases will be the choreographe box that called us.
The actionExecutor must implement do_action(action) and all_done()
'''
class PlanExecutor(object):
def __init__(self, env, actionExecutor):
super(PlanExecutor, self).__init__()
self.env = env
self.actionExecutor = actionExecutor
def perform_next_action(self):
self.env.log("perform next action")
# save completed action to history if there is one
completedAction = get_current_action(self.env)
self.env.log("Completed action = {}".format(repr(completedAction)))
if not completedAction is None:
if not isinstance(completedAction, NullAction):
push_completed_action(self.env, completedAction)
# if we have moved, then save current location
if isinstance(completedAction, Move):
self._have_moved_wrapper()
self.env.log("set current action to NullAction")
# ensure that current action is cleared until we have another one
set_current_action(self.env, NullAction())
self.env.log("pop from plan")
# pop first action from plan
action = pop_planned_action(self.env)
if action is None:
self.env.log("No next action")
self.actionExecutor.all_done()
else:
self.env.log("Next action = {}".format(repr(action)))
set_current_action(self.env, action)
self.actionExecutor.do_action(action)
self.env.log("perform_next_action done")
# get current and previous positions and call have_moved
# it's not intended that this method be overridden
def _have_moved_wrapper(self):
self.env.log("Have moved")
pos = get_position(self.env)
lastPos = get_last_position(self.env)
self.have_moved(lastPos, pos)
save_waypoint(self.env, pos)
# hook for base classes to implement additional functionality
# after robot has moved
def have_moved(self, previousPos, currentPos):
pass
def save_position(self):
pos = get_position(self.env)
save_waypoint(self.env, pos)
def shutdown(self):
pass
'''
Abstract mapping class
'''
class AbstractMapper(object):
def __init__(self, env):
super(AbstractMapper, self).__init__()
self.env = env
# update map based on new sensor data
def update(self, position, sensors):
pass
# return the current map
def get_map(self):
return None
def shutdown(self):
pass
'''
Null mapper - does nothing, just a place holder for when no mapping is actually required
'''
class NullMapper(AbstractMapper):
def __init__(self, env):
super(NullMapper, self).__init__(env)
'''
Mapper that does no actual mapping, but logs all data to file for future analysis
'''
class FileLoggingMapper(AbstractMapper):
def __init__(self, env, save_data=True):
super(FileLoggingMapper, self).__init__(env)
self.save_data = save_data
if self.save_data:
self.open_data_file()
# save the data to file
def update(self, position, sensors):
if self.save_data:
self.save_update_data(position, sensors)
def open_data_file(self):
self.logFilename = tempfile.mktemp()
self.env.log("Saving sensor data to {}".format(self.logFilename))
self.first_write = True
try:
self.logFile = open(self.logFilename, 'r+')
except IOError:
self.env.log("Failed to open file: {}".format(self.logFilename))
self.logFile = None
def save_update_data(self, position, sensors):
if self.logFile:
data = { 'timestamp' : self.timestamp(),
'position' : position,
'leftSonar' : sensors.get_sensor('LeftSonar'),
'rightSonar' : sensors.get_sensor('RightSonar') }
jstr = json.dumps(data)
#self.env.log("Mapper.update: "+jstr)
if not self.first_write:
self.logFile.write(",\n")
self.logFile.write(jstr)
self.first_write = False
self.logFile.flush()
def timestamp(self):
return datetime.datetime.now().strftime('%Y%m%d%H%M%S%f')
# TODO should really block write access while doing this
def write_sensor_data_to_file(self, fp, buffer_size=1024):
if self.logFile:
self.logFile.seek(0)
fp.write('[\n')
while 1:
copy_buffer = self.logFile.read(buffer_size)
if copy_buffer:
fp.write(copy_buffer)
else:
break
fp.write(' ]\n')
self.logFile.seek(0, 2)
def shutdown(self):
if self.logFile:
self.logFile.close()
'''
Get the instance of the planner, creating an instance of the configured class if we don't already
have a planner instance
'''
def get_planner_instance(env):
global planner_instance
if not planner_instance:
fqcn = env.get_property(DEFAULT_CONFIG_FILE, PROPERTY_PLANNER_CLASS, DEFAULT_PLANNER_CLASS)
env.log("Creating a new planner instance of {}".format(fqcn))
klass = find_class(fqcn)
planner_instance = klass(env)
return planner_instance
'''
Get the instance of the plan executor, creating an instance of the class specified in the configuration
file if necessary.
'''
def get_executor_instance(env, actionExecutor):
global executor_instance
if not executor_instance:
fqcn = env.get_property(DEFAULT_CONFIG_FILE, PROPERTY_EXECUTOR_CLASS, DEFAULT_EXECUTOR_CLASS)
env.log("Creating a new executor instance of {}".format(fqcn))
klass = find_class(fqcn)
executor_instance = klass(env, actionExecutor)
# NOT THREAD SAFE
# even if we already had an instance of an executor the choreographe object might have become
# stale so we refresh it. We only have one executor instance at once so this should be OK
executor_instance.actionExecutor = actionExecutor
return executor_instance
'''
Get the instance of the mapper to use
'''
def get_mapper_instance(env):
global mapper_instance
if not mapper_instance:
fqcn = env.get_property(DEFAULT_CONFIG_FILE, PROPERTY_MAPPER_CLASS, DEFAULT_MAPPER_CLASS)
env.log("Creating a new mapper instance of {}".format(fqcn))
klass = find_class(fqcn)
mapper_instance = klass(env)
return mapper_instance
def run_updaters(env, position, sensors):
global wanderer_logger
# do the map update
mapper = get_mapper_instance(env)
if mapper:
try:
mapper.update(position, sensors)
except TypeError as e:
wanderer_logger.error("Error running mapper {0} update: {1}".format(repr(mapper), e))
# run any other updaters
updater_instances = get_updaters(env)
for updater in updater_instances:
try:
updater. update(position, sensors)
except TypeError as e:
wanderer_logger.error("Error running updater {0} update: {1}".format(repr(updater), e))
def get_updaters(env):
global updater_instances
if not updater_instances:
updater_instances = []
fqcns = env.get_property(DEFAULT_CONFIG_FILE, PROPERTY_UPDATER_CLASSES)
if fqcns:
for fqcn in fqcns:
env.log("Creating a new updater instance of {}".format(fqcn))
klass = find_class(fqcn)
updater = klass(env)
if updater:
updater_instances.append(updater)
return updater_instances
def make_wanderer_environment(box_):
env = make_environment(box_)
env.set_application_name(WANDERER_NAME)
return env
def load_event(env):
return from_json_string(env.memory.getData(MEM_CURRENT_EVENT))
def save_event(env, event):
env.memory.insertData(MEM_CURRENT_EVENT, to_json_string(event))
def load_plan(env):
return from_json_string(env.memory.getData(MEM_PLANNED_ACTIONS))
def save_plan(env, plan):
env.memory.insertData(MEM_PLANNED_ACTIONS, to_json_string(plan))
def load_completed_actions(env):
return from_json_string(env.memory.getData(MEM_COMPLETED_ACTIONS))
def save_completed_actions(env, actions):
env.memory.insertData(MEM_COMPLETED_ACTIONS, to_json_string(actions))
def pop_planned_action(env):
plan = load_plan(env)
action = None
if not plan is None:
if len(plan) > 0:
action = plan[0]
plan = plan[1:]
else:
plan = []
save_plan(env, plan)
return action
def get_current_action(env):
return from_json_string(env.memory.getData(MEM_CURRENT_ACTIONS))
def set_current_action(env, action):
env.memory.insertData(MEM_CURRENT_ACTIONS, to_json_string(action))
def push_completed_action(env, action):
actions = load_completed_actions(env)
if actions is None:
actions = []
actions.append(action)
save_completed_actions(env, actions)
def log_plan(env, msg, plan):
env.log(msg)
for p in plan:
env.log(str(p))
def save_direction(env, hRad):
env.memory.insertData(MEM_HEADING, hRad)
'''
Get the entire path
'''
def get_path(env):
return env.memory.getData(MEM_WALK_PATH)
def set_path(env, path):
env.memory.insertData(MEM_WALK_PATH, path)
'''
Get the last position the robot was at by looking at the path
'''
def get_last_position(env):
path = get_path(env)
pos = None
if not path is None:
try:
pos = path[-1]
except IndexError:
pass
return pos
'''
Get the current position of the robot
'''
def get_position(env):
# 1 = FRAME_WORLD
return env.motion.getPosition("Torso", 1, True)
def save_waypoint(env, waypoint):
path = get_path(env)
if path is None:
path = []
path.append(waypoint)
env.log("Path = "+str(path))
set_path(env, path)
| davesnowdon/nao-wanderer | wanderer/src/main/python/wanderer/wanderer.py | Python | gpl-2.0 | 14,410 |
#! /usr/bin/python3
import re
err = "La contraseña no es segura"
msg = "Escriba una contraseña al menos 8 caracteres alfanumericos"
def ismayor8(a):
"""
Compara si es mayor a 8 caracteres
"""
if (len(a) < 8):
return False
return True
def minus(a):
"""
compara si existe alguna letra minuscula
"""
patron = ('[a-z]')
flag = False
for letra in a:
if (re.match(patron, letra)):
flag = True
return flag
def mayus(a):
"""
Compara si existe alguna letra mayuscula
"""
patron = ('[A-Z]')
flag = False
for letra in a:
if (re.match(patron, letra)):
flag = True
return flag
def unnum(a):
"""
Compara si existe algun número
"""
patron = ('[0-9]')
flag = False
for letra in a:
if (re.match(patron, letra)):
flag = True
return flag
def alfanumeric(a):
"""
Compara si la cadena es alfanumerica
"""
if (a.isalnum()):
return True
else:
return False
def vpass():
"""
Validamos contraseña
"""
salida = False
while salida is False:
try:
print (msg, end='\n')
paswd = str(input('passwd: '))
if (ismayor8(paswd)):
if (alfanumeric(paswd)):
if (minus(paswd) and mayus(paswd) and unnum(paswd)):
salida = True
else:
print (err, end='\n')
else:
print (err, end='\n')
except (KeyboardInterrupt, EOFError):
print (msg, end='\n')
return salida
| IntelBUAP/Python3 | Evaluaciones/tuxes/eva2/validapass.py | Python | gpl-2.0 | 1,660 |
from typing import List
class Solution:
def transformArray2(self, arr: List[int]) -> List[int]:
while True:
arr2 = [a for a in arr]
changed = 0
for id in range(1, len(arr) - 1):
l = arr[id - 1]
r = arr[id + 1]
m = arr[id]
if l > m and r > m:
m += 1
changed += 1
elif l < m and r < m:
m -= 1
changed += 1
arr2[id] = m
arr = arr2
if changed == 0:
break
return arr
def transformArray(self, A):
for _ in range(100):
A = A[:1] + [b + (a > b < c) - (a < b > c) for a, b, c in zip(A, A[1:], A[2:])] + A[-1:]
return A
if __name__ == '__main__':
assert Solution().transformArray([6, 2, 3, 4]) == [6, 3, 3, 4]
assert Solution().transformArray([1, 6, 3, 4, 3, 5]) == [1, 4, 4, 4, 4, 5]
| lmmsoft/LeetCode | LeetCode-Algorithm/1243. Array Transformation/1243.py | Python | gpl-2.0 | 992 |
#!/usr/bin/env python3
# Copyright 2009-2017 BHG http://bw.org/
import sqlite3
def main():
print('connect')
db = sqlite3.connect('db-api.db')
cur = db.cursor()
print('create')
cur.execute("DROP TABLE IF EXISTS test")
cur.execute("""
CREATE TABLE test (
id INTEGER PRIMARY KEY, string TEXT, number INTEGER
)
""")
print('insert row')
cur.execute("""
INSERT INTO test (string, number) VALUES ('one', 1)
""")
print('insert row')
cur.execute("""
INSERT INTO test (string, number) VALUES ('two', 2)
""")
print('insert row')
cur.execute("""
INSERT INTO test (string, number) VALUES ('three', 3)
""")
print('commit')
db.commit()
print('count')
cur.execute("SELECT COUNT(*) FROM test")
count = cur.fetchone()[0]
print(f'there are {count} rows in the table.')
print('read')
for row in cur.execute("SELECT * FROM test"):
print(row)
print('drop')
cur.execute("DROP TABLE test")
print('close')
db.close()
if __name__ == '__main__': main()
| ketan-analytics/learnpython | IntermediatePython/Lynda_Bill_PYEssential/SQL.py | Python | gpl-2.0 | 1,160 |
# coding: utf8
# OeQ autogenerated lookup function for 'Window/Wall Ratio East in correlation to year of construction, based on the source data of the survey for the "German Building Typology developed by the "Institut für Wohnen und Umwelt", Darmstadt/Germany, 2011-2013'
import math
import numpy as np
import oeqLookuptable as oeq
def get(*xin):
l_lookup = oeq.lookuptable(
[
1849,0,
1850,0,
1851,0,
1852,0,
1853,0,
1854,0,
1855,0,
1856,0,
1857,0,
1858,0,
1859,0,
1860,0,
1861,0,
1862,0,
1863,0,
1864,0,
1865,0,
1866,0,
1867,0,
1868,0,
1869,0,
1870,0,
1871,0,
1872,0,
1873,0,
1874,0,
1875,0,
1876,0,
1877,0,
1878,0,
1879,0,
1880,0,
1881,0,
1882,0,
1883,0,
1884,0,
1885,0,
1886,0,
1887,0,
1888,0,
1889,0,
1890,0,
1891,0,
1892,0,
1893,0,
1894,0,
1895,0,
1896,0,
1897,0,
1898,0,
1899,0,
1900,0,
1901,0,
1902,0,
1903,0,
1904,0,
1905,0,
1906,0,
1907,0,
1908,0,
1909,0,
1910,0,
1911,0,
1912,0,
1913,0,
1914,0,
1915,0,
1916,0,
1917,0,
1918,0,
1919,0,
1920,0,
1921,0,
1922,0,
1923,0,
1924,0,
1925,0,
1926,0,
1927,0,
1928,0,
1929,0,
1930,0,
1931,0,
1932,0,
1933,0,
1934,0,
1935,0,
1936,0,
1937,0,
1938,0,
1939,0,
1940,0,
1941,0,
1942,0,
1943,0,
1944,0,
1945,0,
1946,0,
1947,0,
1948,0,
1949,0,
1950,0,
1951,0,
1952,0,
1953,0,
1954,0,
1955,0,
1956,0,
1957,0,
1958,0.001,
1959,0.002,
1960,0.002,
1961,0,
1962,0,
1963,0,
1964,0,
1965,0,
1966,0.019,
1967,0.046,
1968,0.077,
1969,0.11,
1970,0.141,
1971,0.169,
1972,0.195,
1973,0.22,
1974,0.22,
1975,0.22,
1976,0.22,
1977,0.22,
1978,0.161,
1979,0.089,
1980,0.028,
1981,0,
1982,0.019,
1983,0.07,
1984,0.131,
1985,0.18,
1986,0.2,
1987,0.199,
1988,0.188,
1989,0.18,
1990,0.184,
1991,0.192,
1992,0.195,
1993,0.18,
1994,0.142,
1995,0.09,
1996,0.038,
1997,0,
1998,0,
1999,0,
2000,0.007,
2001,0.025,
2002,0.038,
2003,0.045,
2004,0.049,
2005,0.05,
2006,0.05,
2007,0.051,
2008,0.05,
2009,0.05,
2010,0.05,
2011,0.05,
2012,0.05,
2013,0.05,
2014,0.05,
2015,0.05,
2016,0.05,
2017,0.05,
2018,0.05,
2019,0.05,
2020,0.05,
2021,0.05])
return(l_lookup.lookup(xin))
| UdK-VPT/Open_eQuarter | mole/stat_corr/window_wall_ratio_east_SDH_by_building_age_lookup.py | Python | gpl-2.0 | 1,995 |
# -*- coding: utf-8 -*-
from shutil import rmtree
from tempfile import mkdtemp
from omdbapi import OMDbAPI
from scrusubtitles import ScruSubtitles
from scrusubtitles import ScruSubtitlesListener
from scrusubtitles import ScruSubtitlesLogger
class TestService(ScruSubtitlesListener, ScruSubtitlesLogger):
def __init__(self):
super(TestService, self).__init__()
self._omdbapi = OMDbAPI()
self._omdbapi.logger = self
self._scrusubtitles = ScruSubtitles()
self._scrusubtitles.listener = self
self._scrusubtitles.logger = self
self._scrusubtitles.workdir = mkdtemp()
self._num_subtitles_downloaded = 0
self._num_subtitles_found = 0
def cleanup(self):
rmtree(self._scrusubtitles.workdir)
def lookup(self, title, year):
return self._omdbapi.search(title, year)
def download(self, url, filename):
self._num_subtitles_downloaded = 0
self._scrusubtitles.download(url, filename)
self.info(u'{0} subtitles downloaded'.format(self._num_subtitles_downloaded))
def search(self, imdb_id, languages):
self._num_subtitles_found = 0
self._scrusubtitles.search(imdb_id, languages)
self.info(u'{0} subtitles found'.format(self._num_subtitles_found))
def on_subtitle_found(self, subtitle):
self._num_subtitles_found += 1
self.info(u'Found {0} subtitle {1}'.format(subtitle['language'], subtitle['filename']))
for key in subtitle:
self.debug(u' {0}: {1}'.format(key, subtitle[key]))
def on_subtitle_downloaded(self, path):
self._num_subtitles_downloaded += 1
self.info(u'Subtitle {0} downloaded'.format(path))
def debug(self, message):
print u'DEBUG: {0}'.format(message)
def info(self, message):
print u'INFO: {0}'.format(message)
def warn(self, message):
print u'WARN: {0}'.format(message)
def error(self, message):
print u'ERROR: {0}'.format(message)
| systemcrash/service.subtitles.subs_com_ru | resources/lib/scrutest.py | Python | gpl-2.0 | 2,013 |
# -*- coding: utf-8 -*-
# Generated by Django 1.10.1 on 2016-11-03 10:44
from __future__ import unicode_literals
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('domains', '0003_auto_20161103_1031'),
]
operations = [
migrations.RemoveField(
model_name='domain',
name='subtopics',
),
migrations.AddField(
model_name='subtopic',
name='dmain',
field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.CASCADE, related_name='subtopics', to='domains.domain'),
),
]
| adithyabhatkajake/kompile | domains/migrations/0004_auto_20161103_1044.py | Python | gpl-2.0 | 678 |
'''
adaboost.py script can be used to train and test adaboost with a SGDclassifier
for mnist data. Look up commandline options for more information.
'''
import argparse
import cPickle as pickle
import numpy as np
import pylab as pl
import time
from sklearn.ensemble import AdaBoostClassifier
from sklearn.linear_model import SGDClassifier
from sklearn.tree import DecisionTreeClassifier
from sklearn.metrics import accuracy_score
from sklearn.metrics import zero_one_loss
def write(model, model_file):
'''
Writes the trained model to the fiven file.
'''
pickle.dump(model,open(model_file, 'wb'))
def load_data(data_file):
'''
Loads X and Y data from the pickled data_file
'''
data = pickle.load(open(data_file))
return (data['X'], data['Y'])
def get_adboost_classifier(algo, num_estimators, wl_loss, wl_penalty, passes):
'''
Constructs a adaboost classifier object based on the algorithm, number of
estimators, loss and penalty function given. Configures the object to run on
all cores.
'''
'''
weak_learner = SGDClassifier(loss=wl_loss, penalty=wl_penalty,
n_jobs=-1, n_iter = passes, shuffle = True)
'''
weak_learner = DecisionTreeClassifier(max_depth=30)
ab_classifier = AdaBoostClassifier( weak_learner, n_estimators =
num_estimators, algorithm = algo)
return ab_classifier
def train(ab_classifier, train_file, validation_file, model_file, graph_file):
'''
Takes a configured adaboost classifier object and train it with the training
data from the data_file and write the learned model to the model_file.
'''
s = time.time()
train_x, train_y = load_data(train_file)
ab_classifier = ab_classifier.fit(train_x, train_y)
write(ab_classifier, model_file)
valid_x, valid_y = load_data(validation_file)
# find out stage wise training error
n_estimators = len(ab_classifier.estimators_)
train_err = np.zeros((n_estimators,))
valid_err = np.zeros((n_estimators,))
for i, y_pred in enumerate(ab_classifier.staged_predict(train_x)):
train_err[i] = zero_one_loss(y_pred, train_y)
for i, y_pred in enumerate(ab_classifier.staged_predict(valid_x)):
valid_err[i] = zero_one_loss(y_pred, valid_y)
save_fig(train_err, valid_err, n_estimators, graph_file)
print 'Training time:', time.time() - s, 'seconds'
def save_fig(train_err, valid_err, n_estimators, file_name):
fig = pl.figure()
ax = fig.add_subplot(111)
ax.plot(np.arange(n_estimators) + 1, train_err, label='Train Error', color='red')
ax.plot(np.arange(n_estimators) + 1, valid_err, label='Validation Error',
color='green')
ax.set_ylim((0.0, 1.0))
ax.set_xlabel('Number of Learners')
ax.set_ylabel('Error')
ax.set_title('Adaboost SAMME on MNIST dataset')
ax.xaxis.grid(True)
ax.yaxis.grid(True)
leg = ax.legend(loc='upper right', fancybox=True)
leg.get_frame().set_alpha(0.7)
pl.savefig(file_name)
def test(model_file, test_file):
'''
Tests the model on the test data in data_file using the model in model_file.
Prints accuracy to report the performance of the classifier.
'''
test_x, test_y = load_data(test_file)
ab_classifier = pickle.load(open(model_file))
pred_y = ab_classifier.predict(test_x)
correct = np.count_nonzero(test_y == pred_y)
print 'Accuracy: ', correct / (1.0 * len(test_y))
def parse_train_args(args):
'''
parsers args required for training and calls the appropriate function.
'''
ab_classifier = get_adboost_classifier('SAMME.R', args.num_learners,
args.loss, args.pen, args.epochs)
train(ab_classifier, args.train_file, args.validation_file, args.model_file,
args.graph_file)
def parse_test_args(args):
'''
parsers args required for testing and calls the appropriate function.
'''
test(args.model_file, args.test_file)
if __name__ == '__main__':
parser = argparse.ArgumentParser()
subparsers = parser.add_subparsers(help = 'sub-command help')
train_parser = subparsers.add_parser('train', help= 'train adaboost')
train_parser.add_argument('train_file', help='path to training data')
train_parser.add_argument('validation_file', help='path to validation data')
train_parser.add_argument('model_file', help='filepath for model')
train_parser.add_argument('graph_file', help='filepath for training graph')
train_parser.add_argument('epochs', help='number of epochs for weak \
learners', type = int)
train_parser.add_argument('num_learners', nargs='?', help='number of weak \
learners', default = 10, type=int)
loss_gp = train_parser.add_mutually_exclusive_group()
loss_gp.set_defaults(loss = 'log')
loss_gp.add_argument('--log_loss', action = 'store_const', dest = 'loss',
const = 'log', help = 'use log loss function for training weak\
learners')
loss_gp.add_argument('--hinge_loss', action = 'store_const', dest = 'loss',
const = 'hinge', help = 'use hinge loss function for training weak\
learners')
penalty_gp = train_parser.add_mutually_exclusive_group()
penalty_gp.set_defaults(pen = 'l2')
penalty_gp.add_argument('--l1', action = 'store_const', dest = 'pen', const
= 'l1', help = 'use l1 penalty for training weak learners')
penalty_gp.add_argument('--l2', action = 'store_const', dest = 'pen', const
= 'l2', help = 'use l2 penalty for training weak learners')
train_parser.set_defaults(func = parse_train_args)
test_parser = subparsers.add_parser('test', help = 'test neural network')
test_parser.add_argument('test_file', help='path to test data')
test_parser.add_argument('model_file', help='filepath for model')
test_parser.set_defaults(func = parse_test_args)
args = parser.parse_args()
args.func(args)
| vmr2117/nnet | src/adaboost/adaboostSAMME.py | Python | gpl-2.0 | 5,974 |
from collections import namedtuple
import fcntl
import os
import shutil
from pcs.common.tools import format_os_error
# TODO add logging (logger / debug reports ?) to the RawFile class; be aware
# the class is used both in pcs.cli and pcs.lib packages
FileMetadata = namedtuple(
"FileMetadata",
[
"file_type_code",
"path",
"owner_user_name",
"owner_group_name",
"permissions",
"is_binary",
],
)
class RawFileError(Exception):
# So far there has been no need to have a separate exception for each
# action. Actions must be passed in a report and we certainely do not want
# a separate report for each action.
ACTION_CHMOD = "chmod"
ACTION_CHOWN = "chown"
ACTION_READ = "read"
ACTION_REMOVE = "remove"
ACTION_WRITE = "write"
def __init__(self, metadata, action, reason=""):
"""
FileMetadata metadata -- describes the file involved in the error
string action -- possible values enumerated in RawFileError
string reason -- plain text error details
"""
super().__init__()
self.metadata = metadata
self.action = action
self.reason = reason
class FileAlreadyExists(RawFileError):
def __init__(self, metadata):
"""
FileMetadata metadata -- describes the file involved in the error
"""
super().__init__(metadata, RawFileError.ACTION_WRITE)
class RawFileInterface:
def __init__(self, metadata):
"""
FileMetadata metadata -- describes the file and provides its metadata
"""
self.__metadata = metadata
@property
def metadata(self):
return self.__metadata
def exists(self):
"""
Return True if file exists, False otherwise
"""
raise NotImplementedError()
def read(self):
"""
Return content of the file as bytes
"""
raise NotImplementedError()
def write(self, file_data, can_overwrite=False):
"""
Write file_data to the file
bytes file_data -- data to be written
bool can_overwrite -- raise if False and the file already exists
"""
raise NotImplementedError()
class RawFile(RawFileInterface):
def exists(self):
# Returns False if the file is not accessible, does not raise.
return os.path.exists(self.metadata.path)
def read(self):
try:
mode = "rb" if self.metadata.is_binary else "r"
with open(self.metadata.path, mode) as my_file:
# the lock is released when the file gets closed on leaving the
# with statement
fcntl.flock(my_file.fileno(), fcntl.LOCK_SH)
content = my_file.read()
return (
content
if self.metadata.is_binary
else content.encode("utf-8")
)
except OSError as e:
# Specific expection if the file does not exist is not needed,
# anyone can and should check that using the exists method.
raise RawFileError(
self.metadata, RawFileError.ACTION_READ, format_os_error(e)
) from e
def write(self, file_data, can_overwrite=False):
try:
mode = "{write_mode}{binary_mode}".format(
write_mode="w" if can_overwrite else "x",
binary_mode="b" if self.metadata.is_binary else "",
)
# It seems pylint cannot process constructing the mode variable and
# gives a false positive.
# pylint: disable=bad-open-mode
with open(self.metadata.path, mode) as my_file:
# the lock is released when the file gets closed on leaving the
# with statement
fcntl.flock(my_file.fileno(), fcntl.LOCK_EX)
# Set the ownership and permissions to cover the case when we
# just created the file. If the file already existed, make sure
# the ownership and permissions are correct before writing any
# data into it.
if (
self.metadata.owner_user_name is not None
or self.metadata.owner_group_name is not None
):
try:
shutil.chown(
self.metadata.path,
self.metadata.owner_user_name,
self.metadata.owner_group_name,
)
except LookupError as e:
raise RawFileError(
self.metadata, RawFileError.ACTION_CHOWN, str(e)
) from e
except OSError as e:
raise RawFileError(
self.metadata,
RawFileError.ACTION_CHOWN,
format_os_error(e),
) from e
if self.metadata.permissions is not None:
try:
os.chmod(my_file.fileno(), self.metadata.permissions)
except OSError as e:
raise RawFileError(
self.metadata,
RawFileError.ACTION_CHMOD,
format_os_error(e),
) from e
# Write file data
my_file.write(
file_data
if self.metadata.is_binary
else file_data.decode("utf-8")
)
except FileExistsError as e:
raise FileAlreadyExists(self.metadata) from e
except OSError as e:
raise RawFileError(
self.metadata, RawFileError.ACTION_WRITE, format_os_error(e)
) from e
def remove(self, fail_if_file_not_found=True):
get_raw_file_error = lambda e: RawFileError(
self.metadata, RawFileError.ACTION_REMOVE, format_os_error(e)
)
try:
os.remove(self.metadata.path)
except FileNotFoundError as e:
if fail_if_file_not_found:
raise get_raw_file_error(e) from e
except OSError as e:
raise get_raw_file_error(e) from e
def backup(self):
# TODO implement
raise NotImplementedError()
| feist/pcs | pcs/common/file.py | Python | gpl-2.0 | 6,496 |
import gobject
import gtk
class Downloader(gtk.Dialog):
def __init__(self, path):
self.__is_cancelled = False
gtk.Dialog.__init__(self, title = "", buttons = (gtk.STOCK_CANCEL,
gtk.RESPONSE_CANCEL))
self.set_default_size(300, 100)
self.connect("response", self.__on_cancel)
vbox = self.vbox
hbox = gtk.HBox(False, 12)
hbox.set_border_width(12)
vbox.pack_start(hbox, True, True)
import os
img = gtk.Image()
img.set_from_file(os.path.join(path, "download.png"))
hbox.pack_start(img, False, False)
vbox = gtk.VBox()
hbox.pack_end(vbox, True, True)
lbl = gtk.Label("<b>" + _("Retrieving:") + "</b>")
lbl.set_use_markup(True)
align = gtk.Alignment(0.0, 0.0)
align.add(lbl)
vbox.add(align)
self.__label = gtk.Label("")
self.__label.set_use_markup(True)
align = gtk.Alignment(0.0, 0.0)
align.add(self.__label)
vbox.add(align)
self.__bar = gtk.ProgressBar()
vbox.add(self.__bar)
def __on_cancel(self, src, response):
self.__is_cancelled = True
def download(self, url, dest):
name = url
if (len(name) >= 60):
name = name[:30] + "..." + name[-30:]
gobject.timeout_add(0, self.__label.set_text, "%s" % (name))
gobject.timeout_add(0, self.__bar.set_fraction, 0)
gobject.timeout_add(0, self.__bar.set_text, "Contacting...")
gobject.timeout_add(0, self.show_all)
self.__is_cancelled = False
dest_fd = open(dest, "w")
import gconf
client = gconf.client_get_default()
use_proxy = client.get_bool('/system/http_proxy/use_http_proxy')
if (use_proxy != 0):
host = client.get_string('/system/http_proxy/host')
port = client.get_int('/system/http_proxy/port')
if (host != ""):
http_proxy = "http://" + host + ':' + str(port)
else:
http_proxy = None
else:
http_proxy = None
import urllib2
if (http_proxy is not None):
proxy_support = urllib2.ProxyHandler({"http" : http_proxy})
opener = urllib2.build_opener(proxy_support)
urllib2.install_opener(opener)
src_fd = urllib2.urlopen(url)
total_size = src_fd.info().get("Content-Length", 0)
so_far = 0
while (not self.__is_cancelled):
data = src_fd.read(4096)
if (not data):
break
dest_fd.write(data)
so_far += len(data)
value = (100 * so_far / max(0.1, float(total_size)))
gobject.timeout_add(0, self.__bar.set_fraction, value / 100.0)
gobject.timeout_add(0, self.__bar.set_text, "%i%%" % (value))
src_fd.close()
dest_fd.close()
gobject.timeout_add(0, self.hide)
| RaumZeit/gdesklets-core | shell/plugins/PackageInstaller/Downloader.py | Python | gpl-2.0 | 3,008 |
# PyJVM (pyjvm.org) Java Virtual Machine implemented in pure Python
# Copyright (C) 2014 Andrew Romanenco ([email protected])
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
'''Class path for jar files and directories. Cache all jars content.
JAVA_HOME must be set.
Class path is list of jar files and folders for classes lookup.
Separator ":", (";", ",") are also supported
See START.txt for details
'''
import os
import zipfile
def read_class_path(class_path):
'''Cache content of all jars.
Begin with rt.jar
'''
# folders for lookup for class files
lookup_paths = []
# content of all jars (name->path to jar)
jars = {}
# content of rt.jar
rt = {}
# first check local rt.jar
local_path = os.path.dirname(os.path.realpath(__file__))
RT_JAR = os.path.join(local_path, "../rt/rt.jar")
if not os.path.isfile(RT_JAR):
JAVA_HOME = os.environ.get('JAVA_HOME')
if JAVA_HOME is None:
raise Exception("JAVA_HOME is not set")
if not os.path.isdir(JAVA_HOME):
raise Exception("JAVA_HOME must be a folder: %s" % JAVA_HOME)
RT_JAR = os.path.join(JAVA_HOME, "lib/rt.jar")
if not os.path.exists(RT_JAR) or os.path.isdir(RT_JAR):
RT_JAR = os.path.join(JAVA_HOME, "jre/lib/rt.jar")
if not os.path.exists(RT_JAR) or os.path.isdir(RT_JAR):
raise Exception("rt.jar not found")
if not zipfile.is_zipfile(RT_JAR):
raise Exception("rt.jar is not a zip: %s" % RT_JAR)
read_from_jar(RT_JAR, rt)
current = os.getcwd()
splitter = None
if ":" in class_path:
splitter = ":"
elif ";" in class_path:
splitter = ";"
elif "," in class_path:
splitter = ","
else:
splitter = ":"
cpaths = class_path.split(splitter)
for p in cpaths:
p = p.strip()
path = os.path.join(current, p)
if not os.path.exists(path):
raise Exception("Wrong class path entry: %s (path not found %s)",
p, path)
if os.path.isdir(path):
lookup_paths.append(path)
else:
if zipfile.is_zipfile(path):
read_from_jar(path, jars)
else:
raise Exception("Class path entry %s is not a jar file" % path)
return (lookup_paths, jars, rt)
def read_from_jar(jar, dict_data):
'''Read file list from a jar'''
if not zipfile.is_zipfile(jar):
raise Exception("Not a jar file: %s" % jar)
with zipfile.ZipFile(jar, "r") as j:
for name in j.namelist():
if name.endswith(".class"): # at some point save all files
dict_data[name] = jar
| andrewromanenco/pyjvm | pyjvm/class_path.py | Python | gpl-3.0 | 3,299 |
"""
3D.py is the interface for plotting Skeleton Wireframes in a 3D
perspective using matplotlib
"""
# 3D Plotting tool
from matplotlib import pyplot as plt
from matplotlib import animation
from mpl_toolkits.mplot3d import Axes3D
# PyKinect XEF modules
import Colour
#: 3D View Obj
class View:
def __init__(self, bodies, **kwargs):
""" data is a from Load.BodyData """
# Performance data
self.bodies = bodies
# Setup Plot
self.fig = plt.figure()
self.axis = self.fig.add_subplot(111, projection='3d')
self.axis.axis('on')
# Define the lines being used
kwargs['marker'] = '.'
kwargs['ms'] = 5
self.lines = []
for i, body in enumerate(self.bodies):
body_lines = []
for joint in body:
for child in joint.children():
body_lines += self.axis.plot([],[],[], c = Colour.plt[i], **kwargs)
self.lines.append(body_lines)
# Settings
self.axis.set_ylim(-2,2)
self.axis.set_xlim(0,4)
self.axis.set_zlim(-2,2)
self.rate = 1.0 / 3.0
def update(self, frame):
""" Used to animate the drawing - passes a blitting=True argument to draw() """
self.draw(frame, blitting=True)
return self.lines
def draw(self, frame, blitting=False):
""" Draw the bodies at this frame """
for body in range(len(self.bodies)):
try:
self.draw_body(body, frame)
except:
pass
if blitting:
self.fig.canvas.blit()
else:
self.display()
return
@staticmethod
def display():
plt.show()
def draw_body(self, body, frame):
""" Draws one body at frame """
bone = 0
for joint in self.bodies[body]:
for start, end in joint.bones_3D(frame):
self.draw_bone(body, start, end, bone)
bone += 1
def draw_bone(self, body, a, b, i):
""" Draws update line i to draw a line between a and b """
# Re-order axes
# Kinect Axis Z is depth (matplot X)
# Kinect Axis Y is height (matplot Z)
# Kinect Axis X is width (matplot y)
y, z, x = [(a[n],b[n]) for n in range(3)]
self.lines[body][i].set_data(x, y)
self.lines[body][i].set_3d_properties(z)
def animate(view):
""" Takes a 3D.View object and 'plays' the frames """
try:
mov = animation.FuncAnimation(view.fig, view.update, interval=view.rate, blit=False)
view.display()
except:
pass
if __name__ == "__main__":
# debug
import Load
a = View(Load.BodyData(5))
animate(a)
| Qirky/PyKinectTk | PyKinectTk/utils/3D.py | Python | gpl-3.0 | 2,821 |
# This file is part of Indico.
# Copyright (C) 2002 - 2017 European Organization for Nuclear Research (CERN).
#
# Indico is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License as
# published by the Free Software Foundation; either version 3 of the
# License, or (at your option) any later version.
#
# Indico is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Indico; if not, see <http://www.gnu.org/licenses/>.
import json
from flask_pluginengine import current_plugin
def get_json_from_remote_server(func, default={}, **kwargs):
"""
Safely manage calls to the remote server by encapsulating JSON creation
from Piwik data.
"""
rawjson = func(**kwargs)
try:
data = json.loads(rawjson)
if isinstance(data, dict) and data.get('result') == 'error':
current_plugin.logger.error('The Piwik server responded with an error: %s', data['message'])
return {}
return data
except Exception:
current_plugin.logger.exception('Unable to load JSON from source %s', rawjson)
return default
def reduce_json(data):
"""Reduce a JSON object"""
return reduce(lambda x, y: int(x) + int(y), data.values())
def stringify_seconds(seconds=0):
"""
Takes time as a value of seconds and deduces the delta in human-readable
HHh MMm SSs format.
"""
seconds = int(seconds)
minutes = seconds / 60
ti = {'h': 0, 'm': 0, 's': 0}
if seconds > 0:
ti['s'] = seconds % 60
ti['m'] = minutes % 60
ti['h'] = minutes / 60
return "%dh %dm %ds" % (ti['h'], ti['m'], ti['s'])
| nop33/indico-plugins | piwik/indico_piwik/queries/utils.py | Python | gpl-3.0 | 1,914 |
"""Test MQTT connections."""
import unittest
from infopanel import mqtt
from infopanel.tests import load_test_config
class TestMqtt(unittest.TestCase):
"""Test connectivity with MQTT."""
@classmethod
def setUpClass(cls):
cls.conf = load_test_config()
def setUp(self):
"""Set up each test."""
data = {}
self.client = mqtt.MQTTClient(data, self.conf["mqtt"])
@unittest.skip(
"Something wrong with the test.mosquitto.org connection from travis ci"
)
def test_connect(self):
"""
Make sure we can connect.
This relies on the test.mosquitto.org test server.
"""
self.client.start()
self.client.stop()
if __name__ == "__main__":
# import sys;sys.argv = ['', 'Test.testName']
unittest.main()
| partofthething/infopanel | infopanel/tests/test_mqtt.py | Python | gpl-3.0 | 818 |
# Author: Paul Wollaston
# Contributions: Luke Mullan
#
# This client script allows connection to Deluge Daemon directly, completely
# circumventing the requirement to use the WebUI.
import json
from base64 import b64encode
import sickbeard
from sickbeard import logger
from .generic import GenericClient
from synchronousdeluge import DelugeClient
class DelugeDAPI(GenericClient):
drpc = None
def __init__(self, host=None, username=None, password=None):
super(DelugeDAPI, self).__init__('DelugeD', host, username, password)
def _get_auth(self):
if not self.connect():
return None
return True
def connect(self, reconnect = False):
hostname = self.host.replace("/", "").split(':')
if not self.drpc or reconnect:
self.drpc = DelugeRPC(hostname[1], port = hostname[2], username = self.username, password = self.password)
return self.drpc
def _add_torrent_uri(self, result):
label = sickbeard.TORRENT_LABEL
if result.show.is_anime:
label = sickbeard.TORRENT_LABEL_ANIME
options = {
'add_paused': sickbeard.TORRENT_PAUSED
}
remote_torrent = self.drpc.add_torrent_magnet(result.url, options, result.hash)
if not remote_torrent:
return None
result.hash = remote_torrent
return remote_torrent
def _add_torrent_file(self, result):
label = sickbeard.TORRENT_LABEL
if result.show.is_anime:
label = sickbeard.TORRENT_LABEL_ANIME
if not result.content: result.content = {}
if not result.content:
return None
options = {
'add_paused': sickbeard.TORRENT_PAUSED
}
remote_torrent = self.drpc.add_torrent_file(result.name + '.torrent', result.content, options, result.hash)
if not remote_torrent:
return None
result.hash = remote_torrent
return remote_torrent
def _set_torrent_label(self, result):
label = sickbeard.TORRENT_LABEL
if result.show.is_anime:
label = sickbeard.TORRENT_LABEL_ANIME
if ' ' in label:
logger.log(self.name + u': Invalid label. Label must not contain a space', logger.ERROR)
return False
if label:
if self.drpc.set_torrent_label(result.hash, label):
return True
return False
def _set_torrent_ratio(self, result):
return True
def _set_torrent_path(self, result):
path = sickbeard.TORRENT_PATH
if path:
if self.drpc.set_torrent_path(result.hash, path):
return True
return False
def _set_torrent_pause(self, result):
if sickbeard.TORRENT_PAUSED:
return self.drpc.pause_torrent(result.hash)
return True
def testAuthentication(self):
if self.connect(True) and self.drpc.test():
return True, 'Success: Connected and Authenticated'
else:
return False, 'Error: Unable to Authenticate! Please check your config!'
class DelugeRPC(object):
host = 'localhost'
port = 58846
username = None
password = None
client = None
def __init__(self, host = 'localhost', port = 58846, username = None, password = None):
super(DelugeRPC, self).__init__()
self.host = host
self.port = port
self.username = username
self.password = password
def connect(self):
self.client = DelugeClient()
self.client.connect(self.host, int(self.port), self.username, self.password)
def test(self):
try:
self.connect()
except:
return False
return True
def add_torrent_magnet(self, torrent, options, torrent_hash):
torrent_id = False
try:
self.connect()
torrent_id = self.client.core.add_torrent_magnet(torrent, options).get()
if not torrent_id:
torrent_id = self._check_torrent(torrent_hash)
except Exception as err:
return False
finally:
if self.client:
self.disconnect()
return torrent_id
def add_torrent_file(self, filename, torrent, options, torrent_hash):
torrent_id = False
try:
self.connect()
torrent_id = self.client.core.add_torrent_file(filename, b64encode(torrent), options).get()
if not torrent_id:
torrent_id = self._check_torrent(torrent_hash)
except Exception as err:
return False
finally:
if self.client:
self.disconnect()
return torrent_id
def set_torrent_label(self, torrent_id, label):
try:
self.connect()
self.client.label.set_torrent(torrent_id, label).get()
except Exception as err:
logger.log('DelugeD: Failed to set label for torrent: ' + err + ' ' + traceback.format_exc(), logger.ERROR)
return False
finally:
if self.client:
self.disconnect()
return True
def set_torrent_path(self, torrent_id, path):
try:
self.connect()
self.client.core.set_torrent_move_completed_path(torrent_id, path).get()
self.client.core.set_torrent_move_completed(torrent_id, 1).get()
except Exception as err:
logger.log('DelugeD: Failed to set path for torrent: ' + err + ' ' + traceback.format_exc(), logger.ERROR)
return False
finally:
if self.client:
self.disconnect()
return True
def pause_torrent(self, torrent_ids):
try:
self.connect()
self.client.core.pause_torrent(torrent_ids).get()
except Exception as err:
logger.log('DelugeD: Failed to pause torrent: ' + err + ' ' + traceback.format_exc(), logger.ERROR)
return False
finally:
if self.client:
self.disconnect()
return True
def disconnect(self):
self.client.disconnect()
def _check_torrent(self, torrent_hash):
torrent_id = self.client.core.get_torrent_status(torrent_hash, {}).get()
if torrent_id['hash']:
logger.log('DelugeD: Torrent already exists in Deluge', logger.DEBUG)
return torrent_hash
return False
api = DelugeDAPI()
| eXistenZNL/SickRage | sickbeard/clients/deluged_client.py | Python | gpl-3.0 | 6,499 |
from core.engine import hplayer
# PLAYER
player = hplayer.addplayer('mpv', 'gadagne')
# Interfaces
player.addInterface('osc', 4000, 4001)
player.addInterface('http', 8080)
# player.addInterface('gpio', [16,19,20,21,26])
# GADAGNE logic
defaultFile = 'media0.mp4'
push1File = 'media1.mp4'
push2File = 'media2.mp4'
push3File = 'media3.mp4'
# Loop default file
player.on('end', lambda: player.play(defaultFile))
# HTTP + GPIO events
player.on(['push1', 'gpio20'], lambda: player.play(push1File))
player.on(['push2', 'gpio21'], lambda: player.play(push2File))
player.on(['push3', 'gpio26'], lambda: player.play(push3File))
fails = 5
# RUN
hplayer.setBasePath("/home/pi/Videos/")
hplayer.run()
| Hemisphere-Project/HPlayer2 | profiles/_legacy/gadagne.py | Python | gpl-3.0 | 696 |
#!/usr/bin/env python3
"""
Loader
import modules from zip files that are stored in ./libs/ directory
author: Steve Göring
contact: [email protected]
2014
"""
import os
import sys
for m in filter(lambda x: ".zip" in x, os.listdir(os.path.dirname(os.path.realpath(__file__)) + "/libs")):
sys.path.insert(0, os.path.dirname(os.path.realpath(__file__)) + "/libs/" + m)
sys.path.insert(0, os.path.dirname(os.path.realpath(__file__)) + "/libs/")
| stg7/papyrus | loader.py | Python | gpl-3.0 | 464 |
# Name: mapper_opendap_sentinel1.py
# Purpose: Nansat mapping for ESA Sentinel-1 data from the Norwegian ground segment
# Author: Morten W. Hansen
# Licence: This file is part of NANSAT. You can redistribute it or modify
# under the terms of GNU General Public License, v.3
# http://www.gnu.org/licenses/gpl-3.0.html
import os
from datetime import datetime
import json
import warnings
import numpy as np
from netCDF4 import Dataset
from nansat.utils import gdal
try:
import scipy
except:
IMPORT_SCIPY = False
else:
IMPORT_SCIPY = True
import pythesint as pti
from nansat.mappers.sentinel1 import Sentinel1
from nansat.mappers.opendap import Opendap
from nansat.vrt import VRT
from nansat.nsr import NSR
from nansat.utils import initial_bearing
class Mapper(Opendap, Sentinel1):
baseURLs = [
'http://nbstds.met.no/thredds/dodsC/NBS/S1A',
'http://nbstds.met.no/thredds/dodsC/NBS/S1B',
]
timeVarName = 'time'
xName = 'x'
yName = 'y'
timeCalendarStart = '1981-01-01'
srcDSProjection = NSR().wkt
def __init__(self, filename, gdal_dataset, gdal_metadata, date=None,
ds=None, bands=None, cachedir=None, *args, **kwargs):
self.test_mapper(filename)
if not IMPORT_SCIPY:
raise NansatReadError('Sentinel-1 data cannot be read because scipy is not installed')
timestamp = date if date else self.get_date(filename)
self.create_vrt(filename, gdal_dataset, gdal_metadata, timestamp, ds, bands, cachedir)
Sentinel1.__init__(self, filename)
self.add_calibrated_nrcs(filename)
self.add_nrcs_VV_from_HH(filename)
def add_calibrated_nrcs(self, filename):
layer_time_id, layer_date = Opendap.get_layer_datetime(None,
self.convert_dstime_datetimes(self.get_dataset_time()))
polarizations = [self.ds.polarisation[i:i+2] for i in range(0,len(self.ds.polarisation),2)]
for pol in polarizations:
dims = list(self.ds.variables['dn_%s' %pol].dimensions)
dims[dims.index(self.timeVarName)] = layer_time_id
src = [
self.get_metaitem(filename, 'Amplitude_%s' %pol, dims)['src'],
self.get_metaitem(filename, 'sigmaNought_%s' %pol, dims)['src']
]
dst = {
'wkv': 'surface_backwards_scattering_coefficient_of_radar_wave',
'PixelFunctionType': 'Sentinel1Calibration',
'polarization': pol,
'suffix': pol,
}
self.create_band(src, dst)
self.dataset.FlushCache()
def add_nrcs_VV_from_HH(self, filename):
if not 'Amplitude_HH' in self.ds.variables.keys():
return
layer_time_id, layer_date = Opendap.get_layer_datetime(None,
self.convert_dstime_datetimes(self.get_dataset_time()))
dims = list(self.ds.variables['dn_HH'].dimensions)
dims[dims.index(self.timeVarName)] = layer_time_id
src = [
self.get_metaitem(filename, 'Amplitude_HH', dims)['src'],
self.get_metaitem(filename, 'sigmaNought_HH', dims)['src'],
{'SourceFilename': self.band_vrts['inciVRT'].filename, 'SourceBand': 1}
]
dst = {
'wkv': 'surface_backwards_scattering_coefficient_of_radar_wave',
'PixelFunctionType': 'Sentinel1Sigma0HHToSigma0VV',
'polarization': 'VV',
'suffix': 'VV'}
self.create_band(src, dst)
self.dataset.FlushCache()
@staticmethod
def get_date(filename):
"""Extract date and time parameters from filename and return
it as a formatted (isoformat) string
Parameters
----------
filename: str
nn
Returns
-------
str, YYYY-mm-ddThh:MMZ
"""
_, filename = os.path.split(filename)
t = datetime.strptime(filename.split('_')[4], '%Y%m%dT%H%M%S')
return datetime.strftime(t, '%Y-%m-%dT%H:%M:%SZ')
def convert_dstime_datetimes(self, ds_time):
"""Convert time variable to np.datetime64"""
ds_datetimes = np.array(
[(np.datetime64(self.timeCalendarStart).astype('M8[s]')
+ np.timedelta64(int(sec), 's').astype('m8[s]')) for sec in ds_time]).astype('M8[s]')
return ds_datetimes
def get_geotransform(self):
""" Return fake and temporary geotransform. This will be replaced by gcps in
Sentinel1.__init__
"""
xx = self.ds.variables['lon'][0:100:50, 0].data
yy = self.ds.variables['lat'][0, 0:100:50].data
return xx[0], xx[1]-xx[0], 0, yy[0], 0, yy[1]-yy[0]
| nansencenter/nansat | nansat/mappers/mapper_opendap_sentinel1.py | Python | gpl-3.0 | 4,824 |
import sys
import os
import io
from pkg_resources import parse_version
import wx
if parse_version(wx.__version__) < parse_version('2.9'):
tmpApp = wx.PySimpleApp()
else:
tmpApp = wx.App(False)
from psychopy import experiment
from psychopy.experiment.components import getAllComponents
# usage: generate or compare all Component.param settings & options
# motivation: catch deviations introduced during refactoring
# use --out to re-generate componsTemplate.txt
# ignore attributes that are there because inherit from object
ignoreObjectAttribs = True
# should not need a wx.App with fetchIcons=False
try:
allComp = getAllComponents(fetchIcons=False)
except Exception:
import wx
if parse_version(wx.__version__) < parse_version('2.9'):
tmpApp = wx.PySimpleApp()
else:
tmpApp = wx.App(False)
try:
from psychopy.app import localization
except Exception:
pass # not needed if can't import it
allComp = getAllComponents(fetchIcons=False)
exp = experiment.Experiment()
relPath = os.path.join(os.path.split(__file__)[0], 'componsTemplate.txt')
if not '--out' in sys.argv:
with io.open(relPath, 'r', encoding='utf-8-sig') as f:
target = f.read()
targetLines = target.splitlines()
targetTag = {}
for line in targetLines:
try:
t, val = line.split(':',1)
targetTag[t] = val
except ValueError:
# need more than one value to unpack; this is a weak way to
# handle multi-line default values, eg TextComponent.text.default
targetTag[t] += '\n' + line # previous t value
else:
outfile = open(relPath,'w')
param = experiment.Param('', '') # want its namespace
ignore = ['__doc__', '__init__', '__module__', '__str__', 'next']
if '--out' not in sys.argv:
# these are for display only (cosmetic) but no harm in gathering initially:
ignore += ['hint',
'label', # comment-out to not ignore labels when checking
'categ'
]
for field in dir(param):
if field.startswith("__"):
ignore.append(field)
fields = set(dir(param)).difference(ignore)
mismatches = []
for compName in sorted(allComp):
comp = allComp[compName](parentName='x', exp=exp)
order = '%s.order:%s' % (compName, eval("comp.order"))
out = [order]
if '--out' in sys.argv:
outfile.write(order+'\n')
elif not order+'\n' in target:
tag = order.split(':', 1)[0]
try:
err = order + ' <== ' + targetTag[tag]
except IndexError: # missing
err = order + ' <==> NEW (no matching param in original)'
print(err)
mismatches.append(err)
for parName in sorted(comp.params):
# default is what you get from param.__str__, which returns its value
default = '%s.%s.default:%s' % (compName, parName, comp.params[parName])
out.append(default)
lineFields = []
for field in sorted(fields):
if parName == 'name' and field == 'updates':
continue
# ignore: never want to change the name *during an experiment*
# the default name.updates value varies across components
# skip private attributes
if field.startswith("_"):
continue
# get value of the field
fieldValue = str(eval("comp.params[parName].%s" % field))
# remove memory address from the string representation
if "at 0x" in fieldValue:
fieldValue = fieldValue.split(" at 0x")[0] + ">"
f = '%s.%s.%s:%s' % (compName, parName, field, fieldValue)
lineFields.append(f)
for line in [default] + lineFields:
if '--out' in sys.argv:
if not ignoreObjectAttribs:
outfile.write(line+'\n')
else:
if (not ":<built-in method __" in line and
not ":<method-wrapper '__" in line and
not ":<bound method " in line):
outfile.write(line+'\n')
elif not line+'\n' in target:
# mismatch, so report on the tag from orig file
# match checks tag + multi-line
# because line is multi-line and target is whole file
tag = line.split(':', 1)[0]
try:
err = line + ' <== ' + targetTag[tag]
except KeyError: # missing
err = line + ' <==> NEW (no matching param in original)'
print(err)
mismatches.append(err)
# return mismatches
| psychopy/psychopy | psychopy/tests/test_experiment/needs_wx/genComponsTemplate.py | Python | gpl-3.0 | 4,702 |
# This file is part of PARPG.
# PARPG is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
# PARPG is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
# You should have received a copy of the GNU General Public License
# along with PARPG. If not, see <http://www.gnu.org/licenses/>.
from fife.extensions.pychan import Icon
class Slot(Icon):
def _setImage(self, source):
self._image = source
def _getImage(self):
return self._image
image = property(_getImage, _setImage)
| parpg/parpg | parpg/gui/slot.py | Python | gpl-3.0 | 889 |
#!/usr/bin/python
# This file is part of PARPG.
# PARPG is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
# PARPG is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
# You should have received a copy of the GNU General Public License
# along with PARPG. If not, see <http://www.gnu.org/licenses/>.
"""Containes classes defining concrete container game objects like crates,
barrels, chests, etc."""
__all__ = ["WoodenCrate",]
from composed import ImmovableContainer
class WoodenCrate (ImmovableContainer):
def __init__ (self, ID, name = 'Wooden Crate', \
text = 'A battered crate', gfx = 'crate', **kwargs):
ImmovableContainer.__init__(self, ID = ID, name = name, gfx = gfx, \
text = text, **kwargs)
| orlandov/parpg-game | scripts/objects/containers.py | Python | gpl-3.0 | 1,128 |
#
# FLOGGER
#
# This program reads records from the OGN network processing only
# those received from a specified site and registration marks, eg aircraft belonging to
# a specific club.
# It writes each record to a database and at the end of each day process
# them to determine the flight times of each flight for each machine.
# Phase 1 will just collect the data.
# Phase 2 will process the data into a new table
# Phase 3 will then format that information with the intention of
# it being used to be checked against the manual log books.
# Phase 4 will remove old flight and track file older than a certain date
# The intention is that it will collect the data between the hours of daylight,
# producing the summary at the end of the day.
# This program could be run on a Raspberry Pi as it is so low powered
#
# Altitude in metres.
# Land speed in km/h.
# Latitude, west is negative decimal degrees.
# Longitude, south is negative decimal degrees.
#
# This program is covered by the GNU GENERAL PUBLIC LICENSE.
# See the file 'LICENSE' for details
#
#
# 20150312: First working version
# Usage: Run flogger.py to collect the daily flight data then
# run process.py which processes the raw data into a table flights in the database flogger.sgl3
# This first version is very experimental, it is proof of concept and processes. The code needs to
# be 'improved'.
# To be done: 1) The program should be run each day between 0900 and sunset. This should be handled by cron
# to start the program at a time specified in settings which then calculates sunrise and suspends
# until then. Once running the program determines sunset and stopping itself at that time. It also needs
# to handle power outages (not sure how at the moment)
# 2) The Flarm code to registration code needs to addressed using OGNs new database.
# 20150505 Second working version
# Only need to run flogger.py, it now handles collection of data during daylight hours and processes
# after sunset (assumes gliders only fly during daylight hours)
# Now reads aircraft registration data from Flarmnet to build own internal table
# 20150515 Third working version
# 1) APRS user and APRS passcode have to be supplied on the command line and not in settings
# 2) Changes to flogger_process_log_old to correct errors - still in testing
#
# 20150520 Fourth working version (V0.1.0)
# 1) On aircraft stop set altitude to initial value else highest value for any flight of the day
# will be the one compared against as the maximum and not the max for a specific flight.
# Bug 20150520-1 Assigned
# 2) Flights table only contains flights for one day and not all previous days flights
# Bug 20150520-2 Assigned
#
# 20150527 Fifth working version (V0.1.1)
# Test version for:
# 1) Bug 20150520-1
# 2) Bug 20150520-2
#
# 20150529 First beta test version (V0.2.0)
# 1) Bug 20150520-1 Solved
# 2) Bug 20150520-2 Solved
# 3) Enhancement - dump days flights table as .csv file
#
# 20150530 Correction to first beta test version (V0.2.1)
# 1) Correction to dump flights to .csv - to make it work!
#
# 20150604 Added enhancements to version V0.2 (V0.2.2)
# 1) Allowance for short duration flight
# 2) Use of geocoding to determine airfield position data - proposed by D.Spreitz
#
# To be done: 1) Tidy up code, remove all redundant testing comments
# 2) A lot more testing - some features might still not work!
# 3) Consider how this may be run as a service with standard start, stop etc options
# 4) Consider adding full logging with levels
# 5) Review the algorithm to determine if aircraft is on the ground. At the moment it determines
# this by the GPS ground speed being zero (ie below a defined value); the ground speed could be zero
# if the wind speed and airspeed are the same but opposite, eg when ridge flying. The algorithm could use
# the altitude as well, eg if ground speed is zero but altitude is greater than home airfield altitude then
# 'we're flying'. Note this still has issues!
# 6) Need to consider sending 'keep alives' when in the sleep state. Solved, not needed
# 7) There's a problem concerning character codes when building the flarm database which needs solving, only show in 1 record
#
# 20160208 1) Add modification to sequence tracks per flight by flarm record timestamp. Using multiple beacons can result in
# track points that are out of sequence when based on order received due to Internet time delays, hence
# use the GPS timestamp recorded in the data taken and sent by flarm (assumes timestamp is from Flarm!).
# 2) Also added graceful exit on Cntrl-C
#
# 20160323 1) Added optional output of track data in IGC format
# 2) Added optional deletion of old flight .csv and track .csv/.igc files
#
# 20160514 1) Use $ pipreqs --force /path/to/project to generate requirements.txt for pip install
#
# 20160518 1) Added attempt to load earlier version Linux libfap if current fails
#
# 20161026 1) Added flogger_find_tug code. This tries to determine which tug, if any, launched a particular glider.
# Note this doesn't always get the right result, but then nor does OGN Flight Log! This could be due to tugs
# sometimes powering down if a launch is not imminent. Gliders are likely to be always powered on and Flarm operating.
# Hence when it becomes time to launch the tug powers up, Flarm is now on but takes some time for the signal to be
# acquired and put onto and processed by the APRS system. It is therefore possible for the launch to take place
# with the take-off times for tug and glider to be too far displaced (from the APRS data) for flogger-find-tug
# to determine the launch has happened. The solution is possibly to increase the time delta used between glider and
# tug take-off but this could result in false positives, some fine tuning maybe needed. Interested to know if
# OGN Flight Log has similar reasoning.
#
# 20161108 1) Rewrote phase 2 flight log processing to be much simpler. Phase 2 puts flights into the flight_group
# table such that all flights by a single aircraft have the same group id. This enables each flight to
# be determined to be a distinct flight from its predecessor or not.
#
# 20170201: 1) Added simple function test_YorN to test for Y|y or N|n
# 2) Started developing using Eclipse Neon.2 (4.6.2)
#
import socket
#from libfap import *
#import flogger_settings
import string
import datetime
import time
import sqlite3
import pytz
from datetime import timedelta
import sys
from flarm_db import flarmdb
from pysqlite2 import dbapi2 as sqlite
from open_db import opendb
import ephem
#from flogger_process_log_old import process_log
#from flogger_process_log import process_log
import argparse
from flogger_dump_flights import dump_flights
from flogger_dump_tracks import dump_tracks2
from flogger_get_coords import get_coords
from flogger_signals import sig_handler
import signal
import os
import os.path
from flogger_dump_IGC import dump_IGC
from flogger_email_log import email_log2
from flogger_landout import landout_check
from geopy.distance import vincenty
from flogger_email_msg import email_msg
from flogger_find_tug import find_tug
from flogger_test_YorN import test_YorN
from flogger_gui import *
from flogger_settings import *
from threading import Thread
from flogger_aprs_parser import *
#
# This added to make it simpler after going to gui version
#
global settings
class flogger3(MyApp):
def __init__(self, interval=1):
print "init flogger3"
print "flogger3 initialized"
return
def flogger_run(self, settings):
print "flogger_run called"
print "settings.FLOGGER_SMTP_SERVER_URL: ", settings.FLOGGER_SMTP_SERVER_URL
# print "settings.FLOGGER_SMTP_SERVER_PORT: ", settings.FLOGGER_SMTP_SERVER_PORT
# print "settings.FLOGGER_DB_SCHEMA: ", settings.FLOGGER_DB_SCHEMA
self.thread = Thread(target=self.flogger_start, name= "flogger", args=(settings,))
print "Thread setup"
self.thread.daemon = True # Daemonize thread
self.thread.start()
print "flogger thread running"
return
def floggerStop(self):
print "floggerStop called"
# libfap_cleanup()
return
# def flogger_start(self, settings):
# def flogger_start(self, settings):
def flogger_start(self, local_settings):
print "flogger_start called\n"
settings = local_settings
# print "settings.FLOGGER_SMTP_SERVER_URL: ", settings.FLOGGER_SMTP_SERVER_URL
# print "settings.FLOGGER_SMTP_SERVER_PORT: ", settings.FLOGGER_SMTP_SERVER_PORT
# print "settings.FLOGGER_DB_SCHEMA: ", settings.FLOGGER_DB_SCHEMA
prev_vals = {'latitude': 0, 'longitude': 0, "altitude": 0, "speed": 0}
nprev_vals = {"G-CKLW": {'latitude': 0, 'longitude': 0, "altitude": 0, "speed": 0, 'maxA': 0},
"G-CKFN": {'latitude': 0, 'longitude': 0, "altitude": 0, "speed": 0, 'maxA': 0}
}
values = {'latitude': 0, 'longitude': 0, "altitude": 0, "speed": 0}
nvalues = {"G-CKLW": {'latitude': 0, 'longitude': 0, "altitude": 0, "speed": 0, 'maxA': 0},
"G-CKFN": {'latitude': 0, 'longitude': 0, "altitude": 0, "speed": 0, 'maxA': 0}
}
L_SMALL = float(0.001) # Small latitude or longitude delta of a 0.001 degree
A_SMALL = float(0.01) # Small altitude delta of 0.01 a metre, ie 1cm
V_SMALL = float(settings.FLOGGER_V_SMALL) # Small velocity delta of 10.0 kph counts as zero ie not moving
V_TAKEOFF_MIN = float(settings.FLOGGER_V_TAKEOFF_MIN)
V_LANDING_MIN = float(settings.FLOGGER_V_LANDING_MIN)
frst_time = False
AIRFIELD = "SuttonBnk"
flight_no = {} # A dictionary {callsign: flight_no}
track_no = {} # A dictionary {callsign: track_no}
# Coded 001-099: Gliders,
# 101-199: Tugs,
# 201-299: Motor Gliders,
# 301-399: Other
aircraft = {"G-CKLW": 1, "G-CKLN": 2, "G-CJVZ": 3, "G-CHEF": 4, "G-CKFN": 5,
"G-CHVR": 6, "G-CKJH": 7, "G-CKRN": 8, "G-CGBK": 9, "G-CDKC": 10,
"G-BFRY": 101, "G-BJIV": 102, "G-MOYR": 103,
"G-OSUT": 201,
"FLRDDF9C4": 301, "FLRDDE5FC": 302, "FLRDDBF13": 303, "FLRDDA884": 304, "FLRDDA886": 305, "FLRDDACAE": 306, "FLRDDA7E9": 307,
"FLRDDABF7": 308, "FLRDDE671": 309}
def CheckPrev(callsignKey, dataKey, value):
print "CheckPrev if callsign in nprev_vals: ", callsignKey, " key: ", dataKey, " Value: ", value
if nprev_vals.has_key(callsignKey) == 1:
print "nprev_vals already has entry: ", callsignKey
else:
print "nprev_vals doesn't exist for callsignKey: ", callsignKey
nprev_vals[callsignKey] = {}
nprev_vals[callsignKey] = {'latitude': 0, 'longitude': 0, "altitude": 0, "speed": 0, 'maxA': 0}
nprev_vals[callsignKey][dataKey] = value
print "nprev_vals for callsignKey: ", callsignKey, " is: ", nprev_vals[callsignKey]
# print "nprev_vals is now: ", nprev_vals
return
def CheckVals(callsignKey, dataKey, value):
print "CheckVals if callsign in nvalues: ", callsignKey, " key: ", dataKey, " Value: ", value
if nvalues.has_key(callsignKey) == 1:
print "nvalues already has entry: ", callsignKey
else:
print "nvalues doesn't exist for callsignKey: ", callsignKey
nvalues[callsignKey] = {}
nvalues[callsignKey] = {'latitude': 0, 'longitude': 0, "altitude": 0, "speed": 0, 'maxA': 0}
nvalues[callsignKey][dataKey] = value
print "nvalues for callsignKey: ", callsignKey, " is: ", nvalues[callsignKey]
# print "nvalues is now: ", nvalues
return
def isDayLight ():
return True
def fleet_check(call_sign):
if aircraft.has_key(call_sign):
return True
else:
return False
def comp_vals(set1, set2):
# Works out if the difference in positions is small and both speeds are close to zero
# Return True is yes and False if no
# Set1 are new values, set2 old values
print "Set1 value for key latitude is: ", set1["latitude"], " value: ", float(set1["latitude"])
# lat1 = float(set1["latitude"])
# lat2 = float(set2["latitude"])
delta_latitude = float(set1["latitude"]) - float(set2["latitude"])
delta_longitude = float(set1["longitude"]) - float(set2["longitude"])
delta_altitude = float(set1["altitude"]) - float(set2["altitude"])
delta_speed = float(set1["speed"]) - float(set2["speed"])
print "Delta positions. Lat: ", delta_latitude, " Long: ", delta_longitude, " Alt: ", delta_altitude, " Speed: ", delta_speed
# if (delta_latitude < L_SMALL) and (delta_longitude < L_SMALL) and (delta_altitude < A_SMALL) and (delta_speed < V_SMALL):
if delta_speed <> 0.0:
print "Delta speed not zero, check others"
# if (delta_latitude == 0.0) and (delta_longitude == 0.0) and (delta_altitude == 0.0) and (delta_speed == 0.0):
if (delta_latitude == 0.0) and (delta_longitude == 0.0) and (delta_altitude == 0.0):
print "Positions same"
return True
else:
print "Positions different"
return False
else:
print "Delta speed zero, return same"
return True
def set_keepalive(sock, after_idle_sec=1, interval_sec=3, max_fails=5):
"""Set TCP keepalive on an open socket.
It activates after 1 second (after_idle_sec) of idleness,
then sends a keepalive ping once every 3 seconds (interval_sec),
and closes the connection after 5 failed ping (max_fails), or 15 seconds
"""
print "set_keepalive for idle after: ", after_idle_sec
sock.setsockopt(socket.SOL_SOCKET, socket.SO_KEEPALIVE, 1)
sock.setsockopt(socket.IPPROTO_TCP, socket.TCP_KEEPIDLE, after_idle_sec)
sock.setsockopt(socket.IPPROTO_TCP, socket.TCP_KEEPINTVL, interval_sec)
sock.setsockopt(socket.IPPROTO_TCP, socket.TCP_KEEPCNT, max_fails)
return
def is_dst(zonename):
# Determine if in daylight
tz = pytz.timezone(zonename)
now = pytz.utc.localize(datetime.utcnow())
return now.astimezone(tz).dst() != timedelta(0)
def fleet_check_new(callsign):
#
# This has become a little confusing! If FLOGGER_FLEET_CHECK == n|N then FLOGGER_AIRFIELD_NAME is not used in
# the flarm_db search so a non-fleet aircraft can be found, but the later code checks whether the aircraft
# has taken off at FLOGGER_AIRFIELD_NAME; if it hasn't it won't be included in the flights, if it has it will.
#
# This logic and code needs to be re-thought!
# Note, there is a difference between aircraft registered to a location and in a designated 'fleet' for
# that location and whether the aircraft has taken off from a location.
# The fleet_check is intended to check whether an aircraft is a member of a designated fleet, not whether
# it has taken off from the designated location. The intention if the fleet check is to enable recording only
# flights undertaken by the club fleet.
#
print "In fleet check for: ", callsign
# cursor.execute('''SELECT ROWID FROM aircraft WHERE registration =? or flarm_id=? ''', (callsign,callsign,))
# row = cursor.fetchone()
# flarm_id = callsign[3:]
# print "search for flarm_id: ", flarm_id
# cursor.execute('''SELECT ROWID FROM flarm_db WHERE flarm_id =?''', (flarm_id,))
# if settings.FLOGGER_FLEET_CHECK == "N" or settings.FLOGGER_FLEET_CHECK == "n":
if not test_YorN(settings.FLOGGER_FLEET_CHECK):
print "Fleet Check: ", settings.FLOGGER_FLEET_CHECK
fleet_name = "Fleet Name: Not used"
cursor.execute('''SELECT ROWID, registration FROM flarm_db WHERE registration =? OR flarm_id =? ''', (callsign,callsign[3:],))
else:
print "Fleet Check for Airfield: ", settings.FLOGGER_AIRFIELD_NAME
fleet_name = settings.FLOGGER_AIRFIELD_NAME
cursor.execute('''SELECT ROWID FROM flarm_db WHERE registration =? OR flarm_id =? AND airport=?''', (callsign,callsign[3:],settings.FLOGGER_AIRFIELD_NAME,))
#cursor.execute('''SELECT ROWID FROM flarm_db WHERE registration =? OR flarm_id =? AND airport=?''', (callsign,callsign[3:],settings.FLOGGER_AIRFIELD_NAME,))
row1 = cursor.fetchone()
if row1 == None:
print "Registration not found in flarm_db: ", callsign, " for: ", fleet_name
return False
else:
print "Aircraft: ", callsign, " found in flarm db at: ", row1[0], " for: ", fleet_name
reg = callsign_trans(callsign)
# if settings.FLOGGER_FLEET_CHECK <> "N":
print "settings.FLOGGER_FLEET_CHECK: ", settings.FLOGGER_FLEET_CHECK
# if not test_YorN(settings.FLOGGER_FLEET_CHECK):
if test_YorN(settings.FLOGGER_FLEET_CHECK):
# if settings.FLOGGER_FLEET_LIST[reg] > 100 and settings.FLOGGER_FLEET_LIST[reg] < 200 and settings.FLOGGER_LOG_TUGS == "N":
if settings.FLOGGER_FLEET_LIST[reg] > 100 and settings.FLOGGER_FLEET_LIST[reg] < 200 and (not test_YorN(settings.FLOGGER_LOG_TUGS)):
print "Don't log tug: %s" % reg
return False
else:
print "Tug flight: ", reg
# At least 1 match for the callsign has been found
return True
def callsign_trans(callsign):
# Translates a callsign supplied as a flarm_id
# into the aircraft registration using a local db based on flarmnet or OGN
# Note if OGN db is being used then callsigns don't start with FLR or ICA, this is denoted by the 'Type' field
# cursor.execute('''SELECT registration, flarm_id FROM aircraft WHERE registration =? or flarm_id=? ''', (callsign,callsign,))
if callsign.startswith("FLR") or callsign.startswith("ICA") :
# Callsign starts with "FLR" or ICA so remove it
str = callsign[3:]
ncallsign = "%s" % str
print "Removing FLR or ICA string. Callsign is now: ", ncallsign
else:
ncallsign = "%s" % callsign
cursor.execute('''SELECT registration FROM flarm_db WHERE flarm_id=? ''', (ncallsign,))
row = cursor.fetchone()
if row <> None:
# Registration found for flarm_id so return registration
registration = "%s" % row
print "In flarm db return: ", registration
return registration
else:
# Registration not found for flarm_id so return flarm_id
print "Not in flarm db return: ", callsign
return ncallsign
def APRS_connect (settings):
#
#-----------------------------------------------------------------
# Connect to the APRS server to receive flarm data
#-----------------------------------------------------------------
#
# create socket & connect to server
try:
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
set_keepalive(sock, after_idle_sec=60, interval_sec=3, max_fails=5)
sock.connect((settings.APRS_SERVER_HOST, settings.APRS_SERVER_PORT))
except Exception, e:
print "Socket failure on connect: ", e
print "Socket sock connected"
try:
# sock.send('user %s pass %s vers OGN_Flogger 0.0.2 filter r/+54.228833/-1.209639/25\n ' % (settings.APRS_USER, settings.APRS_PASSCODE))
APRSparm = ('user %s pass %s vers %s %s filter r/%s/%s/%s\n ' % (settings.APRS_USER,
settings.APRS_PASSCODE,
settings.FLOGGER_NAME,
settings.FLOGGER_VER,
settings.FLOGGER_LATITUDE,
settings.FLOGGER_LONGITUDE,
settings.FLOGGER_RAD))
# print "APRSparm is: ", APRSparm
# s = "user %s pass %s vers OGN_Flogger 0.2.2 filter r/%s/%s/25\n " % (settings.APRS_USER, settings.APRS_PASSCODE, settings.FLOGGER_LATITUDE, settings.FLOGGER_LONGITUDE)
# print "Socket connect string is: ", s
sock.send(APRSparm)
except Exception, e:
print "Socket send failure: ", e
exit()
print "Socket send ok"
# Make the connection to the server
# start_time = datetime.datetime.now()
# keepalive_time = time.time()
# sock_file = sock.makefile()
print "APRS connection made"
return sock
def addTrack(cursor,flight_no,track_no,longitude,latitude,altitude,course,speed,timeStamp):
#
#-----------------------------------------------------------------
# Add gps track data to track record if settings.FLOGGER_TRACK is "Y" ie yes
# and if flight_no != None which it will be if flight has not taken off at FLOGGER_AIRFIELD_NAME
#-----------------------------------------------------------------
#
# dt = str(datetime.datetime.now()) # Get the datetime this track point is created as string
# sdt = dt[0:10] + "T" + dt[11:19] + "Z" # Convert to string format for gpx, ie YYYY-MM-DDTHH:MM:SSZ
# sdt = "%sT%sZ" % (dt[0:10],dt[11:19]) # Convert to string format for gpx, ie YYYY-MM-DDTHH:MM:SSZ
if settings.FLOGGER_TRACKS == "Y" and flight_no != None:
print "Flight_no is: ", flight_no
print "Track point nos is: ", track_no
# dt = str(datetime.datetime.now()) # Get the datetime this track point is created as string
# sdt = dt[0:10] + "T" + dt[11:19] + "Z" # Convert to string format for gpx, ie YYYY-MM-DDTHH:MM:SSZ
# This print doesn't work as one of the values is of none-type, not sure why?
# print "Adding track data to: %i, %i, %f, %f, %f, %f %f " % (flight_no,track_no,latitude,longitude,altitude,course,speed)
try:
cursor.execute('''INSERT INTO track(flight_no,track_no,latitude,longitude,altitude,course,speed,timeStamp)
VALUES(:flight_no,:track_no,:latitude,:longitude,:altitude,:course,:speed,:timeStamp)''',
{'flight_no':flight_no,'track_no':track_no,'latitude':latitude,'longitude':longitude,'altitude':altitude,'course':course,'speed':speed,'timeStamp':timeStamp})
except:
print "Add trackpoint failed on insert: ignore trackpoint"
else:
print "Don't add track point"
return
def endTrack():
return
def CheckTrackData(cursor, flight_no, track_no, callsignKey):
# print "check flight_no if callsign in flight_no{}: ", flight_no, " Track_no is: ", track_no, " CallsignKey is: ", callsignKey
if flight_no.has_key(callsignKey) == 1:
print "flight_no already has entry: ", callsignKey
else:
try:
cursor.execute('''SELECT max(id) FROM flight_log2 WHERE src_callsign =?''', (callsignKey,))
except:
print "!!!ERROR - No record in flight_log2 for: ", callsignKey
# If this crashes need to think about adding record for flight_log2, but why?
exit()
row_id = cursor.fetchone()[0] # value of id for row just inserted use as flight_no for flight
print "Last row ID of flight_log2 for callsign: ", callsignKey, " inserted was: ", row_id
flight_no[src_callsign] = row_id
track_no[callsignKey] = 1
print "flight_no for callsignKey: ", callsignKey, " is: ", flight_no[callsignKey]
return
def check_position_packet (packet_str):
#
#-----------------------------------------------------------------
# This function determines if airfield is in the list of APRS
# base stations used for receiving position fixes.
#
# base_list should be set up as part of the main code initialisation
#-----------------------------------------------------------------
#
# for base in APRS_base_list:
for base in settings.FLOGGER_APRS_BASES:
if string.find(str(packet_str), base) <> -1:
print "Found in list of APRS base stations: ", base
return base
print "Not found base station in packet"
return -1
def delete_table (table):
#
#-----------------------------------------------------------------
# This function deletes the SQLite3 table
# with the name supplied by "table".
#-----------------------------------------------------------------
#
# print "delete_table. settings.FLOGGER_MODE: ", settings.FLOGGER_MODE
if settings.FLOGGER_MODE == "test":
print "Test only. Table %s not deleted" % (table)
return
parm = "DELETE FROM %s" % (table)
try:
cursor.execute(parm)
print "New Delete %s table ok" % (table)
except:
print "New Delete %s table failed or no records in tables" % (table)
return
def delete_flogger_file(folder, filename, days):
#
#-----------------------------------------------------------------
# This function deletes the files whose name contain filename in folder folder
# if they were created up to and including the number of days in the past
# specified by the days parameter.
# If days is zero then no deletions are performed
#-----------------------------------------------------------------
#
print "folder: ", folder
print "filename: ", filename
if days <= 0:
print "Don't delete old files, return"
return
now = time.time()
path = os.path.dirname(os.path.abspath(__file__))
if os.path.isdir(os.path.join(path, folder)):
# flist = os.listdir(folder)
flist = os.listdir(os.path.join(path, folder))
else:
print "Not found: ", folder
return
## print "delete flist: ", flist
for f in flist:
# print "Pathname is: ", os.path.join(folder, f), " st_mtime is: ", os.stat(os.path.join(folder, f)).st_mtime
full_file = os.path.join(folder, f)
file_find = string.find(full_file, filename) <> -1
file_time = os.stat(full_file).st_mtime
# print "File_find is: ", file_find, ". File_time is: ", file_time, "Now is: ", now - days * 86400
if (file_find == True) and (file_time <= (now - days * 86400)):
print "Delete file: ", full_file
os.remove(full_file)
# else:
# print "File not deleted: %s" % full_file
return
def connect_APRS(sock):
#
#-----------------------------------------------------------------
#
# This function tries to shutdown the specified sock and if it
# fails closes it and then creates a new one and reconnects to the APRS system
#
#-----------------------------------------------------------------
#
try:
sock.shutdown(0)
except socket.error, e:
if 'not connected' in e:
print '*** Transport endpoint is not connected ***'
print "socket no longer open so can't be closed, create new one"
else:
print "Socket still open so close it"
sock.close()
print "Create new socket"
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
try:
sock.connect((settings.APRS_SERVER_HOST, settings.APRS_SERVER_PORT))
except Exception, e:
print "Connection refused. Errno: ", e
exit()
APRSparm = ('user %s pass %s vers %s %s filter r/%s/%s/%s\n ' % (settings.APRS_USER,
settings.APRS_PASSCODE,
settings.FLOGGER_NAME,
settings.FLOGGER_VER,
settings.FLOGGER_LATITUDE,
settings.FLOGGER_LONGITUDE,
settings.FLOGGER_RAD))
# print "APRSparm is: ", APRSparm
# sock.send('user %s pass %s vers Python_Example 0.0.1 filter r/+54.228833/-1.209639/25\n ' % (settings.APRS_USER, settings.APRS_PASSCODE))
sock.send(APRSparm)
# Make the connection to the server
sock_file = sock.makefile()
return sock_file
#
#-----------------------------------------------------------------
# Start of main code
#-----------------------------------------------------------------
#
print "FLOGGER_AIRFIELD_NAME from class is: " + settings.FLOGGER_AIRFIELD_NAME
# path = os.path.dirname(os.path.abspath(__file__))
settings.FLOGGER_BS = os.path.dirname(os.path.abspath(__file__))
settings.FLOGGER_TRACKS_FOLDER = settings.FLOGGER_BS + "/tracks" # Setup 'tracks' folder name - not defined in settings class anymore
settings.FLOGGER_FLIGHTS_LOG = settings.FLOGGER_BS + "/flight_logs"
print "settings.FLOGGER_TRACKS_FOLDER: ", settings.FLOGGER_TRACKS_FOLDER
#
# User and passcode now mandatory positional parameters
# Mode is an optional positional parameter, default is "live"
#
# try:
# parser = argparse.ArgumentParser()
# parser.add_argument("--user", help="user and passcode must be supplied, see http://www.george-smart.co.uk/wiki/APRS_Callpass for how to obtain")
# parser.add_argument("--passcode", help="user and passcode must be supplied", type=int)
# parser.add_argument("--mode", help="mode is test or live, test modifies behaviour to add output for testing", default="test")
# parser.add_argument('-s', '--smtp', help="URL of smtp server")
# parser.add_argument('-t', '--tx', help="email address of sender")
# parser.add_argument('-r', '--rx', help="email address of receiver")
# except:
# print "Parsing cmd line args failed"
print "Cmd line args parsed"
try:
args = parser.parse_args()
#
# Check parameters. If an smtp server address is specified then the sender and receiver email
# addresses must also be supplied either in the call line or the config file
#
if (args.smtp == None and settings.FLOGGER_SMTP_SERVER_URL == ""):
print "SMTP url not specified, don't send email"
else:
print "Set to send email"
if (args.smtp <> None):
settings.FLOGGER_SMTP_SERVER_URL = args.smtp
if (args.tx <> None):
settings.FLOGGER_SMTP_TX = args.tx
if (args.rx <> None):
print "args.rx is: ", args.rx
settings.FLOGGER_SMTP_RX = args.rx
elif ((args.tx == None or args.rx == None) and (settings.FLOGGER_SMTP_TX == "" or settings.FLOGGER_SMTP_RX == "")):
print "Email option parameters or config not valid. smtp=%s, SERVER_URL=%s, tx=%s, rx=%s, SMTP_TX=%s, SMTP_RX=%s" % \
(args.smtp, settings.FLOGGER_SMTP_SERVER_URL, args.tx, args.rx, settings.FLOGGER_SMTP_TX, settings.FLOGGER_SMTP_RX)
print "Exit"
exit()
print "Email parameters are now: smtp=%s, SERVER_URL=%s, tx=%s, rx=%s, SMTP_TX=%s, SMTP_RX=%s" % \
(args.smtp, settings.FLOGGER_SMTP_SERVER_URL, args.tx, args.rx, settings.FLOGGER_SMTP_TX, settings.FLOGGER_SMTP_RX)
if (args.user <> None):
settings.APRS_USER = args.user
else:
print "Taken from APRS_USER: ", settings.APRS_USER
if args.passcode <> None:
settings.APRS_PASSCODE = args.passcode
else:
print "Taken from form APRS_PASSCODE: ", settings.APRS_PASSCODE
if args.mode <> None:
print "Taken from args.mode: ", settings.FLOGGER_MODE
settings.FLOGGER_MODE = args.mode
else:
print "Taken from FLOGGER_MODE: ", settings.FLOGGER_MODE
except :
print "Failed in command line arg parser"
# print "user=", args.user, " passcode=", args.passcode, "mode=", args.mode, "smtp=", args.smtp, "tx=", args.tx, "rx=", args.rx
# settings.APRS_USER = args.user
# settings.APRS_PASSCODE = args.passcode
# settings.FLOGGER_MODE = args.mode
# Creates or opens a file called flogger.sql3 as an SQLite3 DB
#
#-----------------------------------------------------------------
# Build flogger db using schema
# Delete SQLite3 database file if it already exists; stops it getting
# too large during testing
#-----------------------------------------------------------------
#
# if os.path.isfile(settings.FLOGGER_DB_NAME):
if os.path.isfile(settings.FLOGGER_DB_NAME) and settings.FLOGGER_MODE <> "test":
print "SQLite3 db file exists so delete it"
os.remove(settings.FLOGGER_DB_NAME)
else:
print "SQLite3 db file exists but in test mode so DON'T delete it!"
db = sqlite3.connect(settings.FLOGGER_DB_NAME)
cursor = db.cursor() # Get a cursor object
# f = open(settings.FLOGGER_DB_SCHEMA, 'rt') # Open the db schema file for reading
f = open(settings.FLOGGER_DB_SCHEMA, 'rt') # Open the db schema file for reading
schema = f.read()
cursor.executescript(schema)
## cursor.executescript(schema) ### # Build flogger db from schema
print "End of building db: ", settings.FLOGGER_DB_NAME, " using schema: ", settings.FLOGGER_DB_SCHEMA
#
#-----------------------------------------------------------------
# Build local database from flarmnet of aircraft
#-----------------------------------------------------------------
#
if flarmdb(settings.FLOGGER_FLARMNET_DB_URL, cursor, db, "flarm_data", settings) == True:
print "Flarmnet db built"
else:
print "Flarmnet db build failed, exit"
exit()
#
#-----------------------------------------------------------------
# Determine location details, latitude, longitude and elevation
#-----------------------------------------------------------------
#
if settings.FLOGGER_AIRFIELD_DETAILS <> "":
loc = get_coords(settings.FLOGGER_AIRFIELD_DETAILS)
i = 1
while loc == False and i<=100:
# while loc[2] == None:
# print "get_coords returned loc[2] as None, retry", " Retry count get_coords: ", i
print "get_coords returned False, retry", " Retry count get_coords: ", i
loc = get_coords(settings.FLOGGER_AIRFIELD_DETAILS)
i = i + 1
# time.sleep (1)
if loc == False:
if settings.FLOGGER_LATITUDE <> "" and settings.FLOGGER_LONGITUDE <> "" and settings.FLOGGER_QNH >=0 :
print "Geolocator failed use values from settings"
else:
print "Geoloactor failed and no value for lat, long, QNH. Run again, might work"
exit(2)
else:
settings.FLOGGER_LATITUDE = str(loc[0]) # Held as string
settings.FLOGGER_LONGITUDE = str(loc[1]) # Held as string
settings.FLOGGER_QNH = loc[2] # Held as number
if settings.FLOGGER_QNH == None:
print "Probable Geolocator error, set FLOGGER_QNH default 0, loc[2]: ", loc[2]
settings.FLOGGER_QNH = 0
exit(3)
print "Location is: ", settings.FLOGGER_AIRFIELD_DETAILS, " latitude: ", loc[0], " longitude: ", loc[1], " elevation: ", loc[2]
# print "Location is: ", settings.FLOGGER_AIRFIELD_DETAILS, " latitude: ", settings.FLOGGER_LATITUDE , \
# " longitude: ", settings.FLOGGER_LONGITUDE, " elevation: ", settings.FLOGGER_QNH
else:
print "Use location data from settings"
#
#-----------------------------------------------------------------
# Set up list of APRS base stations to be used
# (Note this code could be nicer but will do for now)
#-----------------------------------------------------------------
#
#APRS_base_list = [settings.FLOGGER_APRS_BASE_1,
# settings.FLOGGER_APRS_BASE_2,
# settings.FLOGGER_APRS_BASE_3,
# settings.FLOGGER_APRS_BASE_4,]
# APRS_base_list = settings.FLOGGER_APRS_BASES
#
#-----------------------------------------------------------------
# Initialise API for computing sunrise and sunset
#-----------------------------------------------------------------
#
location = ephem.Observer()
location.pressure = 0
#location.horizon = '-0:34' # Adjustments for angle to horizon
location.horizon = settings.FLOGGER_LOCATION_HORIZON # Adjustments for angle to horizon
location.lat = settings.FLOGGER_LATITUDE
location.lon = settings.FLOGGER_LONGITUDE
print "Location for ephem is: ", settings.FLOGGER_AIRFIELD_DETAILS, " latitude: ", location.lat, " longitude: ", location.lon, " elevation: ", settings.FLOGGER_QNH
date = datetime.datetime.now()
next_sunrise = location.next_rising(ephem.Sun(), date)
next_sunset = location.next_setting(ephem.Sun(), date)
print "Sunrise today: ", date, " is: ", next_sunrise
print "Sunset today: ", date, " is: ", next_sunset
#
#-----------------------------------------------------------------
# Make the connection to the APRS server
#-----------------------------------------------------------------
#
start_time = datetime.datetime.now()
keepalive_time = time.time()
#sock_file = sock.makefile()
print "Start time!"
sock = APRS_connect(settings)
sock_file = sock.makefile()
# print "libfap_init"
# rtn = libfap.fap_init()
# if rtn <> 0:
# print "Failed to connect to APRS, check parameters"
# exit()
# print "Libfap return: ", rtn
#
#-----------------------------------------------------------------
# Set up paths for data, logs and tracks
#-----------------------------------------------------------------
#
SB_DATA = "SB_data" + str(start_time)
SB_Log = "SB_Log" + str(start_time)
SB_DATA = str(SB_DATA).replace(" ","_")
SB_Log = str(SB_Log).replace(" ","_")
SB_DATA = str(SB_DATA).replace(":","-")
SB_Log = str(SB_Log).replace(":","-")
print "Checking log paths: ", settings.FLOGGER_LOG_PATH
if settings.FLOGGER_LOG_PATH <> "":
if not os.path.isdir(settings.FLOGGER_LOG_PATH):
print "Log path is not directory",
SB_DATA = os.path.abspath(settings.FLOGGER_LOG_PATH) + "/" + SB_DATA
SB_Log = os.path.abspath(settings.FLOGGER_LOG_PATH) + "/" + SB_Log
try:
#print "Creating log folder"
os.makedirs(settings.FLOGGER_LOG_PATH)
print "Created: ", settings.FLOGGER_LOG_PATH
except:
print "FLOGGER_LOG_PATH does not exist. Please check settings."
exit()
print "SB data file is: ", SB_DATA
print "SB log file is: ", SB_Log
#sys.stdout = open(SB_Log, 'w')
#print "Datafile open"
test = False
if test == True:
datafile = open (SB_DATA, 'rw')
print "In test mode"
else:
datafile = open (SB_DATA, 'w')
print "In live mode"
#
#-----------------------------------------------------------------
# Setup cntrl-c handler
#
#-----------------------------------------------------------------
#
# print "Setup cntrl-c handler"
# sig_handler(db, cursor)
#time.sleep(5) # Press Ctrl+c here # Just for testing
#
#-----------------------------------------------------------------
# Main loop reading data from APRS server and processing records
# This continues until sunset after which the data recorded is processed
#-----------------------------------------------------------------
#
i = 0
try:
# while 1:
while settings.FLOGGER_RUN:
print "FLOGGER_RUN: ", settings.FLOGGER_RUN
# for i in range(1000000):
i = i + 1
datetime_now = datetime.datetime.now()
previous_sunrise = location.previous_rising(ephem.Sun(), date).datetime()
next_sunrise = location.next_rising(ephem.Sun(), date).datetime()
previous_sunset = location.previous_setting(ephem.Sun(), date).datetime()
next_sunset = location.next_setting(ephem.Sun(), date).datetime()
# Set datetime to current time + FLOGGER_LOG_TIME_DELTA to start processing flight log
# that number of hours before sunset
log_datetime = datetime.datetime.now() + datetime.timedelta(hours=settings.FLOGGER_LOG_TIME_DELTA)
# print "Log datetime is: ", log_datetime
location.date = ephem.Date(log_datetime)
print "Ephem date is: ", location.date
s = ephem.Sun()
s.compute(location)
twilight = -6 * ephem.degree # Defn of Twilight is: Centre of Sun is 6, 12, 18 degrees below horizon (civil, nautical, astronomical)
# daylight = s.alt > twilight
# print "Just for testing aprs_parse"
# daylight = True
# if daylight:
if s.alt > twilight:
print "Is it light at Location? Yes", location, " Ephem date is: ", ephem.Date(location.date), " Next sunset at: ", location.next_setting(ephem.Sun())
else:
print "Is it light at Location? No", location, " Ephem date is: ", ephem.Date(location.date), " Next sunrise at: ", location.next_rising(ephem.Sun())
process_log(cursor,db, settings)
#
# Dump tracks from flights table as .gpx
# This updates each flight in flights table with trackfile name
#
print "Dump tracks"
dump_tracks2(cursor, db, settings)
dump_IGC(cursor, db, settings)
#
# Experimental. Find tug used for each launch
#
find_tug(cursor, db, settings)
print "Find tug phase end"
#
# Dump flights table as cvs file
# If no flights then returns ""
#
print "Dump flights table"
csv_file = dump_flights(settings)
#
# Email flights csv file if required
# email_log2 sorts out if there are no flights on any one day
# FLOGGER_SMTP_SERVER_TX is either set in config by user or value taken from cmd line --smtp parm.
#
if settings.FLOGGER_SMTP_SERVER_URL <> "":
print "Email today's flight log. RX: " + settings.FLOGGER_SMTP_RX
email_log2(settings.FLOGGER_SMTP_TX, settings.FLOGGER_SMTP_RX, csv_file, datetime.date.today(), settings)
else:
print "Don't email flight log, no flights"
#
# Delete entries from daily flight logging tables etc
#
delete_table("flight_log")
delete_table("flight_log2")
delete_table("flight_log_final")
delete_table("flight_group")
##DEL delete_table("flights")
delete_table("track")
delete_table("trackFinal")
delete_table("flarm_db") # flarm_db should be rebuilt at start of each day
db.commit()
# Wait for sunrise
# wait_time = next_sunrise - datetime_now
datetime_now = datetime.datetime.now()
date = datetime.datetime.now()
location.date = ephem.Date(datetime.datetime.now())
next_sunrise = location.next_rising(ephem.Sun(), date).datetime()
print "Location Date now: ", location.date, " Next sunrise is: ", next_sunrise
wait_time = location.next_rising(ephem.Sun(), date).datetime() - datetime_now
print "Next sunrise at: ", location.next_rising(ephem.Sun(), date).datetime(), " Datetime now is: ", datetime_now
# Wait an additional 2 hours (in seconds) more before resuming.
# Just a bit of security, not an issue as unlikely to start flying so early
wait_time_secs = int(wait_time.total_seconds()) + (2 * 60 * 60)
# close socket -- not needed. Create new one at sunrise
try:
sock.shutdown(0)
except socket.error as msg:
print "Socket failed to shutdown, ignore. Msg is: " , msg
sock.close()
#
# Delete historic files as specified
#
print "+++++++Phase 4 Start Delete out of date files+++++++"
delete_flogger_file(settings.FLOGGER_TRACKS_FOLDER, "track", settings.FLOGGER_DATA_RETENTION)
delete_flogger_file(settings.FLOGGER_FLIGHTS_LOG, "flights.csv", settings.FLOGGER_DATA_RETENTION)
print "-------Phase 4 End-------"
#
# Sleep till sunrise
# Then open new socket, set ephem date to new day
#
print "Wait till after sunrise at: ", next_sunrise, " Elapsed time: ", wait_time, ". Wait seconds: ", wait_time_secs
# self.RunningLabel.setText("Sleeping")
time.sleep(wait_time_secs)
# Sun has now risen so recommence logging flights
location.date = ephem.Date(datetime.datetime.now())
print "Woken up. Date time is now: ", datetime.datetime.now()
print "Ephem datetime on wakeup is: ", ephem.Date(location.date)
# Make new socket as old one will have timed out during the 'big' sleep, reset the timers
start_time = datetime.datetime.now()
keepalive_time = time.time()
sock = APRS_connect(settings)
sock_file = sock.makefile() # Note both sock & sock_file get used
#
#-----------------------------------------------------------------
# Build local database from flarmnet of aircraft for today
# Note source flarm_db may have changed during previous day
#-----------------------------------------------------------------
#
if flarmdb(settings.FLOGGER_FLARMNET_DB_URL, cursor, db, "flarm_data", settings) == True:
print "Flarmnet db built for today"
else:
print "Flarmnet db re-build failed, exit"
exit()
i = 0 # Count of todays APRS reads reset
flight_no = {} # Re-initialise flight_no dictionary at start of day
track_no = {} # Re-initialise track_no dictionary at start of day
continue
current_time = time.time()
elapsed_time = int(current_time - keepalive_time)
print "Elapsed time is: ", elapsed_time
if (current_time - keepalive_time) > settings.FLOGGER_KEEPALIVE_TIME:
try:
print "Socket open for: ", (current_time - keepalive_time), " seconds, send keepalive"
rtn = sock_file.write("#Python Example App\n\n")
sock_file.flush() # Make sure it gets sent
print "Send keepalive", elapsed_time, " rtn is: ", rtn
keepalive_time = current_time
except Exception, e:
print ('something\'s wrong with socket write. Exception type is %s' % (`e`))
sock_file = connect_APRS(sock)
print "New connection to APRS made"
continue
else:
print "No keepalive sent"
print "In while loop. Count= ", i
try:
if test == False:
# In live mode so use socket read
print "Read socket"
packet_str = sock_file.readline()
print "Raw APRS Packet: ", packet_str
# datafile.write(packet_str)
else:
# In test mode so file read
packet_str = datafile.readline()
except socket.error:
print "Socket error on readline"
print "packet string length is: ", len(packet_str), " packet is: ", packet_str
try:
len_packet_str = len(packet_str)
except TypeError:
packet_str_hex = ":".join("{:02x}".format(ord(c)) for c in packet_str)
len_packet_str = len(packet_str_hex) / 3
print "TypeError on packet_str length. Now is: ", len_packet_str
if len_packet_str == 0:
# create new socket & connect to server
print "Read returns zero length string on iteration: ", i
# Wait 20 seconds
time.sleep(20)
# continue
try:
sock.shutdown(0)
except socket.error, e:
if 'not connected' in e:
print '*** Transport endpoint is not connected ***'
print "socket no longer open so can't be closed, create new one"
else:
print "Socket still open so close it"
sock.close()
print "Create new socket"
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
try:
sock.connect((settings.APRS_SERVER_HOST, settings.APRS_SERVER_PORT))
except Exception, e:
print "Connection refused. Errno: ", e
exit()
APRSparm = ('user %s pass %s vers %s %s filter r/%s/%s/%s\n ' % (settings.APRS_USER,
settings.APRS_PASSCODE,
settings.FLOGGER_NAME,
settings.FLOGGER_VER,
settings.FLOGGER_LATITUDE,
settings.FLOGGER_LONGITUDE,
settings.FLOGGER_RAD))
# print "APRSparm is: ", APRSparm
# sock.send('user %s pass %s vers Python_Example 0.0.1 filter r/+54.228833/-1.209639/25\n ' % (settings.APRS_USER, settings.APRS_PASSCODE))
sock.send(APRSparm)
# Make the connection to the server
sock_file = sock.makefile()
# Delete following line when not running in test mode
# exit()
continue
#
# Parse the returned packet into fields
# Note this uses a modified version of libfap as the master on
# github contains an error
#
packet = aprs_parse(packet_str, settings)
if packet:
print "aprs_parse rtnd: ", packet
else:
print "aprs_parse Failed. Not glider position packet"
continue
src_callsign = packet["from"]
latitude = packet["latitude"]
longitude = packet["longitude"]
altitude = packet["altitude"]
speed = packet["speed"]
course = packet["course"]
timestamp = packet["timestamp"]
CheckVals(src_callsign, "latitude", latitude)
nvalues[src_callsign]["longitude"] = longitude
nvalues[src_callsign]["altitude"] = altitude
nvalues[src_callsign]["speed"] = speed
#
# Removed libfap parsing 20180424
#
# Check if callsign is in the fleet
if fleet_check_new(str(src_callsign)) == False:
print "Aircraft ", src_callsign, " not registered at ", settings.FLOGGER_AIRFIELD_NAME, " , ignore"
print "-----------------End of Packet: ", i, " ------------------------------"
continue
else:
print "Aircraft ", src_callsign, " is in ", settings.FLOGGER_AIRFIELD_NAME, " fleet, process"
# Use registration if it is in aircraft table else just use Flarm_ID
# src_callsign = callsign_trans(src_callsign)
# print "Aircraft callsign is now: ", src_callsign
registration = callsign_trans(src_callsign)
print "Aircraft registration is: ", registration, " FLARM code is: ", src_callsign
# Check with this aircraft callsign has been seen before
CheckPrev(src_callsign, 'latitude', 0)
CheckVals(src_callsign, 'latitude', 0)
# Current and previous data values created
local_time = datetime.datetime.now()
fl_date_time = local_time.strftime("%D:%H:%M:%S")
fl_date = local_time.strftime("%y/%m/%d")
# fl_date = local_time.strftime("%D")
fl_time = local_time.strftime("%H:%M:%S")
print "src_callsign matched: ", src_callsign, " ", fl_date_time, " Latitude is: ", latitude
# print "Line ", i, " ", packet[0].orig_packet
# if nprev_vals[src_callsign]['speed'] == 0 and nvalues[src_callsign]['speed'] <> 0:
# af_loc = (settings.FLOGGER_LATITUDE, settings.FLOGGER_LONGITUDE)
# takeoff_loc = (latitude, longitude)
takeoff_dist = vincenty((settings.FLOGGER_LATITUDE, settings.FLOGGER_LONGITUDE), (latitude, longitude)).meters
print "Test for was stopped now moving. nprevs[speed] is: " + str(nprev_vals[src_callsign]['speed']) + " nvalues[speed] is: "+ str(nvalues[src_callsign]['speed'])
# if nprev_vals[src_callsign]['speed'] <= V_SMALL and nvalues[src_callsign]['speed'] > V_SMALL:
# if nprev_vals[src_callsign]['speed'] <= V_SMALL and nvalues[src_callsign]['speed'] > V_SMALL and takeoff_dist < settings.FLOGGER_AIRFIELD_LIMIT:
if nprev_vals[src_callsign]['speed'] <= V_SMALL and nvalues[src_callsign]['speed'] > V_TAKEOFF_MIN and takeoff_dist < settings.FLOGGER_AIRFIELD_LIMIT:
# The previous speed means it was probably stopped, the current speed means it is probably moving and the position is within the airfield
# Following test for case when Flarm is switched on for first time when stationary and at an
# altitude greater than settings.FLOGGER_QNH, ie a special case of initial location. nprev_vals get set to zero when aircraft
# first detected by flarm. Doesn't work. Needs thought
# if (nprev_vals[src_callsign]['speed'] <= V_SMALL and nvalues[src_callsign]['speed'] > V_SMALL) or (nprev_vals[src_callsign]['speed'] == nvalues[src_callsign]['speed'] and nvalues[src_callsign]['speed']> V_SMALL):
print "New test true for switch-on"
print "Takeoff point is: ", (latitude, longitude), "Distance is: ", takeoff_dist
email_msg(settings.FLOGGER_SMTP_TX, settings.FLOGGER_SMTP_RX, registration, fl_time, settings)
# aircraft was stopped, now isn't
# Enhancement. At this point create new Track table record for the flight.
# Set track_no to current value and increment for use by next new flight.
# Flight_no (ie flight_log2 id field) has to copied to the Track table record
# each time new track data record for the flight is added.
print "Aircraft ", src_callsign, " was stopped, now moving. Create new record"
cursor.execute('''INSERT INTO flight_log2(sdate, stime, edate, etime, duration, src_callsign, max_altitude, speed, registration)
VALUES(:sdate,:stime,:edate,:etime,:duration,:src_callsign,:max_altitude,:speed, :registration)''',
{'sdate':fl_date, 'stime':fl_time, 'edate': "", 'etime':"", 'duration': "", 'src_callsign':src_callsign, 'max_altitude':altitude, 'speed':0, 'registration': registration})
nprev_vals[src_callsign]['speed'] = nvalues[src_callsign]['speed']
print "Storing initial track data"
cursor.execute('''SELECT max(id) FROM flight_log2''')
lastrow_id = cursor.fetchone()[0] # value of id for row just inserted use as flight_no for flight
print "Last row ID of flight_log2 inserted was: ", lastrow_id
flight_no[src_callsign] = lastrow_id
# flight_no[src_callsign] = cursor.lastrowid # Unique value of row just created
track_no[src_callsign] = 1 # Initialise trackpoint number for this flight
addTrack(cursor, flight_no[src_callsign],track_no[src_callsign],longitude,latitude,altitude,course,speed,timestamp)
track_no[src_callsign] += 1 # Increment trackpoint number for this flight
# if nprev_vals[src_callsign]['speed'] <> 0 and nvalues[src_callsign]['speed'] == 0:
# print "Test for was moving is now stopped"
print "Test for was moving is now stopped. nprev=: ", nprev_vals[src_callsign]['speed'], " nval=: ", nvalues[src_callsign]['speed'], " V_LANDING_MIN=: ", V_LANDING_MIN
# if nprev_vals[src_callsign]['speed'] > V_SMALL and nvalues[src_callsign]['speed'] <= V_SMALL:
if nprev_vals[src_callsign]['speed'] > V_LANDING_MIN and nvalues[src_callsign]['speed'] <= V_LANDING_MIN:
# aircraft was moving is now stopped
print "Aircraft ", src_callsign, " was moving, now stopped. Update record for end date & time"
# Add final track record
try:
addTrack(cursor, flight_no[src_callsign],track_no[src_callsign],longitude,latitude,altitude,course,speed,timestamp)
# Find latest record for this callsign
except KeyError, reason:
print "addTrack failed. Trackpoint ignored. Reason: ", reason
#
# Bug 20150520-1 Test Start
#
try:
cursor.execute('''SELECT max(id) FROM flight_log2 WHERE src_callsign =?''', (src_callsign,))
r = cursor.fetchone()
try:
rowid = r[0]
cursor.execute('''SELECT sdate, stime, max_altitude FROM flight_log2 WHERE ROWID =?''', (rowid,))
row = cursor.fetchone()
print "Test Bug 20150520-1 ok, row is: ", row
except:
print "Select for sdate/stime failed for: ", rowid
except:
print "Select max(id) failed for: ", src_callsign
#
# Bug 20150520-1 Test End
#
cursor.execute('''SELECT sdate, stime, max_altitude, id FROM flight_log2 WHERE
ROWID IN (SELECT max(id) FROM flight_log2 WHERE src_callsign =? )''', (src_callsign,))
row = cursor.fetchone()
#
# Bug 20150520-1 Start
# Re-initialise altitude for stopped aircraft to zero. And above row is None
#
if row == None:
print "Bug 20150520-1. We have a problem with: ", src_callsign
continue
#
# Bug 20150520-1 End
#
# for r in row:
# print "Returned row for callsign: ", src_callsign, " is: ", r
# end_time = datetime.strptime(fl_time,'%H:%M:%S')
end_time = datetime.datetime.now() # In seconds since epoch
start_date = row[0] # In %y/%m/%d format
start_time = row[1] # In %H:%M:%S format
max_altitude = row[2]
flight = row[3] # id field of flight_log2
fl_end_datetime = datetime.datetime.now()
fl_end_date = fl_end_datetime.strftime("%y/%m/%d")
fl_end_time_str = fl_end_datetime.strftime("%H:%M:%S")
# fl_end_time = fl_end_time_str
fl_end_time = datetime.datetime.strptime(fl_end_time_str, "%H:%M:%S")
print "Flight End date and time are: ", fl_end_date, " , ", fl_end_time_str
print "Flight Start date and time are: ", start_date, " , ", start_time
fl_start_time = datetime.datetime.strptime(start_time, "%H:%M:%S") # Convert flight start time to type time
fl_duration_datetime = fl_end_time - fl_start_time # fl_duration_time is a string format %H:%M:%S
# fl_duration_time = datetime.datetime.strptime(fl_duration_datetime, "%H:%M:%S")
c = fl_duration_datetime
# fl_duration_time = "%.2dh: %.2dm: %.2ds" % (c.seconds//3600,(c.seconds//60)%60, c.seconds%60)
fl_duration_time = "%.2d: %.2d: %.2d" % (c.seconds//3600,(c.seconds//60)%60, c.seconds%60)
fl_duration_time_str = str(fl_duration_time)
print "Start time: ", fl_start_time, "End time: ", fl_end_time_str, "Duration: ", fl_duration_time, " Max altitude: ", max_altitude
# Add record to flight_log_final
cursor.execute('''INSERT INTO flight_log_final(sdate, stime, edate, etime, duration, src_callsign, max_altitude, speed, registration, flight_no)
VALUES(:sdate,:stime,:edate,:etime,:duration,:src_callsign,:max_altitude,:speed, :registration,:flight_no)''',
{'sdate':start_date, 'stime':start_time, 'edate': fl_end_date, 'etime':fl_end_time_str,
'duration': fl_duration_time_str, 'src_callsign':src_callsign, 'max_altitude':max_altitude, 'speed':0, 'registration': registration, 'flight_no': flight})
print "Updated flight_log_final", src_callsign
# flogger_landout_check(flight_reg, af_centre, radius, landing_coords, mode)
af_loc = (settings.FLOGGER_LATITUDE, settings.FLOGGER_LONGITUDE)
cursor.execute('''SELECT land_out FROM flight_log_final WHERE flight_no=?''', (flight,))
row = cursor.fetchone()
if row[0] == None:
# Check whether land_out already been logged
# This is needed since using input from multiple base stations, landout can be logged more than once
res =landout_check(registration, flight, af_loc, settings.FLOGGER_AIRFIELD_LIMIT, (latitude, longitude), settings.FLOGGER_LANDOUT_MODE, settings)
print "Landout check is: ", res
if res == True:
landout_status = "yes"
else:
landout_status = "no"
cursor.execute('''UPDATE flight_log_final SET land_out=? WHERE flight_no=?''', (landout_status,flight))
else:
print "Landout check. row[0]: ", row[0]
# Update flight record in flight_log2
cursor.execute(''' SELECT max(id) FROM flight_log2 WHERE src_callsign =?''', (src_callsign,))
row = cursor.fetchone()
rowid = row[0]
print "Update row: ", rowid
try:
cursor.execute('''UPDATE flight_log2 SET edate=?, etime=?, duration=?, max_altitude=?, speed=? WHERE ROWID=?''',
(fl_end_date, fl_end_time_str, fl_duration_time_str, max_altitude, 0, rowid))
print "Updated flight_log2", src_callsign, " Row: ", rowid
except:
print "Failed to update flight_log2: ", src_callsign, " Row: ", rowid
nprev_vals[src_callsign]['speed'] = nvalues[src_callsign]['speed'] # ie set to '0'
#
# Bug 20150520-1
# Re-initialise altitude for stopped aircraft to zero
#
print "Bug 20150520-1. Re-initialise altitude in nvalues & nprev_vals for: ", src_callsign
nprev_vals[src_callsign]['altitude'] = 0
nvalues[src_callsign]['altitude'] = 0
# Check updated record
print "Check fields in flight_log2: ", src_callsign, " Row: ", rowid
cursor.execute('''SELECT ROWID, sdate, stime, edate, etime, duration, max_altitude FROM flight_log2 WHERE
ROWID IN (SELECT max(id) FROM flight_log2 WHERE src_callsign =? )''', (src_callsign,))
row = cursor.fetchone()
for r in row:
print "Returned row for callsign: ", src_callsign, " is: ", r
db.commit()
print "-----------------End of Packet: ", i, " ------------------------------"
continue
# if nprev_vals[src_callsign]['speed'] == 0 and nvalues[src_callsign]['speed'] == 0:
print "Is Aircraft %s moving? nprev.speed=%d, nvalues.speed=%d, nvalues.altitude=%d" % (src_callsign, nprev_vals[src_callsign]['speed'], nvalues[src_callsign]['speed'], nvalues[src_callsign]['altitude'])
# if nprev_vals[src_callsign]['speed'] <= V_SMALL and nvalues[src_callsign]['speed'] <= V_SMALL and nvalues[src_callsign]['altitude'] <= settings.FLOGGER_QNH:
if nprev_vals[src_callsign]['speed'] <= V_TAKEOFF_MIN and nvalues[src_callsign]['speed'] <= V_TAKEOFF_MIN and nvalues[src_callsign]['altitude'] <= settings.FLOGGER_QNH:
# Aircraft hasn't moved and is not at an altitude greater than Sutton Bank.
print "Aircraft: ", src_callsign, " Not moving. Speed was: ", nprev_vals[src_callsign]['speed'], " Speed is: ", nvalues[src_callsign]['speed']
else:
# aircraft is moving. Check whether current altitude is greater than previous
# Enhancement. Add new record to Tracks table for this flight here. Track_no for flight is initialised
# when flight record is created, initial Track table record for flight is also created at that time
print "Aircraft ", src_callsign, " is still moving"
# Check whether a track list has been set up. May have to add flight_log2 record as well??
CheckTrackData(cursor, flight_no, track_no, src_callsign)
print "Flight details are: ", flight_no[src_callsign]
# Add track record for moving aircraft
addTrack(cursor, flight_no[src_callsign],track_no[src_callsign],longitude,latitude,altitude,course,speed,timestamp)
track_no[src_callsign] += 1 # Next trackpoint number for this flight
print "Old height was: ", nprev_vals[src_callsign]['altitude'], " New height is: ", nvalues[src_callsign]['altitude']
if nvalues[src_callsign]['altitude'] > nprev_vals[src_callsign]['altitude']:
print "Aircraft ", src_callsign, " is now higher than max height, was: ", nprev_vals[src_callsign]['altitude'], " now: ", nvalues[src_callsign]['altitude']
cursor.execute('''UPDATE flight_log2 SET max_altitude=? WHERE src_callsign=? ''', (altitude, src_callsign))
nprev_vals[src_callsign]['altitude'] = nvalues[src_callsign]['altitude'] # Now higher
else:
print "Aircraft callsign: ", src_callsign, " is moving but is not higher than max height: ", nvalues[src_callsign]['altitude'], " Speed is: ", nvalues[src_callsign]['speed'], " Was: ", nprev_vals[src_callsign]['speed']
# Set previous speed values to current
nprev_vals[src_callsign]['speed'] = nvalues[src_callsign]['speed']
continue
print "Values for callsign Commit: ", src_callsign, " Values are: ", nvalues[src_callsign], " Prev_vals are: ", nprev_vals[src_callsign]
db.commit()
print "-----------------End of Packet: ", i, " ------------------------------"
# libfap.fap_free(packet)
except KeyboardInterrupt:
print "Keyboard input received, ignore"
# db.commit()
pass
# print "libfap_cleanup. If not called results in memory leak"
# libfap.fap_cleanup()
# close socket -- must be closed to avoid buffer overflow
sock.shutdown(0)
sock.close()
# Close the database. Note this should be on all forms of exit
db.close()
| tobiz/OGN-Flight-Logger_V3 | flogger3.py | Python | gpl-3.0 | 77,519 |
Subsets and Splits