code
stringlengths 2
1.05M
| repo_name
stringlengths 5
104
| path
stringlengths 4
251
| language
stringclasses 1
value | license
stringclasses 15
values | size
int32 2
1.05M
|
---|---|---|---|---|---|
#!/usr/bin/env python
import sys
from collections import defaultdict
import itertools
import operator
from operator import itemgetter
counters = defaultdict(int)
trueCounters = defaultdict(int)
fr = open('allworks','r')
wc = 0
for line in fr:
line = line.strip()
words = ''.join(c for c in line if c.isalpha() or c.isspace()).split()
for word in words:
wc += 1
thresold = 0.01 * wc
# 1st Pass
fr.seek(0)
for line in fr:
line = line.strip()
words = ''.join(c for c in line if c.isalpha() or c.isspace()).split()
for word in words:
if word in counters:
counters[word] += 1
elif len(counters) < 99:
counters[word] = 1
else:
delCounters = []
for key in counters:
counters[key] -= 1
if counters[key] == 0:
delCounters.append(key)
for word in delCounters:
del counters[word]
# 2nd Pass: True count, Delete by thresold
fr.seek(0)
for line in fr:
line = line.strip()
words = ''.join(c for c in line if c.isalpha() or c.isspace()).split()
for word in words:
if word in counters:
if word in trueCounters:
trueCounters[word] += 1
else:
trueCounters[word] = 1
delCounters = []
for word in trueCounters:
if trueCounters[word] < thresold:
delCounters.append(word)
for word in delCounters:
del trueCounters[word]
for key, value in sorted(trueCounters.iteritems(), key=operator.itemgetter(1), reverse=True):
print key, value
| fortesit/data-structure | Data Stream Algorithm/Heavy Hitters.py | Python | mit | 1,605 |
from email.mime.text import MIMEText
from smtplib import SMTP
class Gmail(object):
"""Send an email with Google Mail
Can easily be used with other providers
by editing the server and port in send()
Args:
credentials (tuple): (username, password)
"""
def __init__(self, credentials):
self.user, self.password = credentials
def send(self, receiver, subject, body):
"""Send email
Args:
receiver (str): Address of receiver
subject (str): Subject of email
body (str): Body of email
"""
message = MIMEText(body)
message['Subject'] = subject
message['From'] = self.user
message['To'] = receiver
server = SMTP('smtp.gmail.com', 587)
server.starttls()
server.login(self.user, self.password)
server.sendmail(self.user, receiver, message.as_string())
server.quit()
| alexanderteves/OpsBox | mail.py | Python | mit | 937 |
"""
Module which groups all the aggregated precomputed information in order to
save computational power.
"""
import pandas as pd
from FirmsLocations.Preprocess.preprocess_cols import cp2str
def read_agg(filepath):
"Read file of aggregated info."
table = pd.read_csv(filepath, sep=';')
table = cp2str(table)
return table
def read_aggregation(filepath, typevars):
## TODO
aggtable = read_agg(filepath)
aggfeatures = aggtable[typevars['feat_vars']]
agglocs = aggtable[typevars['loc_vars']]
return agglocs, aggfeatures
| tgquintela/Mscthesis | FirmsLocations/IO/io_aggfile.py | Python | mit | 557 |
from ..style import use
use("km3pipe-notebook")
| tamasgal/km3pipe | km3pipe/style/km3pipe_notebook.py | Python | mit | 49 |
from django.utils.translation import ugettext_lazy as _ugl
default_app_config = 'django_sendgrid_parse.apps.DjangoSendgridParseAppConfig'
| letops/django-sendgrid-parse | django_sendgrid_parse/__init__.py | Python | mit | 139 |
"""User-friendly exception handler for swood."""
import http.client
import traceback
import sys
import os
__file__ = os.path.abspath(__file__)
class ComplainToUser(Exception):
"""When used with ComplaintFormatter, tells the user what error (of theirs) caused the failure and exits."""
pass
def can_submit():
if not os.path.isdir(os.path.expanduser("~/.swood")):
os.mkdir(os.path.expanduser("~/.swood"))
sbpath = os.path.expanduser("~/.swood/submit-bugs")
if os.path.isfile(sbpath):
try:
with open(sbpath) as sb:
resp = sb.read(1)
if resp == "1":
return 1
elif resp == "0":
return 0
except:
pass
while True:
resp = input(
"Something went wrong. Do you want to send an anonymous bug report? (Type Y or N): ").lower()
if resp in ("yes", "y", "true"):
try:
with open(sbpath, "w") as sb:
sb.write("1")
except:
pass
return 1
elif resp in ("no", "n", "false"):
try:
with open(sbpath, "w") as sb:
sb.write("0")
except:
pass
return 0
class ComplaintFormatter:
"""Notifies the user when the program fails predictably and uploads bug reports.
When used in a with statement, ComplaintFormatter catches all exceptions. If the
exception is a ComplainToUser exception, it will simply print the error message
and exit (with an exit code of 1). If the exception is something else (i.e. an
actual, unexpected exception), it will upload the traceback to the swood debug
server (unless the user has opted out of sending bug reports.)
"""
def __init__(self, version=None):
self.version = version
def __enter__(self):
pass
def __exit__(self, exc_type, exc, tb):
if isinstance(exc, ComplainToUser):
print("Error: {}".format(exc), file=sys.stderr)
sys.exit(1)
elif isinstance(exc, Exception):
# scrub stack of full path names for extra privacy
# also normalizes the paths, helping to detect dupes
scrubbed_stack = traceback.extract_tb(tb)
# cut off traces of stuff that isn't ours
others_cutoff = next(idx for idx, fs in enumerate(scrubbed_stack) if os.path.samefile(
os.path.dirname(fs.filename), os.path.dirname(__file__)))
scrubbed_stack = scrubbed_stack[others_cutoff:]
# rewrite paths so they contain only relative directories
# (hides username on Windows and Linux)
dirstart = os.path.abspath(
os.path.join(os.path.dirname(__file__), ".."))
for fs in scrubbed_stack:
fs.filename = os.path.relpath(
fs.filename, start=dirstart).replace("\\", "/")
str_tb = "Traceback (most recent call last):\n" + \
"".join(traceback.format_list(scrubbed_stack)) + \
"".join(traceback.format_exception_only(exc_type, exc))
if self.version is not None:
str_tb = "# " + self.version + "\n" + str_tb
if "--optout" in sys.argv or "-o" in sys.argv:
print(
"Something went wrong. A bug report will not be sent because of your command-line flag.", file=sys.stderr)
return False
elif os.environ.get("SWOOD_OPTOUT") == "1":
print(
"Something went wrong. A bug report will not be sent because of your environment variable.", file=sys.stderr)
return False
elif not can_submit():
print(
"Something went wrong. A bug report will not be sent because of your config setting.", file=sys.stderr)
return False
else:
print(
"Something went wrong. A bug report will be sent to help figure it out. (see --optout)", file=sys.stderr)
try:
conn = http.client.HTTPSConnection("meme.institute")
conn.request("POST", "/swood/bugs/submit", str_tb)
resp = conn.getresponse().read().decode("utf-8")
if resp == "done":
print("New bug submitted!", file=sys.stderr)
elif resp == "dupe":
print(
"This bug is already in the queue to be fixed.", file=sys.stderr)
else:
raise Exception
except Exception:
print("Submission of bug report failed.", file=sys.stderr)
traceback.print_exc()
return True
| milkey-mouse/swood | swood/complain.py | Python | mit | 4,905 |
#!/usr/bin/env python
__author__ = "Andrew Hankinson ([email protected])"
__version__ = "1.5"
__date__ = "2011"
__copyright__ = "Creative Commons Attribution"
__license__ = """The MIT License
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE."""
import multiprocessing
from optparse import OptionParser
import os
import sys
import hashlib
import codecs
import re
from pybagit.exceptions import *
# declare a default hashalgorithm
HASHALG = 'sha1'
ENCODING = "utf-8"
def write_manifest(datadir, encoding, update=False):
bag_root = os.path.split(os.path.abspath(datadir))[0]
manifest_file = os.path.join(bag_root, "manifest-{0}.txt".format(HASHALG))
checksums = dict()
files_to_checksum = set(dirwalk(datadir))
if update and os.path.isfile(manifest_file):
for line in codecs.open(manifest_file, 'rb', encoding):
checksum, file_ = line.strip().split(' ', 1)
full_file = os.path.join(bag_root, file_)
if full_file in files_to_checksum:
files_to_checksum.remove(full_file)
checksums[os.path.join(bag_root, file_)] = checksum
p = multiprocessing.Pool(processes=multiprocessing.cpu_count())
result = p.map_async(csumfile, files_to_checksum)
checksums.update((k, v) for v, k in result.get())
p.close()
p.join()
mfile = codecs.open(manifest_file, 'wb', encoding)
for file_, checksum in sorted(checksums.iteritems()):
rp = os.path.relpath(file_, bag_root)
fl = ensure_unix_pathname(rp)
mfile.write(u"{0} {1}\n".format(checksum, fl))
mfile.close()
def dirwalk(datadir):
datafiles = []
for dirpath, dirnames, filenames in os.walk(u"{0}".format(datadir)):
for fn in filenames:
datafiles.append(os.path.join(dirpath, fn))
return datafiles
def csumfile(filename):
""" Based on
http://abstracthack.wordpress.com/2007/10/19/calculating-md5-checksum/
"""
hashalg = getattr(hashlib, HASHALG)() # == 'hashlib.md5' or 'hashlib.sha1'
blocksize = 0x10000
def __upd(m, data):
m.update(data)
return m
fd = open(filename, 'rb')
try:
contents = iter(lambda: fd.read(blocksize), "")
m = reduce(__upd, contents, hashalg)
finally:
fd.close()
return (m.hexdigest(), filename)
def ensure_unix_pathname(pathname):
# it's only windows we have to worry about
if sys.platform != "win32":
return pathname
replace = re.compile(r"\\", re.UNICODE)
fnm = re.sub(replace, "/", pathname)
return fnm
if __name__ == "__main__":
parser = OptionParser()
usage = "%prog [options] arg1 arg2"
parser.add_option("-a", "--algorithm", action="store", help="checksum algorithm to use (sha1|md5)")
parser.add_option("-c", "--encoding", action="store", help="File encoding to write manifest")
parser.add_option("-u", "--update", action="store_true", help="Only update new/removed files")
(options, args) = parser.parse_args()
if options.algorithm:
if not options.algorithm in ('md5', 'sha1'):
raise BagCheckSumNotValid('You must specify either "md5" or "sha1" as the checksum algorithm')
HASHALG = options.algorithm
if options.encoding:
ENCODING = options.encoding
if len(args) < 1:
parser.error("You must specify a data directory")
write_manifest(args[0], ENCODING, update=options.update)
| ahankinson/pybagit | pybagit/multichecksum.py | Python | mit | 4,668 |
"""Basic TCP Server that will listen to port 6633."""
import logging
from socket import error as SocketError
from socketserver import BaseRequestHandler, TCPServer, ThreadingMixIn
from threading import current_thread
from kytos.core.connection import CONNECTION_STATE, Connection
from kytos.core.events import KytosEvent
__all__ = ('KytosServer', 'KytosRequestHandler')
log = logging.getLogger(__name__)
class KytosServer(ThreadingMixIn, TCPServer):
"""Abstraction of a TCPServer to listen to packages from the network.
The KytosServer will listen on the specified port
for any new TCP request from the network and then instantiate the
specified RequestHandler to handle the new request.
It creates a new thread for each Handler.
"""
allow_reuse_address = True
main_threads = {}
def __init__(self, server_address, RequestHandlerClass, controller):
"""Constructor of KytosServer.
Args:
server_address (tuple): Address which the server is listening.
example: ('127.0.0.1', 80)
RequestHandlerClass(socketserver.BaseRequestHandler):
Class that will be instantiated to handle each request.
controller (:class:`~kytos.core.controller.Controller`):
An instance of Kytos Controller class.
"""
super().__init__(server_address, RequestHandlerClass,
bind_and_activate=False)
self.controller = controller
def serve_forever(self, poll_interval=0.5):
"""Handle requests until an explicit shutdown() is called."""
try:
self.server_bind()
self.server_activate()
log.info("Kytos listening at %s:%s", self.server_address[0],
self.server_address[1])
super().serve_forever(poll_interval)
except Exception:
log.error('Failed to start Kytos TCP Server.')
self.server_close()
raise
class KytosRequestHandler(BaseRequestHandler):
"""The socket/request handler class for our controller.
It is instantiated once per connection between each switch and the
controller.
The setup method will dispatch a KytosEvent (``kytos/core.connection.new``)
on the controller, that will be processed by a Core App.
The finish method will close the connection and dispatch a KytonEvents
(``kytos/core.connection.closed``) on the controller.
"""
known_ports = {
6633: 'openflow'
}
def __init__(self, request, client_address, server):
"""Contructor takes the parameters below.
Args:
request (socket.socket):
Request sent by client.
client_address (tuple):
Client address, tuple with host and port.
server (socketserver.BaseServer):
Server used to send messages to client.
"""
super().__init__(request, client_address, server)
self.connection = None
def setup(self):
"""Method used to setup the new connection.
This method builds a new controller Connection, and places a
``kytos/core.connection.new`` KytosEvent in the app buffer.
"""
self.ip = self.client_address[0]
self.port = self.client_address[1]
log.info("New connection from %s:%s", self.ip, self.port)
self.connection = Connection(self.ip, self.port, self.request) # noqa
server_port = self.server.server_address[1]
if server_port in self.known_ports:
protocol_name = self.known_ports[server_port]
else:
protocol_name = f'{server_port:04d}'
self.connection.protocol.name = protocol_name
self.request.settimeout(30)
self.exception = None
event_name = \
f'kytos/core.{self.connection.protocol.name}.connection.new'
event = KytosEvent(name=event_name,
content={'source': self.connection})
self.server.controller.buffers.app.put(event)
def handle(self):
"""Handle each request and places its data in the raw event buffer.
This method loops reading the binary data from the connection socket,
and placing a ``kytos/core.messages.new`` KytosEvent in the raw event
buffer.
"""
curr_thread = current_thread()
MAX_SIZE = 2**16
while True:
try:
new_data = self.request.recv(MAX_SIZE)
except (SocketError, OSError, InterruptedError,
ConnectionResetError) as exception:
self.exception = exception
log.debug('Socket handler exception while reading: %s',
exception)
break
if new_data == b'':
self.exception = 'Request closed by client.'
break
if not self.connection.is_alive():
continue
log.debug("New data from %s:%s at thread %s", self.ip,
self.port, curr_thread.name)
content = {'source': self.connection,
'new_data': new_data}
event_name = \
f'kytos/core.{self.connection.protocol.name}.raw.in'
event = KytosEvent(name=event_name,
content=content)
self.server.controller.buffers.raw.put(event)
def finish(self):
"""Method is called when the client connection is finished.
This method closes the connection socket and generates a
``kytos/core.connection.lost`` KytosEvent in the App buffer.
"""
log.info("Connection lost with Client %s:%s. Reason: %s",
self.ip, self.port, self.exception)
self.connection.state = CONNECTION_STATE.FINISHED
self.connection.close()
content = {'source': self.connection}
if self.exception:
content['exception'] = self.exception
event_name = \
f'kytos/core.{self.connection.protocol.name}.connection.lost'
event = KytosEvent(name=event_name,
content=content)
self.server.controller.buffers.app.put(event)
| erickvermot/kytos | kytos/core/tcp_server.py | Python | mit | 6,260 |
# -*- coding: utf-8 -*-
import smtplib
from django.contrib.auth.models import Permission
from django.test import TestCase
from principal.forms import *
from principal.models import *
from principal.services import DepartmentService, CertificationService, UserService, ImpartSubjectService, \
AdministratorService
from gestionalumnos.settings import *
from django.core import mail
from django.test.utils import override_settings
class CertificationTestCase(TestCase):
def setUp(self):
# Departments
self.department_lsi = Departamento.objects.create(
codigo='1',
nombre='Departamento de Lenguajes y Sistemas Informaticos',
web='http://www.lsi.us.es'
)
self.department_dte = Departamento.objects.create(
codigo='2',
nombre='Departamento de Tecnologia Electronica',
web='https://www.dte.us.es/docencia/etsii/gii-is/redes-de-computadores'
)
self.department_atc = Departamento.objects.create(
codigo='3',
nombre='Departamento de Arquitectura y Tecnologia de Computadores',
web='http://www.atc.us.es/'
)
# Subjects
self.subject_egc = Asignatura.objects.create(
cuatrimestre='1',
nombre='Evolucion y gestion de la configuracion',
curso='4',
codigo='2050032',
creditos='6',
duracion='C',
web='http://www.lsi.us.es/docencia/pagina_asignatura.php?id=111',
tipo_asignatura='OB',
departamento=self.department_lsi,
)
self.subject_rc = Asignatura.objects.create(
cuatrimestre='1',
nombre='Redes de computadores',
curso='2',
codigo='2050013',
creditos='6',
duracion='C',
web='https://www.dte.us.es/docencia/etsii/gii-is/redes-de-computadores',
tipo_asignatura='OB',
departamento=self.department_dte,
)
self.subject_cm = Asignatura.objects.create(
cuatrimestre='1',
nombre='Computacion Movil',
curso='4',
codigo='2060045',
creditos='6',
duracion='C',
web='http://www.us.es/estudios/grados/plan_206/asignatura_2060045',
tipo_asignatura='OP',
departamento=self.department_atc,
)
self.subject_ispp = Asignatura.objects.create(
cuatrimestre='2',
nombre='Ingenieria del Software y Practica Profesional',
curso='4',
codigo='2050039',
creditos='6',
duracion='C',
web='http://www.lsi.us.es/docencia/pagina_asignatura.php?id=110',
tipo_asignatura='OB',
departamento=self.department_lsi,
)
# Certifications
self.certification_isw = Titulacion.objects.create(
codigo='1',
nombre='Grado en Informatica - Ingenieria del Software',
)
self.certification_isw.asignaturas.add(self.subject_rc, self.subject_ispp, self.subject_egc)
self.certification_isc = Titulacion.objects.create(
codigo='2',
nombre='Grado en Informatica - Ingenieria de Computadores',
)
self.certification_isc.asignaturas.add(self.subject_rc)
self.certification_iti = Titulacion.objects.create(
codigo='3',
nombre='Grado en Informatica - Tecnologias Informaticas',
)
self.certification_iti.asignaturas.add(self.subject_cm, self.subject_rc)
def test_create_and_save_ok_1(self):
data_form = {
'code': '123456',
'name': 'Grado en Informatica - Tecnologias Informaticas'
}
form = CertificationEditForm(data=data_form)
self.assertEqual(form.is_valid(), True)
certification = CertificationService.create_and_save(form)
certification_bd = Titulacion.objects.get(codigo=123456)
self.assertEqual(certification_bd, certification)
def test_create_and_save_error_1(self):
data_form = {
'code': '1',
'name': 'Grado en Informatica - Ingenieria del Software'
}
form = CertificationEditForm(data=data_form)
self.assertEqual(form.is_valid(), False)
def test_find_all_ok_1(self):
certifications = list(CertificationService.find_all())
list_certifications = [self.certification_isc, self.certification_isw, self.certification_iti]
self.assertListEqual(certifications, list_certifications)
def test_find_by_code_ok_1(self):
certification = CertificationService.find_by_code('2')
self.assertEqual(certification, self.certification_isc)
def test_find_by_code_error_1(self):
certification = CertificationService.find_by_code('99')
self.assertEqual(certification, None)
def test_find_by_subject_ok_1(self):
certifications = list(CertificationService.find_by_subject(self.subject_rc.id))
list_certifications = [self.certification_isw, self.certification_isc, self.certification_iti]
self.assertListEqual(certifications, list_certifications)
def test_find_by_subject_ok_2(self):
certifications = list(CertificationService.find_by_subject(self.subject_ispp.id))
list_certifications = [self.certification_isw]
self.assertListEqual(certifications, list_certifications)
def test_find_by_subject_ok_3(self):
certifications = list(CertificationService.find_by_subject('4874'))
self.assertListEqual(certifications, [])
def test_search_ok_1(self):
certifications = list(CertificationService.search('Grado'))
list_certifications = [self.certification_isc, self.certification_isw, self.certification_iti]
self.assertListEqual(certifications, list_certifications)
def test_search_ok_2(self):
certifications = list(CertificationService.search('i'))
list_certifications = [self.certification_isc, self.certification_isw, self.certification_iti]
self.assertListEqual(certifications, list_certifications)
def test_search_ok_3(self):
certifications = list(CertificationService.search('Tecnologias'))
list_certifications = [self.certification_iti]
self.assertListEqual(certifications, list_certifications)
def test_search_ok_4(self):
certifications = list(CertificationService.search('z'))
self.assertListEqual(certifications, [])
def test_find_one_ok_1(self):
certification = CertificationService.find_one(self.certification_isw.id)
self.assertEqual(certification, self.certification_isw)
class AdministratorTestCase(TestCase):
def setUp(self):
# Administrators
self.administrator1 = Administrador.objects.create(
username='admin',
is_staff=True,
is_superuser=False
)
self.administrator1.set_password('admin')
self.administrator1.user_permissions.add(Permission.objects.get(codename='administrator'))
self.administrator1.user_permissions.add(Permission.objects.get(codename='view_subject_details'))
def test_find_one_ok_1(self):
administrator = AdministratorService.find_one(self.administrator1.id)
self.assertEqual(administrator, self.administrator1)
class DepartmentTestCase(TestCase):
def setUp(self):
# Departments
self.department_lsi = Departamento.objects.create(
codigo='1',
nombre='Departamento de Lenguajes y Sistemas Informaticos',
web='http://www.lsi.us.es'
)
self.department_dte = Departamento.objects.create(
codigo='2',
nombre='Departamento de Tecnologia Electronica',
web='https://www.dte.us.es/docencia/etsii/gii-is/redes-de-computadores'
)
self.department_atc = Departamento.objects.create(
codigo='3',
nombre='Departamento de Arquitectura y Tecnologia de Computadores',
web='http://www.atc.us.es/'
)
def test_reconstruct_and_save_ok_1(self):
data_form = {
'code': '4',
'name': 'Departamento de Fisica',
'web': 'http://www.fisica.us.es/'
}
form = DepartmentEditForm(data=data_form)
self.assertEqual(form.is_valid(), True)
department = DepartmentService.reconstruct_and_save(form)
department_bd = Departamento.objects.get(codigo=4)
self.assertEqual(department_bd, department)
def test_reconstruct_and_save_ok_2(self):
data_form = DepartmentService.get_form_data(self.department_lsi)
data_form['name'] = 'Test'
form = DepartmentEditForm(data=data_form)
self.assertEqual(form.is_valid(), True)
department = DepartmentService.reconstruct_and_save(form)
department_bd = Departamento.objects.get(id=self.department_lsi.id)
self.assertEqual(department_bd, department)
self.assertEqual(department_bd.nombre, 'Test')
def test_reconstruct_and_save_error_1(self):
data_form = DepartmentService.get_form_data(self.department_lsi)
data_form['code'] = '3'
form = DepartmentEditForm(data=data_form)
self.assertEqual(form.is_valid(), False)
def test_reconstruct_and_save_error_2(self):
data_form = DepartmentService.get_form_data(self.department_lsi)
data_form['id'] = '4944'
form = DepartmentEditForm(data=data_form)
self.assertEqual(form.is_valid(), False)
def test_reconstruct_and_save_error_3(self):
data_form = DepartmentService.get_form_data(self.department_lsi)
data_form['id'] = None
form = DepartmentEditForm(data=data_form)
self.assertEqual(form.is_valid(), False)
def test_find_all_ok_1(self):
departments = list(DepartmentService.find_all())
list_departments = [self.department_atc, self.department_lsi, self.department_dte]
self.assertListEqual(departments, list_departments)
def test_find_by_code_ok_1(self):
department = DepartmentService.find_by_code('3')
self.assertEqual(department, self.department_atc)
def test_find_by_code_error_1(self):
department = DepartmentService.find_by_code('99')
self.assertEqual(department, None)
def test_get_form_data_ok_1(self):
data_form = DepartmentService.get_form_data(self.department_atc)
data_form1 = {
'id': self.department_atc.id,
'code': self.department_atc.codigo,
'name': self.department_atc.nombre,
'web': self.department_atc.web
}
self.assertDictEqual(data_form, data_form1)
def test_get_form_data_error_1(self):
data_form = DepartmentService.get_form_data(self.department_atc)
data_form1 = {
'id': self.department_atc.id,
'code': '324245',
'name': self.department_atc.nombre,
'web': self.department_atc.web
}
self.assertNotEqual(data_form, data_form1)
def test_search_ok_1(self):
departments = list(DepartmentService.search('Departamento'))
list_departments = [self.department_atc, self.department_lsi, self.department_dte]
self.assertListEqual(departments, list_departments)
def test_search_ok_2(self):
departments = list(DepartmentService.search('i'))
list_departments = [self.department_atc, self.department_lsi, self.department_dte]
self.assertListEqual(departments, list_departments)
def test_search_ok_3(self):
departments = list(DepartmentService.search('Lenguajes'))
list_departments = [self.department_lsi]
self.assertListEqual(departments, list_departments)
def test_search_ok_4(self):
departments = list(DepartmentService.search('zz'))
self.assertListEqual(departments, [])
def test_get_form_data_xml_ok_1(self):
department = {
'codigo': self.department_atc.codigo,
'nombre': self.department_atc.nombre,
'web': self.department_atc.web
}
data_form = DepartmentService.get_form_data_xml(department)
data_form1 = {
'code': self.department_atc.codigo,
'name': self.department_atc.nombre,
'web': self.department_atc.web
}
self.assertDictEqual(data_form, data_form1)
def test_get_form_data_xml_error_1(self):
department = {
'codigo': '946514',
'nombre': self.department_atc.nombre,
'web': self.department_atc.web
}
data_form = DepartmentService.get_form_data_xml(department)
data_form1 = {
'code': self.department_atc.codigo,
'name': self.department_atc.nombre,
'web': self.department_atc.web
}
self.assertNotEqual(data_form, data_form1)
def test_get_form_data_csv_ok_1(self):
department = [
self.department_atc.codigo,
self.department_atc.nombre,
self.department_atc.web
]
data_form = DepartmentService.get_form_data_csv(department)
data_form1 = {
'code': self.department_atc.codigo,
'name': self.department_atc.nombre,
'web': self.department_atc.web
}
self.assertDictEqual(data_form, data_form1)
def test_get_form_data_csv_error_1(self):
department = [
'49498',
self.department_atc.nombre,
self.department_atc.web
]
data_form = DepartmentService.get_form_data_csv(department)
data_form1 = {
'code': self.department_atc.codigo,
'name': self.department_atc.nombre,
'web': self.department_atc.web
}
self.assertNotEqual(data_form, data_form1)
def test_rollback_ok_1(self):
departments = list(DepartmentService.find_all())
list_departments = [self.department_atc, self.department_lsi, self.department_dte]
self.assertListEqual(departments, list_departments)
DepartmentService.rollback(list_departments)
departments = list(DepartmentService.find_all())
self.assertListEqual([], departments)
def test_find_one_ok_1(self):
department = DepartmentService.find_one(self.department_atc.id)
self.assertEqual(department, self.department_atc)
class ImpartSubjectTestCase(TestCase):
def setUp(self):
# Departments
self.department_lsi = Departamento.objects.create(
codigo='1',
nombre='Departamento de Lenguajes y Sistemas Informaticos',
web='http://www.lsi.us.es'
)
self.department_dte = Departamento.objects.create(
codigo='2',
nombre='Departamento de Tecnologia Electronica',
web='https://www.dte.us.es/docencia/etsii/gii-is/redes-de-computadores'
)
self.department_atc = Departamento.objects.create(
codigo='3',
nombre='Departamento de Arquitectura y Tecnologia de Computadores',
web='http://www.atc.us.es/'
)
# Subjects
self.subject_egc = Asignatura.objects.create(
cuatrimestre='1',
nombre='Evolucion y gestion de la configuracion',
curso='4',
codigo='2050032',
creditos='6',
duracion='C',
web='http://www.lsi.us.es/docencia/pagina_asignatura.php?id=111',
tipo_asignatura='OB',
departamento=self.department_lsi,
)
self.subject_rc = Asignatura.objects.create(
cuatrimestre='1',
nombre='Redes de computadores',
curso='2',
codigo='2050013',
creditos='6',
duracion='C',
web='https://www.dte.us.es/docencia/etsii/gii-is/redes-de-computadores',
tipo_asignatura='OB',
departamento=self.department_dte,
)
self.subject_cm = Asignatura.objects.create(
cuatrimestre='1',
nombre='Computacion Movil',
curso='4',
codigo='2060045',
creditos='6',
duracion='C',
web='http://www.us.es/estudios/grados/plan_206/asignatura_2060045',
tipo_asignatura='OP',
departamento=self.department_atc,
)
self.subject_ispp = Asignatura.objects.create(
cuatrimestre='2',
nombre='Ingenieria del Software y Practica Profesional',
curso='4',
codigo='2050039',
creditos='6',
duracion='C',
web='http://www.lsi.us.es/docencia/pagina_asignatura.php?id=110',
tipo_asignatura='OB',
departamento=self.department_lsi,
)
# Lecturers
self.lecturer_benavides = Profesor.objects.create(
username='benavides',
email='[email protected]',
categoria='Profesor Titular de Universidad',
telefono='954559897',
despacho='F 0.48',
web='http://www.lsi.us.es/~dbc/',
first_name='David',
last_name='Benavides Cuevas',
tutoriaactivada=True,
dni='55555555X'
)
self.lecturer_benavides.set_password('practica')
self.lecturer_benavides.user_permissions.add(Permission.objects.get(codename='profesor'))
self.lecturer_benavides.user_permissions.add(Permission.objects.get(codename='view_certification_list'))
self.lecturer_benavides.user_permissions.add(Permission.objects.get(codename='view_tutorial_request_list'))
self.lecturer_corchuelo = Profesor.objects.create(
username='corchu',
email='[email protected]',
categoria='Profesor Titular de Universidad',
telefono='954552770',
despacho='F 1.63',
first_name='Rafael',
last_name='Corchuelo Gil',
web='https://www.lsi.us.es/personal/pagina_personal.php?id=12',
tutoriaactivada=True,
dni='66666666X'
)
self.lecturer_corchuelo.set_password('practica')
self.lecturer_corchuelo.user_permissions.add(Permission.objects.get(codename='profesor'))
self.lecturer_corchuelo.user_permissions.add(Permission.objects.get(codename='view_certification_list'))
self.lecturer_corchuelo.user_permissions.add(Permission.objects.get(codename='view_tutorial_request_list'))
self.lecturer_muller = Profesor.objects.create(
username='cmuller',
email='[email protected]',
categoria='Becario FPI',
telefono='954553868',
despacho='F 0.43',
first_name='Carlos',
last_name='Muller Cejas',
web='https://www.lsi.us.es/personal/pagina_personal.php?id=108',
tutoriaactivada=True,
dni='77777777X'
)
self.lecturer_muller.set_password('practica')
self.lecturer_muller.user_permissions.add(Permission.objects.get(codename='profesor'))
self.lecturer_muller.user_permissions.add(Permission.objects.get(codename='view_certification_list'))
self.lecturer_muller.user_permissions.add(Permission.objects.get(codename='view_tutorial_request_list'))
self.lecturer_veronica = Profesor.objects.create(
username='averonica',
email='[email protected]',
categoria='Profesor Titular de Universidad ',
telefono='954557095 ',
despacho='G 1.69',
first_name='Ana Veronica',
last_name='Medina Rodriguez',
web='http://www.dte.us.es/personal/vmedina/',
tutoriaactivada=True,
dni='88888888X'
)
self.lecturer_veronica.set_password('practica')
self.lecturer_veronica.user_permissions.add(Permission.objects.get(codename='profesor'))
self.lecturer_veronica.user_permissions.add(Permission.objects.get(codename='view_certification_list'))
self.lecturer_veronica.user_permissions.add(Permission.objects.get(codename='view_tutorial_request_list'))
def test_reconstruct_and_save_ok_1(self):
data_form = {
'subject_id': self.subject_ispp.id,
'lecturer_id': self.lecturer_corchuelo.id,
'lecturer': self.lecturer_corchuelo.first_name + self.lecturer_corchuelo.last_name,
'position': 'Coordinador'
}
form = UserLinkSubjectForm(data=data_form)
self.assertEqual(form.is_valid(), True)
impart_subject = ImpartSubjectService.reconstruct_and_save(form)
impart_subject_bd = Imparteasignatura.objects.get(profesor=self.lecturer_corchuelo,
asignatura=self.subject_ispp)
self.assertEqual(impart_subject, impart_subject_bd)
def test_reconstruct_and_save_error_1(self):
data_form = {
'subject_id': '',
'lecturer_id': self.lecturer_corchuelo.id,
'lecturer': self.lecturer_corchuelo.first_name + self.lecturer_corchuelo.last_name,
'position': 'Coordinador'
}
form = UserLinkSubjectForm(data=data_form)
self.assertEqual(form.is_valid(), False)
def test_reconstruct_and_save_error_2(self):
data_form = {
'subject_id': self.subject_ispp.id,
'lecturer_id': '',
'lecturer': self.lecturer_corchuelo.first_name + self.lecturer_corchuelo.last_name,
'position': 'Coordinador'
}
form = UserLinkSubjectForm(data=data_form)
self.assertEqual(form.is_valid(), False)
def test_reconstruct_and_save_error_3(self):
data_form = {
'subject_id': self.subject_ispp.id,
'lecturer_id': self.lecturer_corchuelo.id,
'lecturer': self.lecturer_corchuelo.first_name + self.lecturer_corchuelo.last_name,
'position': ''
}
form = UserLinkSubjectForm(data=data_form)
self.assertEqual(form.is_valid(), False)
def test_reconstruct_and_save_error_4(self):
data_form = {
'subject_id': '99854',
'lecturer_id': self.lecturer_corchuelo.id,
'lecturer': self.lecturer_corchuelo.first_name + self.lecturer_corchuelo.last_name,
'position': ''
}
form = UserLinkSubjectForm(data=data_form)
self.assertEqual(form.is_valid(), False)
def test_reconstruct_and_save_error_5(self):
data_form = {
'subject_id': self.subject_ispp.id,
'lecturer_id': '74985',
'lecturer': self.lecturer_corchuelo.first_name + self.lecturer_corchuelo.last_name,
'position': ''
}
form = UserLinkSubjectForm(data=data_form)
self.assertEqual(form.is_valid(), False)
def test_get_form_data_xml_ok_1(self):
lecturer = {
'uvus': self.lecturer_muller.username,
'cargo': 'Profesor'
}
data_form = ImpartSubjectService.get_form_data_xml(lecturer, self.subject_ispp)
data = {
'subject_id': self.subject_ispp.id,
'lecturer_id': self.lecturer_muller.id,
'lecturer': self.lecturer_muller.first_name + self.lecturer_muller.last_name,
'position': 'Profesor'
}
self.assertDictEqual(data_form, data)
def test_get_form_data_xml_error_1(self):
lecturer = {
'uvus': self.lecturer_muller.username,
'cargo': 'Profesor'
}
data_form = ImpartSubjectService.get_form_data_xml(lecturer, self.subject_ispp)
data = {
'subject_id': self.subject_ispp.id,
'lecturer_id': '-1',
'lecturer': self.lecturer_muller.first_name + self.lecturer_muller.last_name,
'position': 'Profesor'
}
self.assertNotEqual(data_form, data)
def test_get_form_data_xml_error_2(self):
lecturer = {
'uvus': self.lecturer_muller.username,
'cargo': 'Profesor'
}
data_form = ImpartSubjectService.get_form_data_xml(lecturer, self.subject_ispp)
data = {
'subject_id': '-1',
'lecturer_id': self.lecturer_muller.id,
'lecturer': self.lecturer_muller.first_name + self.lecturer_muller.last_name,
'position': 'Profesor'
}
self.assertNotEqual(data_form, data)
# def test_get_form_data_csv_ok_1(self):
#
# lecturer = [
# 'Profesor',
# self.lecturer_muller.dni,
# self.lecturer_muller.last_name + "," + self.lecturer_muller.first_name,
# self.lecturer_muller.username,
# 'null',
# 'Coordinador'
# ]
# data_form = ImpartSubjectService.get_form_data_csv(lecturer, self.subject_ispp)
# data = {
# 'subject_id': self.subject_ispp.id,
# 'lecturer_id': self.lecturer_muller.id,
# 'lecturer': "" + self.lecturer_muller.first_name + " " + self.lecturer_muller.last_name,
# 'position': 'Profesor'
# }
# self.assertEqual(data_form, data)
class UserTestCase(TestCase):
def setUp(self):
# Departments
self.department_lsi = Departamento.objects.create(
codigo='1',
nombre='Departamento de Lenguajes y Sistemas Informaticos',
web='http://www.lsi.us.es'
)
self.department_dte = Departamento.objects.create(
codigo='2',
nombre='Departamento de Tecnologia Electronica',
web='https://www.dte.us.es/docencia/etsii/gii-is/redes-de-computadores'
)
self.department_atc = Departamento.objects.create(
codigo='3',
nombre='Departamento de Arquitectura y Tecnologia de Computadores',
web='http://www.atc.us.es/'
)
# Subjects
self.subject_egc = Asignatura.objects.create(
cuatrimestre='1',
nombre='Evolucion y gestion de la configuracion',
curso='4',
codigo='2050032',
creditos='6',
duracion='C',
web='http://www.lsi.us.es/docencia/pagina_asignatura.php?id=111',
tipo_asignatura='OB',
departamento=self.department_lsi,
)
self.subject_rc = Asignatura.objects.create(
cuatrimestre='1',
nombre='Redes de computadores',
curso='2',
codigo='2050013',
creditos='6',
duracion='C',
web='https://www.dte.us.es/docencia/etsii/gii-is/redes-de-computadores',
tipo_asignatura='OB',
departamento=self.department_dte,
)
self.subject_cm = Asignatura.objects.create(
cuatrimestre='1',
nombre='Computacion Movil',
curso='4',
codigo='2060045',
creditos='6',
duracion='C',
web='http://www.us.es/estudios/grados/plan_206/asignatura_2060045',
tipo_asignatura='OP',
departamento=self.department_atc,
)
self.subject_ispp = Asignatura.objects.create(
cuatrimestre='2',
nombre='Ingenieria del Software y Practica Profesional',
curso='4',
codigo='2050039',
creditos='6',
duracion='C',
web='http://www.lsi.us.es/docencia/pagina_asignatura.php?id=110',
tipo_asignatura='OB',
departamento=self.department_lsi,
)
# Alumnos
self.student_carborgar = Alumno.objects.create(
username='carborgar',
first_name='Carlos',
last_name='Borja Garcia - Baquero',
email='[email protected]',
dni='47537495X'
)
self.student_carborgar.set_password('practica')
self.student_carborgar.user_permissions.add(Permission.objects.get(codename='alumno'))
self.student_carborgar.user_permissions.add(Permission.objects.get(codename='view_certification_list'))
self.student_carborgar.user_permissions.add(Permission.objects.get(codename='view_tutorial_request_list'))
self.student_carborgar.user_permissions.add(Permission.objects.get(codename='view_subject_details'))
self.student_juamaiosu = Alumno.objects.create(
username='juamaiosu',
first_name='Juan Elias',
last_name='Maireles Osuna',
email='[email protected]',
dni='47537560X'
)
self.student_juamaiosu.set_password('practica')
self.student_juamaiosu.user_permissions.add(Permission.objects.get(codename='alumno'))
self.student_juamaiosu.user_permissions.add(Permission.objects.get(codename='view_certification_list'))
self.student_juamaiosu.user_permissions.add(Permission.objects.get(codename='view_tutorial_request_list'))
self.student_juamaiosu.user_permissions.add(Permission.objects.get(codename='view_subject_details'))
self.student_rubgombar = Alumno.objects.create(
username='rubgombar',
first_name='Ruben',
last_name='Gomez Barrera',
email='[email protected]',
dni='11111111X'
)
self.student_rubgombar.set_password('practica')
self.student_rubgombar.user_permissions.add(Permission.objects.get(codename='alumno'))
self.student_rubgombar.user_permissions.add(Permission.objects.get(codename='view_certification_list'))
self.student_rubgombar.user_permissions.add(Permission.objects.get(codename='view_tutorial_request_list'))
self.student_rubgombar.user_permissions.add(Permission.objects.get(codename='view_subject_details'))
self.student_davjimvar = Alumno.objects.create(
username='davjimvar',
first_name='David',
last_name='Jimenez Vargas',
email='[email protected]',
dni='22222222X'
)
self.student_davjimvar.set_password('practica')
self.student_davjimvar.user_permissions.add(Permission.objects.get(codename='alumno'))
self.student_davjimvar.user_permissions.add(Permission.objects.get(codename='view_certification_list'))
self.student_davjimvar.user_permissions.add(Permission.objects.get(codename='view_tutorial_request_list'))
self.student_davjimvar.user_permissions.add(Permission.objects.get(codename='view_subject_details'))
self.student_javrodleo = Alumno.objects.create(
username='javrodleo',
first_name='Javier',
last_name='Rodriguez Leon',
email='[email protected]',
dni='33333333X'
)
self.student_javrodleo.set_password('practica')
self.student_javrodleo.user_permissions.add(Permission.objects.get(codename='alumno'))
self.student_javrodleo.user_permissions.add(Permission.objects.get(codename='view_certification_list'))
self.student_javrodleo.user_permissions.add(Permission.objects.get(codename='view_tutorial_request_list'))
self.student_javrodleo.user_permissions.add(Permission.objects.get(codename='view_subject_details'))
# Lecturers
self.lecturer_benavides = Profesor.objects.create(
username='benavides',
email='[email protected]',
categoria='Profesor Titular de Universidad',
telefono='954559897',
despacho='F 0.48',
web='http://www.lsi.us.es/~dbc/',
first_name='David',
last_name='Benavides Cuevas',
tutoriaactivada=True,
dni='55555555X'
)
self.lecturer_benavides.set_password('practica')
self.lecturer_benavides.user_permissions.add(Permission.objects.get(codename='profesor'))
self.lecturer_benavides.user_permissions.add(Permission.objects.get(codename='view_certification_list'))
self.lecturer_benavides.user_permissions.add(Permission.objects.get(codename='view_tutorial_request_list'))
self.lecturer_corchuelo = Profesor.objects.create(
username='corchu',
email='[email protected]',
categoria='Profesor Titular de Universidad',
telefono='954552770',
despacho='F 1.63',
first_name='Rafael',
last_name='Corchuelo Gil',
web='https://www.lsi.us.es/personal/pagina_personal.php?id=12',
tutoriaactivada=True,
dni='66666666X'
)
self.lecturer_corchuelo.set_password('practica')
self.lecturer_corchuelo.user_permissions.add(Permission.objects.get(codename='profesor'))
self.lecturer_corchuelo.user_permissions.add(Permission.objects.get(codename='view_certification_list'))
self.lecturer_corchuelo.user_permissions.add(Permission.objects.get(codename='view_tutorial_request_list'))
self.lecturer_muller = Profesor.objects.create(
username='cmuller',
email='[email protected]',
categoria='Becario FPI',
telefono='954553868',
despacho='F 0.43',
first_name='Carlos',
last_name='Muller Cejas',
web='https://www.lsi.us.es/personal/pagina_personal.php?id=108',
tutoriaactivada=True,
dni='77777777X'
)
self.lecturer_muller.set_password('practica')
self.lecturer_muller.user_permissions.add(Permission.objects.get(codename='profesor'))
self.lecturer_muller.user_permissions.add(Permission.objects.get(codename='view_certification_list'))
self.lecturer_muller.user_permissions.add(Permission.objects.get(codename='view_tutorial_request_list'))
self.lecturer_veronica = Profesor.objects.create(
username='averonica',
email='[email protected]',
categoria='Profesor Titular de Universidad ',
telefono='954557095 ',
despacho='G 1.69',
first_name='Ana Veronica',
last_name='Medina Rodriguez',
web='http://www.dte.us.es/personal/vmedina/',
tutoriaactivada=True,
dni='88888888X'
)
self.lecturer_veronica.set_password('practica')
self.lecturer_veronica.user_permissions.add(Permission.objects.get(codename='profesor'))
self.lecturer_veronica.user_permissions.add(Permission.objects.get(codename='view_certification_list'))
self.lecturer_veronica.user_permissions.add(Permission.objects.get(codename='view_tutorial_request_list'))
self.impart_ispp = Imparteasignatura.objects.create(
cargo='Coordinador',
profesor=self.lecturer_corchuelo,
asignatura=self.subject_ispp
)
self.impart_ispp = Imparteasignatura.objects.create(
cargo='Profesor',
profesor=self.lecturer_muller,
asignatura=self.subject_ispp
)
self.impart_egc = Imparteasignatura.objects.create(
cargo='Coordinador',
profesor=self.lecturer_benavides,
asignatura=self.subject_egc
)
self.student_carborgar.asignaturas = [self.subject_egc, self.subject_ispp]
self.student_juamaiosu.asignaturas = [self.subject_egc]
def test_find_by_username_ok_1(self):
user = UserService.find_by_username(self.student_carborgar.username)
user_db = User.objects.get(username=self.student_carborgar.username)
self.assertEqual(user, user_db)
def test_find_by_username_error_1(self):
user = UserService.find_by_username('ghslih')
self.assertEqual(user, None)
def test_delete_ok_1(self):
username = self.student_carborgar.username
UserService.delete(self.student_carborgar)
error = False
try:
User.objects.get(username=username)
except User.DoesNotExist:
error = True
self.assertTrue(error)
def test_rollback_users_ok_1(self):
user_create = {self.lecturer_muller: 'password', self.lecturer_corchuelo: 'password'}
len_list1 = len(list(UserService.find_all()))
UserService.rollback_users(user_create)
len_list2 = len(list(UserService.find_all()))
self.assertIs(len_list1 - 2, len_list2)
def test_rollback_ok_1(self):
number_link_student_carborgar1 = len(list(self.student_carborgar.asignaturas.all()))
number_link_student_juamaiosu1 = len(list(self.student_juamaiosu.asignaturas.all()))
number_link_lecturer_benavides1 = len(list(self.lecturer_benavides.imparteasignatura_set.all()))
student_link = [self.student_juamaiosu, self.student_carborgar]
lecturer_link = [self.lecturer_benavides]
user_create = [self.lecturer_veronica]
username = self.lecturer_veronica.username
UserService.rollback(user_create, student_link, lecturer_link, self.subject_egc.id)
number_link_student_carborgar2 = len(list(self.student_carborgar.asignaturas.all()))
number_link_student_juamaiosu2 = len(list(self.student_juamaiosu.asignaturas.all()))
number_link_lecturer_benavides2 = len(list(self.lecturer_benavides.imparteasignatura_set.all()))
self.assertEqual(number_link_student_carborgar1 - 1, number_link_student_carborgar2)
self.assertEqual(number_link_student_juamaiosu1 - 1, number_link_student_juamaiosu2)
self.assertEqual(number_link_lecturer_benavides1 - 1, number_link_lecturer_benavides2)
error = False
try:
User.objects.get(username=username)
except User.DoesNotExist:
error = True
self.assertTrue(error)
class SubjectTestCase(TestCase):
def setUp(self):
# Departments
self.department_lsi = Departamento.objects.create(
codigo='1',
nombre='Departamento de Lenguajes y Sistemas Informaticos',
web='http://www.lsi.us.es'
)
self.department_dte = Departamento.objects.create(
codigo='2',
nombre='Departamento de Tecnologia Electronica',
web='https://www.dte.us.es/docencia/etsii/gii-is/redes-de-computadores'
)
self.department_atc = Departamento.objects.create(
codigo='3',
nombre='Departamento de Arquitectura y Tecnologia de Computadores',
web='http://www.atc.us.es/'
)
# Subjects
self.subject_egc = Asignatura.objects.create(
cuatrimestre='1',
nombre='Evolucion y gestion de la configuracion',
curso='4',
codigo='1',
creditos='6',
duracion='C',
web='http://www.lsi.us.es/docencia/pagina_asignatura.php?id=111',
tipo_asignatura='OB',
departamento=self.department_lsi,
)
self.subject_rc = Asignatura.objects.create(
cuatrimestre='1',
nombre='Redes de computadores',
curso='2',
codigo='2',
creditos='6',
duracion='C',
web='https://www.dte.us.es/docencia/etsii/gii-is/redes-de-computadores',
tipo_asignatura='OB',
departamento=self.department_dte,
)
self.subject_cm = Asignatura.objects.create(
cuatrimestre='1',
nombre='Computacion Movil',
curso='4',
codigo='3',
creditos='6',
duracion='C',
web='http://www.us.es/estudios/grados/plan_206/asignatura_2060045',
tipo_asignatura='OP',
departamento=self.department_atc,
)
self.subject_ispp = Asignatura.objects.create(
cuatrimestre='2',
nombre='Ingenieria del Software y Practica Profesional',
curso='4',
codigo='4',
creditos='6',
duracion='C',
web='http://www.lsi.us.es/docencia/pagina_asignatura.php?id=110',
tipo_asignatura='OB',
departamento=self.department_lsi,
)
# Alumnos
self.student_carborgar = Alumno.objects.create(
username='carborgar',
first_name='Carlos',
last_name='Borja Garcia - Baquero',
email='[email protected]',
dni='47537495X'
)
self.student_carborgar.set_password('practica')
self.student_carborgar.user_permissions.add(Permission.objects.get(codename='alumno'))
self.student_carborgar.user_permissions.add(Permission.objects.get(codename='view_certification_list'))
self.student_carborgar.user_permissions.add(Permission.objects.get(codename='view_tutorial_request_list'))
self.student_carborgar.user_permissions.add(Permission.objects.get(codename='view_subject_details'))
self.student_juamaiosu = Alumno.objects.create(
username='juamaiosu',
first_name='Juan Elias',
last_name='Maireles Osuna',
email='[email protected]',
dni='47537560X'
)
self.student_juamaiosu.set_password('practica')
self.student_juamaiosu.user_permissions.add(Permission.objects.get(codename='alumno'))
self.student_juamaiosu.user_permissions.add(Permission.objects.get(codename='view_certification_list'))
self.student_juamaiosu.user_permissions.add(Permission.objects.get(codename='view_tutorial_request_list'))
self.student_juamaiosu.user_permissions.add(Permission.objects.get(codename='view_subject_details'))
self.student_rubgombar = Alumno.objects.create(
username='rubgombar',
first_name='Ruben',
last_name='Gomez Barrera',
email='[email protected]',
dni='11111111X'
)
self.student_rubgombar.set_password('practica')
self.student_rubgombar.user_permissions.add(Permission.objects.get(codename='alumno'))
self.student_rubgombar.user_permissions.add(Permission.objects.get(codename='view_certification_list'))
self.student_rubgombar.user_permissions.add(Permission.objects.get(codename='view_tutorial_request_list'))
self.student_rubgombar.user_permissions.add(Permission.objects.get(codename='view_subject_details'))
self.student_davjimvar = Alumno.objects.create(
username='davjimvar',
first_name='David',
last_name='Jimenez Vargas',
email='[email protected]',
dni='22222222X'
)
self.student_davjimvar.set_password('practica')
self.student_davjimvar.user_permissions.add(Permission.objects.get(codename='alumno'))
self.student_davjimvar.user_permissions.add(Permission.objects.get(codename='view_certification_list'))
self.student_davjimvar.user_permissions.add(Permission.objects.get(codename='view_tutorial_request_list'))
self.student_davjimvar.user_permissions.add(Permission.objects.get(codename='view_subject_details'))
self.student_javrodleo = Alumno.objects.create(
username='javrodleo',
first_name='Javier',
last_name='Rodriguez Leon',
email='[email protected]',
dni='33333333X'
)
self.student_javrodleo.set_password('practica')
self.student_javrodleo.user_permissions.add(Permission.objects.get(codename='alumno'))
self.student_javrodleo.user_permissions.add(Permission.objects.get(codename='view_certification_list'))
self.student_javrodleo.user_permissions.add(Permission.objects.get(codename='view_tutorial_request_list'))
self.student_javrodleo.user_permissions.add(Permission.objects.get(codename='view_subject_details'))
# Lecturers
self.lecturer_benavides = Profesor.objects.create(
username='benavides',
email='[email protected]',
categoria='Profesor Titular de Universidad',
telefono='954559897',
despacho='F 0.48',
web='http://www.lsi.us.es/~dbc/',
first_name='David',
last_name='Benavides Cuevas',
tutoriaactivada=True,
dni='55555555X'
)
self.lecturer_benavides.set_password('practica')
self.lecturer_benavides.user_permissions.add(Permission.objects.get(codename='profesor'))
self.lecturer_benavides.user_permissions.add(Permission.objects.get(codename='view_certification_list'))
self.lecturer_benavides.user_permissions.add(Permission.objects.get(codename='view_tutorial_request_list'))
self.lecturer_corchuelo = Profesor.objects.create(
username='corchu',
email='[email protected]',
categoria='Profesor Titular de Universidad',
telefono='954552770',
despacho='F 1.63',
first_name='Rafael',
last_name='Corchuelo Gil',
web='https://www.lsi.us.es/personal/pagina_personal.php?id=12',
tutoriaactivada=True,
dni='66666666X'
)
self.lecturer_corchuelo.set_password('practica')
self.lecturer_corchuelo.user_permissions.add(Permission.objects.get(codename='profesor'))
self.lecturer_corchuelo.user_permissions.add(Permission.objects.get(codename='view_certification_list'))
self.lecturer_corchuelo.user_permissions.add(Permission.objects.get(codename='view_tutorial_request_list'))
self.lecturer_muller = Profesor.objects.create(
username='cmuller',
email='[email protected]',
categoria='Becario FPI',
telefono='954553868',
despacho='F 0.43',
first_name='Carlos',
last_name='Muller Cejas',
web='https://www.lsi.us.es/personal/pagina_personal.php?id=108',
tutoriaactivada=True,
dni='77777777X'
)
self.lecturer_muller.set_password('practica')
self.lecturer_muller.user_permissions.add(Permission.objects.get(codename='profesor'))
self.lecturer_muller.user_permissions.add(Permission.objects.get(codename='view_certification_list'))
self.lecturer_muller.user_permissions.add(Permission.objects.get(codename='view_tutorial_request_list'))
self.lecturer_veronica = Profesor.objects.create(
username='averonica',
email='[email protected]',
categoria='Profesor Titular de Universidad ',
telefono='954557095 ',
despacho='G 1.69',
first_name='Ana Veronica',
last_name='Medina Rodriguez',
web='http://www.dte.us.es/personal/vmedina/',
tutoriaactivada=True,
dni='88888888X'
)
self.lecturer_veronica.set_password('practica')
self.lecturer_veronica.user_permissions.add(Permission.objects.get(codename='profesor'))
self.lecturer_veronica.user_permissions.add(Permission.objects.get(codename='view_certification_list'))
self.lecturer_veronica.user_permissions.add(Permission.objects.get(codename='view_tutorial_request_list'))
self.impart_ispp_corchu = Imparteasignatura.objects.create(
cargo='Coordinador',
profesor=self.lecturer_corchuelo,
asignatura=self.subject_ispp
)
self.impart_ispp_muller = Imparteasignatura.objects.create(
cargo='Profesor',
profesor=self.lecturer_muller,
asignatura=self.subject_ispp
)
self.impart_ispp_benavides = Imparteasignatura.objects.create(
cargo='Coordinador',
profesor=self.lecturer_benavides,
asignatura=self.subject_ispp
)
self.impart_egc_benavides = Imparteasignatura.objects.create(
cargo='Coordinador',
profesor=self.lecturer_benavides,
asignatura=self.subject_egc
)
self.student_carborgar.asignaturas = [self.subject_egc, self.subject_ispp]
self.student_juamaiosu.asignaturas = [self.subject_egc]
def test_get_student_subjects_ok_1(self):
subjects = list(SubjectService.get_student_subjects(self.student_carborgar.id))
subjects1 = [self.subject_egc, self.subject_ispp]
self.assertListEqual(subjects, subjects1)
def test_get_lecturer_subjects_ok_1(self):
subjects = list(SubjectService.get_lecturer_subjects(self.lecturer_benavides.id))
subjects1 = [self.subject_egc, self.subject_ispp]
self.assertListEqual(subjects, subjects1)
def test_create_and_save_ok_1(self):
data_form = {
'name': 'Prueba',
'course': '1',
'code': '5',
'quarter': '1',
'credits': '6',
'web': 'http://www.lsi.us.es/docencia/pagina_asignatura.php?id=110',
'duration': 'C',
'type': 'OB',
'departament': self.department_lsi.id,
}
form = SubjectEditForm(data=data_form)
self.assertEqual(form.is_valid(), True)
subject = SubjectService.create(form)
SubjectService.save(subject)
subject_bd = Asignatura.objects.get(codigo=subject.codigo)
self.assertEqual(subject, subject_bd)
def test_create_and_save_error_1(self):
data_form = {
'name': 'Prueba',
'course': '1',
'code': '4',
'quarter': '1',
'credits': '6',
'web': 'http://www.lsi.us.es/docencia/pagina_asignatura.php?id=110',
'duration': 'C',
'type': 'OB',
'departament': self.department_lsi.id,
}
form = SubjectEditForm(data=data_form)
self.assertEqual(form.is_valid(), False)
def test_create_and_save_error_2(self):
data_form = {
'name': 'Prueba',
'course': '10',
'code': '5',
'quarter': '1',
'credits': '6',
'web': 'http://www.lsi.us.es/docencia/pagina_asignatura.php?id=110',
'duration': 'C',
'type': 'OB',
'departament': self.department_lsi.id,
}
form = SubjectEditForm(data=data_form)
self.assertEqual(form.is_valid(), False)
def test_create_and_save_error_3(self):
data_form = {
'name': 'Prueba',
'course': '1',
'code': '5',
'quarter': '8',
'credits': '6',
'web': 'http://www.lsi.us.es/docencia/pagina_asignatura.php?id=110',
'duration': 'C',
'type': 'OB',
'departament': self.department_lsi.id,
}
form = SubjectEditForm(data=data_form)
self.assertEqual(form.is_valid(), False)
def test_find_by_code_ok_1(self):
subject = SubjectService.find_by_code(self.subject_ispp.codigo)
self.assertEqual(subject, self.subject_ispp)
def test_find_by_code_error_1(self):
subject = SubjectService.find_by_code('5')
self.assertEqual(subject, None)
def test_find_one_ok_1(self):
subject = SubjectService.find_one(self.subject_ispp.id)
self.assertEqual(subject, self.subject_ispp)
def test_find_one_error_1(self):
subject = SubjectService.find_one('-1')
self.assertEqual(subject, None)
@override_settings(EMAIL_BACKEND='django.core.mail.backends.smtp.EmailBackend')
class EmailTestCase(TestCase):
def test_send_email(self):
try:
mail_sent_success = mail.send_mail('Test',
'Test',
EMAIL_HOST_USER, [EMAIL_HOST_USER],
fail_silently=True)
self.assertEqual(mail_sent_success, 1)
except Exception:
self.assertEqual(False, True, 'No se ha podido enviar el correo')
| carborgar/gestionalumnostfg | principal/tests.py | Python | mit | 52,227 |
"""
Base class for exporters
"""
# Standard library modules.
import os
# Third party modules.
# Local modules.
from pyhmsa.util.monitorable import _Monitorable, _MonitorableThread
# Globals and constants variables.
class _ExporterThread(_MonitorableThread):
def __init__(self, datafile, dirpath, *args, **kwargs):
args = (datafile, dirpath,) + args
super().__init__(args=args, kwargs=kwargs)
def _run(self, datafile, dirpath, *args, **kwargs):
raise NotImplementedError
class _Exporter(_Monitorable):
def _create_thread(self, datafile, dirpath, *args, **kwargs):
args = (datafile, dirpath,) + args
super()._create_thread(*args, **kwargs)
def validate(self, datafile):
pass
def can_export(self, datafile):
try:
self.validate(datafile)
except:
return False
else:
return True
def export(self, datafile, dirpath):
self.validate(datafile)
if not os.path.exists(dirpath):
raise ValueError('Path does not exist: %s' % dirpath)
if not os.path.isdir(dirpath):
raise ValueError('Path is not a directory: %s' % dirpath)
self._start(datafile, dirpath)
| pyhmsa/pyhmsa | pyhmsa/fileformat/exporter/exporter.py | Python | mit | 1,242 |
from __future__ import absolute_import
from .base import *
from bundle_config import config
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.postgresql_psycopg2',
'NAME': config['postgres']['database'],
'USER': config['postgres']['username'],
'PASSWORD': config['postgres']['password'],
'HOST': config['postgres']['host'],
}
}
CACHES = {
'default': {
'BACKEND': 'redis_cache.RedisCache',
'LOCATION': '{host}:{port}'.format(
host=config['redis']['host'],
port=config['redis']['port']),
'OPTIONS': {
'PASSWORD': config['redis']['password'],
},
'VERSION': config['core']['version'],
},
}
DEBUG = False
| almet/whiskerboard | settings/epio.py | Python | mit | 751 |
from django.apps import AppConfig
from django.contrib.auth import get_user_model
from django.contrib.auth.models import Permission
from django.contrib.contenttypes.models import ContentType
class AppConfig(AppConfig):
name = '.'.join(__name__.split('.')[:-1])
label = 'icekit_plugins_iiif'
verbose_name = "IIIF Basics"
def ready(self):
# Create custom permission pointing to User, because we have no other
# model to hang it off for now...
# TODO This is a hack, find a better way
User = get_user_model()
try:
# this doesn't work if migrations haven't been updated, resulting
# in "RuntimeError: Error creating new content types. Please make
# sure contenttypes is migrated before trying to migrate apps
# individually."
content_type = ContentType.objects.get_for_model(User)
Permission.objects.get_or_create(
codename='can_use_iiif_image_api',
name='Can Use IIIF Image API',
content_type=content_type,
)
except RuntimeError:
pass
| ic-labs/django-icekit | icekit/plugins/iiif/apps.py | Python | mit | 1,144 |
# -*- coding: utf-8 -*-
# Generated by Django 1.10.2 on 2016-12-08 21:16
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('main', '0021_auto_20161208_1214'),
]
operations = [
migrations.AlterField(
model_name='tournamentteam',
name='name',
field=models.CharField(max_length=255, verbose_name='holdnavn'),
),
migrations.AlterField(
model_name='tournamentteam',
name='profiles',
field=models.ManyToManyField(to='main.Profile', verbose_name='medlemmer'),
),
]
| bomjacob/htxaarhuslan | main/migrations/0022_auto_20161208_2216.py | Python | mit | 679 |
from functools import wraps
def retry_task(f):
@wraps(f)
def decorated_function(*args, **kwargs):
retry = kwargs.get('retry', False)
if retry == 0:
return f(*args, **kwargs)
elif retry > 0:
for x in range(0, retry):
result = f(*args, **kwargs)
if result['status'] != 500:
return result
return f(*args, **kwargs)
elif retry == -1:
while retry:
result = f(*args, **kwargs)
if result['status'] != 500:
return result
return decorated_function
| JR--Chen/flasky | app/spider/decorators.py | Python | mit | 641 |
#------------------------------------------------------------------------------
# Copyright (C) 2009 Richard W. Lincoln
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to
# deal in the Software without restriction, including without limitation the
# rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
# sell copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
# IN THE SOFTWARE.
#------------------------------------------------------------------------------
""" Defines an action for moving the workspace to the parent directory.
"""
#------------------------------------------------------------------------------
# Imports:
#------------------------------------------------------------------------------
from os.path import dirname
from enthought.traits.api import Bool, Instance
from enthought.pyface.api import ImageResource
from enthought.pyface.action.api import Action
from enthought.envisage.ui.workbench.api import WorkbenchWindow
from puddle.resource.resource_view import RESOURCE_VIEW
from common import IMAGE_LOCATION
#------------------------------------------------------------------------------
# "UpAction" class:
#------------------------------------------------------------------------------
class UpAction(Action):
""" Defines an action for moving the workspace to the parent directory.
"""
#--------------------------------------------------------------------------
# "Action" interface:
#--------------------------------------------------------------------------
# A longer description of the action:
description = "Move workspace to the parent directory"
# The action"s name (displayed on menus/tool bar tools etc):
name = "&Up"
# A short description of the action used for tooltip text etc:
tooltip = "Open parent directory"
# Keyboard accelerator:
accelerator = "Alt+Up"
# The action's image (displayed on tool bar tools etc):
image = ImageResource("up", search_path=[IMAGE_LOCATION])
#--------------------------------------------------------------------------
# "UpAction" interface:
#--------------------------------------------------------------------------
window = Instance(WorkbenchWindow)
#--------------------------------------------------------------------------
# "Action" interface:
#--------------------------------------------------------------------------
def perform(self, event):
""" Perform the action.
"""
# Note that we always offer the service via its name, but look it up
# via the actual protocol.
from puddle.resource.i_workspace import IWorkspace
workspace = self.window.application.get_service(IWorkspace)
workspace.path = dirname(workspace.absolute_path)
view = self.window.get_view_by_id(RESOURCE_VIEW)
if view is not None:
workspace = self.window.application.get_service(IWorkspace)
view.tree_viewer.refresh(workspace)
# EOF -------------------------------------------------------------------------
| rwl/puddle | puddle/resource/action/up_action.py | Python | mit | 3,878 |
#!/usr/bin/env python
from ez_setup import use_setuptools
use_setuptools()
from setuptools import setup, find_packages
from pkg_resources import parse_version
def check_dependencies():
'''
setuptools causes problems for installing packages (especially
statsmodels). Use this function to abort installation instead.
'''
try:
import cython
except ImportError:
raise ImportError("Install cython before installing TurbuStat.")
try:
import matplotlib
mpl_version = matplotlib.__version__
if parse_version(mpl_version) < parse_version('1.2'):
print("***Before installing, upgrade matplotlib to 1.2***")
raise ImportError
except:
raise ImportError(
"Install or upgrade matplotlib before installing TurbuStat.")
try:
from numpy.version import version as np_version
if parse_version(np_version) < parse_version('1.6'):
print("***Before installing, upgrade numpy to 1.6***")
raise ImportError
except:
raise ImportError(
"Install or upgrade numpy before installing TurbuStat.")
try:
from scipy.version import version as sc_version
if parse_version(sc_version) < parse_version('0.12'):
print("***Before installing, upgrade scipy to 0.12***")
raise ImportError
except:
raise ImportError(
"Install or upgrade scipy before installing TurbuStat.")
try:
from pandas.version import version as pa_version
if parse_version(pa_version) < parse_version('0.13'):
print("***Before installing, upgrade pandas to 0.13***")
raise ImportError
except:
raise ImportError(
"Install or upgrade pandas before installing TurbuStat.")
try:
from statsmodels.version import version as sm_version
if parse_version(sm_version) < parse_version('0.4.0'):
print("***Before installing, upgrade statsmodels to 0.4.0***")
raise ImportError
except:
raise ImportError(
"Install or upgrade statsmodels before installing TurbuStat.")
try:
import sklearn
skl_version = sklearn.__version__
if parse_version(skl_version) < parse_version('0.13.0'):
print("***Before installing, upgrade sklearn to 0.13.0***")
raise ImportError
except:
raise ImportError(
"Install or upgrade sklearn before installing TurbuStat.")
try:
from astropy.version import version as ast_version
if parse_version(ast_version[:3]) < parse_version('0.4'):
print(("""***Before installing, upgrade astropy to 0.4.
NOTE: This is the dev version as of 17/06/14.***"""))
raise ImportError("")
except:
raise ImportError(
"Install or upgrade astropy before installing TurbuStat.")
try:
import astrodendro
except:
raise ImportError(("""Install or upgrade astrodendro before installing
TurbuStat. ***NOTE: Need dev version as
of 17/06/14.***"""))
if __name__ == "__main__":
check_dependencies()
setup(name='turbustat',
version='0.0',
description='Distance metrics for comparing spectral line data cubes.',
author='Eric Koch, Caleb Ward, Jason Loeppky and Erik Rosolowsky',
author_email='[email protected]',
url='http://github.com/Astroua/TurbuStat',
scripts=[],
packages=find_packages(
exclude=["*.tests", "*.tests.*", "tests.*", "tests"])
)
| keflavich/TurbuStat | setup.py | Python | mit | 3,706 |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from __future__ import print_function
import os
import shutil
import time
import subprocess
import numpy as np
from .phonopy_conf_creator import PhonopyConfCreator
from vasp.poscar import Poscar
from autotools import symlink_force
class PhononCalculator(object):
def __init__(self,
directory_data="./",
poscar_filename="POSCAR",
poscar_average_filename=None,
is_average_mass=False,
dim_sqs=None,
is_primitive=False,
is_band=True,
is_partial_dos=False,
is_tetrahedron=False,
is_tprop=False,
mesh=None,
nac=None):
if dim_sqs is None:
dim_sqs = np.array([1, 1, 1])
if mesh is None:
mesh = np.array([1, 1, 1])
self._variables = None
self._home = os.path.expanduser("~")
self._phonopy = subprocess.check_output(["which", "phonopy"]).strip()
print("phonopy_path:", self._phonopy)
self._directory_data = directory_data
self._poscar_filename = poscar_filename
self._poscar_average_filename = poscar_average_filename
self._is_average_mass = is_average_mass
self.set_dim_sqs(dim_sqs)
self._is_band = is_band
self.set_is_tetrahedron(is_tetrahedron)
self.set_is_partial_dos(is_partial_dos)
self.set_is_tprop(is_tprop)
self._is_primitive = is_primitive
self._mesh = np.array(mesh)
self._nac = nac
def set_dim_sqs(self, dim_sqs):
self._dim_sqs = dim_sqs
def set_is_tetrahedron(self, is_tetrahedron):
self._is_tetrahedron = is_tetrahedron
def set_is_partial_dos(self, is_partial_dos):
self._is_partial_dos = is_partial_dos
def set_is_tprop(self, is_tprop):
self._is_tprop = is_tprop
def set_mesh(self, mesh):
self._mesh = mesh
def set_variables(self, variables):
self._variables = variables
def run(self):
self.copy_files()
self.create_phonopy_conf()
conf_files = self.gather_conf_files()
for conf_file in conf_files:
self.run_phonopy(conf_file)
def copy_files(self):
dir_data = self._directory_data
symlink_force(os.path.join(dir_data, 'writefc.conf'), 'writefc.conf')
symlink_force(os.path.join(dir_data, 'POSCAR'), 'POSCAR')
symlink_force(os.path.join(dir_data, 'POSCAR_ideal'), 'POSCAR_ideal')
symlink_force(os.path.join(dir_data, 'FORCE_CONSTANTS'), 'FORCE_CONSTANTS')
def create_phonopy_conf(self):
directory_data = self._directory_data
dim_sqs = self._dim_sqs
variables = self._variables
mesh = self._mesh.copy()
print("directory_data:", directory_data)
print("mesh:", mesh)
spg_number = self.create_spg_number()
# Get band path for the specific space group
phonopy_conf_creator = PhonopyConfCreator(
spg_number,
mesh=mesh,
tmax=3000,
dim_sqs=dim_sqs,
is_average_mass=self._is_average_mass,
is_primitive=self._is_primitive,
band_points=101,
poscar_name="POSCAR", # For getting the chemical symbols
magmom_line=None,
variables=variables,
nac=self._nac,
)
phonopy_conf_creator.run()
def create_spg_number(self):
"""
spg_number is used to determine the primitive axis and band paths.
"""
if self._poscar_average_filename is not None:
poscar_filename = self._poscar_average_filename
else:
poscar_filename = self._poscar_filename
print('SPG number is searched from {}'.format(poscar_filename))
spg_number = Poscar(poscar_filename).get_symmetry_dataset()["number"]
print("spg_number:", spg_number)
return spg_number
def gather_conf_files(self):
conf_files = [
"dos_smearing.conf",
]
if self._is_band:
conf_files.append("band.conf")
if self._is_tetrahedron:
conf_files.append("dos_tetrahedron.conf")
if self._is_partial_dos:
conf_files.append("partial_dos_smearing.conf")
if self._is_tetrahedron and self._is_partial_dos:
conf_files.append("partial_dos_tetrahedron.conf")
if self._is_tprop:
conf_files.append("tprop.conf")
return conf_files
def run_phonopy(self, conf_file):
root = os.getcwd()
home = self._home
phonopy = self._phonopy
print("=" * 80)
print(conf_file)
print("=" * 80)
dir_name = conf_file.replace(".conf", "_calc")
log_file = conf_file.replace(".conf", ".log")
if os.path.exists(dir_name):
shutil.rmtree(dir_name)
os.mkdir(dir_name)
os.chdir(dir_name)
for fn in [conf_file, "POSCAR", "FORCE_CONSTANTS", "BORN"]:
if os.path.exists(os.path.join("..", fn)):
os.symlink("../" + fn, fn)
if os.path.exists(log_file):
os.remove(log_file)
time1 = time.time()
with open(log_file, "w") as f:
subprocess.call(
[phonopy, conf_file, "-v"],
stdout=f,
)
time2 = time.time()
dtime = time2 - time1
print("Time for calc.: {:12.6f} s".format(dtime))
if conf_file == "tprop.conf":
subprocess.call(
["python", home + "/script/python/phonopy_tprop_arranger.py"]
)
os.chdir(root)
def main():
import argparse
parser = argparse.ArgumentParser()
parser.add_argument("-d", "--datadir",
default="..",
type=str,
help="Data directory")
parser.add_argument("--tetrahedron",
action="store_true",
help="Calculate using tetrahedron method.")
parser.add_argument("--partial_dos",
action="store_true",
help="Calculate partial DOS.")
parser.add_argument("--tprop",
action="store_true",
help="Calculate thermal properties.")
args = parser.parse_args()
phonon_analyzer = PhononCalculator(
directory_data=args.datadir,
is_tetrahedron=args.tetrahedron,
is_partial_dos=args.partial_dos,
is_tprop=args.tprop,
)
phonon_analyzer.run()
if __name__ == "__main__":
main()
| yuzie007/ph_analysis | ph_analysis/phonon_calculator.py | Python | mit | 6,728 |
from django.conf.urls import patterns, include, url
from django.contrib import admin
from django.conf import settings
admin.autodiscover()
urlpatterns = patterns('',
# Examples:
# url(r'^$', 'CryptoKnocker.views.home', name='home'),
# url(r'^blog/', include('blog.urls')),
url(r'^admin/', include(admin.site.urls)),
url(r'^$', include("mainpage.urls")),
url(r"^login/$", "mainpage.views.login_form"),
url(r'^management/$', "management.views.index"),
url(r'^management/login$', "management.views.user_login"),
url(r'^logout/$', "management.views.user_logout"),
url(r'^management/registration/$', "management.views.registration"),
url(r'^management/keys/$', "management.views.manageKeys"),
url(r'^management/keys/changeKey$', "management.views.changeKey"),
url(r'^management/getPorts/$', "management.views.getPorts"),
url(r'^(?P<path>.*)$', 'django.views.static.serve', {'document_root': settings.MEDIA_ROOT})
)
| bb111189/CryptoKnocker | CryptoKnocker/CryptoKnocker/urls.py | Python | mit | 970 |
# -*- coding: utf-8 -*-
"""
annotations_line2d module
Created on Thu Sep 10 21:51:23 2015
@author: James Sorenson
"""
import matplotlib
import matplotlib.pyplot as plt
# This is to prevent overlapping annotations from being dragged simultaneously
# due to the multi-threaded nature of the matplotlib gui.
import threading
###########################
# Globals
###########################
attr_name = 'annotations_line2d'
_event= None # Used for debugging
###########################
# Class definitions
###########################
class DraggableAnnotationLine2D(matplotlib.offsetbox.DraggableBase):
"""This class is like Matplotlib’s DraggableAnnotation, but this one actually works.
Apparently, the original class can't handle annotations that are created
using 'offset points' from a data point. This class ONLY works with those.
Left-click to move the annotation without changing the data point.
Middle-click to slide the annotation to a different data point.
Right-click to delete the annotation.
The original annotation artist is in self.ref_artist.
We save additional info in self.line, self.index, and self.formatter.
"""
# Class-level lock to make sure only ONE annotation is moved at a time.
# Due to QT's multi—threaded nature, it‘s best to use a real thread lock.
_drag_lock=threading.Lock()
_counter=0 # Just a counter to give each annotation a unique ID.
def __init__(self, ref_artist, line=None, index=None, formatter=None, use_blit=True):
# Use the base init (This isn‘t C++ where the parent is called automatically.)
super().__init__(ref_artist, use_blit=use_blit)
# Store the other parameters
self.line=line
self.index=index
self.formatter=formatter
# Create a unique ID for this annotation (for debugging)
DraggableAnnotationLine2D._counter += 1
DraggableAnnotationLine2D._counter %= 2**31 # Not too big
self.id = DraggableAnnotationLine2D._counter
#print('Init',self.id)
if formatter is not None:
# Get and set the text
self.ref_artist.set_text(self.formatter(line, index))
#Update the canvas to make sure the annotation is visible
self.canvas.draw()
def artist_picker(self, artist, event):
"""
Determines if the artist should enable move for this mouse button event
"""
# Make sure this only happens with a click. Ignore scroll.
# Left or Right click works on all of these annotations
# Middle click (slide) requires that line and index are assigned
if (event.button in (1,3)) or \
(event.button ==2 and self.line is not None and self.index is not None):
# Good action. We only want to drag if the cursor is inside the
# box, not the arrow and the area around it.
# contains(event) returns (bool,attr)
#print('Picked',self.id)
drag = self.ref_artist.get_bbox_patch().contains(event)
if drag[0]:
#Make sure no other annotation are dragging.
# wait=False means no block. True if a successful lock.
if DraggableAnnotationLine2D._drag_lock.acquire(False):
# Record the mouse button
self.button=event.button
#print('Claim',self.id)
return drag
# If we made it here, then we're not moving
return (False, None)
def save_offset(self):
"""
On button-down, this saves the current location of the annotation.
Annotation object is in self.ref_artist.
"""
#print('Save',self.id)
if self.button == 1:
# Left-click. Move the annotation while pointing at the same data.
# Get the starting position of the artist in points (relative to data point)
self.drag_start_text_points = self.ref_artist.get_position()
# Get the inverted transform so we can convert pixels to paints.
self.drag_trans_mat = self.ref_artist.get_transform().inverted().get_matrix()
elif self.button == 2:
# Middle-click. We need some additional information to slide the data.
self.xydata=self.line.get_xydata() #just makes it easier (this does NOT copy)
# we need the pixels of the starting data point (not the cursor)
self.drag_start_pixels = self.ref_artist.get_axes().transData.transform(self.ref_artist.xy)
# Get the translation from pixels to data for annotation.xy
self.drag_trans_pix2dat = self.ref_artist.get_axes().transData.inverted()
def update_offset(self, dx, dy):
"""
dx and dy is the total pixel offset from the point where the mouse
drag started.
"""
if self.button == 1: # Left—click
# Scale delta pixels to delta points using parts of annotation transform.
# The full transform includes the data offset, but set position already does that.
new_position=(self.drag_start_text_points[0] + dx * self.drag_trans_mat[0,0],
self.drag_start_text_points[1] + dy * self.drag_trans_mat[1,1])
# Apply as delta points from data point
self.ref_artist.set_position(new_position)
elif self.button == 2: # Middle—click
# We may have a logarithmic scale, but update offset only gives us delta pixels.
# Add the delta to the starting pixels, then convert to data coordinates
pixels_dxy = matplotlib.numpy.array((dx,dy))
new_data_xy = self.drag_trans_pix2dat.transform(self.drag_start_pixels+pixels_dxy)
# Determine if the new data coordinates reach or exceed the next line data point.
index=self.index
while (index > 0) and (self.xydata[index-1][0] > new_data_xy[0]):
#Move left
index -= 1
while (index < self.xydata.shape[0] - 1) and (self.xydata[index+1][0] < new_data_xy[0]):
# Move right
index += 1
if index != self.index:
# we moved an index! Update the annotation
self.ref_artist.xy=self.xydata[index,:]
self.index=index
if self.formatter is not None:
# Update the text in the annotation
self.ref_artist.set_text(self.formatter(self.line, index))
def finalize_offset(self):
"""Called when the mouse button is released, if this was picked in the first place."""
#print('Finalize',self.id)
if self.button == 2 and self.formatter is not None:
# Print out annotation text for the user to copy/paste
self.print_annotation()
elif self.button == 3:
# Delete annotation
self.remove()
def on_release(self,event):
"""
Called when the mouse button is released, whether or not this was picked.
We extend this function so that we are guaranteed to release the thread lock.
"""
# Call the original
super().on_release(event)
#Everyone tries to remove the block, just in case the controlling annotation was removed.
try:
DraggableAnnotationLine2D._drag_lock.release()
except RuntimeError:
pass # Already released. Not a concern.
def print_annotation(self):
"""Does exactly what you think it does"""
print('Annotation: {0}, ind={1}\n{2}'.format(self.line.get_label(), self.index, self.ref_artist.get_text()))
def remove(self):
"""Disconnect and delete the annotation."""
#print('Remove',self.id)
self.disconnect() # Disconnect the callbacks
self.ref_artist.remove() # Delete the annotation artist
self.got_artist=False # Tell this class it no longer has an artist
self.canvas.draw() # Update the whole canvas so the annotation disappears
class AnnotationPicker(object):
"""
A class to enable convenient annotations to any plot.
This is meant only for 2D lines.
Left-click to move the annotation without changing the data point.
Middle-click to slide the annotation to a different data point.
Right-click to delete the annotation.
Optional arguments:
artists: (default None) A single or list of artists to attach this to as 'artist annotations'
tolerance : (default 5) Picker tolerance to a line's data point to create an annotation.
formatter : function to generate the string in the annotation. fcn(Line2D artist, index)
All other keyword arguments Will be passed to the annotation.
"""
def __init__(self, artists=None, tolerance=5, formatter=None, button=1, key = 'control', use_blit=True, **kwargs):
# Parse the arguments
self.tolerance = tolerance
self.use_blit = use_blit
self.button = button
self.key=key
if formatter is None: # Use default
self.formatter=self._annotate_line_str
else:
self.formatter = formatter
# Save the annotation parameters
self.annotation_kwargs = dict(xycoords='data', textcoords='offset points',
fontsize=11, picker=True, xytext=(20, 20),
bbox=dict(boxstyle='round,pad=0.5', fc='yellow', alpha=0.5),
arrowprops=dict(shrink=0.05, headwidth=5, width=1))
# Add in additional/modified user parameters
self.annotation_kwargs.update(kwargs)
# Apply this annotation instance to the given artists and children
if artists is not None:
self.apply(artists)
def apply(self, artists):
"""
Enable picker on lines so that annotations are activated.
This particular Annotation instance will be applied to this artist and
its children (unless the children already have their own instance.
Use 'clear annotaions' if you wish to override children settings.
"""
# This is overly complex, but it allows the user to throw anything at it (figure, axes, line, etc)
# Make it iterable for convenience
artists = _make_iterable(artists)
for artist in artists:
if artist is None:
continue
# Attach this instance to the given artists
setattr(artist, attr_name, self)
# Enable picker to any line contained in this artist that is not already enabled.
if isinstance(artist, matplotlib.lines.Line2D) and not artist.pickable():
lines = [artist]
elif isinstance(artist, matplotlib.axes.Axes):
lines = [line for line in artist.get_lines() if not line.pickable()]
elif isinstance(artist, matplotlib.figure.Figure):
lines = [line for ax in artist.get_axes() for line in ax.get_lines() if not line.pickable()]
else:
lines=[]
for line in lines:
line.set_picker(self.tolerance)
# Make sure the callbacks are enabled for the parent canvas
enable_callbacks(artist)
def annotate(self, line, index, text=None):
"""
Makes a draggable, interactive annotation on the given line,
at the given index, with the given text.
line : Line2D object to annotate
index : The index of the line to put the annotation
text : The text to fill the annotation with. If None, then use default.
Returns a DraggableAnnotationLine2D instance where the annotation artist is in self.ref_artist.
"""
if text is None:
# Get the text from the formatter
formatter=self.formatter
else:
# Manual text is given. Don't use the formatter
formatter = None
# Create the annotation at the designated point
ax=line.get_axes()
annot=ax.annotate(text, line.get_xydata()[index,:], **self.annotation_kwargs)
# Make it draggable using our class, then return the object
return DraggableAnnotationLine2D(annot, line, index, formatter, use_blit=self.use_blit)
def _annotate_line_str(self, line, index):
"""
The default function to take a Line2D artist and index and generate a
string for the annotation box.
"""
xy=line.get_xydata()[index]
return '{0}[{1}]:\nx={2:.9}\ny:{3:.9}'.format(line.get_label(),index,xy[0],xy[1])
def _onpick(self,event):
"""Called by canvas pick event."""
if event.mouseevent.button == self.button and \
event.mouseevent.key == self.key and \
isinstance(event.artist, matplotlib.lines.Line2D):
# More than one index may be in range. Determine the middle index.
ind = event.ind[len(event.ind)//2]
global _event
_event=event
# Generate the annotation
self.annotate(event.artist, ind)
###########################
# Module functions
###########################
def enable_callbacks(artist):
"""
Enable annotation callbacks within this canvas/figure.
This adds the .annotations attribute to the canvas to hold the callbacks.
"""
if isinstance(artist, matplotlib.figure.Figure):
canvas=artist.canvas
elif hasattr(artist, 'get_figure'):
canvas=artist.get_figure().canvas
else:
canvas=artist
if not hasattr(canvas,attr_name):
# Add the callbacks and store as a list in the canvas attribute
callbacks=[]
callbacks.append(canvas.mpl_connect('pick_event', _on_pick_event))
callbacks.append(canvas.mpl_connect('figure_enter_event', _on_figure_enter_event))
setattr(canvas, attr_name, callbacks)
def disable_callbacks(canvas):
"""
Disable all annotation callbacks pertaining to this callback.
We leave the pickers and annotation instances in the artists.
We just get rid of the callback attached to the canvas.
"""
if isinstance(canvas, matplotlib.figure.Figure):
canvas=canvas.canvas # We were given the figure instead
for callback in getattr(canvas, attr_name, []):
canvas.mpl_disconnect(callback)
delattr(canvas, attr_name)
print('AnnotationPicker callback removed from canvas.')
def annotate(line, index, text=None):
"""
Wrapper function around AnnotationPicker.annotate()
This will find the controlling instance of Annotations for the given line
and create an interactive annotation at the given index with the given text.
Input:
line: The matplotlib line object to annotate (plt.figure(1).axes[0].lines[0])
index: The index of the line to annotate.
text: The annotation text. It None, then the AnnotationPicker.formatter()
is used to generate text at the given line and index.
Returns:
DraggableAnnotationLine2D object
"""
annotations_instance = _find_annotations_instance(line)
if annotations_instance is None:
# Create a default annotation for this line
annotations_instance = AnnotationPicker(line)
setattr(line, attr_name, annotations_instance)
annotations_instance.annotate(line, index, text)
def subplots(*args, anno=None, **kwargs):
"""
Identical to plt.subplots(), but also assigns an AnnotationPicker class
to the figure. Use "anno=AnnotationPickerInstance" to use a specific instance
of the AnnotationPicker.
"""
# Since we are using plt.subplots, this will show immediately if interactive.
# gca and gcf will also be updated.
fig,ax_list=plt.subplots(*args, **kwargs)
if anno is None:
# Create default AnnotationPicker that will be connected to the figure
AnnotationPicker(fig)
else:
anno.apply(fig)
return (fig,ax_list)
###########################
# Private Utilites
###########################
def _make_iterable(obj):
"""Return obj as a list if it is not already an iterable object"""
if hasattr(obj,'__iter__'):
return obj
else:
# Make it iterable for consistency
return [obj]
def _find_annotations_instance(artist):
"""
Find the controlling Annotations instance for this artists.
It could be attached to the artist itself, or on the parent axes or figure.
Returns the controlling Annotations instance.
"""
if hasattr(artist, attr_name):
# Instance is attached to the artist itself
return getattr(artist, attr_name)
elif hasattr(artist, 'get_axes' ) and hasattr(artist.get_axes(), attr_name):
# Instance is attached to the axes
return getattr(artist.get_axes(), attr_name)
elif hasattr(artist, 'get_figure') and hasattr(artist.get_figure(), attr_name):
# Instance is attached to the figure
return getattr(artist.get_figure(), attr_name)
# No instance found
return None
def _clear_annotations(artist):
"""
Call this on any artist to clear the annotation instances for that artist
and all of its children. Mostly useful for debugging.
"""
artists = _make_iterable(artist)
for artist in artists:
if hasattr(artist, attr_name):
delattr(artist, attr_name)
if hasattr(artist,'get chlldren'):
_clear_annotations(artist.get_children())
print('All annotations in artist and children deleted.')
###########################
# Canvas Callback functions
###########################
def _on_pick_event(event):
"""
This is what initially gets called when ANY artist in the figure with
picking enabled is picked.
Startlng with the artist itself, thls function will determine the closest
AnnotationPicker instance to call. This permits different settings per
line or per axes.
"""
annotations_instance = _find_annotations_instance(event.artist)
if annotations_instance is not None:
# Call the controlling Annotations instance
annotations_instance._onpick(event)
def _on_figure_enter_event(event):
"""
When the mouse enters the figure, this will make sure all lines have
picker enabled so that new lines can be annotated.
"""
fig=event.canvas.figure
# Only lines that are not already pickable will be updated.
lines=[line for ax in fig.axes for line in ax.lines if not line.pickable()]
for line in lines:
# The controlling Annotations instance is either in the axes or figure.
annotations_instance=_find_annotations_instance(line)
if annotations_instance is not None:
line.set_picker(annotations_instance.tolerance)
# We may need to update legends if the user manually plotted or deleted a line.
#legend_update(fig, draw=True) #Draw if a change was detected
###########################
# TEST
###########################
if __name__ == '__main__':
import numpy as np
plt.ion()
# Use our subplots wrapper to make sure annotations are enabled
fig,ax=subplots(2,1)
ax[0].set_title('click on points')
x=np.r_[-5:5:.1]
y=x**2-5*x+3
lines=[]
lines += ax[0].plot(x,x**2-5*x+3, '-.',label='My Line')
lines += ax[1].plot(x,5*x+4,label='Line2')
# Enable Annotations
anno=AnnotationPicker(fig)
an=anno.annotate(ax[0].lines[0],30, 'A manual annotation')
# Add a legend
#leg=legend(ax)
# Add another line and see if moving the mouse in catches it
ax[1].plot(x,2*x+7, label='New line')
# Create custom string for 2nd axes
def custom_text(line,ind):
xy=line.get_xydata()[ind]
custom='Custom text\nData[{0}]: {1:.9}, {2:.9}'.format(ind,xy[0],xy[1])
return custom
anno2=AnnotationPicker(ax[1],formatter=custom_text, key=None)
ax[1].plot(x,y, '.-',label='No picker yet') # See if the picker gets enabled
ax[1].legend()
plt.draw()
| J-Sorenson/annnotations-line2d | annotations_line2d.py | Python | cc0-1.0 | 20,221 |
#!/usr/bin/env python3
# Combine SVG Images of Italic Practice Sheets into an OpenDocument Document
# Written in 2014 by Jordan Vaughan
#
# To the extent possible under law, the author(s) have dedicated all copyright
# and related and neighboring rights to this software to the public domain
# worldwide. This software is distributed without any warranty.
#
# You should have received a copy of the CC0 Public Domain Dedication along
# with this software. If not, see
# <http://creativecommons.org/publicdomain/zero/1.0/>.
import argparse
import base64
import datetime
from decimal import Decimal
import math
import os.path
import sys
parser = argparse.ArgumentParser(description="Combine images of Italic calligraphy practice sheets into a single OpenDocument file. Note that this program does not verify that the specified images will fit and retain their aspect ratios within the specified page dimensions: You must verify that yourself. The generated flat OpenDocument file is printed on standard output.")
parser.add_argument("-d", "--description", default="", help="""description of the file (added before the public domain dedication [see -p], if any; default is blank)""")
parser.add_argument("-p", "--public-domain-dedication", metavar="AUTHOR", default=None, help="""add a Creative Commons CC0 Public Domain Dedication to the generated image using the specified AUTHOR""")
parser.add_argument("-t", "--title", default="Italic Calligraphy Practice Sheets", help="""the document's title in its metadata (default: "Italic Calligraphy Practice Sheets")""")
parser.add_argument("-u", "--units", default="mm", help="""units used for page and margin dimensions (can be any unit suffix recognized by the OpenDocument standard; default: mm)""")
parser.add_argument("width", type=Decimal, help="""the width of the page""")
parser.add_argument("height", type=Decimal, help="""the height of the page""")
parser.add_argument("margin", type=Decimal, help="""the width of page margins""")
parser.add_argument("sheetimage", nargs="+", help="""a list of SVG images of Italic calligraphy practice sheets""")
errors = False
def error(message):
global errors
sys.stderr.write(os.path.basename(sys.argv[0]) + ": error: " + message + "\n")
errors = True
if __name__ == "__main__":
try:
args = parser.parse_args()
except Exception:
error("invalid command line arguments (invalid syntax?)")
sys.exit(1)
if args.width <= 0:
error("width must be positive")
if args.height <= 0:
error("height must be positive")
if args.margin < 0:
error("margin must be positive or zero")
if args.margin > args.width * Decimal(0.5):
error("margin exceeds horizontal page dimensions (i.e., it's too large!)")
if args.margin > args.height * Decimal(0.5):
error("margin exceeds vertical page dimensions (i.e., it's too large!)")
if args.units not in {"mm", "cm", "m", "km", "pt", "pc", "inch", "ft", "mi"}:
error("unrecognized units: must be one of mm, cm, m, km, pt, pc, inch, ft, or mi")
if errors:
sys.exit(1)
if not args.sheetimage:
sys.exit(0)
imgwidth = args.width - 2 * args.margin
imgheight = args.height - 2 * args.margin
now = datetime.datetime.today()
sys.stdout.write("""<?xml version="1.0" encoding="UTF-8" standalone="no"?>
<office:document xmlns:office="urn:oasis:names:tc:opendocument:xmlns:office:1.0" xmlns:style="urn:oasis:names:tc:opendocument:xmlns:style:1.0" xmlns:text="urn:oasis:names:tc:opendocument:xmlns:text:1.0" xmlns:draw="urn:oasis:names:tc:opendocument:xmlns:drawing:1.0" xmlns:fo="urn:oasis:names:tc:opendocument:xmlns:xsl-fo-compatible:1.0" xmlns:xlink="http://www.w3.org/1999/xlink" xmlns:dc="http://purl.org/dc/elements/1.1/" xmlns:meta="urn:oasis:names:tc:opendocument:xmlns:meta:1.0" xmlns:svg="urn:oasis:names:tc:opendocument:xmlns:svg-compatible:1.0" office:version="1.2" office:mimetype="application/vnd.oasis.opendocument.text">
<office:meta>
<meta:creation-date>{0}</meta:creation-date>
<dc:description>{1}Pages are {2}{5}x{3}{5} with {4}{5} margins.""".format(now.strftime("%FT%TZ"), "{0}\n\n".format(args.description) if args.description else "", args.width, args.height, args.margin, args.units))
if args.public_domain_dedication:
sys.stdout.write("""
Created on {0} by {1}.
To the extent possible under law, {1} has waived all copyright and related or neighboring rights to this image. You can copy, modify, distribute and perform this image, even for commercial purposes, all without asking permission. Please see <http://creativecommons.org/publicdomain/zero/1.0/> for more information.""".format(now.strftime("%F"), args.public_domain_dedication.strip()))
sys.stdout.write("""</dc:description>
<dc:title>{0}</dc:title>
<dc:date>{1}</dc:date>
</office:meta>
<office:styles>
<style:style style:name="Standard" style:family="paragraph" style:class="text"/>
<style:style style:name="Graphics" style:family="graphic">
<style:graphic-properties text:anchor-type="paragraph" svg:x="0mm" svg:y="0mm" style:wrap="dynamic" style:number-wrapped-paragraphs="no-limit" style:wrap-contour="false" style:vertical-pos="top" style:vertical-rel="paragraph" style:horizontal-pos="center" style:horizontal-rel="paragraph"/>
</style:style>
</office:styles>
<office:automatic-styles>
<style:style style:name="P1" style:family="paragraph" style:parent-style-name="Standard">
<style:paragraph-properties fo:break-before="page"/>
</style:style>
<style:style style:name="fr1" style:family="graphic" style:parent-style-name="Graphics">
<style:graphic-properties style:mirror="none"/>
</style:style>
<style:page-layout style:name="pm1">
<style:page-layout-properties fo:page-width="{2}{5}" fo:page-height="{3}{5}" fo:margin-top="{4}{5}" fo:margin-bottom="{4}{5}" fo:margin-left="{4}{5}" fo:margin-right="{4}{5}"/>
</style:page-layout>
</office:automatic-styles>
<office:master-styles>
<style:master-page style:name="Standard" style:page-layout-name="pm1"/>
</office:master-styles>
<office:body>
<office:text>\n""".format(args.title, now.strftime("%FT%TZ"), args.width, args.height, args.margin, args.units))
def add_image(path, imgno, paragraph_style):
sys.stdout.write(""" <text:p text:style-name="{0}"><draw:frame draw:style-name="fr1" draw:name="n{1}" text:anchor-type="paragraph" svg:width="{2}{4}" svg:height="{3}{4}" draw:z-index="0"><draw:image><office:binary-data>""".format(paragraph_style, imgno, imgwidth, imgheight, args.units))
data = None
try:
with open(path, "rb") as imgfile:
data = imgfile.read()
except OSError as e:
error("unable to read " + path + ": " + e.strerror)
if data:
sys.stdout.write(str(base64.b64encode(data), encoding="UTF-8"))
sys.stdout.write("""</office:binary-data></draw:image></draw:frame></text:p>\n""")
for index, path in enumerate(args.sheetimage):
add_image(path, index, "Standard" if index is 0 else "P1")
sys.stdout.write(""" </office:text>
</office:body>
</office:document>\n""")
if errors:
sys.exit(2)
| jtvaughan/calligraphy | fodtitalicsheets.py | Python | cc0-1.0 | 6,988 |
import OOMP
newPart = OOMP.oompItem(9452)
newPart.addTag("oompType", "RESE")
newPart.addTag("oompSize", "0805")
newPart.addTag("oompColor", "X")
newPart.addTag("oompDesc", "O271")
newPart.addTag("oompIndex", "67")
OOMP.parts.append(newPart)
| oomlout/oomlout-OOMP | old/OOMPpart_RESE_0805_X_O271_67.py | Python | cc0-1.0 | 243 |
"""Cascade UserAffiliation deletes
Revision ID: 5de499ab5b62
Revises: 14f51f27a106
Create Date: 2016-12-13 00:21:39.842218
"""
# revision identifiers, used by Alembic.
revision = '5de499ab5b62'
down_revision = '14f51f27a106'
branch_labels = None
depends_on = None
from alembic import op
import sqlalchemy as sa
def upgrade(engine_name):
globals()["upgrade_%s" % engine_name]()
def downgrade(engine_name):
globals()["downgrade_%s" % engine_name]()
def upgrade_data_broker():
### commands auto generated by Alembic - please adjust! ###
op.drop_constraint('user_affiliation_user_fk', 'user_affiliation', type_='foreignkey')
op.drop_constraint('user_affiliation_cgac_fk', 'user_affiliation', type_='foreignkey')
op.create_foreign_key('user_affiliation_user_fk', 'user_affiliation', 'users', ['user_id'], ['user_id'], ondelete='CASCADE')
op.create_foreign_key('user_affiliation_cgac_fk', 'user_affiliation', 'cgac', ['cgac_id'], ['cgac_id'], ondelete='CASCADE')
### end Alembic commands ###
def downgrade_data_broker():
### commands auto generated by Alembic - please adjust! ###
op.drop_constraint('user_affiliation_cgac_fk', 'user_affiliation', type_='foreignkey')
op.drop_constraint('user_affiliation_user_fk', 'user_affiliation', type_='foreignkey')
op.create_foreign_key('user_affiliation_cgac_fk', 'user_affiliation', 'cgac', ['cgac_id'], ['cgac_id'])
op.create_foreign_key('user_affiliation_user_fk', 'user_affiliation', 'users', ['user_id'], ['user_id'])
### end Alembic commands ###
| fedspendingtransparency/data-act-broker-backend | dataactcore/migrations/versions/5de499ab5b62_cascade_useraffiliation_deletes.py | Python | cc0-1.0 | 1,559 |
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
# do this when > 1.6!!!
# from django.db import migrations, models
from gazetteer.models import GazSource,GazSourceConfig,LocationTypeField,CodeFieldConfig,NameFieldConfig
from skosxl.models import Concept, Scheme, MapRelation
from gazetteer.settings import TARGET_NAMESPACE_FT
def load_base_ft():
(sch,created) = Scheme.objects.get_or_create(uri=TARGET_NAMESPACE_FT[:-1], defaults = { 'pref_label' :"Gaz Feature types" })
try:
(ft,created) = Concept.objects.get_or_create(term="ADMIN", defaults = { 'pref_label' :"Populated Place", 'definition':"Populated place"} , scheme = sch)
except:
pass
# now set up cross references from NGA feature types namespace
# now set up harvest config
def load_ft_mappings() :
pass
def load_config() :
try:
GazSourceConfig.objects.filter(name="TM_WorldBoundaries").delete()
except:
pass
config=GazSourceConfig.objects.create(lat_field="lat", name="TM_WorldBoundaries", long_field="lon")
NameFieldConfig.objects.create(config=config,language="en", as_default=True, languageNamespace="", field="name", languageField="")
LocationTypeField.objects.create(field='"ADMIN"',namespace=TARGET_NAMESPACE_FT, config=config)
CodeFieldConfig.objects.create(config=config,field="iso3",namespace="http://mapstory.org/id/countries/iso3")
CodeFieldConfig.objects.create(config=config,field="iso2",namespace="http://mapstory.org/id/countries/iso2")
CodeFieldConfig.objects.create(config=config,field="un",namespace="http://mapstory.org/id/countries/un")
CodeFieldConfig.objects.create(config=config,field="fips",namespace="http://mapstory.org/id/countries/fips")
(s,created) = GazSource.objects.get_or_create(source="tm_world_borders", config=config, source_type="mapstory")
print (s,created)
"""
class Migration(migrations.Migration):
initial = True
dependencies = [
#('yourappname', '0001_initial'),
]
operations = [
migrations.RunPython(load_ft_mappings),
migrations.RunPython(load_config),
]
"""
| rob-metalinkage/django-gazetteer | gazetteer/fixtures/mapstory_tm_world_config.py | Python | cc0-1.0 | 2,139 |
from flask import *
from playhouse.flask_utils import *
import string
from app import app
from model import Major, Minor, Store, Transaction, Item
@app.route('/major', methods=['GET', 'POST'])
def major_list():
query = Major \
.select(Major, Minor) \
.join(Minor, on=(Major.id == Minor.major).alias('minor')) \
.order_by(Major.id)
last = None
minors = []
majors = []
for major in query:
minor = { 'id': major.minor.id, 'name': major.minor.name }
if last != None and major.id != last.id:
majors.append({'id': last.id, 'income': last.income,
'name': last.name, 'minors': minors})
minors = [minor]
else:
minors.append(minor)
last = major
if last != None:
majors.append({'id': last.id, 'income': last.income,
'name': last.name, 'minors': minors})
return render_template('major.html', majors=majors)
@app.route('/major/add', methods=['GET', 'POST'])
def major_add():
if request.method == 'POST':
if request.form.get('major_id'):
major = get_object_or_404(Major, Major.id == request.form['major_id'])
minors = Minor.listWithStats(request.form['major_id'])
major.name = request.form['name']
major.income = bool(request.form.get('income'))
major.save()
flash('Category #%d updated successfully.' % major.id, 'success')
else:
major = Major.create(name=request.form['name'],
income=bool(request.form.get('income')))
minors = []
for minor_name in string.split(request.form['minors'], ','):
if len(minor_name) > 0:
minor = Minor.create(name=string.strip(minor_name), major=major)
minors.append(minor)
flash('A category created successfully.', 'success')
return render_template('major.html', major=major, minors=minors)
return render_template('major.html')
@app.route('/major/<int:id>', methods=['GET', 'POST'])
def major_detail(id):
major = get_object_or_404(Major, Major.id == id)
minors = Minor.listWithStats(id)
num_items = 0
for minor in minors:
num_items += minor.count
return render_template('major.html',
major=major, minors=minors, num_items=num_items)
@app.route('/major/delete/<int:id>', methods=['GET', 'POST'])
def major_delete(id):
major = get_object_or_404(Major, Major.id == id)
major.delete_instance()
minors = Minor.delete().where(Minor.major == id).execute()
flash('Category #%d is deleted.' % id, 'success')
return jsonify(success=True)
@app.route('/_minor/add', methods=['POST'])
def minor_add():
try:
major_id = request.form['major_id']
major = get_object_or_404(Major, Major.id == major_id)
minor = Minor.create(name=request.form['name'], major=major)
except:
flash('Category #%d not found.' % major_id, 'danger')
return jsonify(success=False)
flash('A new subcategory is added.', 'success')
return jsonify(success=True)
@app.route('/_minor/delete/<int:id>', methods=['GET'])
def minor_delete(id):
try:
minor = get_object_or_404(Minor, Minor.id == id)
minor.delete_instance()
except:
return jsonify(success=False)
return jsonify(success=True)
@app.route('/minor/<int:id>', methods=['GET'])
def minor_detail(id):
minor = get_object_or_404(Minor, Minor.id == id)
majors = Major.select().order_by(Major.id)
return render_template('minor.html', minor=minor, majors=majors)
@app.route('/_minor/edit/<int:id>', methods=['POST'])
def minor_edit(id):
try:
minor = Minor.get(Minor.id == id)
minor.name = request.form['name']
minor.major = request.form['major_id']
minor.save()
except:
return jsonify(success=False)
return jsonify(success=True)
| ilhamwk/accounting | view_major.py | Python | cc0-1.0 | 4,006 |
#!/usr/bin/env python
import gi
gi.require_version('Gtk', '3.0')
from gi.repository import Gtk
window = Gtk.Window()
window.set_default_size(200, 200)
window.connect("destroy", Gtk.main_quit)
overlay = Gtk.Overlay()
window.add(overlay)
textview = Gtk.TextView()
textview.set_wrap_mode(Gtk.WrapMode.WORD_CHAR)
textbuffer = textview.get_buffer()
textbuffer.set_text("Welcome to the PyGObject Tutorial\n\nThis guide aims to provide an introduction to using Python and GTK+.\n\nIt includes many sample code files and exercises for building your knowledge of the language.", -1)
overlay.add(textview)
button = Gtk.Button(label="Overlayed Button")
button.set_valign(Gtk.Align.CENTER)
button.set_halign(Gtk.Align.CENTER)
overlay.add_overlay(button)
overlay.show_all()
window.show_all()
Gtk.main()
| Programmica/python-gtk3-tutorial | _examples/overlay.py | Python | cc0-1.0 | 798 |
"""Tests to ensure that the html5lib tree builder generates good trees."""
import warnings
try:
from bs4.builder import HTML5TreeBuilder
HTML5LIB_PRESENT = True
except ImportError, e:
HTML5LIB_PRESENT = False
from bs4.element import SoupStrainer
from bs4.testing import (
HTML5TreeBuilderSmokeTest,
SoupTest,
skipIf,
)
@skipIf(
not HTML5LIB_PRESENT,
"html5lib seems not to be present, not testing its tree builder.")
class HTML5LibBuilderSmokeTest(SoupTest, HTML5TreeBuilderSmokeTest):
"""See ``HTML5TreeBuilderSmokeTest``."""
@property
def default_builder(self):
return HTML5TreeBuilder
def test_soupstrainer(self):
# The html5lib tree builder does not support SoupStrainers.
strainer = SoupStrainer("b")
markup = "<p>A <b>bold</b> statement.</p>"
with warnings.catch_warnings(record=True) as w:
soup = self.soup(markup, parse_only=strainer)
self.assertEqual(
soup.decode(), self.document_for(markup))
self.assertTrue(
"the html5lib tree builder doesn't support parse_only" in
str(w[0].message))
def test_correctly_nested_tables(self):
"""html5lib inserts <tbody> tags where other parsers don't."""
markup = ('<table id="1">'
'<tr>'
"<td>Here's another table:"
'<table id="2">'
'<tr><td>foo</td></tr>'
'</table></td>')
self.assertSoupEquals(
markup,
'<table id="1"><tbody><tr><td>Here\'s another table:'
'<table id="2"><tbody><tr><td>foo</td></tr></tbody></table>'
'</td></tr></tbody></table>')
self.assertSoupEquals(
"<table><thead><tr><td>Foo</td></tr></thead>"
"<tbody><tr><td>Bar</td></tr></tbody>"
"<tfoot><tr><td>Baz</td></tr></tfoot></table>")
def test_xml_declaration_followed_by_doctype(self):
markup = '''<?xml version="1.0" encoding="utf-8"?>
<!DOCTYPE html>
<html>
<head>
</head>
<body>
<p>foo</p>
</body>
</html>'''
soup = self.soup(markup)
# Verify that we can reach the <p> tag; this means the tree is connected.
self.assertEqual(b"<p>foo</p>", soup.p.encode())
def test_reparented_markup(self):
markup = '<p><em>foo</p>\n<p>bar<a></a></em></p>'
soup = self.soup(markup)
self.assertEqual(u"<body><p><em>foo</em></p><em>\n</em><p><em>bar<a></a></em></p></body>", soup.body.decode())
self.assertEqual(2, len(soup.find_all('p')))
def test_reparented_markup_ends_with_whitespace(self):
markup = '<p><em>foo</p>\n<p>bar<a></a></em></p>\n'
soup = self.soup(markup)
self.assertEqual(u"<body><p><em>foo</em></p><em>\n</em><p><em>bar<a></a></em></p>\n</body>", soup.body.decode())
self.assertEqual(2, len(soup.find_all('p')))
def test_reparented_markup_containing_identical_whitespace_nodes(self):
"""Verify that we keep the two whitespace nodes in this
document distinct when reparenting the adjacent <tbody> tags.
"""
markup = '<table> <tbody><tbody><ims></tbody> </table>'
soup = self.soup(markup)
space1, space2 = soup.find_all(string=' ')
tbody1, tbody2 = soup.find_all('tbody')
assert space1.next_element is tbody1
assert tbody2.next_element is space2
def test_reparented_markup_containing_children(self):
markup = '<div><a>aftermath<p><noscript>target</noscript>aftermath</a></p></div>'
soup = self.soup(markup)
noscript = soup.noscript
self.assertEqual("target", noscript.next_element)
target = soup.find(string='target')
# The 'aftermath' string was duplicated; we want the second one.
final_aftermath = soup.find_all(string='aftermath')[-1]
# The <noscript> tag was moved beneath a copy of the <a> tag,
# but the 'target' string within is still connected to the
# (second) 'aftermath' string.
self.assertEqual(final_aftermath, target.next_element)
self.assertEqual(target, final_aftermath.previous_element)
def test_processing_instruction(self):
"""Processing instructions become comments."""
markup = b"""<?PITarget PIContent?>"""
soup = self.soup(markup)
assert str(soup).startswith("<!--?PITarget PIContent?-->")
def test_cloned_multivalue_node(self):
markup = b"""<a class="my_class"><p></a>"""
soup = self.soup(markup)
a1, a2 = soup.find_all('a')
self.assertEqual(a1, a2)
assert a1 is not a2
def test_foster_parenting(self):
markup = b"""<table><td></tbody>A"""
soup = self.soup(markup)
self.assertEqual(u"<body>A<table><tbody><tr><td></td></tr></tbody></table></body>", soup.body.decode())
def test_extraction(self):
"""
Test that extraction does not destroy the tree.
https://bugs.launchpad.net/beautifulsoup/+bug/1782928
"""
markup = """
<html><head></head>
<style>
</style><script></script><body><p>hello</p></body></html>
"""
soup = self.soup(markup)
[s.extract() for s in soup('script')]
[s.extract() for s in soup('style')]
self.assertEqual(len(soup.find_all("p")), 1)
def test_empty_comment(self):
"""
Test that empty comment does not break structure.
https://bugs.launchpad.net/beautifulsoup/+bug/1806598
"""
markup = """
<html>
<body>
<form>
<!----><input type="text">
</form>
</body>
</html>
"""
soup = self.soup(markup)
inputs = []
for form in soup.find_all('form'):
inputs.extend(form.find_all('input'))
self.assertEqual(len(inputs), 1)
def test_tracking_line_numbers(self):
# The html.parser TreeBuilder keeps track of line number and
# position of each element.
markup = "\n <p>\n\n<sourceline>\n<b>text</b></sourceline><sourcepos></p>"
soup = self.soup(markup)
self.assertEqual(2, soup.p.sourceline)
self.assertEqual(5, soup.p.sourcepos)
self.assertEqual("sourceline", soup.p.find('sourceline').name)
# You can deactivate this behavior.
soup = self.soup(markup, store_line_numbers=False)
self.assertEqual("sourceline", soup.p.sourceline.name)
self.assertEqual("sourcepos", soup.p.sourcepos.name)
| listyque/TACTIC-Handler | thlib/side/bs42/tests/test_html5lib.py | Python | epl-1.0 | 6,494 |
"""
Python environments and packages
================================
This module provides tools for using Python `virtual environments`_
and installing Python packages using the `pip`_ installer.
.. _virtual environments: http://www.virtualenv.org/
.. _pip: http://www.pip-installer.org/
"""
from __future__ import with_statement
from contextlib import contextmanager
from distutils.version import StrictVersion as V
from pipes import quote
import os
import posixpath
import re
from fabric.api import cd, hide, prefix, run, settings, sudo
from fabric.utils import puts
from fabtools.files import is_file
from fabtools.utils import abspath, download, run_as_root
GET_PIP_URL = 'https://raw.githubusercontent.com/pypa/pip/master/contrib/get-pip.py'
def is_pip_installed(version=None, pip_cmd='pip'):
"""
Check if `pip`_ is installed.
.. _pip: http://www.pip-installer.org/
"""
with settings(hide('running', 'warnings', 'stderr', 'stdout'), warn_only=True):
res = run('%(pip_cmd)s --version 2>/dev/null' % locals())
if res.failed:
return False
if version is None:
return res.succeeded
else:
m = re.search(r'pip (?P<version>.*) from', res)
if m is None:
return False
installed = m.group('version')
if V(installed) < V(version):
puts("pip %s found (version >= %s required)" % (installed, version))
return False
else:
return True
def install_pip(python_cmd='python', use_sudo=True):
"""
Install the latest version of `pip`_, using the given Python
interpreter.
::
import fabtools
if not fabtools.python.is_pip_installed():
fabtools.python.install_pip()
.. note::
pip is automatically installed inside a virtualenv, so there
is no need to install it yourself in this case.
.. _pip: http://www.pip-installer.org/
"""
with cd('/tmp'):
download(GET_PIP_URL)
command = '%(python_cmd)s get-pip.py' % locals()
if use_sudo:
run_as_root(command, pty=False)
else:
run(command, pty=False)
run('rm -f get-pip.py')
def is_installed(package, pip_cmd='pip'):
"""
Check if a Python package is installed (using pip).
Package names are case insensitive.
Example::
from fabtools.python import virtualenv
import fabtools
with virtualenv('/path/to/venv'):
fabtools.python.install('Flask')
assert fabtools.python.is_installed('flask')
.. _pip: http://www.pip-installer.org/
"""
with settings(hide('running', 'stdout', 'stderr', 'warnings'), warn_only=True):
res = run('%(pip_cmd)s freeze' % locals())
packages = [line.split('==')[0].lower() for line in res.splitlines()]
return (package.lower() in packages)
def install(packages, upgrade=False, download_cache=None, allow_external=None,
allow_unverified=None, quiet=False, pip_cmd='pip', use_sudo=False,
user=None, exists_action=None):
"""
Install Python package(s) using `pip`_.
Package names are case insensitive.
Starting with version 1.5, pip no longer scrapes insecure external
urls by default and no longer installs externally hosted files by
default. Use ``allow_external=['foo', 'bar']`` or
``allow_unverified=['bar', 'baz']`` to change these behaviours
for specific packages.
Examples::
import fabtools
# Install a single package
fabtools.python.install('package', use_sudo=True)
# Install a list of packages
fabtools.python.install(['pkg1', 'pkg2'], use_sudo=True)
.. _pip: http://www.pip-installer.org/
"""
if isinstance(packages, basestring):
packages = [packages]
if allow_external in (None, False):
allow_external = []
elif allow_external == True:
allow_external = packages
if allow_unverified in (None, False):
allow_unverified = []
elif allow_unverified == True:
allow_unverified = packages
options = []
if upgrade:
options.append('--upgrade')
if download_cache:
options.append('--download-cache="%s"' % download_cache)
if quiet:
options.append('--quiet')
for package in allow_external:
options.append('--allow-external="%s"' % package)
for package in allow_unverified:
options.append('--allow-unverified="%s"' % package)
if exists_action:
options.append('--exists-action=%s' % exists_action)
options = ' '.join(options)
packages = ' '.join(packages)
command = '%(pip_cmd)s install %(options)s %(packages)s' % locals()
if use_sudo:
sudo(command, user=user, pty=False)
else:
run(command, pty=False)
def install_requirements(filename, upgrade=False, download_cache=None,
allow_external=None, allow_unverified=None,
quiet=False, pip_cmd='pip', use_sudo=False,
user=None, exists_action=None):
"""
Install Python packages from a pip `requirements file`_.
::
import fabtools
fabtools.python.install_requirements('project/requirements.txt')
.. _requirements file: http://www.pip-installer.org/en/latest/requirements.html
"""
if allow_external is None:
allow_external = []
if allow_unverified is None:
allow_unverified = []
options = []
if upgrade:
options.append('--upgrade')
if download_cache:
options.append('--download-cache="%s"' % download_cache)
for package in allow_external:
options.append('--allow-external="%s"' % package)
for package in allow_unverified:
options.append('--allow-unverified="%s"' % package)
if quiet:
options.append('--quiet')
if exists_action:
options.append('--exists-action=%s' % exists_action)
options = ' '.join(options)
command = '%(pip_cmd)s install %(options)s -r %(filename)s' % locals()
if use_sudo:
sudo(command, user=user, pty=False)
else:
run(command, pty=False)
def create_virtualenv(directory, system_site_packages=False, venv_python=None,
use_sudo=False, user=None, clear=False, prompt=None,
virtualenv_cmd='virtualenv'):
"""
Create a Python `virtual environment`_.
::
import fabtools
fabtools.python.create_virtualenv('/path/to/venv')
.. _virtual environment: http://www.virtualenv.org/
"""
options = ['--quiet']
if system_site_packages:
options.append('--system-site-packages')
if venv_python:
options.append('--python=%s' % quote(venv_python))
if clear:
options.append('--clear')
if prompt:
options.append('--prompt=%s' % quote(prompt))
options = ' '.join(options)
directory = quote(directory)
command = '%(virtualenv_cmd)s %(options)s %(directory)s' % locals()
if use_sudo:
sudo(command, user=user)
else:
run(command)
def virtualenv_exists(directory):
"""
Check if a Python `virtual environment`_ exists.
.. _virtual environment: http://www.virtualenv.org/
"""
return is_file(posixpath.join(directory, 'bin', 'python'))
@contextmanager
def virtualenv(directory, local=False):
"""
Context manager to activate an existing Python `virtual environment`_.
::
from fabric.api import run
from fabtools.python import virtualenv
with virtualenv('/path/to/virtualenv'):
run('python -V')
.. _virtual environment: http://www.virtualenv.org/
"""
path_mod = os.path if local else posixpath
# Build absolute path to the virtualenv activation script
venv_path = abspath(directory)
activate_path = path_mod.join(venv_path, 'bin', 'activate')
# Source the activation script
with prefix('. %s' % quote(activate_path)):
yield
| juanantoniofm/accesible-moodle | fabtools/python.py | Python | gpl-2.0 | 8,076 |
from setuptools import setup
def readme():
with open('README.rst.example') as f:
return f.read()
setup(name='manifold_gui',
version='0.1',
description='GUI for a manifold technique',
long_description=readme(),
classifiers=[
'Development Status :: 1 - Alpha',
'Environment :: Console',
'Environment :: X11 Applications',
'License :: OSI Approved :: GNU General Public License (GPL)',
'Programming Language :: Python :: 2.7 :: chimera',
'Intended Audience :: End Users/Desktop',
],
keywords='manifold chimera',
author='Hstau Y Liao',
platform='linux chimera',
author_email='[email protected]',
packages=['gui'],
include_package_data=True,
zip_safe=False)
| hstau/manifold-cryo | setup.py | Python | gpl-2.0 | 792 |
#!/usr/bin/python
import csv
import datetime
import os
import copy
class Trip:
dols = []
dists = []
gals = []
octs = []
eths = []
drivers = []
tires = []
miles = 0
gallons = 0
actualGals = 0
days = 0
octane = 0
snowtires = 0
make = 0
model = 0
year = 0
engineIV = 0
enginecyl = 0
engineL = 0
ethanol = 0
driver = 0
avgMileage = 0
beginDate = 0
hybrid = 0
def write(self):
out = [self.miles,self.gallons,self.actualGals,self.dollars,self.days,self.octane,self.snowtires,self.make,self.model,self.year,self.engineIV,self.enginecyl,self.engineL,self.ethanol,self.driver,self.avgMileage,self.beginDate,self.hybrid]
return out
def clear(self):
self.dols[:] = []
self.dists[:] = []
self.gals[:] = []
self.octs[:] = []
self.eths[:] = []
self.drivers[:] = []
self.tires[:] = []
self.miles = 0
self.gallons = 0
self.actualGals = 0
self.days = 0
self.octane = 0
self.snowtires = 0
self.make = 0
self.model = 0
self.year = 0
self.engineIV = 0
self.enginecyl = 0
self.engineL = 0
self.ethanol = 0
self.driver = ""
self.avgMileage = 0
self.beginDate = 0
self.hybrid = 0
def wavg(series, weight):
avg = 0
if (weight[0] <= 0):
weight = weight[1:]
assert(len(series) == len(weight))
for i in range(len(weight)):
avg += float(series[i])*float(weight[i])/float(sum(weight))
return avg
def octaneCode(inOct):
if (inOct == 1):
return 87;
elif (inOct == 2):
return 89;
elif (inOct == 3):
return 93;
else:
print "Unknown octane code", inOct
assert(1 == 0)
def driverCode(driver):
if (driver == "Mark"):
return 0
elif (driver == "Mary"):
return 1
elif (driver == "Andy"):
return 2
elif (driver == "Jeff"):
return 3
else:
print "Unknown driver: ", driver
assert(1 == 0)
def makeCode(make):
if (make == "Chevrolet"):
return 0
elif (make == "Buick"):
return 1
elif (make == "Oldsmobile"):
return 2
elif (make == "Mercury"):
return 3
elif (make == "Plymouth"):
return 4
elif (make == "Volkswagen"):
return 5
elif (make == "Toyota"):
return 6
elif (make == "Honda"):
return 7
else:
print "Unknown make: ", make
assert(1 == 0)
def modelCode(model):
if (model == "Concourse"):
return 0
elif (model == "Vega"):
return 1
elif (model == "Century"):
return 2
elif (model == "Cierra"):
return 3
elif (model == "Sable"):
return 4
elif (model == "Voyager"):
return 5
elif (model == "Highlander"):
return 6
elif (model == "CRV"):
return 7
elif (model == "Jetta"):
return 8
else:
print "Unknown model: ", model
assert(1 == 0)
def gasTank(model,year):
if (model == "Concourse"):
return 21.0
elif (model == "Vega"):
return 16.0
elif (model == "Century"):
return 15.0
elif (model == "Cierra"):
return 15.7
elif (model == "Sable"):
return 16.0
elif (model == "Voyager"):
return 20.0
elif (model == "Highlander"):
if (year == 2003):
return 19.8
elif (year == 2008):
return 17.2
elif (model == "CRV"):
return 15.3
elif (model == "Jetta"):
return 14.5
else:
print "Unknown model: ", model
assert(1 == 0)
def dateMaker(date):
start = 0
while date.find("/",start) > -1:
start = date.find("/",start) + 1
year = date[start:]
if len(year) == 2:
if (int(year) > 50):
year = 1900 + int(year)
if (int(year) <= 50):
year = 2000 + int(year)
return date[0:start] + str(year)
def check(fill,gastype,driver,snowtires,ethanol,hybrid):
assert(fill == 0 or fill == 1)
assert(gastype == 1 or gastype == 2 or gastype == 3)
assert(driver == "Andy" or driver == "Mark" or driver == "Mary" or driver == "Jeff")
assert(snowtires == 0 or snowtires == 1)
assert(ethanol == 0 or ethanol == 1)
assert(hybrid == 0 or hybrid == 1)
#ethanol
def checkTrip(a):
a.miles = sum(a.dists)
a.dollars = sum(a.dols)
a.actualGals = sum(i for i in a.gals if i > 0)
a.gallons = sum(a.gals)
a.octane = wavg(a.octs,a.gals)
print "octane",a.octane
a.ethanol = wavg(a.eths,a.gals)
print "ethanol",a.ethanol
a.snowtires = wavg(a.tires,a.dists)
a.driver = sorted(a.drivers)[len(a.drivers)/2]
print a.beginDate
assert(min(a.dists) > 0)
assert(min(a.dols) > 0)
assert(a.days > 0)
assert(a.miles > 0)
assert(a.dollars > 0)
assert(a.gallons > 0)
def checkInterTrip(a,b):
print a.beginDate
print "mpg: ", a.miles/a.gallons, b.miles/b.gallons
print "price: ", a.dollars/a.actualGals, b.dollars/b.actualGals
if(abs((a.miles/a.gallons)/(b.miles/b.gallons) - 1) > 0.5):
status = raw_input("Press Enter to continue... (mpg)")
if(abs((a.dollars/a.actualGals)/(b.dollars/b.actualGals) - 1) > 0.2):
status = raw_input("Press Enter to continue... (price)")
print ""
def main(dir,outfile):
trips = []
for file in os.listdir(dir):
if not file.endswith('.csv'):
continue
print file
f = open(dir+file,'rU')
datareader = csv.reader(f, dialect = csv.excel_tab)
lineNum = 0
beginMiles = 0
beginDate = 0
for row in datareader:
lineNum += 1
line = str(row)
line = line[2:-2].split(',')
if (line[0] == "Date"):
continue
date = dateMaker(str(line[0]))
odometer = int(line[1])
fill = int(line[2])
gastype = int(line[3])
gallons = float(line[4])
dollars = float(line[5])
driver = str(line[6])
snowtires = int(line[7])
ethanol = int(line[8])
make = str(line[9])
model = str(line[10])
year = int(line[11])
engineL = float(line[12])
enginecyl = int(line[13])
engineIV = int(line[14])
hybrid = int(line[15])
if (fill == -1):
#begin trip
#make trip opject
a = Trip()
beginMiles = odometer
beginDate = date
beginOctane = 87
beginEthanol = 0
if (year >= 1994):
beginEthanol = 1
a.gals.append(gallons)
else:
#check and add to trip
a.dols.append(dollars)
a.gals.append(gallons)
a.dists.append(odometer - beginMiles)
a.octs.append(beginOctane)
a.eths.append(beginEthanol)
a.drivers.append(driverCode(driver))
a.tires.append(snowtires)
check(fill,gastype,driver,snowtires,ethanol,hybrid)
beginMiles = odometer
#update gas contents
tank = gasTank(model, year)
beginOctane = (gallons * octaneCode(gastype) + (tank - gallons) * beginOctane) / tank
beginEthanol = (gallons * ethanol + (tank - gallons) * beginEthanol) / tank
if (fill == 1):
#end trip
tripMiles = sum(a.dists)
dateobj1 = datetime.datetime.strptime(beginDate,'%m/%d/%Y').date()
dateobj2 = datetime.datetime.strptime(date,'%m/%d/%Y').date()
tripDate = dateobj2 - dateobj1
tripDays = tripDate.days
if (tripDays == 0):
tripDays += 1
a.days = tripDays
a.make = makeCode(make)
a.model = modelCode(model)
a.year = year
a.engineIV = engineIV
a.enginecyl = enginecyl
a.engineL = engineL
a.beginDate = beginDate
a.hybrid = hybrid
a.avgMileage = odometer - 0.5*tripMiles
#check and save trip
checkTrip(a)
if (len(trips) > 0):
checkInterTrip(a,trips[-1])
trips.append(copy.deepcopy(a))
#reset dollars and gallons
#make trip opject
a.clear()
beginDate = date
beginMiles = odometer
fo = open(outfile,'wb')
datareader = csv.writer(fo, delimiter=',')
#print trips
for thisTrip in trips:
out = thisTrip.write()
datareader.writerow(out)
dir = './raw/'
outfile = './car_data.csv'
main(dir,outfile)
| andybond13/fontina | fontina.py | Python | gpl-2.0 | 7,414 |
import itertools
import subprocess
import sys
#http://pastebin.com/zj72xk4N
#run when system password box is showing eg. keychain password dialog
#apple script for automating dialog box input
sys_script = '''
tell application "System Events" to tell process "SecurityAgent"
set value of text field 1 of window 1 to $(PASS)
click button 1 of group 1 of window 1
end tell
'''
#fill this array with chars for combination
keys = ['s','t','a','r','t']
def automate_login():
for l in xrange(0, len(keys)+1):
for subset in itertools.permutations(keys, l):
guess = ''.join(subset)
tmp = sys_script.replace('$(PASS)', '"%s"' % guess)
try:
subprocess.check_output('osascript -e \'%s\'' % tmp, shell=True)
sys.stdout.write('\rtrying %s ' % guess)
sys.stdout.flush()
except subprocess.CalledProcessError:
print('\nfailed')
return
return
automate_login() | Jacobious52/PythonLab | osxpasscrack.py | Python | gpl-2.0 | 886 |
'''
Created on 17/2/2015
@author: PC06
Primer cambio en el proyecto
'''
from include import app
if __name__ == '__main__':
app.run("127.0.0.1", 9000, debug=True) | javiteri/reposdmpdos | miltonvz/run.py | Python | gpl-2.0 | 176 |
# Copyright (c) 2008 Duncan Fordyce
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
import socket
import time
import traceback
from oyoyo.parse import *
from oyoyo import helpers
from oyoyo.cmdhandler import CommandError
class IRCClientError(Exception):
pass
class IRCClient:
""" IRC Client class. This handles one connection to a server.
This can be used either with or without IRCApp ( see connect() docs )
"""
def __init__(self, cmd_handler, **kwargs):
""" the first argument should be an object with attributes/methods named
as the irc commands. You may subclass from one of the classes in
oyoyo.cmdhandler for convenience but it is not required. The
methods should have arguments (prefix, args). prefix is
normally the sender of the command. args is a list of arguments.
Its recommened you subclass oyoyo.cmdhandler.DefaultCommandHandler,
this class provides defaults for callbacks that are required for
normal IRC operation.
all other arguments should be keyword arguments. The most commonly
used will be nick, host and port. You can also specify an "on connect"
callback. ( check the source for others )
Warning: By default this class will not block on socket operations, this
means if you use a plain while loop your app will consume 100% cpu.
To enable blocking pass blocking=True.
>>> from oyoyo import helpers >>> class My_Handler(DefaultCommandHandler):
... def privmsg(self, prefix, command, args):
... print "%s said %s" % (prefix, args[1])
...
>>> def connect_callback(c):
... helpers.join(c, '#myroom')
...
>>> cli = IRCClient(My_Handler,
... host="irc.freenode.net",
... port=6667,
... nick="myname",
... connect_cb=connect_callback)
...
>>> cli_con = cli.connect()
>>> while 1:
... cli_con.next()
...
"""
self.socket = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
self.nick = None
self.real_name = None
self.host = None
self.port = None
self.connect_cb = None
self.blocking = False
self.__dict__.update(kwargs)
self.command_handler = cmd_handler(self)
self._end = 0
def send(self, *args, **kwargs):
""" send a message to the connected server. all arguments are joined
with a space for convenience, for example the following are identical
>>> cli.send("JOIN %s" % some_room)
>>> cli.send("JOIN", some_room)
In python 2, all args must be of type str or unicode, *BUT* if they are
unicode they will be converted to str with the encoding specified by
the 'encoding' keyword argument (default 'utf8').
In python 3, all args must be of type str or bytes, *BUT* if they are
str they will be converted to bytes with the encoding specified by the
'encoding' keyword argument (default 'utf8').
"""
# Convert all args to bytes if not already
encoding = kwargs.get('encoding') or 'utf8'
bargs = []
for arg in args:
if isinstance(arg, str):
bargs.append(bytes(arg, encoding))
elif isinstance(arg, bytes):
bargs.append(arg)
elif type(arg).__name__ == 'unicode':
bargs.append(arg.encode(encoding))
else:
raise IRCClientError('Refusing to send one of the args from provided: %s'
% repr([(type(arg), arg) for arg in args]))
msg = bytes(" ", "ascii").join(bargs)
logging.info('---> send "%s"' % msg)
self.socket.send(msg + bytes("\r\n", "ascii"))
def connect(self):
""" initiates the connection to the server set in self.host:self.port
and returns a generator object.
>>> cli = IRCClient(my_handler, host="irc.freenode.net", port=6667)
>>> g = cli.connect()
>>> while 1:
... g.next()
"""
try:
logging.info('connecting to %s:%s' % (self.host, self.port))
self.socket.connect(("%s" % self.host, self.port))
if self.blocking:
# this also overrides default timeout
self.socket.setblocking(1)
else:
self.socket.setblocking(0)
helpers.nick(self, self.nick)
helpers.user(self, self.nick, self.real_name)
if self.connect_cb:
self.connect_cb(self)
buffer = bytes()
while not self._end:
try:
buffer += self.socket.recv(1024)
except socket.error as e:
try: # a little dance of compatibility to get the errno
errno = e.errno
except AttributeError:
errno = e[0]
if not self.blocking and errno == 11:
pass
else:
raise e
else:
data = buffer.split(bytes("\n", "ascii"))
buffer = data.pop()
for el in data:
prefix, command, args = parse_raw_irc_command(el)
try:
self.command_handler.run(command, prefix, *args)
except CommandError:
# error will of already been loggingged by the handler
pass
yield True
finally:
if self.socket:
logging.info('closing socket')
self.socket.close()
# noinspection PyPep8Naming
class IRCApp:
""" This class manages several IRCClient instances without the use of threads.
(Non-threaded) Timer functionality is also included.
"""
class _ClientDesc:
def __init__(self, **kwargs):
self.con = None
self.autoreconnect = False
self.__dict__.update(kwargs)
def __init__(self):
self._clients = {}
self._timers = []
self.running = False
self.sleep_time = 0.5
def addClient(self, client, autoreconnect=False):
""" add a client object to the application. setting autoreconnect
to true will mean the application will attempt to reconnect the client
after every disconnect. you can also set autoreconnect to a number
to specify how many reconnects should happen.
warning: if you add a client that has blocking set to true,
timers will no longer function properly """
logging.info('added client %s (ar=%s)' % (client, autoreconnect))
self._clients[client] = self._ClientDesc(autoreconnect=autoreconnect)
def addTimer(self, seconds, cb):
""" add a timed callback. accuracy is not specified, you can only
garuntee the callback will be called after seconds has passed.
( the only advantage to these timers is they dont use threads )
"""
assert callable(cb)
logging.info('added timer to call %s in %ss' % (cb, seconds))
self._timers.append((time.time() + seconds, cb))
def run(self):
""" run the application. this will block until stop() is called """
# TODO: convert this to use generators too?
self.running = True
while self.running:
found_one_alive = False
for client, clientdesc in self._clients.items():
if clientdesc.con is None:
clientdesc.con = client.connect()
try:
clientdesc.con.next()
except Exception as e:
logging.error('client error %s' % e)
logging.error(traceback.format_exc())
if clientdesc.autoreconnect:
clientdesc.con = None
if isinstance(clientdesc.autoreconnect, (int, float)):
clientdesc.autoreconnect -= 1
found_one_alive = True
else:
clientdesc.con = False
else:
found_one_alive = True
if not found_one_alive:
logging.info('nothing left alive... quiting')
self.stop()
now = time.time()
timers = self._timers[:]
self._timers = []
for target_time, cb in timers:
if now > target_time:
logging.info('calling timer cb %s' % cb)
cb()
else:
self._timers.append((target_time, cb))
time.sleep(self.sleep_time)
def stop(self):
""" stop the application """
self.running = False
| FrodeSolheim/fs-uae-launcher | oyoyo/client.py | Python | gpl-2.0 | 10,090 |
from crispy_forms.helper import FormHelper
from crispy_forms.layout import *
from crispy_forms.bootstrap import *
from crispy_forms.layout import Layout, Submit, Reset, Div
from django import forms
from django.contrib.auth.forms import UserChangeForm, UserCreationForm
from silo.models import TolaUser
from django.contrib.auth.models import User
class RegistrationForm(UserChangeForm):
"""
Form for registering a new account.
"""
def __init__(self, *args, **kwargs):
user = kwargs.pop('initial')
super(RegistrationForm, self).__init__(*args, **kwargs)
del self.fields['password']
print user['username'].is_superuser
# allow country access change for now until we know how we will use this GWL 012617
# if they aren't a super user or User Admin don't let them change countries form field
# if 'User Admin' not in user['username'].groups.values_list('name', flat=True) and not user['username'].is_superuser:
# self.fields['country'].widget.attrs['disabled'] = "disabled"
self.fields['created'].widget.attrs['disabled'] = "disabled"
class Meta:
model = TolaUser
fields = '__all__'
helper = FormHelper()
helper.form_method = 'post'
helper.form_class = 'form-horizontal'
helper.label_class = 'col-sm-2'
helper.field_class = 'col-sm-6'
helper.form_error_title = 'Form Errors'
helper.error_text_inline = True
helper.help_text_inline = True
helper.html5_required = True
helper.layout = Layout(Fieldset('','title', 'name',
'country'),
Submit('submit', 'Submit', css_class='btn-default'),
Reset('reset', 'Reset', css_class='btn-warning'))
class NewUserRegistrationForm(UserCreationForm):
"""
Form for registering a new account.
"""
class Meta:
model = User
fields = ['first_name', 'last_name','email','username']
def __init__(self, *args, **kwargs):
super(NewUserRegistrationForm, self).__init__(*args, **kwargs)
helper = FormHelper()
helper.form_method = 'post'
helper.form_class = 'form-horizontal'
helper.label_class = 'col-sm-2'
helper.field_class = 'col-sm-6'
helper.form_error_title = 'Form Errors'
helper.error_text_inline = True
helper.help_text_inline = True
helper.html5_required = True
helper.form_tag = False
class NewTolaUserRegistrationForm(forms.ModelForm):
"""
Form for registering a new account.
"""
class Meta:
model = TolaUser
fields = ['title', 'country', 'privacy_disclaimer_accepted']
def __init__(self, *args, **kwargs):
super(NewTolaUserRegistrationForm, self).__init__(*args, **kwargs)
helper = FormHelper()
helper.form_method = 'post'
helper.form_class = 'form-horizontal'
helper.label_class = 'col-sm-2'
helper.field_class = 'col-sm-6'
helper.form_error_title = 'Form Errors'
helper.error_text_inline = True
helper.help_text_inline = True
helper.html5_required = True
helper.form_tag = False
helper.layout = Layout(
Fieldset('Information','title', 'country'),
Fieldset('Privacy Statement','privacy_disclaimer_accepted',),
)
| mercycorps/TolaTables | tola/forms.py | Python | gpl-2.0 | 3,291 |
import search_duplicated_task
| 3dfxsoftware/cbss-addons | duplicated_tasks/wizard/__init__.py | Python | gpl-2.0 | 30 |
import Globals
from Products.ZenModel.ZenPack import ZenPack as ZenPackBase
from Products.ZenUtils.Utils import unused, zenPath
import os
unused(Globals)
_plugins = [
'rig_host_app_transform1.py',
'copy_server_config_file.sh',
]
class ZenPack(ZenPackBase):
def install(self, app):
super(ZenPack, self).install(app)
self.symlink_plugins()
def symlink_plugins(self):
libexec = os.path.join(os.environ.get('ZENHOME'), 'libexec')
if not os.path.isdir(libexec):
# Stack installs might not have a \$ZENHOME/libexec directory.
os.mkdir(libexec)
# Now get the path to the file in the ZenPack's libexec directory
filepath = __file__ # Get path to this file
(zpdir, tail) = os.path.split(filepath)
zp_libexec_dir = os.path.join(zpdir,'libexec')
for plugin in _plugins:
plugin_path = zenPath('libexec', plugin)
zp_plugin_path = os.path.join(zp_libexec_dir, plugin)
#os.system('ln -sf "%s" "%s"' % (self.path(plugin), plugin_path))
os.system('ln -sf "%s" "%s"' % (zp_plugin_path, plugin_path))
os.system('chmod 0755 %s' % plugin_path)
def remove_plugin_symlinks(self):
for plugin in _plugins:
os.system('rm -f "%s"' % zenPath('libexec', plugin))
def remove(self, app, leaveObjects=False):
if not leaveObjects:
self.remove_plugin_symlinks()
super(ZenPack, self).remove(app, leaveObjects=leaveObjects)
| jcurry/ZenPacks.Markit.RigHost | ZenPacks/Markit/RigHost/__init__.py | Python | gpl-2.0 | 1,531 |
#!/usr/bin/python
import piksemel
import os
def updateGConf (filepath, remove=False):
parse = piksemel.parse (filepath)
schemaList = list()
for xmlfile in parse.tags ("File"):
path = xmlfile.getTagData ("Path")
# Only interested in /etc/gconf/schemas
if "etc/gconf/schemas" in path:
schemaList.append ("/%s" % path)
if len(schemaList) > 0:
os.environ['GCONF_CONFIG_SOURCE'] = 'xml:merged:/etc/gconf/gconf.xml.defaults'
operation = "--makefile-uninstall-rule" if remove else "--makefile-install-rule"
cmd = "/usr/bin/gconftool-2 %s %s" % (operation, " ".join(schemaList))
os.system (cmd)
def setupPackage (metapath, filepath):
updateGConf (filepath)
def postCleanupPackage (metapath, filepath):
updateGConf (filepath)
| richard-fisher/repository | desktop/gnome/core/gconf/comar/packageHandler.py | Python | gpl-2.0 | 816 |
__author__ = 'Stephanie'
from ODMconnection import dbconnection
from readSensors import readSensors
from updateSensors import updateSensors
from createSensors import createSensors
from deleteSensors import deleteSensors
__all__ = [
'readSensors',
'updateSensors',
'createSensors',
'deleteSensors',
] | Castronova/EMIT | api_old/ODM2/Sensors/services/__init__.py | Python | gpl-2.0 | 319 |
"""In the case that the Setup.py file fails to execute, please manually install the following packages,
or execute the requirements.sh script."""
# Installing Requirements: #
# pip install git+https://github.com/pwaller/pyfiglet #
# pip install colorama #
# pip install termcolor #
# pip install blessings #
from distutils.core import setup
setup(name='DPS East Hackathon Rule booklet.',
version='1.0',
description='DPS East Hackathon Rule booklet.',
author='thel3l',
author_email='[email protected]',
url='https://www.github.com/thel3l/hackathon-dpse',
packages=['distutils', 'distutils.command', 'pyfiglet', 'colorama', 'termcolor', 'blessings'],
)
| xxHACKYHACKYXx/hackathon-dpse | Rule booklet in Python/Setup.py | Python | gpl-2.0 | 875 |
# -*- coding: utf-8 -*-
#
# This tool helps you rebase your package to the latest version
# Copyright (C) 2013-2019 Red Hat, Inc.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License along
# with this program; if not, write to the Free Software Foundation, Inc.,
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
#
# Authors: Petr Hráček <[email protected]>
# Tomáš Hozza <[email protected]>
# Nikola Forró <[email protected]>
# František Nečas <[email protected]>
import logging
from typing import cast
from rebasehelper.logger import CustomLogger
logger: CustomLogger = cast(CustomLogger, logging.getLogger(__name__))
class InputHelper:
"""Class for command line interaction with the user."""
@staticmethod
def strtobool(message):
"""Converts a user message to a corresponding truth value.
This method is a replacement for deprecated strtobool from distutils,
its behaviour remains the same.
Args:
message (str): Message to evaluate.
Returns:
bool: True on 'y', 'yes', 't', 'true', 'on' and '1'.
False on 'n', 'no', 'f', 'false', 'off' and '0'.
Raises:
ValueError: On any other value.
"""
message = message.lower()
if message in ('y', 'yes', 't', 'true', 'on', '1'):
return True
elif message in ('n', 'no', 'f', 'false', 'off', '0'):
return False
raise ValueError('No conversion to truth value for "{}"'.format(message))
@classmethod
def get_message(cls, message, default_yes=True, any_input=False):
"""Prompts a user with yes/no message and gets the response.
Args:
message (str): Prompt string.
default_yes (bool): If the default value should be YES.
any_input (bool): Whether to return default value regardless of input.
Returns:
bool: True or False, based on user's input.
"""
if default_yes:
choice = '[Y/n]'
else:
choice = '[y/N]'
if any_input:
msg = '{0} '.format(message)
else:
msg = '{0} {1}? '.format(message, choice)
while True:
user_input = input(msg).lower()
if not user_input or any_input:
return True if default_yes else False
try:
user_input = cls.strtobool(user_input)
except ValueError:
logger.error('You have to type y(es) or n(o).')
continue
if any_input:
return True
else:
return bool(user_input)
| rebase-helper/rebase-helper | rebasehelper/helpers/input_helper.py | Python | gpl-2.0 | 3,222 |
#parser_testing.py
import os, sys, re, StringIO
sys.path.append('/Users/Jason/Dropbox/JournalMap/scripts/GeoParsers')
#from jmap_geoparser_re import *
from jmap_geoparser import *
#def test_parsing():
test = "blah blah blah 45º 23' 12'', 123º 23' 56'' and blah blah blah 32º21'59''N, 115º 23' 14''W blah blah blah"
coords = coordinateParser.searchString(test)
for coord in coords:
assert coordinate(coord).calcDD(), "Coordinate Transform Error for "+str(coord)
test = "45.234º, 123.43º"
assert coordinate(coordinateParser.parseString(test)).calcDD() == {'latitude': 45.234, 'longitude': 123.43}
test = "-45º 23' 12'', -123º 23' 56''"
assert coordinate(coordinateParser.parseString(test)).calcDD() == {'latitude': -45.38667, 'longitude': 123.39889}
test = "32º21'59''N, 115º 23' 14''W"
assert coordinate(coordinateParser.parseString(test)).calcDD() == {'latitude': 32.36639, 'longitude': -115.38722}
test = "12 43 56 North, 23 56 12 East"
assert coordinate(coordinateParser.parseString(test)).calcDD() == {'latitude': 12.73222, 'longitude': 23.93667}
test = "52 15 10N, 0 01 54W"
assert coordinate(coordinateParser.parseString(test)).calcDD() == {'latitude': 52.25278, 'longitude': -0.03167}
test = "52 35 31N, 1 28 05E"
assert coordinate(coordinateParser.parseString(test)).calcDD() == {'latitude': 52.59194, 'longitude': 1.46806}
test = "30° 47' N, 34° 46' E"
assert coordinate(coordinateParser.parseString(test)).calcDD() == {'latitude': 30.78333, 'longitude': 34.76667}
'''
test = "AT; 1 spm, CN 3-41, 21°00′ N, 112°30′ E"
for result, start, end in coordinateParser.scanString(test):
assert coordinate(result).calcDD() == {'latitude': 21.0, 'longitude': 112.5}
test = '27°43.886, 34°15.663'
assert coordinate(coordinateParser.parseString(test)).calcDD() == {'latitude': 27.73143, 'longitude': 34.26105}
test = '49°17’13”N, 13°40’18”E'
assert coordinate(coordinateParser.parseString(test)).calcDD() == {'latitude': 49.28694, 'longitude': 13.67167}
test = '45.9215º; -76.6219º'
assert coordinate(coordinateParser.parseString(test)).calcDD() == {'latitude': -45.9215, 'longitude': 76.6219}
test = "latitude 32°47′47″ S and longitude 26°50′56″ E"
assert coordinate(coordinateParser.parseString(test)).calcDD() == {'latitude': -32.79639, 'longitude': 26.84889}
test = "N15°46′ W87°00'"
assert coordinate(coordinateParser.parseString(test)).calcDD() == {'latitude': 15.76667, 'longitude': -87.0}
test = "latitude of 35°13', longitude of 4°11'"
assert coordinate(coordinateParser.parseString(test)).calcDD() == {'latitude': 35.21667, 'longitude': 4.18333}
test = "expects to find coordinates: 52 degrees, 42 minutes north, 124 degrees, 50 minutes west"
assert coordinate(coordinateParser.parseString(test)).calcDD() == {'latitude': 52.7, 'longitude': -124.83333}
# Should return an exception, but instead calculates latitude as 6º 10'
#test = "expects to find coordinates: 5°70'N, 73°46'W" # Minutes greater than 60
#test = "expects not to find: 4.5–5.0 "
''' | JournalMap/GeoParsers | pyparser_geoparser_testing.py | Python | gpl-2.0 | 3,064 |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
Created on Mon Nov 21 15:53:15 2016
@author: agiovann
"""
from __future__ import division
from __future__ import print_function
#%%
from builtins import zip
from builtins import str
from builtins import map
from builtins import range
from past.utils import old_div
import cv2
try:
cv2.setNumThreads(1)
except:
print('Open CV is naturally single threaded')
try:
if __IPYTHON__:
print((1))
# this is used for debugging purposes only. allows to reload classes when changed
get_ipython().magic('load_ext autoreload')
get_ipython().magic('autoreload 2')
except NameError:
print('Not launched under iPython')
import caiman as cm
import numpy as np
import time
import pylab as pl
import psutil
import sys
import os
from ipyparallel import Client
from skimage.external.tifffile import TiffFile
# , motion_correction_piecewise
from caiman.motion_correction import tile_and_correct
#%% in parallel
def tile_and_correct_wrapper(params):
from skimage.external.tifffile import imread
import numpy as np
import cv2
try:
cv2.setNumThreads(1)
except:
1 # 'Open CV is naturally single threaded'
from caiman.motion_correction import tile_and_correct
img_name, out_fname, idxs, shape_mov, template, strides, overlaps, max_shifts,\
add_to_movie, max_deviation_rigid, upsample_factor_grid, newoverlaps, newstrides, shifts_opencv = params
imgs = imread(img_name, key=idxs)
mc = np.zeros(imgs.shape, dtype=np.float32)
shift_info = []
for count, img in enumerate(imgs):
if count % 10 == 0:
print(count)
mc[count], total_shift, start_step, xy_grid = tile_and_correct(img, template, strides, overlaps, max_shifts, add_to_movie=add_to_movie, newoverlaps=newoverlaps, newstrides=newstrides,
upsample_factor_grid=upsample_factor_grid, upsample_factor_fft=10, show_movie=False, max_deviation_rigid=max_deviation_rigid, shifts_opencv=shifts_opencv)
shift_info.append([total_shift, start_step, xy_grid])
if out_fname is not None:
outv = np.memmap(out_fname, mode='r+', dtype=np.float32,
shape=shape_mov, order='F')
outv[:, idxs] = np.reshape(
mc.astype(np.float32), (len(imgs), -1), order='F').T
return shift_info, idxs, np.nanmean(mc, 0)
#%%
def motion_correction_piecewise(fname, splits, strides, overlaps, add_to_movie=0, template=None, max_shifts=(12, 12), max_deviation_rigid=3, newoverlaps=None, newstrides=None,
upsample_factor_grid=4, order='F', dview=None, save_movie=True, base_name='none', num_splits=None, shifts_opencv=False):
'''
'''
with TiffFile(fname) as tf:
d1, d2 = tf[0].shape
T = len(tf)
if type(splits) is int:
idxs = np.array_split(list(range(T)), splits)
else:
idxs = splits
save_movie = False
if template is None:
raise Exception('Not implemented')
shape_mov = (d1 * d2, T)
dims = d1, d2
if num_splits is not None:
idxs = np.array(idxs)[np.random.randint(0, len(idxs), num_splits)]
save_movie = False
print('**** MOVIE NOT SAVED BECAUSE num_splits is not None ****')
if save_movie:
if base_name is None:
base_name = fname[:-4]
fname_tot = base_name + '_d1_' + str(dims[0]) + '_d2_' + str(dims[1]) + '_d3_' + str(
1 if len(dims) == 2 else dims[2]) + '_order_' + str(order) + '_frames_' + str(T) + '_.mmap'
fname_tot = os.path.join(os.path.split(fname)[0], fname_tot)
np.memmap(fname_tot, mode='w+', dtype=np.float32,
shape=shape_mov, order=order)
else:
fname_tot = None
pars = []
for idx in idxs:
pars.append([fname, fname_tot, idx, shape_mov, template, strides, overlaps, max_shifts, np.array(
add_to_movie, dtype=np.float32), max_deviation_rigid, upsample_factor_grid, newoverlaps, newstrides, shifts_opencv])
t1 = time.time()
if dview is not None:
res = dview.map_sync(tile_and_correct_wrapper, pars)
else:
res = list(map(tile_and_correct_wrapper, pars))
print((time.time() - t1))
return fname_tot, res
#%%
# backend='SLURM'
backend = 'local'
if backend == 'SLURM':
n_processes = np.int(os.environ.get('SLURM_NPROCS'))
else:
# roughly number of cores on your machine minus 1
n_processes = np.maximum(np.int(psutil.cpu_count()), 1)
print(('using ' + str(n_processes) + ' processes'))
#%% start cluster for efficient computation
single_thread = False
if single_thread:
dview = None
else:
try:
c.close()
except:
print('C was not existing, creating one')
print("Stopping cluster to avoid unnencessary use of memory....")
sys.stdout.flush()
if backend == 'SLURM':
try:
cm.stop_server(is_slurm=True)
except:
print('Nothing to stop')
slurm_script = '/mnt/xfs1/home/agiovann/SOFTWARE/Constrained_NMF/SLURM/slurmStart.sh'
cm.start_server(slurm_script=slurm_script)
pdir, profile = os.environ['IPPPDIR'], os.environ['IPPPROFILE']
c = Client(ipython_dir=pdir, profile=profile)
else:
cm.stop_server()
cm.start_server()
c = Client()
print(('Using ' + str(len(c)) + ' processes'))
dview = c[:len(c)]
#%% set parameters and create template by rigid motion correction
t1 = time.time()
#fname = 'k56_20160608_RSM_125um_41mW_zoom2p2_00001_00034.tif'
#fname = 'Sue_1000.tif'
fname = 'Sue_2000.tif'
max_shifts = (12, 12)
# splits = 56 # for parallelization split the movies in num_splits chuncks across time
#num_splits_to_process = 28
#fname = 'M_FLUO_t_1000.tif'
#max_shifts = (10,10)
splits = 56 # for parallelization split the movies in num_splits chuncks across time
num_splits_to_process = 28
#fname = 'M_FLUO_4.tif'
m = cm.load(fname, subindices=slice(0, 500, None))
template = cm.motion_correction.bin_median(m[100:400].copy().motion_correct(
max_shifts[0], max_shifts[1], template=None)[0])
print(time.time() - t1)
#%
# pl.imshow(template)
#%
shifts_opencv = False
new_templ = template
add_to_movie = -np.min(template)
save_movie = False
num_iter = 1
for iter_ in range(num_iter):
print(iter_)
old_templ = new_templ.copy()
if iter_ == num_iter - 1:
save_movie = True
print('saving!')
num_splits_to_process = None
# templ_to_save = old_templ
fname_tot, res = motion_correction_piecewise(fname, splits, None, None,
add_to_movie=add_to_movie, template=old_templ, max_shifts=max_shifts, max_deviation_rigid=0,
newoverlaps=None, newstrides=None,
upsample_factor_grid=4, order='F', dview=dview, save_movie=save_movie, base_name=fname[:-4] + '_rig_', num_splits=num_splits_to_process, shifts_opencv=shifts_opencv)
new_templ = np.nanmedian(np.dstack([r[-1] for r in res]), -1)
print((old_div(np.linalg.norm(new_templ - old_templ), np.linalg.norm(old_templ))))
t2 = time.time() - t1
print(t2)
pl.imshow(new_templ, cmap='gray', vmax=np.percentile(new_templ, 95))
#%%
import scipy
np.save(fname[:-4] + '_templ_rigid.npy', new_templ)
#scipy.io.savemat('/mnt/xfs1/home/agiovann/dropbox/Python_progress/' + str(np.shape(m)[-1])+'_templ_rigid.mat',{'template':new_templ})
#%%
template = new_templ
#%%
mr = cm.load(fname_tot)
#%% online does not seem to work!
#overlaps = (16,16)
# if template.shape == (512,512):
# strides = (128,128)# 512 512
# #strides = (48,48)# 128 64
# elif template.shape == (64,128):
# strides = (48,48)# 512 512
# else:
# raise Exception('Unknown size, set manually')
#upsample_factor_grid = 4
#
#T = m.shape[0]
#idxs_outer = np.array_split(range(T),T/1000)
# for iddx in idxs_outer:
# num_fr = len(iddx)
# splits = np.array_split(iddx,num_fr/n_processes)
# print (splits[0][0]),(splits[-1][-1])
# fname_tot, res = motion_correction_piecewise(fname,splits, strides, overlaps,\
# add_to_movie=add_to_movie, template = template, max_shifts = (12,12),max_deviation_rigid = 3,\
# upsample_factor_grid = upsample_factor_grid,dview = dview)
#%%
# for 512 512 this seems good
t1 = time.time()
if template.shape == (512, 512):
strides = (128, 128) # 512 512
overlaps = (32, 32)
# strides = (16,16)# 512 512
newoverlaps = None
newstrides = None
# strides = (48,48)# 128 64
elif template.shape == (64, 128):
strides = (32, 32)
overlaps = (16, 16)
newoverlaps = None
newstrides = None
else:
raise Exception('Unknown size, set manually')
splits = 56
num_splits_to_process = 28
upsample_factor_grid = 4
max_deviation_rigid = 3
new_templ = template
add_to_movie = -np.min(m)
num_iter = 2
save_movie = False
for iter_ in range(num_iter):
print(iter_)
old_templ = new_templ.copy()
if iter_ == num_iter - 1:
save_movie = True
num_splits_to_process = None
print('saving!')
fname_tot, res = motion_correction_piecewise(fname, splits, strides, overlaps,
add_to_movie=add_to_movie, template=old_templ, max_shifts=max_shifts, max_deviation_rigid=max_deviation_rigid,
newoverlaps=newoverlaps, newstrides=newstrides,
upsample_factor_grid=upsample_factor_grid, order='F', dview=dview, save_movie=save_movie, base_name=fname[:-4] + '_els_opencv_', num_splits=num_splits_to_process, shifts_opencv=shifts_opencv)
new_templ = np.nanmedian(np.dstack([r[-1] for r in res]), -1)
# print((old_div(np.linalg.norm(new_templ-old_templ),np.linalg.norm(old_templ))))
# pl.imshow(new_templ,cmap = 'gray',vmax = np.percentile(new_templ,99))
# pl.pause(.1)
t2 = time.time() - t1
print(t2)
mc = cm.load(fname_tot)
#%%
pl.imshow(new_templ, cmap='gray', vmax=np.percentile(new_templ, 95))
#%%
np.save(fname[:-4] + '_templ_pw_rigid.npy', new_templ)
#scipy.io.savemat('/mnt/xfs1/home/agiovann/dropbox/Python_progress/' + str(np.shape(m)[-1])+'_templ_pw_rigid.mat',{'template':templ_to_save})
#%%
#%%
def compute_metrics_motion_correction(fname, final_size_x, final_size_y, swap_dim, pyr_scale=.5, levels=3, winsize=100, iterations=15, poly_n=5, poly_sigma=1.2 / 5, flags=0,
play_flow=False, resize_fact_flow=.2, template=None):
# cv2.OPTFLOW_FARNEBACK_GAUSSIAN
import scipy
vmin, vmax = -1, 1
m = cm.load(fname)
max_shft_x = np.int(np.ceil((np.shape(m)[1] - final_size_x) / 2))
max_shft_y = np.int(np.ceil((np.shape(m)[2] - final_size_y) / 2))
max_shft_x_1 = - ((np.shape(m)[1] - max_shft_x) - (final_size_x))
max_shft_y_1 = - ((np.shape(m)[2] - max_shft_y) - (final_size_y))
if max_shft_x_1 == 0:
max_shft_x_1 = None
if max_shft_y_1 == 0:
max_shft_y_1 = None
# print ([max_shft_x,max_shft_x_1,max_shft_y,max_shft_y_1])
m = m[:, max_shft_x:max_shft_x_1, max_shft_y:max_shft_y_1]
print('Local correlations..')
img_corr = m.local_correlations(eight_neighbours=True, swap_dim=swap_dim)
print(m.shape)
if template is None:
tmpl = cm.motion_correction.bin_median(m)
else:
tmpl = template
# tmpl = tmpl[max_shft_x:-max_shft_x,max_shft_y:-max_shft_y]
print('Compute Smoothness.. ')
smoothness = np.sqrt(
np.sum(np.sum(np.array(np.gradient(np.mean(m, 0)))**2, 0)))
smoothness_corr = np.sqrt(
np.sum(np.sum(np.array(np.gradient(img_corr))**2, 0)))
print('Compute correlations.. ')
correlations = []
count = 0
for fr in m:
if count % 100 == 0:
print(count)
count += 1
correlations.append(scipy.stats.pearsonr(
fr.flatten(), tmpl.flatten())[0])
print('Compute optical flow .. ')
m = m.resize(1, 1, resize_fact_flow)
norms = []
flows = []
count = 0
for fr in m:
if count % 100 == 0:
print(count)
count += 1
flow = cv2.calcOpticalFlowFarneback(
tmpl, fr, None, pyr_scale, levels, winsize, iterations, poly_n, poly_sigma, flags)
if play_flow:
pl.subplot(1, 3, 1)
pl.cla()
pl.imshow(fr, vmin=0, vmax=300, cmap='gray')
pl.title('movie')
pl.subplot(1, 3, 3)
pl.cla()
pl.imshow(flow[:, :, 1], vmin=vmin, vmax=vmax)
pl.title('y_flow')
pl.subplot(1, 3, 2)
pl.cla()
pl.imshow(flow[:, :, 0], vmin=vmin, vmax=vmax)
pl.title('x_flow')
pl.pause(.05)
n = np.linalg.norm(flow)
flows.append(flow)
norms.append(n)
np.savez(fname[:-4] + '_metrics', flows=flows, norms=norms, correlations=correlations,
smoothness=smoothness, tmpl=tmpl, smoothness_corr=smoothness_corr, img_corr=img_corr)
return tmpl, correlations, flows, norms, smoothness
#%% run comparisons MLK
m_res = glob.glob('MKL*hdf5')
final_size = (512 - 24, 512 - 24)
winsize = 100
swap_dim = False
resize_fact_flow = .2
for mv in m_res:
tmpl, correlations, flows_orig, norms, smoothness = compute_metrics_motion_correction(
mv, final_size[0], final_size[1], swap_dim, winsize=winsize, play_flow=False, resize_fact_flow=resize_fact_flow)
#%% run comparisons NORMCORRE
m_fluos = glob.glob('M_FLUO*.mmap') + glob.glob('M_FLUO*.tif')
final_size = (64 - 20, 128 - 20)
winsize = 32
resize_fact_flow = 1
for mv in m_fluos:
tmpl, correlations, flows_orig, norms, smoothness = compute_metrics_motion_correction(
mv, final_size[0], final_size[1], winsize=winsize, play_flow=False, resize_fact_flow=resize_fact_flow)
#% run comparisons resonant
m_res = glob.glob('Sue*mmap') + glob.glob('Sue*.tif')
final_size = (512 - 24, 512 - 24)
winsize = 100
swap_dim = False
resize_fact_flow = .2
for mv in m_res:
tmpl, correlations, flows_orig, norms, smoothness = compute_metrics_motion_correction(
mv, final_size[0], final_size[1], swap_dim, winsize=winsize, play_flow=False, resize_fact_flow=resize_fact_flow)
#%% run comparisons SIMA
m_fluos = glob.glob('plane*.tif') + glob.glob('row*.tif')
final_size = (64 - 20, 128 - 20)
winsize = 32
resize_fact_flow = 1
for mv in m_fluos:
tmpl, correlations, flows_orig, norms, smoothness = compute_metrics_motion_correction(
mv, final_size[0], final_size[1], winsize=winsize, play_flow=False, resize_fact_flow=resize_fact_flow)
#% run comparisons resonant
m_res = glob.glob('Sue*.tif')
final_size = (512 - 24, 512 - 24)
winsize = 100
resize_fact_flow = .2
for mv in m_res:
tmpl, correlations, flows_orig, norms, smoothness = compute_metrics_motion_correction(
mv, final_size[0], final_size[1], winsize=winsize, play_flow=False, resize_fact_flow=resize_fact_flow)
#%% run comparisons SUITE2P
for mvs in glob.glob('Sue*2000*16*.mat'):
print(mvs)
cm.movie(scipy.io.loadmat(mvs)['data'].transpose(
[2, 0, 1])).save(mvs[:-3] + '.hdf5')
#%%
m_fluos = glob.glob('M_FLUO*.hdf5')
final_size = (64 - 20, 128 - 20)
winsize = 32
resize_fact_flow = 1
for mv in m_fluos:
tmpl, correlations, flows_orig, norms, smoothness = compute_metrics_motion_correction(
mv, final_size[0], final_size[1], winsize=winsize, play_flow=False, resize_fact_flow=resize_fact_flow)
#% run comparisons resonant
m_res = glob.glob('Sue_2000*16*.hdf5')
final_size = (512 - 24, 512 - 24)
winsize = 100
resize_fact_flow = .2
for mv in m_res:
tmpl, correlations, flows_orig, norms, smoothness = compute_metrics_motion_correction(
mv, final_size[0], final_size[1], winsize=winsize, play_flow=False, resize_fact_flow=resize_fact_flow)
#%% plot the results
files_img = [u'/mnt/xfs1/home/agiovann/DataForPublications/Piecewise-Rigid-Analysis-paper/NORM_CORRE_OPENCV/Sue_2000_els_opencv__d1_512_d2_512_d3_1_order_F_frames_2000_._metrics.npz',
u'/mnt/xfs1/home/agiovann/DataForPublications/Piecewise-Rigid-Analysis-paper/NORMCORRE_EFF/Sue_2000_els__d1_512_d2_512_d3_1_order_F_frames_2000_._metrics.npz',
# u'/mnt/xfs1/home/agiovann/DataForPublications/Piecewise-Rigid-Analysis-paper/MLK/Sue_2000_MLK_metrics.npz',
# u'/mnt/xfs1/home/agiovann/DataForPublications/Piecewise-Rigid-Analysis-paper/SIMA_RESULTS/Sue_1000_T.tifrow1_example_sima_Trow1_example_sima_metrics.npz',
# u'/mnt/xfs1/home/agiovann/DataForPublications/Piecewise-Rigid-Analysis-paper/SUITE_2P_RES/Sue_2000_t_NB_16.._metrics.npz',
u'/mnt/xfs1/home/agiovann/DataForPublications/Piecewise-Rigid-Analysis-paper/MLK/MKL16T._metrics.npz']
# for fl in glob.glob('*.npz'):
for fl in files_img:
with np.load(fl) as ld:
print(ld.keys())
pl.figure()
print(fl + ':' + str(np.mean(ld['norms'])) + '+/-' + str(np.std(ld['norms'])) + ' ; ' + str(np.mean(ld['correlations'])
) + '+/-' + str(np.std(ld['correlations'])) + ' ; ' + str(ld['smoothness']) + ' ; ' + str(ld['smoothness_corr']))
pl.subplot(1, 2, 1)
try:
mean_img = np.mean(cm.load(fl[:-12] + 'mmap'), 0)[12:-12, 12:-12]
except:
try:
mean_img = np.mean(
cm.load(fl[:-12] + '.tif'), 0)[12:-12, 12:-12]
except:
mean_img = np.mean(
cm.load(fl[:-12] + 'hdf5'), 0)[12:-12, 12:-12]
# lq,hq = np.nanpercentile(mean_img,[.1,99.9])
lq, hq = 13.3, 318.01
pl.imshow(mean_img, vmin=lq, vmax=hq)
pl.colorbar()
# pl.plot(ld['correlations'])
pl.subplot(1, 2, 2)
pl.imshow(ld['img_corr'], vmin=0, vmax=.5)
pl.colorbar()
#%%
for fl in glob.glob('Mf*.npz'):
with np.load(fl) as ld:
print(ld.keys())
pl.figure()
print(fl + ':' + str(np.mean(ld['norms'])) + '+/-' + str(np.std(ld['norms'])) + ' ; ' + str(np.mean(ld['correlations'])
) + '+/-' + str(np.std(ld['correlations'])) + ' ; ' + str(ld['smoothness']) + ' ; ' + str(ld['smoothness_corr']))
#%%
#%
#total_shifts = []
#start_steps = []
#xy_grids = []
#mc = np.zeros(m.shape)
# for count,img in enumerate(np.array(m)):
# if count % 10 == 0:
# print(count)
# mc[count],total_shift,start_step,xy_grid = tile_and_correct(img, template, strides, overlaps,(12,12), newoverlaps = None, \
# newstrides = newstrides, upsample_factor_grid=upsample_factor_grid,\
# upsample_factor_fft=10,show_movie=False,max_deviation_rigid=2,add_to_movie=add_to_movie)
#
# total_shifts.append(total_shift)
# start_steps.append(start_step)
# xy_grids.append(xy_grid)
#mc = cm.load('M_FLUO_4_d1_64_d2_128_d3_1_order_F_frames_4620_.mmap')
#mc = cm.load('M_FLUO_t_d1_64_d2_128_d3_1_order_F_frames_6764_.mmap')
#%%
mc.resize(1, 1, .1).play(gain=10., fr=30, offset=100, magnification=1.)
#%%
m.resize(1, 1, .2).play(gain=10, fr=30, offset=0, magnification=1.)
#%%
cm.concatenate([mr.resize(1, 1, .5), mc.resize(1, 1, .5)], axis=1).play(
gain=10, fr=100, offset=300, magnification=1.)
#%%
import h5py
with h5py.File('sueann_pw_rigid_movie.mat') as f:
mef = np.array(f['M2'])
mef = cm.movie(mef.transpose([0, 2, 1]))
#%%
cm.concatenate([mef.resize(1, 1, .15), mc.resize(1, 1, .15)], axis=1).play(
gain=30, fr=40, offset=300, magnification=1.)
#%%
(mef - mc).resize(1, 1, .1).play(gain=50, fr=20, offset=0, magnification=1.)
#%%
(mc - mef).resize(1, 1, .1).play(gain=50, fr=20, offset=0, magnification=1.)
#%%
T, d1, d2 = np.shape(m)
shape_mov = (d1 * d2, m.shape[0])
Y = np.memmap('M_FLUO_4_d1_64_d2_128_d3_1_order_F_frames_4620_.mmap',
mode='r', dtype=np.float32, shape=shape_mov, order='F')
mc = cm.movie(np.reshape(Y, (d2, d1, T), order='F').transpose([2, 1, 0]))
mc.resize(1, 1, .25).play(gain=10., fr=50)
#%%
total_shifts = [r[0][0][0] for r in res]
pl.plot(np.reshape(np.array(total_shifts), (len(total_shifts), -1)))
#%%
#m_raw = cm.motion_correction.bin_median(m,exclude_nans=True)
#m_rig = cm.motion_correction.bin_median(mr,exclude_nans=True)
#m_el = cm.motion_correction.bin_median(mc,exclude_nans=True)
m_raw = np.nanmean(m, 0)
m_rig = np.nanmean(mr, 0)
m_el = np.nanmean(mc, 0)
m_ef = np.nanmean(mef, 0)
#%%
import scipy
r_raw = []
r_rig = []
r_el = []
r_ef = []
max_shft_x, max_shft_y = max_shifts
for fr_id in range(m.shape[0]):
fr = m[fr_id].copy()[max_shft_x:-max_shft_x, max_shft_y:-max_shft_y]
templ_ = m_raw.copy()[max_shft_x:-max_shft_x, max_shft_y:-max_shft_y]
r_raw.append(scipy.stats.pearsonr(fr.flatten(), templ_.flatten())[0])
fr = mr[fr_id].copy()[max_shft_x:-max_shft_x, max_shft_y:-max_shft_y]
templ_ = m_rig.copy()[max_shft_x:-max_shft_x, max_shft_y:-max_shft_y]
r_rig.append(scipy.stats.pearsonr(fr.flatten(), templ_.flatten())[0])
fr = mc[fr_id].copy()[max_shft_x:-max_shft_x, max_shft_y:-max_shft_y]
templ_ = m_el.copy()[max_shft_x:-max_shft_x, max_shft_y:-max_shft_y]
r_el.append(scipy.stats.pearsonr(fr.flatten(), templ_.flatten())[0])
if 1:
fr = mef[fr_id].copy()[max_shft_x:-max_shft_x, max_shft_y:-max_shft_y]
templ_ = m_ef.copy()[max_shft_x:-max_shft_x, max_shft_y:-max_shft_y]
r_ef.append(scipy.stats.pearsonr(fr.flatten(), templ_.flatten())[0])
r_raw = np.array(r_raw)
r_rig = np.array(r_rig)
r_el = np.array(r_el)
r_ef = np.array(r_ef)
#%%
#r_ef = scipy.io.loadmat('sueann.mat')['cM2'].squeeze()
#r_efr = scipy.io.loadmat('sueann.mat')['cY'].squeeze()
# pl.close()
#%%
pl.plot(r_raw)
pl.plot(r_rig)
pl.plot(r_el)
# pl.plot(r_ef)
#%%
pl.scatter(r_el, r_ef)
pl.plot([0, 1], [0, 1], 'r--')
#%%
pl.plot(old_div((r_ef - r_el), np.abs(r_el)))
#%%
import pylab as pl
vmax = -100
max_shft = 3
#%
pl.subplot(3, 3, 1)
pl.imshow(np.nanmean(m, 0)[max_shft:-max_shft, max_shft:-
max_shft], cmap='gray', vmax=vmax, interpolation='none')
pl.title('raw')
pl.axis('off')
pl.xlim([0, 100])
pl.ylim([220, 320])
pl.axis('off')
pl.subplot(3, 3, 2)
pl.title('rigid mean')
pl.imshow(np.nanmean(mr, 0)[max_shft:-max_shft, max_shft:-
max_shft], cmap='gray', vmax=vmax, interpolation='none')
pl.xlim([0, 100])
pl.ylim([220, 320])
pl.axis('off')
pl.subplot(3, 3, 3)
pl.imshow(np.nanmean(mc, 0)[max_shft:-max_shft, max_shft:-
max_shft], cmap='gray', vmax=vmax, interpolation='none')
pl.title('pw-rigid mean')
pl.axis('off')
pl.xlim([0, 100])
pl.ylim([220, 320])
pl.axis('off')
pl.subplot(3, 3, 5)
pl.scatter(r_raw, r_rig)
pl.plot([0, 1], [0, 1], 'r--')
pl.xlabel('raw')
pl.ylabel('rigid')
pl.xlim([0, 1])
pl.ylim([0, 1])
pl.subplot(3, 3, 6)
pl.scatter(r_rig, r_el)
pl.plot([0, 1], [0, 1], 'r--')
pl.ylabel('pw-rigid')
pl.xlabel('rigid')
pl.xlim([0, 1])
pl.ylim([0, 1])
if 0:
pl.subplot(2, 3, 3)
pl.scatter(r_el, r_ef)
pl.plot([0, 1], [0, 1], 'r--')
pl.ylabel('pw-rigid')
pl.xlabel('pw-rigid eft')
pl.xlim([0, 1])
pl.ylim([0, 1])
pl.subplot(2, 3, 6)
pl.imshow(np.nanmean(mef, 0)[max_shft:-max_shft, max_shft:-
max_shft], cmap='gray', vmax=vmax, interpolation='none')
pl.title('pw-rigid eft mean')
pl.axis('off')
#%%
pl.plot(r_ef)
#%%
mc = cm.movie(mc)
mc[np.isnan(mc)] = 0
#%% play movie
(mc + add_to_movie).resize(1, 1, .25).play(gain=10., fr=50)
#%% compute correlation images
ccimage = m.local_correlations(eight_neighbours=True, swap_dim=False)
ccimage_rig = mr.local_correlations(eight_neighbours=True, swap_dim=False)
ccimage_els = mc.local_correlations(eight_neighbours=True, swap_dim=False)
ccimage_ef = mef.local_correlations(eight_neighbours=True, swap_dim=False)
#%% check correlation images
pl.subplot(2, 2, 1)
pl.imshow(ccimage, vmin=0, vmax=0.4, interpolation='none')
pl.subplot(2, 2, 2)
pl.imshow(ccimage_rig, vmin=0, vmax=0.4, interpolation='none')
pl.subplot(2, 2, 3)
pl.imshow(ccimage_els, vmin=0, vmax=0.4, interpolation='none')
pl.subplot(2, 2, 4)
pl.imshow(ccimage_ef, vmin=0, vmax=0.4, interpolation='none')
#%%
all_mags = []
all_mags_eig = []
for chunk in res:
for frame in chunk[0]:
shifts, pos, init = frame
x_sh = np.zeros(np.add(init[-1], 1))
y_sh = np.zeros(np.add(init[-1], 1))
for nt, sh in zip(init, shifts):
x_sh[nt] = sh[0]
y_sh[nt] = sh[1]
jac_xx = x_sh[1:, :] - x_sh[:-1, :]
jac_yx = y_sh[1:, :] - y_sh[:-1, :]
jac_xy = x_sh[:, 1:] - x_sh[:, :-1]
jac_yy = y_sh[:, 1:] - y_sh[:, :-1]
mag_norm = np.sqrt(jac_xx[:, :-1]**2 + jac_yx[:, :-1]
** 2 + jac_xy[:-1, :]**2 + jac_yy[:-1, :]**2)
all_mags.append(mag_norm)
# pl.cla()
# pl.imshow(mag_norm,vmin=0,vmax =1,interpolation = 'none')
# pl.pause(.1)
#%%
mam = cm.movie(np.dstack(all_mags)).transpose([2, 0, 1])
#mam.play(magnification=10,gain = 5.)
#%%
pl.imshow(np.max(mam, 0), interpolation='none')
#%%
m = cm.load('rig_sue__d1_512_d2_512_d3_1_order_F_frames_3000_.mmap')
m1 = cm.load('els_sue__d1_512_d2_512_d3_1_order_F_frames_3000_.mmap')
m0 = cm.load('k56_20160608_RSM_125um_41mW_zoom2p2_00001_00034.tif')
tmpl = cm.motion_correction.bin_median(m)
tmpl1 = cm.motion_correction.bin_median(m1)
tmpl0 = cm.motion_correction.bin_median(m0)
#%%
vmin, vmax = -1, 1
count = 0
pyr_scale = .5
levels = 3
winsize = 100
iterations = 15
poly_n = 5
poly_sigma = old_div(1.2, 5)
flags = 0 # cv2.OPTFLOW_FARNEBACK_GAUSSIAN
norms = []
flows = []
for fr, fr1, fr0 in zip(m.resize(1, 1, .2), m1.resize(1, 1, .2), m0.resize(1, 1, .2)):
count += 1
print(count)
flow1 = cv2.calcOpticalFlowFarneback(tmpl1[max_shft_x:-max_shft_x, max_shft_y:-max_shft_y], fr1[max_shft_x:-
max_shft_x, max_shft_y:-max_shft_y], None, pyr_scale, levels, winsize, iterations, poly_n, poly_sigma, flags)
flow = cv2.calcOpticalFlowFarneback(tmpl[max_shft_x:-max_shft_x, max_shft_y:-max_shft_y], fr[max_shft_x:-
max_shft_x, max_shft_y:-max_shft_y], None, pyr_scale, levels, winsize, iterations, poly_n, poly_sigma, flags)
flow0 = cv2.calcOpticalFlowFarneback(tmpl0[max_shft_x:-max_shft_x, max_shft_y:-max_shft_y], fr0[max_shft_x:-
max_shft_x, max_shft_y:-max_shft_y], None, pyr_scale, levels, winsize, iterations, poly_n, poly_sigma, flags)
#
# pl.subplot(2,3,1)
# pl.cla()
# pl.imshow(flow1[:,:,1],vmin=vmin,vmax=vmax)
# pl.subplot(2,3,2)
# pl.cla()
# pl.imshow(flow[:,:,1],vmin=vmin,vmax=vmax)
# pl.subplot(2,3,3)
# pl.cla()
# pl.imshow(flow0[:,:,1],vmin=vmin,vmax=vmax)
#
# pl.subplot(2,3,4)
# pl.cla()
# pl.imshow(flow1[:,:,0],vmin=vmin,vmax=vmax)
# pl.subplot(2,3,5)
# pl.cla()
# pl.imshow(flow[:,:,0],vmin=vmin,vmax=vmax)
# pl.subplot(2,3,6)
# pl.cla()
# pl.imshow(flow0[:,:,0],vmin=vmin,vmax=vmax)
# pl.pause(.1)
n1, n, n0 = np.linalg.norm(flow1), np.linalg.norm(
flow), np.linalg.norm(flow0)
flows.append([flow1, flow, flow0])
norms.append([n1, n, n0])
#%%
flm1_x = cm.movie(np.dstack([fl[0][:, :, 0]
for fl in flows])).transpose([2, 0, 1])
flm_x = cm.movie(np.dstack([fl[1][:, :, 0]
for fl in flows])).transpose([2, 0, 1])
flm0_x = cm.movie(np.dstack([fl[2][:, :, 0]
for fl in flows])).transpose([2, 0, 1])
flm1_y = cm.movie(np.dstack([fl[0][:, :, 1]
for fl in flows])).transpose([2, 0, 1])
flm_y = cm.movie(np.dstack([fl[1][:, :, 1]
for fl in flows])).transpose([2, 0, 1])
flm0_y = cm.movie(np.dstack([fl[2][:, :, 1]
for fl in flows])).transpose([2, 0, 1])
#%%
pl.figure()
pl.subplot(2, 1, 1)
pl.plot(norms)
pl.subplot(2, 1, 2)
pl.plot(np.arange(0, 3000 * .2, 0.2), r_el)
pl.plot(np.arange(0, 3000 * .2, 0.2), r_rig)
pl.plot(np.arange(0, 3000 * .2, 0.2), r_raw)
#%%
#%% compare to optical flow
pl.figure()
vmin = -.5
vmax = .5
cmap = 'hot'
pl.subplot(2, 3, 1)
pl.imshow(np.mean(np.abs(flm1_x), 0), vmin=vmin, vmax=vmax, cmap=cmap)
pl.title('PW-RIGID')
pl.ylabel('optical flow x')
pl.colorbar()
pl.subplot(2, 3, 2)
pl.title('RIGID')
pl.imshow(np.mean(np.abs(flm_x), 0), vmin=vmin, vmax=vmax, cmap=cmap)
pl.colorbar()
pl.subplot(2, 3, 3)
pl.imshow(np.mean(np.abs(flm0_x), 0), vmin=vmin * 4, vmax=vmax * 4, cmap=cmap)
pl.title('RAW')
pl.colorbar()
pl.subplot(2, 3, 4)
pl.imshow(np.mean(np.abs(flm1_y), 0), vmin=vmin, vmax=vmax, cmap=cmap)
pl.ylabel('optical flow y')
pl.colorbar()
pl.subplot(2, 3, 5)
pl.imshow(np.mean(np.abs(flm_y), 0), vmin=vmin, vmax=vmax, cmap=cmap)
pl.colorbar()
pl.subplot(2, 3, 6)
pl.imshow(np.mean(np.abs(flm0_y), 0), vmin=vmin * 4, vmax=vmax * 4, cmap=cmap)
pl.colorbar()
#%%
fl_rig = [n[1] / 1000 for n in norms]
fl_raw = [n[2] / 1000 for n in norms]
fl_el = [n[0] / 1000 for n in norms]
#%%
font = {'family': 'Myriad Pro',
'weight': 'regular',
'size': 15}
pl.rc('font', **font)
vmax = -100
max_shft = 3
pl.subplot(4, 3, 1)
pl.imshow(np.nanmean(m, 0)[max_shft:-max_shft, max_shft:-
max_shft], cmap='gray', vmax=vmax, interpolation='none')
pl.title('raw')
pl.axis('off')
pl.xlim([0, 100])
pl.ylim([220, 320])
pl.axis('off')
pl.subplot(4, 3, 2)
pl.title('rigid mean')
pl.imshow(np.nanmean(mr, 0)[max_shft:-max_shft, max_shft:-
max_shft], cmap='gray', vmax=vmax, interpolation='none')
pl.xlim([0, 100])
pl.ylim([220, 320])
pl.axis('off')
pl.subplot(4, 3, 3)
pl.imshow(np.nanmean(mc, 0)[max_shft:-max_shft, max_shft:-
max_shft], cmap='gray', vmax=vmax, interpolation='none')
pl.title('pw-rigid mean')
pl.axis('off')
pl.xlim([0, 100])
pl.ylim([220, 320])
pl.axis('off')
pl.subplot(4, 3, 5)
pl.scatter(r_raw, r_rig, s=50, c='red')
pl.axis('tight')
pl.plot([0, 1], [0, 1], 'k--')
pl.xlabel('raw')
pl.ylabel('rigid')
pl.xlim([0.2, .45])
pl.ylim([.2, .45])
pl.locator_params(nbins=4)
pl.subplot(4, 3, 6)
pl.scatter(r_rig, r_el, s=50, c='red')
pl.plot([0, 1], [0, 1], 'k--')
pl.ylabel('pw-rigid')
pl.xlabel('rigid')
pl.xlim([0.3, .45])
pl.ylim([.3, .45])
pl.locator_params(nbins=4)
pl.subplot(4, 3, 4)
pl.plot(np.arange(0, 3000 * .2, 0.2), r_el)
pl.plot(np.arange(0, 3000 * .2, 0.2), r_rig)
pl.plot(np.arange(0, 3000 * .2, 0.2), r_raw)
pl.xlim([220, 320])
pl.ylabel('correlation')
pl.locator_params(nbins=4)
pl.subplot(4, 3, 7)
pl.plot(norms)
pl.xlim([220, 320])
pl.ylabel('norm of optical flow')
pl.xlabel('frames')
pl.locator_params(nbins=4)
pl.subplot(4, 3, 8)
pl.scatter(fl_raw, fl_rig, s=50, c='red')
pl.axis('tight')
pl.plot([0, 3000], [0, 3000], 'k--')
pl.xlabel('raw')
pl.ylabel('rigid')
pl.xlim([0, 3])
pl.ylim([0, 3])
pl.locator_params(nbins=4)
pl.subplot(4, 3, 9)
pl.scatter(fl_rig, fl_el, s=50, c='red')
pl.plot([0, 1000], [0, 1000], 'k--')
pl.ylabel('pw-rigid')
pl.xlabel('rigid')
pl.xlim([0, 1])
pl.ylim([0, 1])
pl.locator_params(nbins=4)
ofl_mod_rig = np.mean(np.sqrt(flm_x**2 + flm_y**2), 0)
ofl_mod_el = np.mean(np.sqrt(flm1_x**2 + flm1_y**2), 0)
pl.subplot(4, 3, 10)
pl.imshow(ofl_mod_el, cmap='hot', vmin=0, vmax=1, interpolation='none')
pl.axis('off')
pl.colorbar()
pl.subplot(4, 3, 11)
pl.imshow(ofl_mod_rig, cmap='hot', vmin=0, vmax=1, interpolation='none')
pl.axis('off')
# pl.xlim([0,100])
# pl.ylim([220,320])
pl.axis('off')
pl.subplot(4, 3, 12)
pl.imshow(ofl_mod_el, cmap='hot', vmin=0, vmax=1, interpolation='none')
pl.axis('off')
# pl.xlim([0,100])
# pl.ylim([220,320])
pl.axis('off')
# font = {'family' : 'Myriad Pro',
# 'weight' : 'regular',
# 'size' : 15}
#
#pl.rc('font', **font)
pl.rcParams['pdf.fonttype'] = 42
#%% test against SIMA
import sima
import sima.motion
from sima.motion import HiddenMarkov2D
#fname_gr = 'M_FLUO_t.tif'
#fname_gr = 'Sue_1000.tif'
#fname_gr = 'Sue_2000.tif'
fname_gr = 'Sue_1000_T.tif'
fname_gr = 'Sue_1000_T.tifrow1_example_sima_T.tif'
sequences = [sima.Sequence.create('TIFF', fname_gr)]
dataset = sima.ImagingDataset(sequences, fname_gr)
#%%
import time
t1 = time.time()
granularity = 'row'
gran_n = 1
mc_approach = sima.motion.HiddenMarkov2D(granularity=(
granularity, gran_n), max_displacement=max_shifts, verbose=True, n_processes=14)
new_dataset = mc_approach.correct(dataset, None)
t2 = time.time() - t1
print(t2)
#%
new_dataset.export_frames(
[[[fname_gr[:-4] + granularity + str(gran_n) + '_example_sima.tif']]], fmt='TIFF16')
#%%
m_s = cm.load(granularity + str(gran_n) + '_example_sima.tif')
m_s_row = cm.load('example_sima.tif')
#%%
def compute_jacobians(res):
all_mags = []
all_mags_eig = []
for chunk in res:
for frame in chunk[0]:
shifts, pos, init = frame
x_sh = np.zeros(np.add(init[-1], 1))
y_sh = np.zeros(np.add(init[-1], 1))
for nt, sh in zip(init, shifts):
x_sh[nt] = sh[0]
y_sh[nt] = sh[1]
jac_xx = x_sh[1:, :] - x_sh[:-1, :]
jac_yx = y_sh[1:, :] - y_sh[:-1, :]
jac_xy = x_sh[:, 1:] - x_sh[:, :-1]
jac_yy = y_sh[:, 1:] - y_sh[:, :-1]
mag_norm = np.sqrt(
jac_xx[:, :-1]**2 + jac_yx[:, :-1]**2 + jac_xy[:-1, :]**2 + jac_yy[:-1, :]**2)
for a, b, c, d in zip(jac_xx, jac_xy, jac_yy, jac_yy):
jc = np.array([[a, b], [c, d]])
w, vl, vr = scipy.linalg.eig(jc)
lsl
all_mags_eig.append(mag_eig)
all_mags.append(mag_norm)
# %%
#m = cm.load('M_FLUO_t_1000.tif')
#tmpl, correlations, flows_rig, norms = compute_metrics_motion_correction('M_FLUO_t_1000_rig__d1_64_d2_128_d3_1_order_F_frames_1000_.mmap',10,10,winsize=32, play_flow=False, resize_fact_flow=1)
#tmpl, correlations, flows_els, norms = compute_metrics_motion_correction('M_FLUO_t_1000_els__d1_64_d2_128_d3_1_order_F_frames_1000_.mmap',10,10,winsize=32, play_flow=False, resize_fact_flow=1)
#tmpl, correlations, flows_orig, norms = compute_metrics_motion_correction('M_FLUO_t_1000.tif',10,10,winsize=32, play_flow=False, resize_fact_flow=1)
#mfl_orig = cm.movie(np.concatenate([np.sqrt(np.sum(ff**2,-1))[np.newaxis,:,:] for ff in flows_orig],axis=0))
#mfl_rig = cm.movie(np.concatenate([np.sqrt(np.sum(ff**2,-1))[np.newaxis,:,:] for ff in flows_rig],axis=0))
#mfl_els = cm.movie(np.concatenate([np.sqrt(np.sum(ff**2,-1))[np.newaxis,:,:] for ff in flows_els],axis=0))
# %%
#cm.concatenate([mfl_orig/5.,mfl_rig,mfl_els],axis = 1).zproject(vmax = .5)
# %%
#cm.concatenate([m[:,10:-10,10:-10]/500,mfl_orig,mfl_rig,mfl_els],axis = 1).play(magnification = 5,gain = 5)
#%% TEST OPT FLOW
nmf = 'M_FLUO_t_shifted_flow.tif'
m = cm.load('M_FLUO_t_1000_els__d1_64_d2_128_d3_1_order_F_frames_1000_.mmap')
#shfts = [(a,b) for a,b in zip(np.random.randint(-2,3,m.shape[0]),np.random.randint(-2,3,m.shape[0]))]
shfts = [(a, b) for a, b in zip(np.random.randn(
m.shape[0]), np.random.randn(m.shape[0]))]
msh = m.copy().apply_shifts(shfts)
msh[:, 10:-10, 10:-10].save(nmf)
template = np.nanmean(m[:, 10:-10, 10:-10], 0)
tmpl, correlations, flows_orig, norms, smoothness = compute_metrics_motion_correction(
'M_FLUO_t_shifted_flow.tif', template.shape[0], template.shape[1], winsize=32, play_flow=False, resize_fact_flow=1, template=template)
with np.load('M_FLUO_t_shifted_flow_metrics.npz') as ld:
flows = ld['flows']
ff_1 = [np.nanmean(f[:, :, 1]) for f in flows]
ff_0 = [np.nanmean(f[:, :, 0]) for f in flows]
pl.subplot(2, 1, 1)
pl.plot(np.array(shfts)[:, 1])
pl.plot(np.array(ff_0))
pl.legend(['shifts', 'optical flow'])
pl.xlim([400, 600])
pl.ylabel('x shifts')
pl.subplot(2, 1, 2)
pl.plot(np.array(shfts)[:, 0])
pl.plot(np.array(ff_1))
pl.xlim([400, 600])
pl.xlabel('frames (15 Hz)')
pl.ylabel('y shifts')
| agiovann/Constrained_NMF | use_cases/motion_correction_paper/demo_motion_correction_nonrigid.py | Python | gpl-2.0 | 36,419 |
from turtlelsystem.TurtleSVGMachine import TurtleSVGMachine
from nose.tools import assert_almost_equal
def test_forward():
turtle = TurtleSVGMachine(width = 20, height = 20)
turtle.do_command("FORWARD 10")
assert_almost_equal(turtle.x, 20.0)
def test_backward():
turtle = TurtleSVGMachine(width = 20, height = 20)
turtle.do_command("BACKWARD 10")
assert_almost_equal(turtle.x, 0.0)
def test_left():
turtle = TurtleSVGMachine()
turtle.do_command("LEFT 30")
assert_almost_equal(turtle.theta, 30.0)
def test_right():
turtle = TurtleSVGMachine()
turtle.do_command("RIGHT 30")
assert_almost_equal(turtle.theta, 330.0)
| ptrgags/turtle-fractals | turtlelsystem/tests/test_TurtleSVGMachine.py | Python | gpl-2.0 | 665 |
#
#
# Copyright (C) 2006, 2007, 2011, 2012, 2014 Google Inc.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
# 02110-1301, USA.
"""Module for the unix socket protocol
This module implements the local unix socket protocol. You only need
this module and the opcodes module in the client program in order to
communicate with the master.
The module is also used by the master daemon.
"""
import socket
import collections
import time
import errno
import logging
from ganeti import serializer
from ganeti import constants
from ganeti import errors
from ganeti import utils
from ganeti import objects
from ganeti import pathutils
KEY_METHOD = constants.LUXI_KEY_METHOD
KEY_ARGS = constants.LUXI_KEY_ARGS
KEY_SUCCESS = constants.LUXI_KEY_SUCCESS
KEY_RESULT = constants.LUXI_KEY_RESULT
KEY_VERSION = constants.LUXI_KEY_VERSION
REQ_SUBMIT_JOB = constants.LUXI_REQ_SUBMIT_JOB
REQ_SUBMIT_JOB_TO_DRAINED_QUEUE = constants.LUXI_REQ_SUBMIT_JOB_TO_DRAINED_QUEUE
REQ_SUBMIT_MANY_JOBS = constants.LUXI_REQ_SUBMIT_MANY_JOBS
REQ_WAIT_FOR_JOB_CHANGE = constants.LUXI_REQ_WAIT_FOR_JOB_CHANGE
REQ_CANCEL_JOB = constants.LUXI_REQ_CANCEL_JOB
REQ_ARCHIVE_JOB = constants.LUXI_REQ_ARCHIVE_JOB
REQ_CHANGE_JOB_PRIORITY = constants.LUXI_REQ_CHANGE_JOB_PRIORITY
REQ_AUTO_ARCHIVE_JOBS = constants.LUXI_REQ_AUTO_ARCHIVE_JOBS
REQ_QUERY = constants.LUXI_REQ_QUERY
REQ_QUERY_FIELDS = constants.LUXI_REQ_QUERY_FIELDS
REQ_QUERY_JOBS = constants.LUXI_REQ_QUERY_JOBS
REQ_QUERY_INSTANCES = constants.LUXI_REQ_QUERY_INSTANCES
REQ_QUERY_NODES = constants.LUXI_REQ_QUERY_NODES
REQ_QUERY_GROUPS = constants.LUXI_REQ_QUERY_GROUPS
REQ_QUERY_NETWORKS = constants.LUXI_REQ_QUERY_NETWORKS
REQ_QUERY_EXPORTS = constants.LUXI_REQ_QUERY_EXPORTS
REQ_QUERY_CONFIG_VALUES = constants.LUXI_REQ_QUERY_CONFIG_VALUES
REQ_QUERY_CLUSTER_INFO = constants.LUXI_REQ_QUERY_CLUSTER_INFO
REQ_QUERY_TAGS = constants.LUXI_REQ_QUERY_TAGS
REQ_SET_DRAIN_FLAG = constants.LUXI_REQ_SET_DRAIN_FLAG
REQ_SET_WATCHER_PAUSE = constants.LUXI_REQ_SET_WATCHER_PAUSE
REQ_ALL = constants.LUXI_REQ_ALL
DEF_CTMO = constants.LUXI_DEF_CTMO
DEF_RWTO = constants.LUXI_DEF_RWTO
WFJC_TIMEOUT = constants.LUXI_WFJC_TIMEOUT
class ProtocolError(errors.LuxiError):
"""Denotes an error in the LUXI protocol."""
class ConnectionClosedError(ProtocolError):
"""Connection closed error."""
class TimeoutError(ProtocolError):
"""Operation timeout error."""
class RequestError(ProtocolError):
"""Error on request.
This signifies an error in the request format or request handling,
but not (e.g.) an error in starting up an instance.
Some common conditions that can trigger this exception:
- job submission failed because the job data was wrong
- query failed because required fields were missing
"""
class NoMasterError(ProtocolError):
"""The master cannot be reached.
This means that the master daemon is not running or the socket has
been removed.
"""
class PermissionError(ProtocolError):
"""Permission denied while connecting to the master socket.
This means the user doesn't have the proper rights.
"""
class Transport:
"""Low-level transport class.
This is used on the client side.
This could be replace by any other class that provides the same
semantics to the Client. This means:
- can send messages and receive messages
- safe for multithreading
"""
def __init__(self, address, timeouts=None):
"""Constructor for the Client class.
Arguments:
- address: a valid address the the used transport class
- timeout: a list of timeouts, to be used on connect and read/write
There are two timeouts used since we might want to wait for a long
time for a response, but the connect timeout should be lower.
If not passed, we use a default of 10 and respectively 60 seconds.
Note that on reading data, since the timeout applies to an
invidual receive, it might be that the total duration is longer
than timeout value passed (we make a hard limit at twice the read
timeout).
"""
self.address = address
if timeouts is None:
self._ctimeout, self._rwtimeout = DEF_CTMO, DEF_RWTO
else:
self._ctimeout, self._rwtimeout = timeouts
self.socket = None
self._buffer = ""
self._msgs = collections.deque()
try:
self.socket = socket.socket(socket.AF_UNIX, socket.SOCK_STREAM)
# Try to connect
try:
utils.Retry(self._Connect, 1.0, self._ctimeout,
args=(self.socket, address, self._ctimeout))
except utils.RetryTimeout:
raise TimeoutError("Connect timed out")
self.socket.settimeout(self._rwtimeout)
except (socket.error, NoMasterError):
if self.socket is not None:
self.socket.close()
self.socket = None
raise
@staticmethod
def _Connect(sock, address, timeout):
sock.settimeout(timeout)
try:
sock.connect(address)
except socket.timeout, err:
raise TimeoutError("Connect timed out: %s" % str(err))
except socket.error, err:
error_code = err.args[0]
if error_code in (errno.ENOENT, errno.ECONNREFUSED):
raise NoMasterError(address)
elif error_code in (errno.EPERM, errno.EACCES):
raise PermissionError(address)
elif error_code == errno.EAGAIN:
# Server's socket backlog is full at the moment
raise utils.RetryAgain()
raise
def _CheckSocket(self):
"""Make sure we are connected.
"""
if self.socket is None:
raise ProtocolError("Connection is closed")
def Send(self, msg):
"""Send a message.
This just sends a message and doesn't wait for the response.
"""
if constants.LUXI_EOM in msg:
raise ProtocolError("Message terminator found in payload")
self._CheckSocket()
try:
# TODO: sendall is not guaranteed to send everything
self.socket.sendall(msg + constants.LUXI_EOM)
except socket.timeout, err:
raise TimeoutError("Sending timeout: %s" % str(err))
def Recv(self):
"""Try to receive a message from the socket.
In case we already have messages queued, we just return from the
queue. Otherwise, we try to read data with a _rwtimeout network
timeout, and making sure we don't go over 2x_rwtimeout as a global
limit.
"""
self._CheckSocket()
etime = time.time() + self._rwtimeout
while not self._msgs:
if time.time() > etime:
raise TimeoutError("Extended receive timeout")
while True:
try:
data = self.socket.recv(4096)
except socket.timeout, err:
raise TimeoutError("Receive timeout: %s" % str(err))
except socket.error, err:
if err.args and err.args[0] == errno.EAGAIN:
continue
raise
break
if not data:
raise ConnectionClosedError("Connection closed while reading")
new_msgs = (self._buffer + data).split(constants.LUXI_EOM)
self._buffer = new_msgs.pop()
self._msgs.extend(new_msgs)
return self._msgs.popleft()
def Call(self, msg):
"""Send a message and wait for the response.
This is just a wrapper over Send and Recv.
"""
self.Send(msg)
return self.Recv()
def Close(self):
"""Close the socket"""
if self.socket is not None:
self.socket.close()
self.socket = None
def ParseRequest(msg):
"""Parses a LUXI request message.
"""
try:
request = serializer.LoadJson(msg)
except ValueError, err:
raise ProtocolError("Invalid LUXI request (parsing error): %s" % err)
logging.debug("LUXI request: %s", request)
if not isinstance(request, dict):
logging.error("LUXI request not a dict: %r", msg)
raise ProtocolError("Invalid LUXI request (not a dict)")
method = request.get(KEY_METHOD, None) # pylint: disable=E1103
args = request.get(KEY_ARGS, None) # pylint: disable=E1103
version = request.get(KEY_VERSION, None) # pylint: disable=E1103
if method is None or args is None:
logging.error("LUXI request missing method or arguments: %r", msg)
raise ProtocolError(("Invalid LUXI request (no method or arguments"
" in request): %r") % msg)
return (method, args, version)
def ParseResponse(msg):
"""Parses a LUXI response message.
"""
# Parse the result
try:
data = serializer.LoadJson(msg)
except KeyboardInterrupt:
raise
except Exception, err:
raise ProtocolError("Error while deserializing response: %s" % str(err))
# Validate response
if not (isinstance(data, dict) and
KEY_SUCCESS in data and
KEY_RESULT in data):
raise ProtocolError("Invalid response from server: %r" % data)
return (data[KEY_SUCCESS], data[KEY_RESULT],
data.get(KEY_VERSION, None)) # pylint: disable=E1103
def FormatResponse(success, result, version=None):
"""Formats a LUXI response message.
"""
response = {
KEY_SUCCESS: success,
KEY_RESULT: result,
}
if version is not None:
response[KEY_VERSION] = version
logging.debug("LUXI response: %s", response)
return serializer.DumpJson(response)
def FormatRequest(method, args, version=None):
"""Formats a LUXI request message.
"""
# Build request
request = {
KEY_METHOD: method,
KEY_ARGS: args,
}
if version is not None:
request[KEY_VERSION] = version
# Serialize the request
return serializer.DumpJson(request)
def CallLuxiMethod(transport_cb, method, args, version=None):
"""Send a LUXI request via a transport and return the response.
"""
assert callable(transport_cb)
request_msg = FormatRequest(method, args, version=version)
# Send request and wait for response
response_msg = transport_cb(request_msg)
(success, result, resp_version) = ParseResponse(response_msg)
# Verify version if there was one in the response
if resp_version is not None and resp_version != version:
raise errors.LuxiError("LUXI version mismatch, client %s, response %s" %
(version, resp_version))
if success:
return result
errors.MaybeRaise(result)
raise RequestError(result)
class Client(object):
"""High-level client implementation.
This uses a backing Transport-like class on top of which it
implements data serialization/deserialization.
"""
def __init__(self, address=None, timeouts=None, transport=Transport):
"""Constructor for the Client class.
Arguments:
- address: a valid address the the used transport class
- timeout: a list of timeouts, to be used on connect and read/write
- transport: a Transport-like class
If timeout is not passed, the default timeouts of the transport
class are used.
"""
if address is None:
address = pathutils.MASTER_SOCKET
self.address = address
self.timeouts = timeouts
self.transport_class = transport
self.transport = None
self._InitTransport()
def _InitTransport(self):
"""(Re)initialize the transport if needed.
"""
if self.transport is None:
self.transport = self.transport_class(self.address,
timeouts=self.timeouts)
def _CloseTransport(self):
"""Close the transport, ignoring errors.
"""
if self.transport is None:
return
try:
old_transp = self.transport
self.transport = None
old_transp.Close()
except Exception: # pylint: disable=W0703
pass
def _SendMethodCall(self, data):
# Send request and wait for response
try:
self._InitTransport()
return self.transport.Call(data)
except Exception:
self._CloseTransport()
raise
def Close(self):
"""Close the underlying connection.
"""
self._CloseTransport()
def CallMethod(self, method, args):
"""Send a generic request and return the response.
"""
if not isinstance(args, (list, tuple)):
raise errors.ProgrammerError("Invalid parameter passed to CallMethod:"
" expected list, got %s" % type(args))
return CallLuxiMethod(self._SendMethodCall, method, args,
version=constants.LUXI_VERSION)
def SetQueueDrainFlag(self, drain_flag):
return self.CallMethod(REQ_SET_DRAIN_FLAG, (drain_flag, ))
def SetWatcherPause(self, until):
return self.CallMethod(REQ_SET_WATCHER_PAUSE, (until, ))
def SubmitJob(self, ops):
ops_state = map(lambda op: op.__getstate__(), ops)
return self.CallMethod(REQ_SUBMIT_JOB, (ops_state, ))
def SubmitJobToDrainedQueue(self, ops):
ops_state = map(lambda op: op.__getstate__(), ops)
return self.CallMethod(REQ_SUBMIT_JOB_TO_DRAINED_QUEUE, (ops_state, ))
def SubmitManyJobs(self, jobs):
jobs_state = []
for ops in jobs:
jobs_state.append([op.__getstate__() for op in ops])
return self.CallMethod(REQ_SUBMIT_MANY_JOBS, (jobs_state, ))
@staticmethod
def _PrepareJobId(request_name, job_id):
try:
return int(job_id)
except ValueError:
raise RequestError("Invalid parameter passed to %s as job id: "
" expected integer, got value %s" %
(request_name, job_id))
def CancelJob(self, job_id):
job_id = Client._PrepareJobId(REQ_CANCEL_JOB, job_id)
return self.CallMethod(REQ_CANCEL_JOB, (job_id, ))
def ArchiveJob(self, job_id):
job_id = Client._PrepareJobId(REQ_ARCHIVE_JOB, job_id)
return self.CallMethod(REQ_ARCHIVE_JOB, (job_id, ))
def ChangeJobPriority(self, job_id, priority):
job_id = Client._PrepareJobId(REQ_CHANGE_JOB_PRIORITY, job_id)
return self.CallMethod(REQ_CHANGE_JOB_PRIORITY, (job_id, priority))
def AutoArchiveJobs(self, age):
timeout = (DEF_RWTO - 1) / 2
return self.CallMethod(REQ_AUTO_ARCHIVE_JOBS, (age, timeout))
def WaitForJobChangeOnce(self, job_id, fields,
prev_job_info, prev_log_serial,
timeout=WFJC_TIMEOUT):
"""Waits for changes on a job.
@param job_id: Job ID
@type fields: list
@param fields: List of field names to be observed
@type prev_job_info: None or list
@param prev_job_info: Previously received job information
@type prev_log_serial: None or int/long
@param prev_log_serial: Highest log serial number previously received
@type timeout: int/float
@param timeout: Timeout in seconds (values larger than L{WFJC_TIMEOUT} will
be capped to that value)
"""
assert timeout >= 0, "Timeout can not be negative"
return self.CallMethod(REQ_WAIT_FOR_JOB_CHANGE,
(job_id, fields, prev_job_info,
prev_log_serial,
min(WFJC_TIMEOUT, timeout)))
def WaitForJobChange(self, job_id, fields, prev_job_info, prev_log_serial):
job_id = Client._PrepareJobId(REQ_WAIT_FOR_JOB_CHANGE, job_id)
while True:
result = self.WaitForJobChangeOnce(job_id, fields,
prev_job_info, prev_log_serial)
if result != constants.JOB_NOTCHANGED:
break
return result
def Query(self, what, fields, qfilter):
"""Query for resources/items.
@param what: One of L{constants.QR_VIA_LUXI}
@type fields: List of strings
@param fields: List of requested fields
@type qfilter: None or list
@param qfilter: Query filter
@rtype: L{objects.QueryResponse}
"""
result = self.CallMethod(REQ_QUERY, (what, fields, qfilter))
return objects.QueryResponse.FromDict(result)
def QueryFields(self, what, fields):
"""Query for available fields.
@param what: One of L{constants.QR_VIA_LUXI}
@type fields: None or list of strings
@param fields: List of requested fields
@rtype: L{objects.QueryFieldsResponse}
"""
result = self.CallMethod(REQ_QUERY_FIELDS, (what, fields))
return objects.QueryFieldsResponse.FromDict(result)
def QueryJobs(self, job_ids, fields):
return self.CallMethod(REQ_QUERY_JOBS, (job_ids, fields))
def QueryInstances(self, names, fields, use_locking):
return self.CallMethod(REQ_QUERY_INSTANCES, (names, fields, use_locking))
def QueryNodes(self, names, fields, use_locking):
return self.CallMethod(REQ_QUERY_NODES, (names, fields, use_locking))
def QueryGroups(self, names, fields, use_locking):
return self.CallMethod(REQ_QUERY_GROUPS, (names, fields, use_locking))
def QueryNetworks(self, names, fields, use_locking):
return self.CallMethod(REQ_QUERY_NETWORKS, (names, fields, use_locking))
def QueryExports(self, nodes, use_locking):
return self.CallMethod(REQ_QUERY_EXPORTS, (nodes, use_locking))
def QueryClusterInfo(self):
return self.CallMethod(REQ_QUERY_CLUSTER_INFO, ())
def QueryConfigValues(self, fields):
return self.CallMethod(REQ_QUERY_CONFIG_VALUES, (fields, ))
def QueryTags(self, kind, name):
return self.CallMethod(REQ_QUERY_TAGS, (kind, name))
| vladimir-ipatov/ganeti | lib/luxi.py | Python | gpl-2.0 | 17,598 |
__author__ = 'Marco Maio'
import time
class Handler():
def __init__(self, stocks_today=None, investments_by_name=None, investments_by_availability=None):
# input data assessment
if stocks_today is None:
raise ValueError('Stocks_today container not specified!')
elif investments_by_name is None:
raise ValueError('Investments_by_name container not specified!')
elif investments_by_availability is None:
raise ValueError('Investments_by_availability container not specified!')
self.__stocks_today = stocks_today
self.__investments_by_name = investments_by_name
self.__investments_by_availability = investments_by_availability
def get_amount_by_stock_name(self, stock_name):
if stock_name is None or len(stock_name) == 0:
raise ValueError('Stock name not specified!')
return self.__stocks_today[stock_name]["EUR"] *\
self.__stocks_today[stock_name]["Numbers of parts"]
def get_amount_total_investment(self):
tot = 0
for i in self.__stocks_today:
tot += self.get_amount_by_stock_name(i)
return tot
def get_total_amount_by_date(self, date=None, stock_name="", closest_availability_only=False):
if date is None or len(date) == 0:
raise ValueError('Date not specified!')
dates = [d for d in self.__investments_by_availability.keys() if len(d) > 0]
eligible_dates =[]
for d in dates:
if time.strptime(date, "%d/%m/%Y") >= time.strptime(d, "%d/%m/%Y"):
if not closest_availability_only or date.split('/')[2] == d.split('/')[2]:
eligible_dates.append(d)
if len(eligible_dates)== 0:
raise ValueError('No fund available by the ' + date)
tot = 0
stocks = set()
for ed in eligible_dates:
for k, v in self.__investments_by_availability[ed].items():
if stock_name in k:
stocks.add(k)
tot += self.__stocks_today[k]["EUR"] * v
return tot, stocks
def get_paid_by_stock_name(self, stock_name=None):
if stock_name is None or len(stock_name) == 0:
raise ValueError('Stock name not specified!')
if stock_name not in self.__stocks_today:
raise ValueError('Please provide a valid stock name!')
tot = 0.0
for k, v in self.__investments_by_name[stock_name].items():
tot += v['Number of actions bought'] * v['Purchase value']
return tot
def get_total_gain(self):
tot_paid = 0.0
for stock_name in self.__investments_by_name:
tot_paid += self.get_paid_by_stock_name(stock_name)
tot = self.get_amount_total_investment()
gain = tot - tot_paid
percentage_gain = (tot/tot_paid - 1)*100
return gain, percentage_gain
def get_gain_by_stock_name(self, stock_name):
if stock_name is None or len(stock_name) == 0:
raise ValueError('Stock name not specified!')
if stock_name not in self.__stocks_today:
raise ValueError('Please provide a valid stock name!')
tot_paid = self.get_paid_by_stock_name(stock_name)
tot = self.get_amount_by_stock_name(stock_name)
gain = tot - tot_paid
percentage_gain = (tot/tot_paid - 1)*100
return gain, percentage_gain
def get_next_available_amount(self):
dates = [d for d in self.__investments_by_availability.keys() if len(d) > 0]
min_date = None
min_date_str = ""
for d in dates:
current_date = time.strptime(d, "%d/%m/%Y")
if min_date is None or min_date > current_date:
min_date = current_date
min_date_str = d
return min_date_str, self.get_total_amount_by_date(min_date_str)
| marcomaio/NatixisUnofficialAPI | natixis/handler.py | Python | gpl-2.0 | 4,027 |
# Copyright 2004-2010 PyTom <[email protected]>
#
# Permission is hereby granted, free of charge, to any person
# obtaining a copy of this software and associated documentation files
# (the "Software"), to deal in the Software without restriction,
# including without limitation the rights to use, copy, modify, merge,
# publish, distribute, sublicense, and/or sell copies of the Software,
# and to permit persons to whom the Software is furnished to do so,
# subject to the following conditions:
#
# The above copyright notice and this permission notice shall be
# included in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
import renpy
import codecs
import os
import os.path
import time
image_prefixes = None
filenames = None
# Things to check in lint.
#
# Image files exist, and are of the right case.
# Jump/Call targets defined.
# Say whos can evaluate.
# Call followed by say.
# Show/Scene valid.
# At valid.
# With valid.
# Hide maybe valid.
# Expressions can compile.
# The node the report will be about:
report_node = None
# Reports a message to the user.
def report(msg, *args):
if report_node:
out = u"%s:%d " % (renpy.parser.unicode_filename(report_node.filename), report_node.linenumber)
else:
out = ""
out += msg % args
print
print out.encode('utf-8')
added = { }
# Reports additional information about a message, the first time it
# occurs.
def add(msg):
if not msg in added:
added[msg] = True
print unicode(msg).encode('utf-8')
# Trys to evaluate an expression, announcing an error if it fails.
def try_eval(where, expr, additional=None):
try:
renpy.python.py_eval(expr)
except:
report( "Could not evaluate '%s', in %s.", expr, where)
if additional:
add(additional)
# Returns True of the expression can be compiled as python, False
# otherwise.
def try_compile(where, expr):
try:
renpy.python.py_compile_eval_bytecode(expr)
except:
report("'%s' could not be compiled as a python expression, %s.", expr, where)
# This reports an error if we're sure that the image with the given name
# does not exist.
def image_exists(name, expression, tag):
# Add the tag to the set of known tags.
tag = tag or name[0]
image_prefixes[tag] = True
if expression:
return
name = list(name)
names = " ".join(name)
while name:
if tuple(name) in renpy.exports.images:
return
name.pop()
report("The image named '%s' was not declared.", names)
# Only check each file once.
check_file_cache = { }
def check_file(what, fn):
present = check_file_cache.get(fn, None)
if present is True:
return
if present is False:
report("%s uses file '%s', which is not loadable.", what.capitalize(), fn)
return
if not renpy.loader.loadable(fn):
report("%s uses file '%s', which is not loadable.", what.capitalize(), fn)
check_file_cache[fn] = False
return
check_file_cache[fn] = True
try:
renpy.loader.transfn(fn)
except:
return
if renpy.loader.transfn(fn) and \
fn.lower() in filenames and \
fn != filenames[fn.lower()]:
report("Filename case mismatch for %s. '%s' was used in the script, but '%s' was found on disk.", what, fn, filenames[fn.lower()])
add("Case mismatches can lead to problems on Mac, Linux/Unix, and when archiving images. To fix them, either rename the file on disk, or the filename use in the script.")
def check_displayable(what, d):
files = [ ]
def files_callback(img):
files.extend(img.predict_files())
d.predict(files_callback)
for fn in files:
check_file(what, fn)
# Lints ast.Image nodes.
def check_image(node):
name = " ".join(node.imgname)
check_displayable('image %s' % name, renpy.exports.images[node.imgname])
def imspec(t):
if len(t) == 3:
return t[0], None, None, t[1], t[2], 0
if len(t) == 6:
return t[0], t[1], t[2], t[3], t[4], t[5], None
else:
return t
# Lints ast.Show and ast.Scene nodets.
def check_show(node):
# A Scene may have an empty imspec.
if not node.imspec:
return
name, expression, tag, at_list, layer, zorder, behind = imspec(node.imspec)
if layer not in renpy.config.layers and layer not in renpy.config.top_layers:
report("Uses layer '%s', which is not in config.layers.", layer)
image_exists(name, expression, tag)
for i in at_list:
try_eval("the at list of a scene or show statment", i, "Perhaps you forgot to declare, or misspelled, a position?")
# Lints ast.Hide.
def check_hide(node):
name, expression, tag, at_list, layer, zorder, behind = imspec(node.imspec)
tag = tag or name[0]
if layer not in renpy.config.layers and layer not in renpy.config.top_layers:
report("Uses layer '%s', which is not in config.layers.", layer)
if tag not in image_prefixes:
report("The image tag '%s' is not the prefix of a declared image, nor was it used in a show statement before this hide statement.", tag)
# for i in at_list:
# try_eval(node, "at list of hide statment", i)
def check_with(node):
try_eval("a with statement or clause", node.expr, "Perhaps you forgot to declare, or misspelled, a transition?")
def check_user(node):
def error(msg):
report("%s", msg)
renpy.exports.push_error_handler(error)
try:
node.call("lint")
finally:
renpy.exports.pop_error_handler()
try:
node.get_next()
except:
report("Didn't properly report what the next statement should be.")
check_text_tags = renpy.display.text.check_text_tags
def text_checks(s):
msg = renpy.display.text.check_text_tags(s)
if msg:
report("%s (in %s)", msg, repr(s)[1:])
if "%" in s:
state = 0
pos = 0
fmt = ""
while pos < len(s):
c = s[pos]
pos += 1
# Not in a format.
if state == 0:
if c == "%":
state = 1
fmt = "%"
# In a format.
elif state == 1:
fmt += c
if c == "(":
state = 2
elif c in "#0123456780- +hlL":
state = 1
elif c in "diouxXeEfFgGcrs%":
state = 0
else:
report("Unknown string format code '%s' (in %s)", fmt, repr(s)[1:])
state = 0
# In a mapping key.
elif state == 2:
fmt += c
if c == ")":
state = 1
if state != 0:
report("Unterminated string format code '%s' (in %s)", fmt, repr(s)[1:])
def check_say(node):
if node.who:
try_eval("the who part of a say statement", node.who, "Perhaps you forgot to declare a character?")
if node.with_:
try_eval("the with clause of a say statement", node.with_, "Perhaps you forgot to declare, or misspelled, a transition?")
text_checks(node.what)
def check_menu(node):
if node.with_:
try_eval("the with clause of a menu statement", node.with_, "Perhaps you forgot to declare, or misspelled, a transition?")
if not [ (l, c, b) for l, c, b in node.items if b ]:
report("The menu does not contain any selectable choices.")
for l, c, b in node.items:
if c:
try_compile("in the if clause of a menuitem", c)
text_checks(l)
def check_jump(node):
if node.expression:
return
if not renpy.game.script.has_label(node.target):
report("The jump is to nonexistent label '%s'.", node.target)
def check_call(node):
# if not isinstance(node.next.name, basestring):
# report(node, "The call does not have a from clause associated with it.")
# add("You can add from clauses to calls automatically by running the add_from program.")
# add("This is necessary to ensure saves can be loaded even when the script changes.")
if node.expression:
return
if not renpy.game.script.has_label(node.label):
report("The call is to nonexistent label '%s'.", node.label)
def check_while(node):
try_compile("in the condition of the while statement", node.condition)
def check_if(node):
for condition, block in node.entries:
try_compile("in a condition of the if statement", condition)
def check_style(name, s):
if s.indexed:
for i in s.indexed:
check_style(name + "[%r]" % (name,), s.indexed[i])
for p in s.properties:
for k, v in p.iteritems():
kname = name + "." + k
# Treat font specially.
if k.endswith("font"):
check_file(name, v)
e = renpy.style.expansions[k]
# We only need to check the first function.
for prio, propn, func in e:
if func:
v = func(v)
break
if isinstance(v, renpy.display.core.Displayable):
check_displayable(kname, v)
def check_styles():
for name, s in renpy.style.style_map.iteritems():
check_style("Style property style." + name, s)
def lint():
"""
The master lint function, that's responsible for staging all of the
other checks.
"""
renpy.game.lint = True
print codecs.BOM_UTF8
print unicode(renpy.version + " lint report, generated at: " + time.ctime()).encode("utf-8")
# This is used to support the check_image.
global filenames
filenames = { }
for d in renpy.config.searchpath:
for fn in os.listdir(os.path.join(renpy.config.basedir, d)):
filenames[fn.lower()] = fn
# This supports check_hide.
global image_prefixes
image_prefixes = { }
for k in renpy.exports.images:
image_prefixes[k[0]] = True
# Iterate through every statement in the program, processing
# them. We sort them in filename, linenumber order.
all_stmts = [ (i.filename, i.linenumber, i) for i in renpy.game.script.all_stmts ]
all_stmts.sort()
say_words = 0
say_count = 0
menu_count = 0
global report_node
for fn, ln, node in all_stmts:
report_node = node
if isinstance(node, renpy.ast.Image):
check_image(node)
elif isinstance(node, renpy.ast.Show):
check_show(node)
elif isinstance(node, renpy.ast.Scene):
check_show(node)
elif isinstance(node, renpy.ast.Hide):
check_hide(node)
elif isinstance(node, renpy.ast.With):
check_with(node)
elif isinstance(node, renpy.ast.Say):
check_say(node)
say_count += 1
say_words += len(node.what.split())
elif isinstance(node, renpy.ast.Menu):
check_menu(node)
menu_count += 1
elif isinstance(node, renpy.ast.Jump):
check_jump(node)
elif isinstance(node, renpy.ast.Call):
check_call(node)
elif isinstance(node, renpy.ast.While):
check_while(node)
elif isinstance(node, renpy.ast.If):
check_if(node)
elif isinstance(node, renpy.ast.UserStatement):
check_user(node)
report_node = None
check_styles()
for f in renpy.config.lint_hooks:
f()
print
print
print "Statistics:"
print
print "The game contains", say_count, "screens of dialogue."
print "These screens contain a total of", say_words, "words,"
if say_count > 0:
print "for an average of %.1f words per screen." % (1.0 * say_words / say_count)
print "The game contains", menu_count, "menus."
print
if renpy.config.developer:
print "Remember to set config.developer to False before releasing."
print
print "Lint is not a substitute for thorough testing. Remember to update Ren'Py"
print "before releasing. New releases fix bugs and improve compatibility."
| MSEMJEJME/ReAlistair | renpy/lint.py | Python | gpl-2.0 | 13,359 |
# coding=utf-8
from datetime import datetime
from euphorie.client import model
from euphorie.client.tests.utils import addAccount
from euphorie.client.tests.utils import addSurvey
from euphorie.content.tests.utils import BASIC_SURVEY
from euphorie.testing import EuphorieIntegrationTestCase
from lxml import html
from plone import api
from Products.Five.browser.metaconfigure import ViewNotCallableError
from time import sleep
from zope.event import notify
from zope.lifecycleevent import ObjectModifiedEvent
class TestSurveyViews(EuphorieIntegrationTestCase):
def test_survey_publication_date_views(self):
"""We have some views to display and set the published column
for a survey session
"""
with api.env.adopt_user("admin"):
survey = addSurvey(self.portal, BASIC_SURVEY)
account = addAccount(password="secret")
survey_session = model.SurveySession(
id=123,
title=u"Dummy session",
created=datetime(2012, 4, 22, 23, 5, 12),
modified=datetime(2012, 4, 23, 11, 50, 30),
zodb_path="nl/ict/software-development",
account=account,
company=model.Company(country="nl", employees="1-9", referer="other"),
)
model.Session.add(survey_session)
survey = self.portal.client.nl.ict["software-development"]
session_id = "++session++%d" % survey_session.id
traversed_survey_session = survey.restrictedTraverse(session_id)
with api.env.adopt_user(user=survey_session.account):
with self._get_view(
"publication_date", traversed_survey_session, survey_session
) as view:
# The view is not callable but
# has traversable allowed attributes
self.assertRaises(ViewNotCallableError, view)
# We have some default values that will be changed
# when publishing/unpublishing the session
self.assertEqual(survey_session.last_publisher, None)
self.assertEqual(survey_session.published, None)
self.assertEqual(survey_session.last_modifier, None)
self.assertEqual(survey_session.review_state, "private")
# Calling set_date will result in having this session published
# and the publication time and the publisher will be recorded
# If no referer is set,
# the methods will redirect to the context url
self.assertEqual(
view.set_date(),
"{url}/{session_id}".format(
url=survey.absolute_url(), session_id=session_id
),
)
self.assertEqual(survey_session.last_publisher, survey_session.account)
self.assertIsInstance(survey_session.published, datetime)
self.assertEqual(survey_session.review_state, "published")
old_modified = survey_session.modified
old_published = survey_session.published
old_modifier = survey_session.last_modifier
# Changing the HTTP_REFERER will redirect there
# and calling reset_date will update the published date
view.request.set("HTTP_REFERER", "foo")
# We need to wait at least one second because the datetime
# is stored with that accuracy
sleep(1)
self.assertEqual(view.reset_date(), "foo")
self.assertEqual(survey_session.last_publisher, survey_session.account)
# The publisher and publication dates are set. The modification date
# is not touched.
self.assertEqual(survey_session.modified, old_modified)
self.assertEqual(survey_session.last_modifier, old_modifier)
self.assertTrue(survey_session.published > old_published)
# Calling unset_date will restore the publication info
self.assertEqual(view.unset_date(), "foo")
self.assertEqual(survey_session.last_publisher, None)
self.assertEqual(survey_session.published, None)
self.assertEqual(survey_session.review_state, "private")
# We also have a menu view
with self._get_view(
"publication_menu", traversed_survey_session, survey_session
) as view:
soup = html.fromstring(view())
self.assertListEqual(
["publication_date/set_date#content"],
[
el.attrib["action"].rpartition("@@")[-1]
for el in soup.cssselect("form")
],
)
# We trigger the session to be private
survey_session.published = "foo"
soup = html.fromstring(view())
self.assertListEqual(
[
"publication_date/unset_date#content",
"publication_date/reset_date#content",
],
[
el.attrib["action"].rpartition("@@")[-1]
for el in soup.cssselect("form")
],
)
def test_modify_updates_last_modifier(self):
account = addAccount(password="secret")
survey_session = model.SurveySession(
title=u"Dummy session", account=account, zodb_path=""
)
self.assertEqual(survey_session.modified, None)
self.assertEqual(survey_session.last_modifier, None)
with api.env.adopt_user(user=account):
notify(ObjectModifiedEvent(survey_session))
self.assertIsInstance(survey_session.modified, datetime)
self.assertEqual(survey_session.last_modifier, account)
| euphorie/Euphorie | src/euphorie/client/tests/test_survey_integration.py | Python | gpl-2.0 | 5,946 |
from flask import Response
from flask.views import View
from bson import json_util
from mcp import mongo
class Map(View):
def dispatch_request(self, komuna, viti):
json = mongo.db.procurements.aggregate([
{
"$match": {
"komuna.slug": komuna,
"viti": viti,
"kompania.selia.slug": {'$ne': ''}
}
},
{
"$group": {
"_id": {
"selia": "$kompania.selia.slug",
"emri": "$kompania.selia.emri",
"gjeresi": "$kompania.selia.kordinatat.gjeresi",
"gjatesi": "$kompania.selia.kordinatat.gjatesi",
},
"cmimi": {
"$sum": "$kontrata.qmimi"
},
"vlera": {
"$sum": "$kontrata.vlera"
},
"numriKontratave": {
"$sum": 1
}
}
},
{
"$sort": {
"_id.selia": 1
}
},
{
"$project": {
"selia": "$_id.selia",
"emri": "$_id.emri",
"gjeresia": "$_id.gjeresi",
"gjatesia": "$_id.gjatesi",
"cmimi": "$cmimi",
"vlera": "$vlera",
"numriKontratave": "$numriKontratave",
"_id": 0
}
}
])
json_min_max = mongo.db.procurements.aggregate([
{
"$match": {
"komuna.slug": komuna,
"viti": viti,
"kompania.selia.slug": {'$ne': ''}
}
},
{
"$group": {
"_id": {
"selia": "$kompania.selia.slug",
"gjeresi": "$kompania.selia.kordinatat.gjeresi",
"gjatesi": "$kompania.selia.kordinatat.gjatesi",
},
"sumCmimi": {
"$sum": "$kontrata.qmimi"
},
"sumVlera": {
"$sum": "$kontrata.vlera"
},
"sumNumriKontratave": {
"$sum": 1
}
}
},
{
"$group": {
"_id": {},
"maxCmimi": {
"$max": "$sumCmimi"
},
"maxVlera": {
"$max": "$sumVlera"
},
"maxNumriKontratave": {
"$max": "$sumNumriKontratave"
},
"minCmimi": {
"$min": "$sumCmimi"
},
"minVlera": {
"$min": "$sumVlera"
},
"minNumriKontratave": {
"$min": "$sumNumriKontratave"
},
}
},
{
"$project": {
"_id": 0,
"vlera": {
"min": "$minVlera",
"max": "$maxVlera",
},
"cmimi": {
"min": "$minCmimi",
"max": "$maxCmimi",
},
"numriKontratave": {
"min": "$minNumriKontratave",
"max": "$maxNumriKontratave",
}
}
}
])
#pergjigjen e kthyer dhe te konvertuar ne JSON ne baze te json_util.dumps() e ruajme ne resp
result_json = {};
result_json['bounds'] = json_min_max['result'][0]
result_json['result'] = json['result']
resp = Response(
response=json_util.dumps(result_json),
mimetype='application/json')
return resp
| opendatakosovo/municipality-procurement-api | mcp/views/map.py | Python | gpl-2.0 | 4,266 |
# -*- coding: utf-8 -*-
#
# M4Baker
# Copyright (C) 2010 Kilian Lackhove
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
"""
Module implementing MainWindow.
"""
from PyQt4.QtGui import *
from PyQt4.QtCore import *
from Ui_mainWindow import Ui_MainWindow
from baseclasses import *
from splitDialog import splitDialog
from aboutDialog import aboutDialog
TITLE, CHAPTER, TRACK, DURATION, STARTTIME, FILENAME, ENDTIME = range(7)
def makeClickable(widget):
class clickFilter(QObject):
clicked = pyqtSignal()
def eventFilter(self, obj, event):
if obj == widget:
if event.type() == QEvent.MouseButtonRelease:
self.clicked.emit()
return True
return False
filter = clickFilter(widget)
widget.installEventFilter(filter)
return filter.clicked
class MainWindow(QMainWindow, Ui_MainWindow):
"""
Class documentation goes here.
"""
def __init__(self, parent = None):
"""
Constructor
"""
class delkeyFilter(QObject):
delkeyPressed = pyqtSignal()
def eventFilter(self, obj, event):
if event.type() == QEvent.KeyPress:
if event.key() == Qt.Key_Delete:
self.delkeyPressed.emit()
return True
return False
class returnkeyFilter(QObject):
def eventFilter(self, obj, event):
if event.type() == QEvent.KeyPress:
if event.key() == Qt.Key_Return:
current = obj.currentIndex()
current = obj.indexBelow(current)
obj.setCurrentIndex(current)
return False
self.audiobookList = audiobookContainer()
self.currentDir = os.getcwd()
QMainWindow.__init__(self, parent)
self.setupUi(self)
self.stackedWidget.setCurrentWidget(self.infoPage)
makeClickable(self.coverLabel).connect(self.on_coverLabel_clicked)
self.model = audiobookTreeModel()
self.dataTreeView.setModel(self.model)
self.progessDelegate = progressBarDelegate()
self.dataTreeView.setItemDelegateForColumn(1, self.progessDelegate)
self.connect(self.dataTreeView.selectionModel(),
SIGNAL('currentChanged(QModelIndex, QModelIndex)'),
self.on_dataTreeView_currentItemChanged)
self.connect(self.model, SIGNAL('dataChanged(QModelIndex,QModelIndex)'), self.dataChanged)
self.connect(self.model, SIGNAL('expand(QModelIndex)'), self.dataTreeView.expand)
#trying the new style of connecting signals
self.model.processingDone.connect(self.on_processingDone)
self.delfilter = delkeyFilter()
self.dataTreeView.installEventFilter(self.delfilter)
self.connect(self.delfilter, SIGNAL('delkeyPressed()'),
self.on_actionRemove_triggered)
self.returnFilter = returnkeyFilter()
self.dataTreeView.installEventFilter(self.returnFilter)
#allow only numbers in yearEdit
self.yearEdit.setValidator(QRegExpValidator(QRegExp(r'\d*'), self))
#set icons
self.actionMoveDown.setIcon(QIcon.fromTheme('go-down'))
self.actionMoveUp_2.setIcon(QIcon.fromTheme('go-up'))
#TODO: clean the name of this action
self.actionRemove.setIcon(QIcon.fromTheme('edit-delete'))
self.actionAddAudiobook.setIcon(QIcon.fromTheme('address-book-new'))
self.actionAddChapter.setIcon(QIcon.fromTheme('document-new'))
self.action_About.setIcon(QIcon.fromTheme('help-about'))
self.action_help.setIcon(QIcon.fromTheme('help-browser'))
self.actionExit.setIcon(QIcon.fromTheme('application-exit'))
self.actionProcess.setIcon(QIcon.fromTheme('system-run'))
self.chapterFileButton.setIcon(QIcon.fromTheme('document-open'))
self.outfileButton.setIcon(QIcon.fromTheme('document-open'))
self.updateTree()
def okToQuit(self):
reply = QMessageBox.question(self,"M4Baker - really quit?", \
"Really quit?",QMessageBox.Yes|QMessageBox.Cancel)
if reply == QMessageBox.Cancel:
return False
elif reply == QMessageBox.Yes:
return True
def closeEvent(self, event):
if not self.okToQuit():
event.ignore()
@pyqtSignature("")
def on_actionAddAudiobook_triggered(self):
"""
Slot documentation goes here.
"""
current = self.dataTreeView.currentIndex()
formats = ["*%s" % format for format in supportedInputFiles]
fnames = QFileDialog.getOpenFileNames(
self,
"Choose audio files to create audiobook from",
self.currentDir,
'audio files (%s)' % " ".join(formats))
if fnames:
#fnames = [unicode(element) for element in fnames]
self.currentDir = fnames[-1].section(os.sep,0,-2)
newbook = audiobook([chapter(element) for element in fnames])
self.model.addAudiobooks(newbook, current)
self.updateTree()
@pyqtSignature("")
def on_actionMoveDown_triggered(self):
"""
Slot documentation goes here.
"""
indexes = self.dataTreeView.selectionModel().selectedIndexes()
#clean indexes list from double entries
cleanIndexes = []
for index in indexes:
if index.column() == 0:
cleanIndexes.append(index)
indexes = cleanIndexes
self.model.move(indexes, 'down')
@pyqtSignature("")
def on_actionRemove_triggered(self):
"""
Slot documentation goes here.
"""
current = self.dataTreeView.currentIndex()
indexes = self.dataTreeView.selectionModel().selectedIndexes()
#clean indexes list from double entries
cleanIndexes = []
for index in indexes:
if index.column() == 0:
cleanIndexes.append(index)
indexes = cleanIndexes
self.model.remove(indexes)
self.updateTree()
@pyqtSignature("")
def on_actionAddChapter_triggered(self):
"""
Slot documentation goes here.
"""
formats = ["*%s" % format for format in supportedInputFiles]
fnames = QFileDialog.getOpenFileNames(
self,
"Choose audio files to append to audiobook",
self.currentDir,
'audio files (%s)' % " ".join(formats))
if fnames:
self.currentDir = fnames[-1].section(os.sep,0,-2)
#fnames = [unicode(element) for element in fnames]
chaplist = [chapter(element) for element in fnames]
current = self.dataTreeView.currentIndex()
self.model.addChapters(chaplist, current)
self.updateTree()
#TODO: maybe it is smarter to add the chapter after current item?
@pyqtSignature("")
def on_actionSortByFilename_triggered(self):
"""
Slot documentation goes here.
"""
current = self.dataTreeView.currentIndex()
self.model.sort(current, 'filename')
self.updateTree()
@pyqtSignature("")
def on_actionSortByTracknumber_triggered(self):
"""
Slot documentation goes here.
"""
current = self.dataTreeView.currentIndex()
self.model.sort(current, 'trackNumber')
self.updateTree()
@pyqtSignature("")
def on_actionProcess_triggered(self):
"""
Slot documentation goes here.
"""
uiElements = (self.actionAddChapter, self.actionMoveDown,
self.actionMoveUp_2, self.actionProcess, self.actionRemove, self.actionSortByFilename,
self.actionSortByTracknumber, self.actionSplit, self.actionAddAudiobook)
for element in uiElements:
element.setEnabled(False)
#switch to about docker to prevent data from being changed
self.stackedWidget.setCurrentWidget(self.infoPage)
#disable treeview
self.dataTreeView.setEnabled(False)
self.model.process()
@pyqtSignature("")
def on_actionMoveUp_2_triggered(self):
"""
Slot documentation goes here.
"""
indexes = self.dataTreeView.selectionModel().selectedIndexes()
#clean indexes list from double entries
cleanIndexes = []
for index in indexes:
if index.column() == 0:
cleanIndexes.append(index)
indexes = cleanIndexes
self.model.move(indexes, 'up')
def populateChapterProperties(self):
#current must be a chapter, otherwise this method wont be called
current = self.dataTreeView.currentIndex()
title = self.model.data(self.model.index(current.row(), TITLE, current.parent()),
Qt.DisplayRole).toString()
startTime = self.model.data(self.model.index(current.row(), STARTTIME, current.parent()),
Qt.DisplayRole).toString()
duration = self.model.data(self.model.index(current.row(), DURATION, current.parent()),
Qt.DisplayRole).toString()
filename = self.model.data(self.model.index(current.row(), FILENAME, current.parent()),
Qt.DisplayRole).toString()
endTime= self.model.data(self.model.index(current.row(), TITLE, current.parent()),
Qt.UserRole)['endTime']
endTime = u'%.2d:%.2d:%#06.3f' % secConverter(endTime)
self.chapterTitleEdit.setText(title)
self.startTimeEdit.setText(startTime)
self.durationEdit.setText(duration)
self.chapterFileEdit.setText(filename)
self.endTimeEdit.setText(endTime)
def populateAudiobookProperties(self):
current = self.dataTreeView.currentIndex()
title = self.model.data(self.model.index(current.row(), TITLE, current.parent()),
Qt.UserRole)['title']
booknum = self.model.data(self.model.index(current.row(), TITLE, current.parent()),
Qt.UserRole)['booknum']
author = self.model.data(self.model.index(current.row(), TITLE, current.parent()),
Qt.UserRole)['author']
encodeString = self.model.data(self.model.index(current.row(), TITLE, current.parent()),
Qt.UserRole)['encodeString']
outfileName = self.model.data(self.model.index(current.row(), TITLE, current.parent()),
Qt.UserRole)['outfileName']
year = self.model.data(self.model.index(current.row(), TITLE, current.parent()),
Qt.UserRole)['year']
self.authorEdit.setText(author)
self.titleEdit.setText(title)
self.yearEdit.setText(year)
self.faacEdit.setText(encodeString)
self.outfileEdit.setText(outfileName)
pixmap = self.model.data(self.model.index(current.row(), 0, current.parent()), Qt.UserRole).get('cover')
if pixmap:
pixmap = self.model.data(self.model.index(current.row(), 0, current.parent()), Qt.UserRole)['cover']
width = self.coverLabel.size().width()
pixmap = pixmap.scaledToWidth(width)
self.coverLabel.setPixmap(pixmap)
else:
self.coverLabel.setText('(click to change)')
@pyqtSignature("QModelIndex*, QModelIndex*")
def on_dataTreeView_currentItemChanged(self, current, previous):
"""
Slot documentation goes here.
"""
uiElements = (self.actionAddChapter, self.actionMoveDown,
self.actionMoveUp_2, self.actionProcess, self.actionRemove, self.actionSortByFilename,
self.actionSortByTracknumber, self.actionSplit)
if not current.isValid():
#current is rootItem
for element in uiElements:
element.setDisabled(True)
return
else:
for element in uiElements:
element.setEnabled(True)
if not current.parent().isValid():
#current is audiobook
self.stackedWidget.setCurrentWidget(self.audiobookPropertiesPage)
self.populateAudiobookProperties()
if current.row() == 0:
#current is first audiobook
self.actionMoveUp_2.setEnabled(False)
if current.row() == self.model.rowCount(current.parent()) -1:
#current is last audiobook
self.actionMoveDown.setEnabled(False)
else:
#current is chapter
self.stackedWidget.setCurrentWidget(self.chapterPropertiesPage)
self.populateChapterProperties()
if current.row() == 0:
#current is the first chapter of its book
if current.parent().row() == 0:
#current is the first chapter of the first book
self.actionMoveUp_2.setEnabled(False)
if current.row() == self.model.rowCount(current.parent()) -1:
#current is the last chapter of its book
if current.parent().row() == self.model.rowCount(current.parent().parent()) -1:
#current is the last chapter of the last book
self.actionMoveDown.setEnabled(False)
@pyqtSignature("")
def on_chapterFileButton_clicked(self):
"""
Slot documentation goes here.
"""
current = self.dataTreeView.currentIndex()
formats = ["*%s" % format for format in supportedInputFiles]
fname = QFileDialog.getOpenFileName(
self,
"change chapter source file",
self.currentDir,
'audio files (%s)' % " ".join(formats))
if not fname.isEmpty():
self.currentDir = fname.section(os.sep,0,-2)
self.model.setData(self.model.index(current.row(), FILENAME, current.parent()), QVariant(fname))
self.populateChapterProperties()
@pyqtSignature("")
def on_outfileButton_clicked(self):
"""
Slot documentation goes here.
"""
current = self.dataTreeView.currentIndex()
fname = QFileDialog.getSaveFileName(
self,
'choose audiobook output file',
self.currentDir,
"Audiobook files (*.m4b)")
if not fname.isEmpty():
self.currentDir = fname.section(os.sep,0,-2)
if not fname.endsWith('.m4b'):
fname += ".m4b"
self.model.setData(self.model.index(current.row(), FILENAME, current.parent()), QVariant(fname))
self.populateAudiobookProperties()
@pyqtSignature("")
def on_action_About_triggered(self):
dialog = aboutDialog()
if dialog.exec_():
pass
@pyqtSignature("")
def on_actionSplit_triggered(self):
"""
Slot documentation goes here.
"""
current = self.dataTreeView.currentIndex()
if not current.parent().isValid():
#audiobook
pass
else:
#chapter
current = current.parent()
minSplitDuration = self.model.data(current, Qt.UserRole)['minSplitDuration']
hours, minutes, seconds = secConverter(minSplitDuration)
minSplitDuration = QTime(hours, minutes, seconds+1)
dialog = splitDialog(minSplitDuration)
if dialog.exec_():
maxSplitDuration = dialog.getMaxSplitDuration()
self.model.split(current, maxSplitDuration)
self.updateTree()
@pyqtSignature("")
def on_coverLabel_clicked(self):
current = self.dataTreeView.currentIndex()
fname = QFileDialog.getOpenFileName(
self,
"Choose a cover file",
self.currentDir,
"image files (*.png *.jpg *.jpeg *.bmp *.gif *.pbm *.pgm *ppm *xpm *xpm)",
"cover.png"
)
if not fname.isEmpty():
self.currentDir = fname.section(os.sep,0,-2)
self.model.setData(self.model.index(current.row(), 0, current.parent()),
{'cover':QPixmap(fname)}, Qt.UserRole)
self.populateAudiobookProperties()
def updateTree(self):
for i in range(6):
self.dataTreeView.resizeColumnToContents(i)
def dataChanged(self, topLeft, bottomRight):
current = self.dataTreeView.currentIndex()
if not current.parent().isValid():
#audiobook
self.populateAudiobookProperties()
else:
#chapter
self.populateChapterProperties()
def on_processingDone(self):
self.actionProcess.setEnabled(True)
self.actionAddAudiobook.setEnabled(True)
self.dataTreeView.setEnabled(True)
self.dataTreeView.reset()
@pyqtSignature("")
def on_chapterTitleEdit_editingFinished(self):
"""
Slot documentation goes here.
"""
current = self.dataTreeView.currentIndex()
text = self.chapterTitleEdit.text()
self.model.setData(self.model.index(current.row(), TITLE, current.parent()), QVariant(text))
@pyqtSignature("")
def on_faacEdit_editingFinished(self):
"""
Slot documentation goes here.
"""
text = self.faacEdit.text()
current = self.dataTreeView.currentIndex()
value = {'encodeString':QVariant(text)}
self.model.setData(self.model.index(current.row(), 0, QModelIndex()), value, Qt.UserRole)
@pyqtSignature("")
def on_titleEdit_editingFinished(self):
"""
Slot documentation goes here.
"""
text = self.titleEdit.text()
current = self.dataTreeView.currentIndex()
self.model.setData(self.model.index(current.row(), TITLE, QModelIndex()), QVariant(text))
@pyqtSignature("")
def on_yearEdit_editingFinished(self):
"""
Slot documentation goes here.
"""
text = self.titleEdit.text()
current = self.dataTreeView.currentIndex()
self.model.setData(self.model.index(current.row(), TITLE, QModelIndex()), QVariant(text))
@pyqtSignature("")
def on_authorEdit_editingFinished(self):
"""
Slot documentation goes here.
"""
text = self.authorEdit.text()
current = self.dataTreeView.currentIndex()
value = {'author':QVariant(text)}
self.model.setData(self.model.index(current.row(), 0, QModelIndex()), value, Qt.UserRole)
@pyqtSignature("")
def on_action_help_triggered(self):
"""
Slot documentation goes here.
"""
self.stackedWidget.setCurrentWidget(self.infoPage)
| crabmanX/m4baker | src/mainWindow.py | Python | gpl-2.0 | 21,163 |
from django.conf.urls import patterns, include, url
from django.contrib import admin
from django.contrib.auth import views
admin.autodiscover()
urlpatterns = patterns('',
url(r'^$', 'lite_note.views.home', name='home'),
url(r'^test','lite_note.views.new_home',name='new_home'),
url(r'^admin/', include(admin.site.urls)),
url(r'^login/', views.login, name='login'),
url(r'^logout/', views.logout, {'next_page': 'home'}, name='logout'),
url(r'^register/', 'regsiter.views.registration', name='registration_register'),
url(r'^create/', 'lite_note.views.create_note', name='create_note'),
url(r'^unknown/', 'lite_note.views.enter_anonymous_user', name='enter_anonymous'),
url(r'^note/(?P<id>[0-9]+)/', 'lite_note.views.note', name='note'),
url(r'^delete/(?P<id>[0-9]+)','lite_note.tools.delet_note'),
url(r'^private/(?P<id>[0-9]+)','lite_note.tools.make_private_note'),
url(r'^public/(?P<id>[0-9]+)','lite_note.tools.make_public_note'),
url(r'^favorite/(?P<id>[0-9]+)','lite_note.tools.make_favorite_note'),
url(r'^unfavorite/(?P<id>[0-9]+)','lite_note.tools.make_usual_note'),
url(r'^get_login','regsiter.views.request_login'),
url(r'^test','lite_note.views.new_home',name='new_home'),
url(r'^get_notes','lite_note.views.new_note',name='new_note')
)
| Shiwin/LiteNote | noties/urls.py | Python | gpl-2.0 | 1,648 |
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('usermodule', '0002_auto_20151108_2019'),
]
operations = [
migrations.CreateModel(
name='Period',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('name', models.CharField(max_length=10)),
('start_date', models.DateField()),
('end_date', models.DateField()),
('professor', models.ForeignKey(to='usermodule.Professor')),
],
),
]
| Sezzh/tifis_platform | tifis_platform/groupmodule/migrations/0001_initial.py | Python | gpl-2.0 | 711 |
#
# calculator.py : A calculator module for the deskbar applet.
#
# Copyright (C) 2008 by Johannes Buchner
# Copyright (C) 2007 by Michael Hofmann
# Copyright (C) 2006 by Callum McKenzie
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
#
# Authors:
# Callum McKenzie <[email protected]> - Original author
# Michael Hofmann <[email protected]> - compatibility changes for deskbar 2.20
# Johannes Buchner <[email protected]> - Made externally usable
#
# This version of calculator can be used with converter
# read how at http://twoday.tuwien.ac.at/jo/search?q=calculator+converter+deskbar
#
from __future__ import division
from deskbar.handlers.actions.CopyToClipboardAction import CopyToClipboardAction
from deskbar.defs import VERSION
from gettext import gettext as _
import deskbar.core.Utils
import deskbar.interfaces.Match
import deskbar.interfaces.Module
import logging
import math
import re
LOGGER = logging.getLogger(__name__)
HANDLERS = ["CalculatorModule"]
def bin (n):
"""A local binary equivalent of the hex and oct builtins."""
if (n == 0):
return "0b0"
s = ""
if (n < 0):
while n != -1:
s = str (n & 1) + s
n >>= 1
return "0b" + "...111" + s
else:
while n != 0:
s = str (n & 1) + s
n >>= 1
return "0b" + s
# These next three make sure {hex, oct, bin} can handle floating point,
# by rounding. This makes sure things like hex(255/2) behave as a
# programmer would expect while allowing 255/2 to equal 127.5 for normal
# people. Abstracting out the body of these into a single function which
# takes hex, oct or bin as an argument seems to run into problems with
# those functions not being defined correctly in the resticted eval (?).
def lenient_hex (c):
try:
return hex (c)
except TypeError:
return hex (int (c))
def lenient_oct (c):
try:
return oct (c)
except TypeError:
return oct (int (c))
def lenient_bin (c):
try:
return bin (c)
except TypeError:
return bin (int (c))
class CalculatorAction (CopyToClipboardAction):
def __init__ (self, text, answer):
CopyToClipboardAction.__init__ (self, answer, answer)
self.text = text
def get_verb(self):
return _("Copy <b>%(origtext)s = %(name)s</b> to clipboard")
def get_name(self, text = None):
"""Because the text variable for history entries contains the text
typed for the history search (and not the text of the orginal action),
we store the original text seperately."""
result = CopyToClipboardAction.get_name (self, text)
result["origtext"] = self.text
return result
def get_tooltip(self, text=None):
return self._name
class CalculatorMatch (deskbar.interfaces.Match):
def __init__ (self, text, answer, **kwargs):
deskbar.interfaces.Match.__init__ (self, name = text,
icon = "gtk-add", category = "calculator", **kwargs)
self.answer = str (answer)
self.add_action (CalculatorAction (text, self.answer))
def get_hash (self):
return self.answer
class CalculatorModule (deskbar.interfaces.Module):
INFOS = {"icon": deskbar.core.Utils.load_icon ("gtk-add"),
"name": _("Calculator"),
"description": _("Calculate simple equations"),
"version" : VERSION,
"categories" : { "calculator" : { "name" : _("Calculator") }}}
def __init__ (self):
deskbar.interfaces.Module.__init__ (self)
self.hexre = re.compile ("0[Xx][0-9a-fA-F_]*[0-9a-fA-F]")
self.binre = re.compile ("0[bB][01_]*[01]")
def _number_parser (self, match, base):
"""A generic number parser, regardless of base. It also ignores the
'_' character so it can be used as a separator. Note how we skip
the first two characters since we assume it is something like '0x'
or '0b' and identifies the base."""
table = { '0' : 0, '1' : 1, '2' : 2, '3' : 3, '4' : 4,
'5' : 5, '6' : 6, '7' : 7, '8' : 8, '9' : 9,
'a' : 10, 'b' : 11, 'c' : 12, 'd' : 13,
'e' : 14, 'f' : 15 }
d = 0
for c in match.group()[2:]:
if c != "_":
d = d * base + table[c]
return str (d)
def _binsub (self, match):
"""Because python doesn't handle binary literals, we parse it
ourselves and replace it with a decimal representation."""
return self._number_parser (match, 2)
def _hexsub (self, match):
"""Parse the hex literal ourselves. We could let python do it, but
since we have a generic parser we use that instead."""
return self._number_parser (match, 16)
def run_query (self, query):
"""We evaluate the equation by first replacing hex and binary literals
with their decimal representation. (We need to check hex, so we can
distinguish 0x10b1 as a hex number, not 0x1 followed by 0b1.) We
severely restrict the eval environment. Any errors are ignored."""
restricted_dictionary = { "__builtins__" : None, "abs" : abs,
"acos" : math.acos, "asin" : math.asin,
"atan" : math.atan, "atan2" : math.atan2,
"bin" : lenient_bin,"ceil" : math.ceil,
"cos" : math.cos, "cosh" : math.cosh,
"degrees" : math.degrees,
"exp" : math.exp, "floor" : math.floor,
"hex" : lenient_hex, "int" : int,
"log" : math.log, "pow" : math.pow,
"log10" : math.log10, "oct" : lenient_oct,
"pi" : math.pi, "radians" : math.radians,
"round": round, "sin" : math.sin,
"sinh" : math.sinh, "sqrt" : math.sqrt,
"tan" : math.tan, "tanh" : math.tanh}
try:
scrubbedquery = query.lower()
scrubbedquery = self.hexre.sub (self._hexsub, scrubbedquery)
scrubbedquery = self.binre.sub (self._binsub, scrubbedquery)
for (c1, c2) in (("[", "("), ("{", "("), ("]", ")"), ("}", ")")):
scrubbedquery = scrubbedquery.replace (c1, c2)
answer = eval (scrubbedquery, restricted_dictionary)
# Try and avoid echoing back simple numbers. Note that this
# doesn't work well for floating point, e.g. '3.' behaves badly.
if str (answer) == query:
return None
# We need this check because the eval can return function objects
# when we are halfway through typing the expression.
if isinstance (answer, (float, int, long, str)):
return answer
else:
return None
except Exception, e:
LOGGER.debug (str(e))
return None
def query (self, query):
answer = self.run_query(query)
if answer != None:
result = [CalculatorMatch (query, answer)]
self._emit_query_ready (query, result)
return answer
else:
return []
| benpicco/mate-deskbar-applet | deskbar/handlers/calculator.py | Python | gpl-2.0 | 8,080 |
# -*- coding: utf-8 -*-
# Form implementation generated from reading ui file 'openPathTool.ui'
#
# Created by: PyQt4 UI code generator 4.11.4
#
# WARNING! All changes made in this file will be lost!
from PyQt4 import QtCore, QtGui
try:
_fromUtf8 = QtCore.QString.fromUtf8
except AttributeError:
def _fromUtf8(s):
return s
try:
_encoding = QtGui.QApplication.UnicodeUTF8
def _translate(context, text, disambig):
return QtGui.QApplication.translate(context, text, disambig, _encoding)
except AttributeError:
def _translate(context, text, disambig):
return QtGui.QApplication.translate(context, text, disambig)
class Ui_MainWindow(object):
def setupUi(self, MainWindow):
MainWindow.setObjectName(_fromUtf8("MainWindow"))
MainWindow.resize(457, 95)
self.centralwidget = QtGui.QWidget(MainWindow)
self.centralwidget.setObjectName(_fromUtf8("centralwidget"))
self.formLayout = QtGui.QFormLayout(self.centralwidget)
self.formLayout.setFieldGrowthPolicy(QtGui.QFormLayout.AllNonFixedFieldsGrow)
self.formLayout.setObjectName(_fromUtf8("formLayout"))
self.pathInLineEdit = QtGui.QLineEdit(self.centralwidget)
self.pathInLineEdit.setObjectName(_fromUtf8("pathInLineEdit"))
self.formLayout.setWidget(0, QtGui.QFormLayout.SpanningRole, self.pathInLineEdit)
self.pathOutLineEdit = QtGui.QLineEdit(self.centralwidget)
self.pathOutLineEdit.setReadOnly(True)
self.pathOutLineEdit.setObjectName(_fromUtf8("pathOutLineEdit"))
self.formLayout.setWidget(1, QtGui.QFormLayout.SpanningRole, self.pathOutLineEdit)
self.buttonLayout = QtGui.QHBoxLayout()
self.buttonLayout.setObjectName(_fromUtf8("buttonLayout"))
self.explorerButton = QtGui.QPushButton(self.centralwidget)
self.explorerButton.setObjectName(_fromUtf8("explorerButton"))
self.buttonLayout.addWidget(self.explorerButton)
spacerItem = QtGui.QSpacerItem(40, 20, QtGui.QSizePolicy.Expanding, QtGui.QSizePolicy.Minimum)
self.buttonLayout.addItem(spacerItem)
self.convertButton = QtGui.QPushButton(self.centralwidget)
self.convertButton.setObjectName(_fromUtf8("convertButton"))
self.buttonLayout.addWidget(self.convertButton)
spacerItem1 = QtGui.QSpacerItem(40, 20, QtGui.QSizePolicy.Expanding, QtGui.QSizePolicy.Minimum)
self.buttonLayout.addItem(spacerItem1)
self.closeButton = QtGui.QPushButton(self.centralwidget)
self.closeButton.setObjectName(_fromUtf8("closeButton"))
self.buttonLayout.addWidget(self.closeButton)
self.formLayout.setLayout(2, QtGui.QFormLayout.SpanningRole, self.buttonLayout)
MainWindow.setCentralWidget(self.centralwidget)
self.retranslateUi(MainWindow)
QtCore.QMetaObject.connectSlotsByName(MainWindow)
def retranslateUi(self, MainWindow):
MainWindow.setWindowTitle(_translate("MainWindow", "MainWindow", None))
self.pathInLineEdit.setPlaceholderText(_translate("MainWindow", "Input Path", None))
self.pathOutLineEdit.setPlaceholderText(_translate("MainWindow", "Output Path", None))
self.explorerButton.setText(_translate("MainWindow", "Open In Explorer", None))
self.convertButton.setText(_translate("MainWindow", "Convert", None))
self.closeButton.setText(_translate("MainWindow", "Close", None))
if __name__ == "__main__":
import sys
app = QtGui.QApplication(sys.argv)
MainWindow = QtGui.QMainWindow()
ui = Ui_MainWindow()
ui.setupUi(MainWindow)
MainWindow.show()
sys.exit(app.exec_())
| david2777/DavidsTools | Standalone/openPathTool/UI_openPathTool.py | Python | gpl-2.0 | 3,656 |
# Copyright 2009-2010 by Peter Cock. All rights reserved.
# Based on code contributed and copyright 2009 by Jose Blanca (COMAV-UPV).
#
# This code is part of the Biopython distribution and governed by its
# license. Please see the LICENSE file that should have been included
# as part of this package.
"""Bio.SeqIO support for the binary Standard Flowgram Format (SFF) file format.
SFF was designed by 454 Life Sciences (Roche), the Whitehead Institute for
Biomedical Research and the Wellcome Trust Sanger Institute. You are expected
to use this module via the Bio.SeqIO functions under the format name "sff" (or
"sff-trim" as described below).
For example, to iterate over the records in an SFF file,
>>> from Bio import SeqIO
>>> for record in SeqIO.parse("Roche/E3MFGYR02_random_10_reads.sff", "sff"):
... print record.id, len(record), record.seq[:20]+"..."
E3MFGYR02JWQ7T 265 tcagGGTCTACATGTTGGTT...
E3MFGYR02JA6IL 271 tcagTTTTTTTTGGAAAGGA...
E3MFGYR02JHD4H 310 tcagAAAGACAAGTGGTATC...
E3MFGYR02GFKUC 299 tcagCGGCCGGGCCTCTCAT...
E3MFGYR02FTGED 281 tcagTGGTAATGGGGGGAAA...
E3MFGYR02FR9G7 261 tcagCTCCGTAAGAAGGTGC...
E3MFGYR02GAZMS 278 tcagAAAGAAGTAAGGTAAA...
E3MFGYR02HHZ8O 221 tcagACTTTCTTCTTTACCG...
E3MFGYR02GPGB1 269 tcagAAGCAGTGGTATCAAC...
E3MFGYR02F7Z7G 219 tcagAATCATCCACTTTTTA...
Each SeqRecord object will contain all the annotation from the SFF file,
including the PHRED quality scores.
>>> print record.id, len(record)
E3MFGYR02F7Z7G 219
>>> print record.seq[:10], "..."
tcagAATCAT ...
>>> print record.letter_annotations["phred_quality"][:10], "..."
[22, 21, 23, 28, 26, 15, 12, 21, 28, 21] ...
Notice that the sequence is given in mixed case, the central upper case region
corresponds to the trimmed sequence. This matches the output of the Roche
tools (and the 3rd party tool sff_extract) for SFF to FASTA.
>>> print record.annotations["clip_qual_left"]
4
>>> print record.annotations["clip_qual_right"]
134
>>> print record.seq[:4]
tcag
>>> print record.seq[4:20], "...", record.seq[120:134]
AATCATCCACTTTTTA ... CAAAACACAAACAG
>>> print record.seq[134:]
atcttatcaacaaaactcaaagttcctaactgagacacgcaacaggggataagacaaggcacacaggggataggnnnnnnnnnnn
The annotations dictionary also contains any adapter clip positions
(usually zero), and information about the flows. e.g.
>>> print record.annotations["flow_key"]
TCAG
>>> print record.annotations["flow_values"][:10], "..."
(83, 1, 128, 7, 4, 84, 6, 106, 3, 172) ...
>>> print len(record.annotations["flow_values"])
400
>>> print record.annotations["flow_index"][:10], "..."
(1, 2, 3, 2, 2, 0, 3, 2, 3, 3) ...
>>> print len(record.annotations["flow_index"])
219
As a convenience method, you can read the file with SeqIO format name "sff-trim"
instead of "sff" to get just the trimmed sequences (without any annotation
except for the PHRED quality scores):
>>> from Bio import SeqIO
>>> for record in SeqIO.parse("Roche/E3MFGYR02_random_10_reads.sff", "sff-trim"):
... print record.id, len(record), record.seq[:20]+"..."
E3MFGYR02JWQ7T 260 GGTCTACATGTTGGTTAACC...
E3MFGYR02JA6IL 265 TTTTTTTTGGAAAGGAAAAC...
E3MFGYR02JHD4H 292 AAAGACAAGTGGTATCAACG...
E3MFGYR02GFKUC 295 CGGCCGGGCCTCTCATCGGT...
E3MFGYR02FTGED 277 TGGTAATGGGGGGAAATTTA...
E3MFGYR02FR9G7 256 CTCCGTAAGAAGGTGCTGCC...
E3MFGYR02GAZMS 271 AAAGAAGTAAGGTAAATAAC...
E3MFGYR02HHZ8O 150 ACTTTCTTCTTTACCGTAAC...
E3MFGYR02GPGB1 221 AAGCAGTGGTATCAACGCAG...
E3MFGYR02F7Z7G 130 AATCATCCACTTTTTAACGT...
Looking at the final record in more detail, note how this differs to the
example above:
>>> print record.id, len(record)
E3MFGYR02F7Z7G 130
>>> print record.seq[:10], "..."
AATCATCCAC ...
>>> print record.letter_annotations["phred_quality"][:10], "..."
[26, 15, 12, 21, 28, 21, 36, 28, 27, 27] ...
>>> print record.annotations
{}
You might use the Bio.SeqIO.convert() function to convert the (trimmed) SFF
reads into a FASTQ file (or a FASTA file and a QUAL file), e.g.
>>> from Bio import SeqIO
>>> from StringIO import StringIO
>>> out_handle = StringIO()
>>> count = SeqIO.convert("Roche/E3MFGYR02_random_10_reads.sff", "sff",
... out_handle, "fastq")
>>> print "Converted %i records" % count
Converted 10 records
The output FASTQ file would start like this:
>>> print "%s..." % out_handle.getvalue()[:50]
@E3MFGYR02JWQ7T
tcagGGTCTACATGTTGGTTAACCCGTACTGATT...
Bio.SeqIO.index() provides memory efficient random access to the reads in an
SFF file by name. SFF files can include an index within the file, which can
be read in making this very fast. If the index is missing (or in a format not
yet supported in Biopython) the file is indexed by scanning all the reads -
which is a little slower. For example,
>>> from Bio import SeqIO
>>> reads = SeqIO.index("Roche/E3MFGYR02_random_10_reads.sff", "sff")
>>> record = reads["E3MFGYR02JHD4H"]
>>> print record.id, len(record), record.seq[:20]+"..."
E3MFGYR02JHD4H 310 tcagAAAGACAAGTGGTATC...
Or, using the trimmed reads:
>>> from Bio import SeqIO
>>> reads = SeqIO.index("Roche/E3MFGYR02_random_10_reads.sff", "sff-trim")
>>> record = reads["E3MFGYR02JHD4H"]
>>> print record.id, len(record), record.seq[:20]+"..."
E3MFGYR02JHD4H 292 AAAGACAAGTGGTATCAACG...
You can also use the Bio.SeqIO.write() function with the "sff" format. Note
that this requires all the flow information etc, and thus is probably only
useful for SeqRecord objects originally from reading another SFF file (and
not the trimmed SeqRecord objects from parsing an SFF file as "sff-trim").
As an example, let's pretend this example SFF file represents some DNA which
was pre-amplified with a PCR primers AAAGANNNNN. The following script would
produce a sub-file containing all those reads whose post-quality clipping
region (i.e. the sequence after trimming) starts with AAAGA exactly (the non-
degenerate bit of this pretend primer):
>>> from Bio import SeqIO
>>> records = (record for record in
... SeqIO.parse("Roche/E3MFGYR02_random_10_reads.sff","sff")
... if record.seq[record.annotations["clip_qual_left"]:].startswith("AAAGA"))
>>> count = SeqIO.write(records, "temp_filtered.sff", "sff")
>>> print "Selected %i records" % count
Selected 2 records
Of course, for an assembly you would probably want to remove these primers.
If you want FASTA or FASTQ output, you could just slice the SeqRecord. However,
if you want SFF output we have to preserve all the flow information - the trick
is just to adjust the left clip position!
>>> from Bio import SeqIO
>>> def filter_and_trim(records, primer):
... for record in records:
... if record.seq[record.annotations["clip_qual_left"]:].startswith(primer):
... record.annotations["clip_qual_left"] += len(primer)
... yield record
>>> records = SeqIO.parse("Roche/E3MFGYR02_random_10_reads.sff", "sff")
>>> count = SeqIO.write(filter_and_trim(records,"AAAGA"),
... "temp_filtered.sff", "sff")
>>> print "Selected %i records" % count
Selected 2 records
We can check the results, note the lower case clipped region now includes the "AAAGA"
sequence:
>>> for record in SeqIO.parse("temp_filtered.sff", "sff"):
... print record.id, len(record), record.seq[:20]+"..."
E3MFGYR02JHD4H 310 tcagaaagaCAAGTGGTATC...
E3MFGYR02GAZMS 278 tcagaaagaAGTAAGGTAAA...
>>> for record in SeqIO.parse("temp_filtered.sff", "sff-trim"):
... print record.id, len(record), record.seq[:20]+"..."
E3MFGYR02JHD4H 287 CAAGTGGTATCAACGCAGAG...
E3MFGYR02GAZMS 266 AGTAAGGTAAATAACAAACG...
>>> import os
>>> os.remove("temp_filtered.sff")
For a description of the file format, please see the Roche manuals and:
http://www.ncbi.nlm.nih.gov/Traces/trace.cgi?cmd=show&f=formats&m=doc&s=formats
"""
from Bio.SeqIO.Interfaces import SequenceWriter
from Bio import Alphabet
from Bio.Seq import Seq
from Bio.SeqRecord import SeqRecord
import struct
import sys
from Bio._py3k import _bytes_to_string, _as_bytes
_null = _as_bytes("\0")
_sff = _as_bytes(".sff")
_hsh = _as_bytes(".hsh")
_srt = _as_bytes(".srt")
_mft = _as_bytes(".mft")
#This is a hack because char 255 is special in unicode:
try:
#This works on Python 2.6+ or Python 3.0
_flag = eval(r'b"\xff"')
except SyntaxError:
#Must be on Python 2.4 or 2.5
_flag = "\xff" #Char 255
def _sff_file_header(handle):
"""Read in an SFF file header (PRIVATE).
Assumes the handle is at the start of the file, will read forwards
though the header and leave the handle pointing at the first record.
Returns a tuple of values from the header (header_length, index_offset,
index_length, number_of_reads, flows_per_read, flow_chars, key_sequence)
>>> handle = open("Roche/greek.sff", "rb")
>>> values = _sff_file_header(handle)
>>> print values[0]
840
>>> print values[1]
65040
>>> print values[2]
256
>>> print values[3]
24
>>> print values[4]
800
>>> values[-1]
'TCAG'
"""
if hasattr(handle,"mode") and "U" in handle.mode.upper():
raise ValueError("SFF files must NOT be opened in universal new "
"lines mode. Binary mode is recommended (although "
"on Unix the default mode is also fine).")
elif hasattr(handle,"mode") and "B" not in handle.mode.upper() \
and sys.platform == "win32":
raise ValueError("SFF files must be opened in binary mode on Windows")
#file header (part one)
#use big endiean encdoing >
#magic_number I
#version 4B
#index_offset Q
#index_length I
#number_of_reads I
#header_length H
#key_length H
#number_of_flows_per_read H
#flowgram_format_code B
#[rest of file header depends on the number of flows and how many keys]
fmt = '>4s4BQIIHHHB'
assert 31 == struct.calcsize(fmt)
data = handle.read(31)
if not data:
raise ValueError("Empty file.")
elif len(data) < 13:
raise ValueError("File too small to hold a valid SFF header.")
magic_number, ver0, ver1, ver2, ver3, index_offset, index_length, \
number_of_reads, header_length, key_length, number_of_flows_per_read, \
flowgram_format = struct.unpack(fmt, data)
if magic_number in [_hsh, _srt, _mft]:
#Probably user error, calling Bio.SeqIO.parse() twice!
raise ValueError("Handle seems to be at SFF index block, not start")
if magic_number != _sff: # 779314790
raise ValueError("SFF file did not start '.sff', but %s" \
% repr(magic_number))
if (ver0, ver1, ver2, ver3) != (0, 0, 0, 1):
raise ValueError("Unsupported SFF version in header, %i.%i.%i.%i" \
% (ver0, ver1, ver2, ver3))
if flowgram_format != 1:
raise ValueError("Flowgram format code %i not supported" \
% flowgram_format)
if (index_offset!=0) ^ (index_length!=0):
raise ValueError("Index offset %i but index length %i" \
% (index_offset, index_length))
flow_chars = _bytes_to_string(handle.read(number_of_flows_per_read))
key_sequence = _bytes_to_string(handle.read(key_length))
#According to the spec, the header_length field should be the total number
#of bytes required by this set of header fields, and should be equal to
#"31 + number_of_flows_per_read + key_length" rounded up to the next value
#divisible by 8.
assert header_length % 8 == 0
padding = header_length - number_of_flows_per_read - key_length - 31
assert 0 <= padding < 8, padding
if handle.read(padding).count(_null) != padding:
raise ValueError("Post header %i byte padding region contained data" \
% padding)
return header_length, index_offset, index_length, \
number_of_reads, number_of_flows_per_read, \
flow_chars, key_sequence
#This is a generator function!
def _sff_do_slow_index(handle):
"""Generates an index by scanning though all the reads in an SFF file (PRIVATE).
This is a slow but generic approach if we can't parse the provided index
(if present).
Will use the handle seek/tell functions.
"""
handle.seek(0)
header_length, index_offset, index_length, number_of_reads, \
number_of_flows_per_read, flow_chars, key_sequence \
= _sff_file_header(handle)
#Now on to the reads...
read_header_fmt = '>2HI4H'
read_header_size = struct.calcsize(read_header_fmt)
#NOTE - assuming flowgram_format==1, which means struct type H
read_flow_fmt = ">%iH" % number_of_flows_per_read
read_flow_size = struct.calcsize(read_flow_fmt)
assert 1 == struct.calcsize(">B")
assert 1 == struct.calcsize(">s")
assert 1 == struct.calcsize(">c")
assert read_header_size % 8 == 0 #Important for padding calc later!
for read in range(number_of_reads):
record_offset = handle.tell()
if record_offset == index_offset:
#Found index block within reads, ignore it:
offset = index_offset + index_length
if offset % 8:
offset += 8 - (offset % 8)
assert offset % 8 == 0
handle.seek(offset)
record_offset = offset
#assert record_offset%8 == 0 #Worth checking, but slow
#First the fixed header
data = handle.read(read_header_size)
read_header_length, name_length, seq_len, clip_qual_left, \
clip_qual_right, clip_adapter_left, clip_adapter_right \
= struct.unpack(read_header_fmt, data)
if read_header_length < 10 or read_header_length % 8 != 0:
raise ValueError("Malformed read header, says length is %i:\n%s" \
% (read_header_length, repr(data)))
#now the name and any padding (remainder of header)
name = _bytes_to_string(handle.read(name_length))
padding = read_header_length - read_header_size - name_length
if handle.read(padding).count(_null) != padding:
raise ValueError("Post name %i byte padding region contained data" \
% padding)
assert record_offset + read_header_length == handle.tell()
#now the flowgram values, flowgram index, bases and qualities
size = read_flow_size + 3*seq_len
handle.seek(size, 1)
#now any padding...
padding = size % 8
if padding:
padding = 8 - padding
if handle.read(padding).count(_null) != padding:
raise ValueError("Post quality %i byte padding region contained data" \
% padding)
#print read, name, record_offset
yield name, record_offset
if handle.tell() % 8 != 0:
raise ValueError("After scanning reads, did not end on a multiple of 8")
def _sff_find_roche_index(handle):
"""Locate any existing Roche style XML meta data and read index (PRIVATE).
Makes a number of hard coded assumptions based on reverse engineered SFF
files from Roche 454 machines.
Returns a tuple of read count, SFF "index" offset and size, XML offset
and size, and the actual read index offset and size.
Raises a ValueError for unsupported or non-Roche index blocks.
"""
handle.seek(0)
header_length, index_offset, index_length, number_of_reads, \
number_of_flows_per_read, flow_chars, key_sequence \
= _sff_file_header(handle)
assert handle.tell() == header_length
if not index_offset or not index_offset:
raise ValueError("No index present in this SFF file")
#Now jump to the header...
handle.seek(index_offset)
fmt = ">4s4B"
fmt_size = struct.calcsize(fmt)
data = handle.read(fmt_size)
if not data:
raise ValueError("Premature end of file? Expected index of size %i at offest %i, found nothing" \
% (index_length, index_offset))
if len(data) < fmt_size:
raise ValueError("Premature end of file? Expected index of size %i at offest %i, found %s" \
% (index_length, index_offset, repr(data)))
magic_number, ver0, ver1, ver2, ver3 = struct.unpack(fmt, data)
if magic_number == _mft: # 778921588
#Roche 454 manifest index
#This is typical from raw Roche 454 SFF files (2009), and includes
#both an XML manifest and the sorted index.
if (ver0, ver1, ver2, ver3) != (49, 46, 48, 48):
#This is "1.00" as a string
raise ValueError("Unsupported version in .mft index header, %i.%i.%i.%i" \
% (ver0, ver1, ver2, ver3))
fmt2 = ">LL"
fmt2_size = struct.calcsize(fmt2)
xml_size, data_size = struct.unpack(fmt2, handle.read(fmt2_size))
if index_length != fmt_size + fmt2_size + xml_size + data_size:
raise ValueError("Problem understanding .mft index header, %i != %i + %i + %i + %i" \
% (index_length, fmt_size, fmt2_size, xml_size, data_size))
return number_of_reads, header_length, \
index_offset, index_length, \
index_offset + fmt_size + fmt2_size, xml_size, \
index_offset + fmt_size + fmt2_size + xml_size, data_size
elif magic_number == _srt: #779317876
#Roche 454 sorted index
#I've had this from Roche tool sfffile when the read identifiers
#had nonstandard lengths and there was no XML manifest.
if (ver0, ver1, ver2, ver3) != (49, 46, 48, 48):
#This is "1.00" as a string
raise ValueError("Unsupported version in .srt index header, %i.%i.%i.%i" \
% (ver0, ver1, ver2, ver3))
data = handle.read(4)
if data != _null*4:
raise ValueError("Did not find expected null four bytes in .srt index")
return number_of_reads, header_length, \
index_offset, index_length, \
0, 0, \
index_offset + fmt_size + 4, index_length - fmt_size - 4
elif magic_number == _hsh:
raise ValueError("Hash table style indexes (.hsh) in SFF files are "
"not (yet) supported")
else:
raise ValueError("Unknown magic number %s in SFF index header:\n%s" \
% (repr(magic_number), repr(data)))
def _sff_read_roche_index_xml(handle):
"""Reads any existing Roche style XML manifest data in the SFF "index" (PRIVATE, DEPRECATED).
Will use the handle seek/tell functions. Returns a string.
This has been replaced by ReadRocheXmlManifest. We would normally just
delete an old private function without warning, but I believe some people
are using this so we'll handle this with a deprecation warning.
"""
import warnings
warnings.warn("Private function _sff_read_roche_index_xml is deprecated. "
"Use new public function ReadRocheXmlManifest instead",
DeprecationWarning)
return ReadRocheXmlManifest(handle)
def ReadRocheXmlManifest(handle):
"""Reads any Roche style XML manifest data in the SFF "index".
The SFF file format allows for multiple different index blocks, and Roche
took advantage of this to define their own index block wich also embeds
an XML manifest string. This is not a publically documented extension to
the SFF file format, this was reverse engineered.
The handle should be to an SFF file opened in binary mode. This function
will use the handle seek/tell functions and leave the handle in an
arbitrary location.
Any XML manifest found is returned as a Python string, which you can then
parse as appropriate, or reuse when writing out SFF files with the
SffWriter class.
Returns a string, or raises a ValueError if an Roche manifest could not be
found.
"""
number_of_reads, header_length, index_offset, index_length, xml_offset, \
xml_size, read_index_offset, read_index_size = _sff_find_roche_index(handle)
if not xml_offset or not xml_size:
raise ValueError("No XML manifest found")
handle.seek(xml_offset)
return _bytes_to_string(handle.read(xml_size))
#This is a generator function!
def _sff_read_roche_index(handle):
"""Reads any existing Roche style read index provided in the SFF file (PRIVATE).
Will use the handle seek/tell functions.
This works on ".srt1.00" and ".mft1.00" style Roche SFF index blocks.
Roche SFF indices use base 255 not 256, meaning we see bytes in range the
range 0 to 254 only. This appears to be so that byte 0xFF (character 255)
can be used as a marker character to separate entries (required if the
read name lengths vary).
Note that since only four bytes are used for the read offset, this is
limited to 255^4 bytes (nearly 4GB). If you try to use the Roche sfffile
tool to combine SFF files beyound this limit, they issue a warning and
omit the index (and manifest).
"""
number_of_reads, header_length, index_offset, index_length, xml_offset, \
xml_size, read_index_offset, read_index_size = _sff_find_roche_index(handle)
#Now parse the read index...
handle.seek(read_index_offset)
fmt = ">5B"
for read in range(number_of_reads):
#TODO - Be more aware of when the index should end?
data = handle.read(6)
while True:
more = handle.read(1)
if not more:
raise ValueError("Premature end of file!")
data += more
if more == _flag: break
assert data[-1:] == _flag, data[-1:]
name = _bytes_to_string(data[:-6])
off4, off3, off2, off1, off0 = struct.unpack(fmt, data[-6:-1])
offset = off0 + 255*off1 + 65025*off2 + 16581375*off3
if off4:
#Could in theory be used as a fifth piece of offset information,
#i.e. offset =+ 4228250625L*off4, but testing the Roche tools this
#is not the case. They simple don't support such large indexes.
raise ValueError("Expected a null terminator to the read name.")
yield name, offset
if handle.tell() != read_index_offset + read_index_size:
raise ValueError("Problem with index length? %i vs %i" \
% (handle.tell(), read_index_offset + read_index_size))
def _sff_read_seq_record(handle, number_of_flows_per_read, flow_chars,
key_sequence, alphabet, trim=False):
"""Parse the next read in the file, return data as a SeqRecord (PRIVATE)."""
#Now on to the reads...
#the read header format (fixed part):
#read_header_length H
#name_length H
#seq_len I
#clip_qual_left H
#clip_qual_right H
#clip_adapter_left H
#clip_adapter_right H
#[rest of read header depends on the name length etc]
read_header_fmt = '>2HI4H'
read_header_size = struct.calcsize(read_header_fmt)
read_flow_fmt = ">%iH" % number_of_flows_per_read
read_flow_size = struct.calcsize(read_flow_fmt)
read_header_length, name_length, seq_len, clip_qual_left, \
clip_qual_right, clip_adapter_left, clip_adapter_right \
= struct.unpack(read_header_fmt, handle.read(read_header_size))
if clip_qual_left:
clip_qual_left -= 1 #python counting
if clip_adapter_left:
clip_adapter_left -= 1 #python counting
if read_header_length < 10 or read_header_length % 8 != 0:
raise ValueError("Malformed read header, says length is %i" \
% read_header_length)
#now the name and any padding (remainder of header)
name = _bytes_to_string(handle.read(name_length))
padding = read_header_length - read_header_size - name_length
if handle.read(padding).count(_null) != padding:
raise ValueError("Post name %i byte padding region contained data" \
% padding)
#now the flowgram values, flowgram index, bases and qualities
#NOTE - assuming flowgram_format==1, which means struct type H
flow_values = handle.read(read_flow_size) #unpack later if needed
temp_fmt = ">%iB" % seq_len # used for flow index and quals
flow_index = handle.read(seq_len) #unpack later if needed
seq = _bytes_to_string(handle.read(seq_len)) #TODO - Use bytes in Seq?
quals = list(struct.unpack(temp_fmt, handle.read(seq_len)))
#now any padding...
padding = (read_flow_size + seq_len*3)%8
if padding:
padding = 8 - padding
if handle.read(padding).count(_null) != padding:
raise ValueError("Post quality %i byte padding region contained data" \
% padding)
#Now build a SeqRecord
if trim:
seq = seq[clip_qual_left:clip_qual_right].upper()
quals = quals[clip_qual_left:clip_qual_right]
#Don't record the clipping values, flow etc, they make no sense now:
annotations = {}
else:
#This use of mixed case mimics the Roche SFF tool's FASTA output
seq = seq[:clip_qual_left].lower() + \
seq[clip_qual_left:clip_qual_right].upper() + \
seq[clip_qual_right:].lower()
annotations = {"flow_values":struct.unpack(read_flow_fmt, flow_values),
"flow_index":struct.unpack(temp_fmt, flow_index),
"flow_chars":flow_chars,
"flow_key":key_sequence,
"clip_qual_left":clip_qual_left,
"clip_qual_right":clip_qual_right,
"clip_adapter_left":clip_adapter_left,
"clip_adapter_right":clip_adapter_right}
record = SeqRecord(Seq(seq, alphabet),
id=name,
name=name,
description="",
annotations=annotations)
#Dirty trick to speed up this line:
#record.letter_annotations["phred_quality"] = quals
dict.__setitem__(record._per_letter_annotations,
"phred_quality", quals)
#TODO - adaptor clipping
#Return the record and then continue...
return record
#This is a generator function!
def SffIterator(handle, alphabet=Alphabet.generic_dna, trim=False):
"""Iterate over Standard Flowgram Format (SFF) reads (as SeqRecord objects).
handle - input file, an SFF file, e.g. from Roche 454 sequencing.
This must NOT be opened in universal read lines mode!
alphabet - optional alphabet, defaults to generic DNA.
trim - should the sequences be trimmed?
The resulting SeqRecord objects should match those from a paired FASTA
and QUAL file converted from the SFF file using the Roche 454 tool
ssfinfo. i.e. The sequence will be mixed case, with the trim regions
shown in lower case.
This function is used internally via the Bio.SeqIO functions:
>>> from Bio import SeqIO
>>> handle = open("Roche/E3MFGYR02_random_10_reads.sff", "rb")
>>> for record in SeqIO.parse(handle, "sff"):
... print record.id, len(record)
E3MFGYR02JWQ7T 265
E3MFGYR02JA6IL 271
E3MFGYR02JHD4H 310
E3MFGYR02GFKUC 299
E3MFGYR02FTGED 281
E3MFGYR02FR9G7 261
E3MFGYR02GAZMS 278
E3MFGYR02HHZ8O 221
E3MFGYR02GPGB1 269
E3MFGYR02F7Z7G 219
>>> handle.close()
You can also call it directly:
>>> handle = open("Roche/E3MFGYR02_random_10_reads.sff", "rb")
>>> for record in SffIterator(handle):
... print record.id, len(record)
E3MFGYR02JWQ7T 265
E3MFGYR02JA6IL 271
E3MFGYR02JHD4H 310
E3MFGYR02GFKUC 299
E3MFGYR02FTGED 281
E3MFGYR02FR9G7 261
E3MFGYR02GAZMS 278
E3MFGYR02HHZ8O 221
E3MFGYR02GPGB1 269
E3MFGYR02F7Z7G 219
>>> handle.close()
Or, with the trim option:
>>> handle = open("Roche/E3MFGYR02_random_10_reads.sff", "rb")
>>> for record in SffIterator(handle, trim=True):
... print record.id, len(record)
E3MFGYR02JWQ7T 260
E3MFGYR02JA6IL 265
E3MFGYR02JHD4H 292
E3MFGYR02GFKUC 295
E3MFGYR02FTGED 277
E3MFGYR02FR9G7 256
E3MFGYR02GAZMS 271
E3MFGYR02HHZ8O 150
E3MFGYR02GPGB1 221
E3MFGYR02F7Z7G 130
>>> handle.close()
"""
if isinstance(Alphabet._get_base_alphabet(alphabet),
Alphabet.ProteinAlphabet):
raise ValueError("Invalid alphabet, SFF files do not hold proteins.")
if isinstance(Alphabet._get_base_alphabet(alphabet),
Alphabet.RNAAlphabet):
raise ValueError("Invalid alphabet, SFF files do not hold RNA.")
header_length, index_offset, index_length, number_of_reads, \
number_of_flows_per_read, flow_chars, key_sequence \
= _sff_file_header(handle)
#Now on to the reads...
#the read header format (fixed part):
#read_header_length H
#name_length H
#seq_len I
#clip_qual_left H
#clip_qual_right H
#clip_adapter_left H
#clip_adapter_right H
#[rest of read header depends on the name length etc]
read_header_fmt = '>2HI4H'
read_header_size = struct.calcsize(read_header_fmt)
read_flow_fmt = ">%iH" % number_of_flows_per_read
read_flow_size = struct.calcsize(read_flow_fmt)
assert 1 == struct.calcsize(">B")
assert 1 == struct.calcsize(">s")
assert 1 == struct.calcsize(">c")
assert read_header_size % 8 == 0 #Important for padding calc later!
#The spec allows for the index block to be before or even in the middle
#of the reads. We can check that if we keep track of our position
#in the file...
for read in range(number_of_reads):
if index_offset and handle.tell() == index_offset:
offset = index_offset + index_length
if offset % 8:
offset += 8 - (offset % 8)
assert offset % 8 == 0
handle.seek(offset)
#Now that we've done this, we don't need to do it again. Clear
#the index_offset so we can skip extra handle.tell() calls:
index_offset = 0
yield _sff_read_seq_record(handle,
number_of_flows_per_read,
flow_chars,
key_sequence,
alphabet,
trim)
#The following is not essential, but avoids confusing error messages
#for the user if they try and re-parse the same handle.
if index_offset and handle.tell() == index_offset:
offset = index_offset + index_length
if offset % 8:
offset += 8 - (offset % 8)
assert offset % 8 == 0
handle.seek(offset)
#Should now be at the end of the file...
if handle.read(1):
raise ValueError("Additional data at end of SFF file")
#This is a generator function!
def _SffTrimIterator(handle, alphabet=Alphabet.generic_dna):
"""Iterate over SFF reads (as SeqRecord objects) with trimming (PRIVATE)."""
return SffIterator(handle, alphabet, trim=True)
class SffWriter(SequenceWriter):
"""SFF file writer."""
def __init__(self, handle, index=True, xml=None):
"""Creates the writer object.
handle - Output handle, ideally in binary write mode.
index - Boolean argument, should we try and write an index?
xml - Optional string argument, xml manifest to be recorded in the index
block (see function ReadRocheXmlManifest for reading this data).
"""
if hasattr(handle,"mode") and "U" in handle.mode.upper():
raise ValueError("SFF files must NOT be opened in universal new "
"lines mode. Binary mode is required")
elif hasattr(handle,"mode") and "B" not in handle.mode.upper():
raise ValueError("SFF files must be opened in binary mode")
self.handle = handle
self._xml = xml
if index:
self._index = []
else:
self._index = None
def write_file(self, records):
"""Use this to write an entire file containing the given records."""
try:
self._number_of_reads = len(records)
except TypeError:
self._number_of_reads = 0 #dummy value
if not hasattr(self.handle, "seek") \
or not hasattr(self.handle, "tell"):
raise ValueError("A handle with a seek/tell methods is "
"required in order to record the total "
"record count in the file header (once it "
"is known at the end).")
if self._index is not None and \
not (hasattr(self.handle, "seek") and hasattr(self.handle, "tell")):
import warnings
warnings.warn("A handle with a seek/tell methods is required in "
"order to record an SFF index.")
self._index = None
self._index_start = 0
self._index_length = 0
if not hasattr(records, "next"):
records = iter(records)
#Get the first record in order to find the flow information
#we will need for the header.
try:
record = records.next()
except StopIteration:
record = None
if record is None:
#No records -> empty SFF file (or an error)?
#We can't write a header without the flow information.
#return 0
raise ValueError("Need at least one record for SFF output")
try:
self._key_sequence = _as_bytes(record.annotations["flow_key"])
self._flow_chars = _as_bytes(record.annotations["flow_chars"])
self._number_of_flows_per_read = len(self._flow_chars)
except KeyError:
raise ValueError("Missing SFF flow information")
self.write_header()
self.write_record(record)
count = 1
for record in records:
self.write_record(record)
count += 1
if self._number_of_reads == 0:
#Must go back and record the record count...
offset = self.handle.tell()
self.handle.seek(0)
self._number_of_reads = count
self.write_header()
self.handle.seek(offset) #not essential?
else:
assert count == self._number_of_reads
if self._index is not None:
self._write_index()
return count
def _write_index(self):
assert len(self._index)==self._number_of_reads
handle = self.handle
self._index.sort()
self._index_start = handle.tell() #need for header
#XML...
if self._xml is not None:
xml = _as_bytes(self._xml)
else:
from Bio import __version__
xml = "<!-- This file was output with Biopython %s -->\n" % __version__
xml += "<!-- This XML and index block attempts to mimic Roche SFF files -->\n"
xml += "<!-- This file may be a combination of multiple SFF files etc -->\n"
xml = _as_bytes(xml)
xml_len = len(xml)
#Write to the file...
fmt = ">I4BLL"
fmt_size = struct.calcsize(fmt)
handle.write(_null*fmt_size + xml) #will come back later to fill this
fmt2 = ">6B"
assert 6 == struct.calcsize(fmt2)
self._index.sort()
index_len = 0 #don't know yet!
for name, offset in self._index:
#Roche files record the offsets using base 255 not 256.
#See comments for parsing the index block. There may be a faster
#way to code this, but we can't easily use shifts due to odd base
off3 = offset
off0 = off3 % 255
off3 -= off0
off1 = off3 % 65025
off3 -= off1
off2 = off3 % 16581375
off3 -= off2
assert offset == off0 + off1 + off2 + off3, \
"%i -> %i %i %i %i" % (offset, off0, off1, off2, off3)
off3, off2, off1, off0 = off3//16581375, off2//65025, \
off1//255, off0
assert off0 < 255 and off1 < 255 and off2 < 255 and off3 < 255, \
"%i -> %i %i %i %i" % (offset, off0, off1, off2, off3)
handle.write(name + struct.pack(fmt2, 0, \
off3, off2, off1, off0, 255))
index_len += len(name) + 6
#Note any padding in not included:
self._index_length = fmt_size + xml_len + index_len #need for header
#Pad out to an 8 byte boundary (although I have noticed some
#real Roche SFF files neglect to do this depsite their manual
#suggesting this padding should be there):
if self._index_length % 8:
padding = 8 - (self._index_length%8)
handle.write(_null*padding)
else:
padding = 0
offset = handle.tell()
assert offset == self._index_start + self._index_length + padding, \
"%i vs %i + %i + %i" % (offset, self._index_start, \
self._index_length, padding)
#Must now go back and update the index header with index size...
handle.seek(self._index_start)
handle.write(struct.pack(fmt, 778921588, #magic number
49,46,48,48, #Roche index version, "1.00"
xml_len, index_len) + xml)
#Must now go back and update the header...
handle.seek(0)
self.write_header()
handle.seek(offset) #not essential?
def write_header(self):
#Do header...
key_length = len(self._key_sequence)
#file header (part one)
#use big endiean encdoing >
#magic_number I
#version 4B
#index_offset Q
#index_length I
#number_of_reads I
#header_length H
#key_length H
#number_of_flows_per_read H
#flowgram_format_code B
#[rest of file header depends on the number of flows and how many keys]
fmt = '>I4BQIIHHHB%is%is' % (self._number_of_flows_per_read, key_length)
#According to the spec, the header_length field should be the total
#number of bytes required by this set of header fields, and should be
#equal to "31 + number_of_flows_per_read + key_length" rounded up to
#the next value divisible by 8.
if struct.calcsize(fmt) % 8 == 0:
padding = 0
else:
padding = 8 - (struct.calcsize(fmt) % 8)
header_length = struct.calcsize(fmt) + padding
assert header_length % 8 == 0
header = struct.pack(fmt, 779314790, #magic number 0x2E736666
0, 0, 0, 1, #version
self._index_start, self._index_length,
self._number_of_reads,
header_length, key_length,
self._number_of_flows_per_read,
1, #the only flowgram format code we support
self._flow_chars, self._key_sequence)
self.handle.write(header + _null*padding)
def write_record(self, record):
"""Write a single additional record to the output file.
This assumes the header has been done.
"""
#Basics
name = _as_bytes(record.id)
name_len = len(name)
seq = _as_bytes(str(record.seq).upper())
seq_len = len(seq)
#Qualities
try:
quals = record.letter_annotations["phred_quality"]
except KeyError:
raise ValueError("Missing PHRED qualities information")
#Flow
try:
flow_values = record.annotations["flow_values"]
flow_index = record.annotations["flow_index"]
if self._key_sequence != _as_bytes(record.annotations["flow_key"]) \
or self._flow_chars != _as_bytes(record.annotations["flow_chars"]):
raise ValueError("Records have inconsistent SFF flow data")
except KeyError:
raise ValueError("Missing SFF flow information")
except AttributeError:
raise ValueError("Header not written yet?")
#Clipping
try:
clip_qual_left = record.annotations["clip_qual_left"]
if clip_qual_left:
clip_qual_left += 1
clip_qual_right = record.annotations["clip_qual_right"]
clip_adapter_left = record.annotations["clip_adapter_left"]
if clip_adapter_left:
clip_adapter_left += 1
clip_adapter_right = record.annotations["clip_adapter_right"]
except KeyError:
raise ValueError("Missing SFF clipping information")
#Capture information for index
if self._index is not None:
offset = self.handle.tell()
#Check the position of the final record (before sort by name)
#See comments earlier about how base 255 seems to be used.
#This means the limit is 255**4 + 255**3 +255**2 + 255**1
if offset > 4244897280:
import warnings
warnings.warn("Read %s has file offset %i, which is too large "
"to store in the Roche SFF index structure. No "
"index block will be recorded." % (name, offset))
#No point recoring the offsets now
self._index = None
else:
self._index.append((name, self.handle.tell()))
#the read header format (fixed part):
#read_header_length H
#name_length H
#seq_len I
#clip_qual_left H
#clip_qual_right H
#clip_adapter_left H
#clip_adapter_right H
#[rest of read header depends on the name length etc]
#name
#flow values
#flow index
#sequence
#padding
read_header_fmt = '>2HI4H%is' % name_len
if struct.calcsize(read_header_fmt) % 8 == 0:
padding = 0
else:
padding = 8 - (struct.calcsize(read_header_fmt) % 8)
read_header_length = struct.calcsize(read_header_fmt) + padding
assert read_header_length % 8 == 0
data = struct.pack(read_header_fmt,
read_header_length,
name_len, seq_len,
clip_qual_left, clip_qual_right,
clip_adapter_left, clip_adapter_right,
name) + _null*padding
assert len(data) == read_header_length
#now the flowgram values, flowgram index, bases and qualities
#NOTE - assuming flowgram_format==1, which means struct type H
read_flow_fmt = ">%iH" % self._number_of_flows_per_read
read_flow_size = struct.calcsize(read_flow_fmt)
temp_fmt = ">%iB" % seq_len # used for flow index and quals
data += struct.pack(read_flow_fmt, *flow_values) \
+ struct.pack(temp_fmt, *flow_index) \
+ seq \
+ struct.pack(temp_fmt, *quals)
#now any final padding...
padding = (read_flow_size + seq_len*3)%8
if padding:
padding = 8 - padding
self.handle.write(data + _null*padding)
if __name__ == "__main__":
print "Running quick self test"
filename = "../../Tests/Roche/E3MFGYR02_random_10_reads.sff"
metadata = ReadRocheXmlManifest(open(filename, "rb"))
index1 = sorted(_sff_read_roche_index(open(filename, "rb")))
index2 = sorted(_sff_do_slow_index(open(filename, "rb")))
assert index1 == index2
assert len(index1) == len(list(SffIterator(open(filename, "rb"))))
from StringIO import StringIO
try:
#This is in Python 2.6+, and is essential on Python 3
from io import BytesIO
except ImportError:
BytesIO = StringIO
assert len(index1) == len(list(SffIterator(BytesIO(open(filename,"rb").read()))))
if sys.platform != "win32":
assert len(index1) == len(list(SffIterator(open(filename, "r"))))
index2 = sorted(_sff_read_roche_index(open(filename)))
assert index1 == index2
index2 = sorted(_sff_do_slow_index(open(filename)))
assert index1 == index2
assert len(index1) == len(list(SffIterator(open(filename))))
assert len(index1) == len(list(SffIterator(BytesIO(open(filename,"r").read()))))
assert len(index1) == len(list(SffIterator(BytesIO(open(filename).read()))))
sff = list(SffIterator(open(filename, "rb")))
sff2 = list(SffIterator(open("../../Tests/Roche/E3MFGYR02_alt_index_at_end.sff", "rb")))
assert len(sff) == len(sff2)
for old, new in zip(sff, sff2):
assert old.id == new.id
assert str(old.seq) == str(new.seq)
sff2 = list(SffIterator(open("../../Tests/Roche/E3MFGYR02_alt_index_at_start.sff", "rb")))
assert len(sff) == len(sff2)
for old, new in zip(sff, sff2):
assert old.id == new.id
assert str(old.seq) == str(new.seq)
sff2 = list(SffIterator(open("../../Tests/Roche/E3MFGYR02_alt_index_in_middle.sff", "rb")))
assert len(sff) == len(sff2)
for old, new in zip(sff, sff2):
assert old.id == new.id
assert str(old.seq) == str(new.seq)
sff2 = list(SffIterator(open("../../Tests/Roche/E3MFGYR02_index_at_start.sff", "rb")))
assert len(sff) == len(sff2)
for old, new in zip(sff, sff2):
assert old.id == new.id
assert str(old.seq) == str(new.seq)
sff2 = list(SffIterator(open("../../Tests/Roche/E3MFGYR02_index_in_middle.sff", "rb")))
assert len(sff) == len(sff2)
for old, new in zip(sff, sff2):
assert old.id == new.id
assert str(old.seq) == str(new.seq)
sff_trim = list(SffIterator(open(filename, "rb"), trim=True))
print ReadRocheXmlManifest(open(filename, "rb"))
from Bio import SeqIO
filename = "../../Tests/Roche/E3MFGYR02_random_10_reads_no_trim.fasta"
fasta_no_trim = list(SeqIO.parse(open(filename,"rU"), "fasta"))
filename = "../../Tests/Roche/E3MFGYR02_random_10_reads_no_trim.qual"
qual_no_trim = list(SeqIO.parse(open(filename,"rU"), "qual"))
filename = "../../Tests/Roche/E3MFGYR02_random_10_reads.fasta"
fasta_trim = list(SeqIO.parse(open(filename,"rU"), "fasta"))
filename = "../../Tests/Roche/E3MFGYR02_random_10_reads.qual"
qual_trim = list(SeqIO.parse(open(filename,"rU"), "qual"))
for s, sT, f, q, fT, qT in zip(sff, sff_trim, fasta_no_trim,
qual_no_trim, fasta_trim, qual_trim):
#print
print s.id
#print s.seq
#print s.letter_annotations["phred_quality"]
assert s.id == f.id == q.id
assert str(s.seq) == str(f.seq)
assert s.letter_annotations["phred_quality"] == q.letter_annotations["phred_quality"]
assert s.id == sT.id == fT.id == qT.id
assert str(sT.seq) == str(fT.seq)
assert sT.letter_annotations["phred_quality"] == qT.letter_annotations["phred_quality"]
print "Writing with a list of SeqRecords..."
handle = StringIO()
w = SffWriter(handle, xml=metadata)
w.write_file(sff) #list
data = handle.getvalue()
print "And again with an iterator..."
handle = StringIO()
w = SffWriter(handle, xml=metadata)
w.write_file(iter(sff))
assert data == handle.getvalue()
#Check 100% identical to the original:
filename = "../../Tests/Roche/E3MFGYR02_random_10_reads.sff"
original = open(filename,"rb").read()
assert len(data) == len(original)
assert data == original
del data
handle.close()
print "-"*50
filename = "../../Tests/Roche/greek.sff"
for record in SffIterator(open(filename,"rb")):
print record.id
index1 = sorted(_sff_read_roche_index(open(filename, "rb")))
index2 = sorted(_sff_do_slow_index(open(filename, "rb")))
assert index1 == index2
try:
print ReadRocheXmlManifest(open(filename, "rb"))
assert False, "Should fail!"
except ValueError:
pass
handle = open(filename, "rb")
for record in SffIterator(handle):
pass
try:
for record in SffIterator(handle):
print record.id
assert False, "Should have failed"
except ValueError, err:
print "Checking what happens on re-reading a handle:"
print err
"""
#Ugly code to make test files...
index = ".diy1.00This is a fake index block (DIY = Do It Yourself), which is allowed under the SFF standard.\0"
padding = len(index)%8
if padding:
padding = 8 - padding
index += chr(0)*padding
assert len(index)%8 == 0
#Ugly bit of code to make a fake index at start
records = list(SffIterator(open("../../Tests/Roche/E3MFGYR02_random_10_reads.sff", "rb")))
out_handle = open("../../Tests/Roche/E3MFGYR02_alt_index_at_start.sff", "w")
index = ".diy1.00This is a fake index block (DIY = Do It Yourself), which is allowed under the SFF standard.\0"
padding = len(index)%8
if padding:
padding = 8 - padding
index += chr(0)*padding
w = SffWriter(out_handle, index=False, xml=None)
#Fake the header...
w._number_of_reads = len(records)
w._index_start = 0
w._index_length = 0
w._key_sequence = records[0].annotations["flow_key"]
w._flow_chars = records[0].annotations["flow_chars"]
w._number_of_flows_per_read = len(w._flow_chars)
w.write_header()
w._index_start = out_handle.tell()
w._index_length = len(index)
out_handle.seek(0)
w.write_header() #this time with index info
w.handle.write(index)
for record in records:
w.write_record(record)
out_handle.close()
records2 = list(SffIterator(open("../../Tests/Roche/E3MFGYR02_alt_index_at_start.sff", "rb")))
for old, new in zip(records, records2):
assert str(old.seq)==str(new.seq)
i = list(_sff_do_slow_index(open("../../Tests/Roche/E3MFGYR02_alt_index_at_start.sff", "rb")))
#Ugly bit of code to make a fake index in middle
records = list(SffIterator(open("../../Tests/Roche/E3MFGYR02_random_10_reads.sff", "rb")))
out_handle = open("../../Tests/Roche/E3MFGYR02_alt_index_in_middle.sff", "w")
index = ".diy1.00This is a fake index block (DIY = Do It Yourself), which is allowed under the SFF standard.\0"
padding = len(index)%8
if padding:
padding = 8 - padding
index += chr(0)*padding
w = SffWriter(out_handle, index=False, xml=None)
#Fake the header...
w._number_of_reads = len(records)
w._index_start = 0
w._index_length = 0
w._key_sequence = records[0].annotations["flow_key"]
w._flow_chars = records[0].annotations["flow_chars"]
w._number_of_flows_per_read = len(w._flow_chars)
w.write_header()
for record in records[:5]:
w.write_record(record)
w._index_start = out_handle.tell()
w._index_length = len(index)
w.handle.write(index)
for record in records[5:]:
w.write_record(record)
out_handle.seek(0)
w.write_header() #this time with index info
out_handle.close()
records2 = list(SffIterator(open("../../Tests/Roche/E3MFGYR02_alt_index_in_middle.sff", "rb")))
for old, new in zip(records, records2):
assert str(old.seq)==str(new.seq)
j = list(_sff_do_slow_index(open("../../Tests/Roche/E3MFGYR02_alt_index_in_middle.sff", "rb")))
#Ugly bit of code to make a fake index at end
records = list(SffIterator(open("../../Tests/Roche/E3MFGYR02_random_10_reads.sff", "rb")))
out_handle = open("../../Tests/Roche/E3MFGYR02_alt_index_at_end.sff", "w")
w = SffWriter(out_handle, index=False, xml=None)
#Fake the header...
w._number_of_reads = len(records)
w._index_start = 0
w._index_length = 0
w._key_sequence = records[0].annotations["flow_key"]
w._flow_chars = records[0].annotations["flow_chars"]
w._number_of_flows_per_read = len(w._flow_chars)
w.write_header()
for record in records:
w.write_record(record)
w._index_start = out_handle.tell()
w._index_length = len(index)
out_handle.write(index)
out_handle.seek(0)
w.write_header() #this time with index info
out_handle.close()
records2 = list(SffIterator(open("../../Tests/Roche/E3MFGYR02_alt_index_at_end.sff", "rb")))
for old, new in zip(records, records2):
assert str(old.seq)==str(new.seq)
try:
print ReadRocheXmlManifest(open("../../Tests/Roche/E3MFGYR02_alt_index_at_end.sff", "rb"))
assert False, "Should fail!"
except ValueError:
pass
k = list(_sff_do_slow_index(open("../../Tests/Roche/E3MFGYR02_alt_index_at_end.sff", "rb")))
"""
print "Done"
| BlogomaticProject/Blogomatic | opt/blog-o-matic/usr/lib/python/Bio/SeqIO/SffIO.py | Python | gpl-2.0 | 53,635 |
#!/usr/bin/env python
import subprocess
import os
class MakeException(Exception):
pass
def swapExt(path, current, replacement):
path, ext = os.path.splitext(path)
if ext == current:
path += replacement
return path
else:
raise MakeException(
"swapExt: expected file name ending in %s, got file name ending in %s" % \
(current, replacement))
headerFiles = [
'benc.h',
'bencode.h',
]
codeFiles = [
'benc_int.c',
'benc_bstr.c',
'benc_list.c',
'benc_dict.c',
'bencode.c',
'bcopy.c',
]
cflags = ['-g']
programFile = 'bcopy'
def gcc(*packedArgs):
args = []
for arg in packedArgs:
if isinstance(arg, list):
args += arg
elif isinstance(arg, tuple):
args += list(arg)
else:
args.append(arg)
subprocess.check_call(['gcc'] + args)
def compile(codeFile, cflags=[]):
objectFile = swapExt(codeFile, '.c', '.o')
gcc(cflags, '-c', ('-o', objectFile), codeFile)
return objectFile
def link(programFile, objectFiles, cflags=[]):
gcc(cflags, ('-o', programFile), objectFiles)
if __name__ == '__main__':
objectFiles = [compile(codeFile, cflags) for codeFile in codeFiles]
link(programFile, objectFiles, cflags)
| HarryR/ffff-dnsp2p | libbenc/make.py | Python | gpl-2.0 | 1,298 |
#!/usr/bin/python
##
## Copyright 2008, Various
## Adrian Likins <[email protected]>
##
## This software may be freely redistributed under the terms of the GNU
## general public license.
##
import os
import socket
import subprocess
import time
import unittest
import simplejson
import func.utils
from func import yaml
from func import jobthing
def structToYaml(data):
# takes a data structure, serializes it to
# yaml
buf = yaml.dump(data)
return buf
def structToJSON(data):
#Take data structure for the test
#and serializes it using json
serialized = simplejson.dumps(input)
return serialized
class BaseTest(object):
# assume we are talking to localhost
# th = socket.gethostname()
th = socket.getfqdn()
nforks=1
async=False
ft_cmd = "func-transmit"
# just so we can change it easy later
def _serialize(self, data):
raise NotImplementedError
def _deserialize(self, buf):
raise NotImplementedError
def _call_async(self, data):
data['async'] = True
data['nforks'] = 4
job_id = self._call(data)
no_answer = True
while (no_answer):
out = self._call({'clients': '*',
'method':'job_status',
'parameters': job_id})
if out[0] == jobthing.JOB_ID_FINISHED:
no_answer = False
else:
time.sleep(.25)
result = out[1]
return result
def _call(self, data):
f = self._serialize(data)
p = subprocess.Popen(self.ft_cmd, shell=True,
stdin=subprocess.PIPE, stdout=subprocess.PIPE)
output = p.communicate(input=f)
return self._deserialize(output[0])
def call(self, data):
if self.async:
return self._call_async(data)
return self._call(data)
def __init__(self):
pass
# we do this all over the place...
def assert_on_fault(self, result):
assert func.utils.is_error(result[self.th]) == False
# assert type(result[self.th]) != xmlrpclib.Fault
class YamlBaseTest(BaseTest):
# i'd add the "yaml" attr here for nosetest to find, but it doesnt
# seem to find it unless the class is a test class directly
ft_cmd = "func-transmit --yaml"
def _serialize(self, data):
buf = yaml.dump(data)
return buf
def _deserialize(self, buf):
data = yaml.load(buf).next()
return data
class JSONBaseTest(BaseTest):
ft_cmd = "func-transmit --json"
def _serialize(self, data):
buf = simplejson.dumps(data)
return buf
def _deserialize(self, buf):
data = simplejson.loads(buf)
return data
class ListMinion(object):
def test_list_minions(self):
out = self.call({'clients': '*',
'method': 'list_minions'})
def test_list_minions_no_match(self):
out = self.call({'clients': 'somerandom-name-that-shouldnt-be-a_real_host_name',
'method': 'list_minions'})
assert out == []
def test_list_minions_group_name(self):
out = self.call({'clients': '@test',
'method': 'list_minions'})
def test_list_minions_no_clients(self):
out = self.call({'method': 'list_minions'})
class ListMinionAsync(ListMinion):
async = True
class TestListMinionYaml(YamlBaseTest, ListMinion):
yaml = True
def __init__(self):
super(TestListMinionYaml, self).__init__()
class TestListMinionJSON(JSONBaseTest, ListMinion):
json = True
def __init__(self):
super(TestListMinionJSON, self).__init__()
# list_minions is a convience call for func_transmit, and doesn't
# really make any sense to call async
#class TestListMinionYamlAsync(YamlBaseTest, ListMinionAsync):
# yaml = True
# async = True
# def __init__(self):
# super(TestListMinionYamlAsync, self).__init__()
#class TestListMinionJSONAsync(JSONBaseTest, ListMinionAsync):
# json = True
# async = True
# def __init__(self):
# super(TestListMinionJSONAsync, self).__init__()
class ClientGlob(object):
def _test_add(self, client):
result = self.call({'clients': client,
'method': 'add',
'module': 'test',
'parameters': [1,2]})
self.assert_on_fault(result)
return result
def test_single_client(self):
result = self._test_add(self.th)
def test_glob_client(self):
result = self._test_add("*")
def test_glob_list(self):
result = self._test_add([self.th, self.th])
def test_glob_string_list(self):
result = self._test_add("%s;*" % self.th)
# note, needs a /etc/func/group setup with the proper groups defined
# need to figure out a good way to test this... -akl
def test_group(self):
result = self._test_add("@test")
# def test_group_and_glob(self):
# result = self._test_add("@test;*")
# def test_list_of_groups(self):
# result = self._test_add(["@test", "@test2"])
# def test_string_list_of_groups(self):
# result = self._test_add("@test;@test2")
# run all the same tests, but run then
class ClientGlobAsync(ClientGlob):
async = True
class TestClientGlobYaml(YamlBaseTest, ClientGlob):
yaml = True
def __init__(self):
super(TestClientGlobYaml, self).__init__()
class TestClientGlobJSON(JSONBaseTest, ClientGlob):
json = True
def __init__(self):
super(TestClientGlobJSON, self).__init__()
class TestClientGlobYamlAsync(YamlBaseTest, ClientGlobAsync):
yaml = True
async = True
def __init__(self):
super(TestClientGlobYamlAsync, self).__init__()
class TestClientGlobJSONAsync(JSONBaseTest, ClientGlobAsync):
json = True
async = True
def __init__(self):
super(TestClientGlobJSONAsync, self).__init__()
# why the weird T_est name? because nosetests doesn't seem to reliably
# respect the __test__ attribute, and these modules aren't meant to be
# invoked as test classes themselves, only as bases for other tests
class T_estTest(object):
__test__ = False
def _echo_test(self, data):
result = self.call({'clients':'*',
'method': 'echo',
'module': 'test',
'parameters': [data]})
self.assert_on_fault(result)
assert result[self.th] == data
def test_add(self):
result = self.call({'clients':'*',
'method': 'add',
'module': 'test',
'parameters': [1,2]})
assert result[self.th] == 3
def test_echo_int(self):
self._echo_test(37)
def test_echo_array(self):
self._echo_test([1,2,"three", "fore", "V"])
def test_echo_hash(self):
self._echo_test({'one':1, 'two':2, 'three': 3, 'four':"IV"})
def test_echo_float(self):
self._echo_test(1.0)
# NOTE/FIXME: the big float tests fail for yaml and json
def test_echo_big_float(self):
self._echo_test(123121232.23)
def test_echo_bigger_float(self):
self._echo_test(234234234234234234234.234234234234234)
def test_echo_little_float(self):
self._echo_test(0.0000000000000000000000000000000000037)
# Note/FIXME: these test currently fail for YAML
def test_echo_boolean_true(self):
self._echo_test(True)
def test_echo_boolean_false(self):
self._echo_test(False)
class T_estTestAsync(T_estTest):
__test__ = False
async = True
class TestTestYaml(YamlBaseTest, T_estTest):
yaml = True
def __init__(self):
super(YamlBaseTest, self).__init__()
class TestTestJSON(JSONBaseTest, T_estTest):
json = True
def __init__(self):
super(JSONBaseTest,self).__init__()
class TestTestAsyncJSON(JSONBaseTest, T_estTestAsync):
json = True
async = True
def __init__(self):
super(JSONBaseTest,self).__init__()
class TestTestAsyncYaml(YamlBaseTest, T_estTestAsync):
yaml = True
async = True
def __init__(self):
super(YamlBaseTest,self).__init__()
| pombredanne/func | test/unittest/test_func_transmit.py | Python | gpl-2.0 | 8,415 |
__author__ = 'ryanplyler'
def sayhi(config):
error = None
try:
server_output = "Executing action 'sayhi()'"
response = "HI THERE!"
except:
error = 1
return server_output, response, error
| grplyler/netcmd | netcmd_actions.py | Python | gpl-2.0 | 231 |
##
# Copyright 2009-2020 Ghent University
#
# This file is part of EasyBuild,
# originally created by the HPC team of Ghent University (http://ugent.be/hpc/en),
# with support of Ghent University (http://ugent.be/hpc),
# the Flemish Supercomputer Centre (VSC) (https://www.vscentrum.be),
# Flemish Research Foundation (FWO) (http://www.fwo.be/en)
# and the Department of Economy, Science and Innovation (EWI) (http://www.ewi-vlaanderen.be/en).
#
# https://github.com/easybuilders/easybuild
#
# EasyBuild is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation v2.
#
# EasyBuild is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with EasyBuild. If not, see <http://www.gnu.org/licenses/>.
##
"""
EasyBuild support for building and installing MRtrix, implemented as an easyblock
"""
import glob
import os
from distutils.version import LooseVersion
import easybuild.tools.environment as env
from easybuild.framework.easyblock import EasyBlock
from easybuild.tools.filetools import copy, symlink
from easybuild.tools.run import run_cmd
from easybuild.tools.systemtools import get_shared_lib_ext
class EB_MRtrix(EasyBlock):
"""Support for building/installing MRtrix."""
def __init__(self, *args, **kwargs):
"""Initialize easyblock, enable build-in-installdir based on version."""
super(EB_MRtrix, self).__init__(*args, **kwargs)
if LooseVersion(self.version) >= LooseVersion('0.3') and LooseVersion(self.version) < LooseVersion('0.3.14'):
self.build_in_installdir = True
self.log.debug("Enabled build-in-installdir for version %s", self.version)
def extract_step(self):
"""Extract MRtrix sources."""
# strip off 'mrtrix*' part to avoid having everything in a 'mrtrix*' subdirectory
if LooseVersion(self.version) >= LooseVersion('0.3'):
self.cfg.update('unpack_options', '--strip-components=1')
super(EB_MRtrix, self).extract_step()
def configure_step(self):
"""No configuration step for MRtrix."""
if LooseVersion(self.version) >= LooseVersion('0.3'):
if LooseVersion(self.version) < LooseVersion('0.3.13'):
env.setvar('LD', "%s LDFLAGS OBJECTS -o EXECUTABLE" % os.getenv('CXX'))
env.setvar('LDLIB', "%s -shared LDLIB_FLAGS OBJECTS -o LIB" % os.getenv('CXX'))
env.setvar('QMAKE_CXX', os.getenv('CXX'))
cmd = "python configure -verbose"
run_cmd(cmd, log_all=True, simple=True, log_ok=True)
def build_step(self):
"""Custom build procedure for MRtrix."""
cmd = "python build -verbose"
run_cmd(cmd, log_all=True, simple=True, log_ok=True)
def install_step(self):
"""Custom install procedure for MRtrix."""
if LooseVersion(self.version) < LooseVersion('0.3'):
cmd = "python build -verbose install=%s linkto=" % self.installdir
run_cmd(cmd, log_all=True, simple=True, log_ok=True)
elif LooseVersion(self.version) >= LooseVersion('3.0'):
copy(os.path.join(self.builddir, 'bin'), self.installdir)
copy(os.path.join(self.builddir, 'lib'), self.installdir)
elif LooseVersion(self.version) >= LooseVersion('0.3.14'):
copy(glob.glob(os.path.join(self.builddir, 'release', '*')), self.installdir)
copy(os.path.join(self.builddir, 'scripts'), self.installdir)
# some scripts expect 'release/bin' to be there, so we put a symlink in place
symlink(self.installdir, os.path.join(self.installdir, 'release'))
def make_module_req_guess(self):
"""
Return list of subdirectories to consider to update environment variables;
also consider 'scripts' subdirectory for $PATH
"""
guesses = super(EB_MRtrix, self).make_module_req_guess()
guesses['PATH'].append('scripts')
if LooseVersion(self.version) >= LooseVersion('3.0'):
guesses.setdefault('PYTHONPATH', []).append('lib')
return guesses
def sanity_check_step(self):
"""Custom sanity check for MRtrix."""
shlib_ext = get_shared_lib_ext()
if LooseVersion(self.version) >= LooseVersion('0.3'):
libso = 'libmrtrix.%s' % shlib_ext
else:
libso = 'libmrtrix-%s.%s' % ('_'.join(self.version.split('.')), shlib_ext)
custom_paths = {
'files': [os.path.join('lib', libso)],
'dirs': ['bin'],
}
custom_commands = []
if LooseVersion(self.version) >= LooseVersion('3.0'):
custom_commands.append("python -c 'import mrtrix3'")
super(EB_MRtrix, self).sanity_check_step(custom_paths=custom_paths, custom_commands=custom_commands)
| pescobar/easybuild-easyblocks | easybuild/easyblocks/m/mrtrix.py | Python | gpl-2.0 | 5,093 |
"""
Virtualization test - Virtual disk related utility functions
:copyright: Red Hat Inc.
"""
import os
import glob
import shutil
import stat
import tempfile
import logging
import re
try:
import configparser as ConfigParser
except ImportError:
import ConfigParser
from avocado.core import exceptions
from avocado.utils import process
from avocado.utils.service import SpecificServiceManager
from virttest import error_context
from virttest.compat_52lts import decode_to_text
# Whether to print all shell commands called
DEBUG = False
def copytree(src, dst, overwrite=True, ignore=''):
"""
Copy dirs from source to target.
:param src: source directory
:param dst: destination directory
:param overwrite: overwrite file if exist or not
:param ignore: files want to ignore
"""
ignore = glob.glob(os.path.join(src, ignore))
for root, dirs, files in os.walk(src):
dst_dir = root.replace(src, dst)
if not os.path.exists(dst_dir):
os.makedirs(dst_dir)
for _ in files:
if _ in ignore:
continue
src_file = os.path.join(root, _)
dst_file = os.path.join(dst_dir, _)
if os.path.exists(dst_file):
if overwrite:
os.remove(dst_file)
else:
continue
shutil.copy(src_file, dst_dir)
def is_mount(src, dst=None, fstype=None, options=None, verbose=False,
session=None):
"""
Check is src or dst mounted.
:param src: source device or directory
:param dst: mountpoint, if None will skip to check
:param fstype: file system type, if None will skip to check
:param options: mount options should be seperated by ","
:param session: check within the session if given
:return: True if mounted, else return False
"""
mount_str = "%s %s %s" % (src, dst, fstype)
mount_str = mount_str.replace('None', '').strip()
mount_list_cmd = 'cat /proc/mounts'
if session:
mount_result = session.cmd_output_safe(mount_list_cmd)
else:
mount_result = decode_to_text(process.system_output(mount_list_cmd, shell=True))
if verbose:
logging.debug("/proc/mounts contents:\n%s", mount_result)
for result in mount_result.splitlines():
if mount_str in result:
if options:
options = options.split(",")
options_result = result.split()[3].split(",")
for op in options:
if op not in options_result:
if verbose:
logging.info("%s is not mounted with given"
" option %s", src, op)
return False
if verbose:
logging.info("%s is mounted", src)
return True
if verbose:
logging.info("%s is not mounted", src)
return False
def mount(src, dst, fstype=None, options=None, verbose=False, session=None):
"""
Mount src under dst if it's really mounted, then remout with options.
:param src: source device or directory
:param dst: mountpoint
:param fstype: filesystem type need to mount
:param options: mount options
:param session: mount within the session if given
:return: if mounted return True else return False
"""
options = (options and [options] or [''])[0]
if is_mount(src, dst, fstype, options, verbose, session):
if 'remount' not in options:
options = 'remount,%s' % options
cmd = ['mount']
if fstype:
cmd.extend(['-t', fstype])
if options:
cmd.extend(['-o', options])
cmd.extend([src, dst])
cmd = ' '.join(cmd)
if session:
return session.cmd_status(cmd, safe=True) == 0
return process.system(cmd, verbose=verbose) == 0
def umount(src, dst, fstype=None, verbose=False, session=None):
"""
Umount src from dst, if src really mounted under dst.
:param src: source device or directory
:param dst: mountpoint
:param fstype: fstype used to check if mounted as expected
:param session: umount within the session if given
:return: if unmounted return True else return False
"""
mounted = is_mount(src, dst, fstype, verbose=verbose, session=session)
if mounted:
from . import utils_package
package = "psmisc"
# check package is available, if not try installing it
if not utils_package.package_install(package):
logging.error("%s is not available/installed for fuser", package)
fuser_cmd = "fuser -km %s" % dst
umount_cmd = "umount %s" % dst
if session:
session.cmd_output_safe(fuser_cmd)
return session.cmd_status(umount_cmd, safe=True) == 0
process.system(fuser_cmd, ignore_status=True, verbose=True, shell=True)
return process.system(umount_cmd, ignore_status=True, verbose=True) == 0
return True
@error_context.context_aware
def cleanup(folder):
"""
If folder is a mountpoint, do what is possible to unmount it. Afterwards,
try to remove it.
:param folder: Directory to be cleaned up.
"""
error_context.context(
"cleaning up unattended install directory %s" % folder)
umount(None, folder)
if os.path.isdir(folder):
shutil.rmtree(folder)
@error_context.context_aware
def clean_old_image(image):
"""
Clean a leftover image file from previous processes. If it contains a
mounted file system, do the proper cleanup procedures.
:param image: Path to image to be cleaned up.
"""
error_context.context("cleaning up old leftover image %s" % image)
if os.path.exists(image):
umount(image, None)
os.remove(image)
class Disk(object):
"""
Abstract class for Disk objects, with the common methods implemented.
"""
def __init__(self):
self.path = None
def get_answer_file_path(self, filename):
return os.path.join(self.mount, filename)
def copy_to(self, src):
logging.debug("Copying %s to disk image mount", src)
dst = os.path.join(self.mount, os.path.basename(src))
if os.path.isdir(src):
shutil.copytree(src, dst)
elif os.path.isfile(src):
shutil.copyfile(src, dst)
def close(self):
os.chmod(self.path, stat.S_IRWXU | stat.S_IRGRP | stat.S_IXGRP |
stat.S_IROTH | stat.S_IXOTH)
cleanup(self.mount)
logging.debug("Disk %s successfully set", self.path)
class FloppyDisk(Disk):
"""
Represents a floppy disk. We can copy files to it, and setup it in
convenient ways.
"""
@error_context.context_aware
def __init__(self, path, qemu_img_binary, tmpdir, vfd_size):
error_context.context(
"Creating unattended install floppy image %s" % path)
self.mount = tempfile.mkdtemp(prefix='floppy_virttest_', dir=tmpdir)
self.path = path
self.vfd_size = vfd_size
clean_old_image(path)
try:
c_cmd = '%s create -f raw %s %s' % (qemu_img_binary, path,
self.vfd_size)
process.run(c_cmd, verbose=DEBUG)
f_cmd = 'mkfs.msdos -s 1 %s' % path
process.run(f_cmd, verbose=DEBUG)
except process.CmdError as e:
logging.error("Error during floppy initialization: %s" % e)
cleanup(self.mount)
raise
def close(self):
"""
Copy everything that is in the mountpoint to the floppy.
"""
pwd = os.getcwd()
try:
os.chdir(self.mount)
path_list = glob.glob('*')
for path in path_list:
self.copy_to(path)
finally:
os.chdir(pwd)
cleanup(self.mount)
def copy_to(self, src):
logging.debug("Copying %s to floppy image", src)
mcopy_cmd = "mcopy -s -o -n -i %s %s ::/" % (self.path, src)
process.run(mcopy_cmd, verbose=DEBUG)
def _copy_virtio_drivers(self, virtio_floppy):
"""
Copy the virtio drivers on the virtio floppy to the install floppy.
1) Mount the floppy containing the viostor drivers
2) Copy its contents to the root of the install floppy
"""
pwd = os.getcwd()
try:
m_cmd = 'mcopy -s -o -n -i %s ::/* %s' % (
virtio_floppy, self.mount)
process.run(m_cmd, verbose=DEBUG)
finally:
os.chdir(pwd)
def setup_virtio_win2003(self, virtio_floppy, virtio_oemsetup_id):
"""
Setup the install floppy with the virtio storage drivers, win2003 style.
Win2003 and WinXP depend on the file txtsetup.oem file to install
the virtio drivers from the floppy, which is a .ini file.
Process:
1) Copy the virtio drivers on the virtio floppy to the install floppy
2) Parse the ini file with config parser
3) Modify the identifier of the default session that is going to be
executed on the config parser object
4) Re-write the config file to the disk
"""
self._copy_virtio_drivers(virtio_floppy)
txtsetup_oem = os.path.join(self.mount, 'txtsetup.oem')
if not os.path.isfile(txtsetup_oem):
raise IOError('File txtsetup.oem not found on the install '
'floppy. Please verify if your floppy virtio '
'driver image has this file')
parser = ConfigParser.ConfigParser()
parser.read(txtsetup_oem)
if not parser.has_section('Defaults'):
raise ValueError('File txtsetup.oem does not have the session '
'"Defaults". Please check txtsetup.oem')
default_driver = parser.get('Defaults', 'SCSI')
if default_driver != virtio_oemsetup_id:
parser.set('Defaults', 'SCSI', virtio_oemsetup_id)
fp = open(txtsetup_oem, 'w')
parser.write(fp)
fp.close()
def setup_virtio_win2008(self, virtio_floppy):
"""
Setup the install floppy with the virtio storage drivers, win2008 style.
Win2008, Vista and 7 require people to point out the path to the drivers
on the unattended file, so we just need to copy the drivers to the
driver floppy disk. Important to note that it's possible to specify
drivers from a CDROM, so the floppy driver copy is optional.
Process:
1) Copy the virtio drivers on the virtio floppy to the install floppy,
if there is one available
"""
if os.path.isfile(virtio_floppy):
self._copy_virtio_drivers(virtio_floppy)
else:
logging.debug(
"No virtio floppy present, not needed for this OS anyway")
class CdromDisk(Disk):
"""
Represents a CDROM disk that we can master according to our needs.
"""
def __init__(self, path, tmpdir):
self.mount = tempfile.mkdtemp(prefix='cdrom_virttest_', dir=tmpdir)
self.tmpdir = tmpdir
self.path = path
clean_old_image(path)
if not os.path.isdir(os.path.dirname(path)):
os.makedirs(os.path.dirname(path))
def _copy_virtio_drivers(self, virtio_floppy, cdrom_virtio):
"""
Copy the virtio drivers from floppy and cdrom to install cdrom.
1) Mount the floppy and cdrom containing the virtio drivers
2) Copy its contents to the root of the install cdrom
"""
pwd = os.getcwd()
mnt_pnt = tempfile.mkdtemp(prefix='cdrom_virtio_', dir=self.tmpdir)
mount(cdrom_virtio, mnt_pnt, options='loop,ro', verbose=DEBUG)
try:
copytree(mnt_pnt, self.mount, ignore='*.vfd')
cmd = 'mcopy -s -o -n -i %s ::/* %s' % (virtio_floppy, self.mount)
process.run(cmd, verbose=DEBUG)
finally:
os.chdir(pwd)
umount(None, mnt_pnt, verbose=DEBUG)
os.rmdir(mnt_pnt)
def setup_virtio_win2008(self, virtio_floppy, cdrom_virtio):
"""
Setup the install cdrom with the virtio storage drivers, win2008 style.
Win2008, Vista and 7 require people to point out the path to the drivers
on the unattended file, so we just need to copy the drivers to the
extra cdrom disk. Important to note that it's possible to specify
drivers from a CDROM, so the floppy driver copy is optional.
Process:
1) Copy the virtio drivers on the virtio floppy to the install cdrom,
if there is one available
"""
if os.path.isfile(virtio_floppy):
self._copy_virtio_drivers(virtio_floppy, cdrom_virtio)
else:
logging.debug(
"No virtio floppy present, not needed for this OS anyway")
@error_context.context_aware
def close(self):
error_context.context(
"Creating unattended install CD image %s" % self.path)
g_cmd = ('mkisofs -o %s -max-iso9660-filenames '
'-relaxed-filenames -D --input-charset iso8859-1 '
'%s' % (self.path, self.mount))
process.run(g_cmd, verbose=DEBUG)
os.chmod(self.path, stat.S_IRWXU | stat.S_IRGRP | stat.S_IXGRP |
stat.S_IROTH | stat.S_IXOTH)
cleanup(self.mount)
logging.debug("unattended install CD image %s successfully created",
self.path)
class CdromInstallDisk(Disk):
"""
Represents a install CDROM disk that we can master according to our needs.
"""
def __init__(self, path, tmpdir, source_cdrom, extra_params):
self.mount = tempfile.mkdtemp(prefix='cdrom_unattended_', dir=tmpdir)
self.path = path
self.extra_params = extra_params
self.source_cdrom = source_cdrom
cleanup(path)
if not os.path.isdir(os.path.dirname(path)):
os.makedirs(os.path.dirname(path))
cp_cmd = ('cp -r %s/isolinux/ %s/' % (source_cdrom, self.mount))
listdir = os.listdir(self.source_cdrom)
for i in listdir:
if i == 'isolinux':
continue
os.symlink(os.path.join(self.source_cdrom, i),
os.path.join(self.mount, i))
process.run(cp_cmd)
def get_answer_file_path(self, filename):
return os.path.join(self.mount, 'isolinux', filename)
@error_context.context_aware
def close(self):
error_context.context(
"Creating unattended install CD image %s" % self.path)
if os.path.exists(os.path.join(self.mount, 'isolinux')):
# bootable cdrom
f = open(os.path.join(self.mount, 'isolinux', 'isolinux.cfg'), 'w')
f.write('default /isolinux/vmlinuz append initrd=/isolinux/'
'initrd.img %s\n' % self.extra_params)
f.close()
boot = '-b isolinux/isolinux.bin'
else:
# Not a bootable CDROM, using -kernel instead (eg.: arm64)
boot = ''
m_cmd = ('mkisofs -o %s %s -c isolinux/boot.cat -no-emul-boot '
'-boot-load-size 4 -boot-info-table -f -R -J -V -T %s'
% (self.path, boot, self.mount))
process.run(m_cmd)
os.chmod(self.path, stat.S_IRWXU | stat.S_IRGRP | stat.S_IXGRP |
stat.S_IROTH | stat.S_IXOTH)
cleanup(self.mount)
cleanup(self.source_cdrom)
logging.debug("unattended install CD image %s successfully created",
self.path)
class GuestFSModiDisk(object):
"""
class of guest disk using guestfs lib to do some operation(like read/write)
on guest disk:
"""
def __init__(self, disk, backend='direct'):
"""
:params disk: target disk image.
:params backend: let libguestfs creates/connects to backend daemon
by starting qemu directly, or using libvirt to manage
an appliance, running User-Mode Linux, or connecting
to an already running daemon.
'direct', 'appliance', 'libvirt', 'libvirt:null',
'libvirt:URI', 'uml', 'unix:path'.
"""
try:
import guestfs
except ImportError:
install_cmd = "yum -y install python-libguestfs"
try:
process.run(install_cmd)
import guestfs
except Exception:
raise exceptions.TestSkipError('We need python-libguestfs (or '
'the equivalent for your '
'distro) for this particular '
'feature (modifying guest '
'files with libguestfs)')
self.g = guestfs.GuestFS()
self.disk = disk
self.g.add_drive(disk)
self.g.set_backend(backend)
libvirtd = SpecificServiceManager("libvirtd")
libvirtd_status = libvirtd.status()
if libvirtd_status is None:
raise exceptions.TestError('libvirtd: service not found')
if (not libvirtd_status) and (not libvirtd.start()):
raise exceptions.TestError('libvirtd: failed to start')
logging.debug("Launch the disk %s, wait..." % self.disk)
self.g.launch()
def os_inspects(self):
self.roots = self.g.inspect_os()
if self.roots:
return self.roots
else:
return None
def mounts(self):
return self.g.mounts()
def mount_all(self):
def compare(a, b):
if len(a[0]) > len(b[0]):
return 1
elif len(a[0]) == len(b[0]):
return 0
else:
return -1
roots = self.os_inspects()
if roots:
for root in roots:
mps = self.g.inspect_get_mountpoints(root)
mps.sort(compare)
for mp_dev in mps:
try:
msg = "Mount dev '%s' partitions '%s' to '%s'"
logging.info(msg % (root, mp_dev[1], mp_dev[0]))
self.g.mount(mp_dev[1], mp_dev[0])
except RuntimeError as err_msg:
logging.info("%s (ignored)" % err_msg)
else:
raise exceptions.TestError(
"inspect_vm: no operating systems found")
def umount_all(self):
logging.debug("Umount all device partitions")
if self.mounts():
self.g.umount_all()
def read_file(self, file_name):
"""
read file from the guest disk, return the content of the file
:param file_name: the file you want to read.
"""
try:
self.mount_all()
o = self.g.cat(file_name)
if o:
return o
else:
err_msg = "Can't read file '%s', check is it exist?"
raise exceptions.TestError(err_msg % file_name)
finally:
self.umount_all()
def write_to_image_file(self, file_name, content, w_append=False):
"""
Write content to the file on the guest disk.
When using this method all the original content will be overriding.
if you don't hope your original data be override set ``w_append=True``.
:param file_name: the file you want to write
:param content: the content you want to write.
:param w_append: append the content or override
"""
try:
try:
self.mount_all()
if w_append:
self.g.write_append(file_name, content)
else:
self.g.write(file_name, content)
except Exception:
raise exceptions.TestError("write '%s' to file '%s' error!"
% (content, file_name))
finally:
self.umount_all()
def replace_image_file_content(self, file_name, find_con, rep_con):
"""
replace file content matches in the file with rep_con.
support using Regular expression
:param file_name: the file you want to replace
:param find_con: the original content you want to replace.
:param rep_con: the replace content you want.
"""
try:
self.mount_all()
file_content = self.g.cat(file_name)
if file_content:
file_content_after_replace = re.sub(find_con, rep_con,
file_content)
if file_content != file_content_after_replace:
self.g.write(file_name, file_content_after_replace)
else:
err_msg = "Can't read file '%s', check is it exist?"
raise exceptions.TestError(err_msg % file_name)
finally:
self.umount_all()
def close(self):
"""
Explicitly close the guestfs handle.
"""
if self.g:
self.g.close()
| lmr/avocado-vt | virttest/utils_disk.py | Python | gpl-2.0 | 21,332 |
# This file is part of the Enkel web programming library.
#
# Copyright (C) 2007 Espen Angell Kristiansen ([email protected])
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
from unittest import TestCase
from cStringIO import StringIO
from sys import exc_info
from enkel.wansgli.apprunner import run_app, AppError, Response
from enkel.wansgli.testhelpers import unit_case_suite, run_suite
HEAD = "HTTP/1.1 200 OK\r\ncontent-type: text/plain\r\n"
ERRHEAD = "HTTP/1.1 500 ERROR\r\ncontent-type: text/plain\r\n"
def only_header_app(env, start_response):
start_response("200 OK", [("Content-type", "text/plain")])
return list() # return empty list
def simple_app(env, start_response):
start_response("200 OK", [("Content-type", "text/plain")])
return ["Simple app"]
def using_write_app(env, start_response):
""" WSGI app for testing of the write function. """
write = start_response("200 OK", [("Content-type", "text/plain")])
write("Using write")
return []
def mixing_write_app(env, start_response):
""" WSGI app for tesing of mixing using the write function and iterator. """
write = start_response("200 OK", [("Content-type", "text/plain")])
write("Mixing write... ")
return [" ...and iterator."]
def double_response_error_app(env, start_response):
""" WSGI app for testing the situation when an error occurs BEFORE
HTTP headers are sent to the browser and a traceback IS NOT supplied.
This should produce an error, and the same will happen if start_response
is called after HTTP headers are sent. """
start_response("200 OK", [("Content-type", "text/plain")])
start_response("500 ERROR", [("Content-type", "text/plain")])
return list() # return empty list
def double_response_ok_app(env, start_response):
""" WSGI app for testing the situation when an error occurs BEFORE
HTTP headers are sent to the browser and a traceback is supplied.
Should work.
"""
start_response("200 OK", [("Content-type", "text/plain")])
try:
int("jeje")
except ValueError:
start_response("500 ERROR", [("Content-type", "text/plain")],
exc_info())
return list() # return empty list
class DoubleResponseErrInResponse(object):
""" WSGI app for testing the situation when an error occurs AFTER
HTTP headers are sent to the browser and a traceback is supplied.
Should re-raise the ValueError raised when "four" is sent to the
int function.
"""
def __init__(self, env, start_response):
start_response("200 OK", [("Content-type", "text/plain")])
self.it = [1, "2", 3, "four", 5, "6"].__iter__()
self.start_response = start_response
def __iter__(self):
for d in self.it:
try:
yield str(int(d)) # will fail on "four"
except ValueError:
self.start_response("500 ERROR",
[("Content-type", "text/plain")],
exc_info())
def noiter_app(env, start_response):
""" An app that does not return an iterator. This is an error,
and should raise AppError. """
start_response("200 OK", [("Content-type", "text/plain")])
return 10
def override_defaultheader(env, start_response):
""" An app that overrides the default HTTP header "server".
This should result in only one "server" header with the new value.
"""
start_response("200 OK", [
("Content-type", "text/plain"),
("Server", "xxx")
])
return []
class TestApprunner(TestCase):
""" Tests the entire apprunner module. """
def setUp(self):
self.buf = StringIO()
self.env = dict(SERVER_PROTOCOL="HTTP/1.1")
self.sr = Response(self.buf, self.env)
def test_only_header(self):
run_app(only_header_app, self.sr)
b = self.buf.getvalue()
self.assert_(b.startswith(HEAD))
def test_simple(self):
run_app(simple_app, self.sr)
b = self.buf.getvalue()
self.assert_(b.startswith(HEAD))
self.assert_(b.endswith("Simple app"))
def test_using_write(self):
run_app(using_write_app, self.sr)
b = self.buf.getvalue()
self.assert_(b.startswith(HEAD))
self.assert_(b.endswith("Using write"))
def test_mixing_write(self):
run_app(mixing_write_app, self.sr)
b = self.buf.getvalue()
self.assert_(b.startswith(HEAD))
self.assert_(b.endswith("Mixing write... ...and iterator."))
def test_double_response_error(self):
self.assertRaises(AppError, run_app,
double_response_error_app, self.sr)
def test_double_response_ok(self):
run_app(double_response_ok_app, self.sr)
b = self.buf.getvalue()
self.assert_(b.startswith(ERRHEAD))
def testDoubleResponseErrInResponse(self):
self.assertRaises(ValueError, run_app,
DoubleResponseErrInResponse, self.sr)
def test_noiter(self):
self.assertRaises(AppError, run_app,
noiter_app, self.sr)
def suite():
return unit_case_suite(TestApprunner)
if __name__ == '__main__':
run_suite(suite())
| espenak/enkel | testsuite/wansgli/apprunner.py | Python | gpl-2.0 | 5,360 |
#!/usr/bin/env python2
from gimpfu import *
import time
import re
def preview (image, delay, loops, force_delay, ignore_hidden, restore_hide):
if not image:
raise "No image given."
layers = image.layers
nlayers = len (layers)
visible = []
length = []
i = 0
while i < nlayers:
visible += [pdb.gimp_item_get_visible (layers [i])]
if visible [i]:
pdb.gimp_item_set_visible (layers [i], False)
name = pdb.gimp_item_get_name (layers [i])
l = None
if not force_delay:
l = re.search ("\([0-9]+ms\)", name)
if l:
l = tuple (map (sum, zip (l.span (), tuple ([+1, -3]))))
l = name [slice (*l)]
if not l:
l = delay
length += [float (l) / 1000.0]
i += 1
j = 0
while j < loops:
while i > 0:
i -= 1
if (not ignore_hidden) or visible [i]:
pdb.gimp_item_set_visible (layers [i], True)
pdb.gimp_displays_flush ()
time.sleep (length [i])
j += 1
# unhides everything for optimized
if j < loops:
while i < nlayers:
if (not ignore_hidden) or visible [i]:
pdb.gimp_item_set_visible (layers [i], False)
i += 1
else:
i = nlayers
i = nlayers
if restore_hide:
while i > 0:
i -= 1
if visible [i]:
pdb.gimp_item_set_visible (layers [i], True)
register(
"preview",
"preview",
"Preview the animation of a gif",
"Roger Bongers",
"Roger Bongers",
"2016",
"Preview...",
"*",
[
(PF_IMAGE, "image", "The image to modify", None),
(PF_INT32, "delay", "The default length in ms of each frame", 100),
(PF_INT32, "loops", "The number of times to loop the animation", 1),
(PF_BOOL, "force-delay", "Force the default length on every frame", 0),
(PF_BOOL, "ignore-hidden", "Ignore currently hidden items", 0),
(PF_BOOL, "restore-hide", "Restore the hidden status after preview", 0),
],
[],
preview,
menu = "<Image>/Filters/Animation")
main()
| rbong/gimptools | preview.py | Python | gpl-2.0 | 2,246 |
from __future__ import print_function
from Components.Task import PythonTask, Task, Job, job_manager as JobManager
from Tools.Directories import fileExists
from enigma import eTimer
from os import path
from shutil import rmtree, copy2, move
class DeleteFolderTask(PythonTask):
def openFiles(self, fileList):
self.fileList = fileList
def work(self):
print("[DeleteFolderTask] files ", self.fileList)
errors = []
try:
rmtree(self.fileList)
except Exception as e:
errors.append(e)
if errors:
raise errors[0]
class CopyFileJob(Job):
def __init__(self, srcfile, destfile, name):
Job.__init__(self, _("Copying files"))
cmdline = 'cp -Rf "%s" "%s"' % (srcfile, destfile)
AddFileProcessTask(self, cmdline, srcfile, destfile, name)
class MoveFileJob(Job):
def __init__(self, srcfile, destfile, name):
Job.__init__(self, _("Moving files"))
cmdline = 'mv -f "%s" "%s"' % (srcfile, destfile)
AddFileProcessTask(self, cmdline, srcfile, destfile, name)
class AddFileProcessTask(Task):
def __init__(self, job, cmdline, srcfile, destfile, name):
Task.__init__(self, job, name)
self.setCmdline(cmdline)
self.srcfile = srcfile
self.destfile = destfile
self.ProgressTimer = eTimer()
self.ProgressTimer.callback.append(self.ProgressUpdate)
def ProgressUpdate(self):
if self.srcsize <= 0 or not fileExists(self.destfile, 'r'):
return
self.setProgress(int((path.getsize(self.destfile)/float(self.srcsize))*100))
self.ProgressTimer.start(5000, True)
def prepare(self):
if fileExists(self.srcfile, 'r'):
self.srcsize = path.getsize(self.srcfile)
self.ProgressTimer.start(5000, True)
def afterRun(self):
self.setProgress(100)
self.ProgressTimer.stop()
def copyFiles(fileList, name):
for src, dst in fileList:
if path.isdir(src) or int(path.getsize(src))/1000/1000 > 100:
JobManager.AddJob(CopyFileJob(src, dst, name))
else:
copy2(src, dst)
def moveFiles(fileList, name):
for src, dst in fileList:
if path.isdir(src) or int(path.getsize(src))/1000/1000 > 100:
JobManager.AddJob(MoveFileJob(src, dst, name))
else:
move(src, dst)
def deleteFiles(fileList, name):
job = Job(_("Deleting files"))
task = DeleteFolderTask(job, name)
task.openFiles(fileList)
JobManager.AddJob(job)
| atvcaptain/enigma2 | lib/python/Tools/CopyFiles.py | Python | gpl-2.0 | 2,263 |
#! /usr/bin/env python3
import sys
from cx_Freeze import setup, Executable
# Dependencies are automatically detected, but it might need fine tuning.
includefiles = ['windows/libusb-1.0.dll',
('icons/buzzer.png', 'icons/buzzer.png'),
'README.md',
'LICENSE',
'C:\\Windows\\SysWOW64\\msvcr110.dll']
excludes = []
packages = []
buildOptions = {'packages': packages,
'excludes': excludes,
'include_files':includefiles
}
# GUI applications require a different base on Windows (the default is for a
# console application).
base = None
if sys.platform == "win32":
base = "Win32GUI"
executables = [
Executable('pyPardy.py', base=base),
Executable('pyPardyEdit.py', base=base)
]
setup(
name='pyPardy',
#long_description='',
keywords='game jeopardy',
version='0.2',
author='Christian Wichmann',
author_email='[email protected]',
packages=['data', 'gui'],
url='',
license='LICENSE',
description='Jeopardy(tm) game system',
platforms=['any'],
classifiers=[
'Intended Audience :: End Users/Desktop',
'Development Status :: 4 - Beta',
'License :: OSI Approved :: GNU General Public License v2 or later (GPLv2+)',
'Operating System :: OS Independent',
'Natural Language :: English',
'Natural Language :: German',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: Implementation :: CPython',
'Topic :: Games/Entertainment',
],
options=dict(build_exe=buildOptions),
executables=executables, requires=['PyQt4', 'libusb1'],
#data_files=[('libs', 'windows/libusb-1.0.dll'),
# ('icons', 'icons/buzzer.png')],
)
| wichmann/pyPardy | setup.py | Python | gpl-2.0 | 1,810 |
#!/usr/bin/env python3
# Copyright (c) 2008-9 Qtrac Ltd. All rights reserved.
# This program or module is free software: you can redistribute it and/or
# modify it under the terms of the GNU General Public License as published
# by the Free Software Foundation, either version 2 of the License, or
# version 3 of the License, or (at your option) any later version. It is
# provided for educational purposes and is distributed in the hope that
# it will be useful, but WITHOUT ANY WARRANTY; without even the implied
# warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See
# the GNU General Public License for more details.
from PyQt4.QtCore import (QDate, QString, Qt, SIGNAL, pyqtSignature)
from PyQt4.QtGui import (QApplication, QDialog, QDialogButtonBox)
import moviedata_ans as moviedata
import ui_addeditmoviedlg_ans as ui_addeditmoviedlg
class AddEditMovieDlg(QDialog,
ui_addeditmoviedlg.Ui_AddEditMovieDlg):
def __init__(self, movies, movie=None, parent=None):
super(AddEditMovieDlg, self).__init__(parent)
self.setupUi(self)
self.movies = movies
self.movie = movie
self.acquiredDateEdit.setDisplayFormat(moviedata.DATEFORMAT)
if movie is not None:
self.titleLineEdit.setText(movie.title)
self.yearSpinBox.setValue(movie.year)
self.minutesSpinBox.setValue(movie.minutes)
self.acquiredDateEdit.setDate(movie.acquired)
self.acquiredDateEdit.setEnabled(False)
self.locationLineEdit.setText(movie.location)
self.notesTextEdit.setPlainText(movie.notes)
self.notesTextEdit.setFocus()
self.buttonBox.button(QDialogButtonBox.Ok).setText(
"&Accept")
self.setWindowTitle("My Movies - Edit Movie")
else:
today = QDate.currentDate()
self.acquiredDateEdit.setDateRange(today.addDays(-5),
today)
self.acquiredDateEdit.setDate(today)
self.titleLineEdit.setFocus()
self.on_titleLineEdit_textEdited(QString())
@pyqtSignature("QString")
def on_titleLineEdit_textEdited(self, text):
self.buttonBox.button(QDialogButtonBox.Ok).setEnabled(
not self.titleLineEdit.text().isEmpty())
def accept(self):
title = self.titleLineEdit.text()
year = self.yearSpinBox.value()
minutes = self.minutesSpinBox.value()
location = self.locationLineEdit.text()
notes = self.notesTextEdit.toPlainText()
if self.movie is None:
acquired = self.acquiredDateEdit.date()
self.movie = moviedata.Movie(title, year, minutes,
acquired, location, notes)
self.movies.add(self.movie)
else:
self.movies.updateMovie(self.movie, title, year,
minutes, location, notes)
QDialog.accept(self)
if __name__ == "__main__":
import sys
app = QApplication(sys.argv)
form = AddEditMovieDlg(0)
form.show()
app.exec_()
| paradiseOffice/Bash_and_Cplus-plus | CPP/full_examples/pyqt/chap08/addeditmoviedlg_ans.py | Python | gpl-2.0 | 3,155 |
# -*- coding: UTF-8 -*-
"""
Copyright (C) 2014 smokdpi
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
"""
import re
from t0mm0.common.net import Net
from lib import jsunpack
from urlresolver import common
from urlresolver.plugnplay.interfaces import UrlResolver
from urlresolver.plugnplay.interfaces import PluginSettings
from urlresolver.plugnplay import Plugin
class UsersCloudResolver(Plugin, UrlResolver, PluginSettings):
implements = [UrlResolver, PluginSettings]
name = "userscloud"
domains = ["userscloud.com"]
pattern = '(?://|\.)(userscloud\.com)/(?:embed-)?([0-9a-zA-Z/]+)'
def __init__(self):
p = self.get_setting('priority') or 100
self.priority = int(p)
self.net = Net()
self.user_agent = common.IE_USER_AGENT
self.net.set_user_agent(self.user_agent)
self.headers = {'User-Agent': self.user_agent}
def get_media_url(self, host, media_id):
web_url = self.get_url(host, media_id)
stream_url = None
self.headers['Referer'] = web_url
html = self.net.http_GET(web_url, headers=self.headers).content
r = re.search('>(eval\(function\(p,a,c,k,e,d\).+?)</script>', html, re.DOTALL)
if r:
js_data = jsunpack.unpack(r.group(1))
stream_url = re.findall('<param\s+name="src"\s*value="([^"]+)', js_data)
stream_url += re.findall('file\s*:\s*[\'|\"](.+?)[\'|\"]', js_data)
stream_url = [i for i in stream_url if not i.endswith('.srt')]
if stream_url:
return stream_url[0]
raise UrlResolver.ResolverError('File not found')
def get_url(self, host, media_id):
return 'https://%s/%s' % (host, media_id)
def get_host_and_id(self, url):
r = re.search(self.pattern, url)
if r:
return r.groups()
else:
return False
def valid_url(self, url, host):
return re.search(self.pattern, url) or self.name in host
| igor-rangel7l/igorrangelteste.repository | script.module.urlresolver/lib/urlresolver/plugins/userscloud.py | Python | gpl-2.0 | 2,595 |
# -*- coding: utf-8 -*-
'''
test_qgsatlascomposition.py
--------------------------------------
Date : Oct 2012
Copyright : (C) 2012 by Dr. Hugo Mercier
email : hugo dot mercier at oslandia dot com
***************************************************************************
* *
* This program is free software; you can redistribute it and/or modify *
* it under the terms of the GNU General Public License as published by *
* the Free Software Foundation; either version 2 of the License, or *
* (at your option) any later version. *
* *
***************************************************************************/
'''
import qgis # NOQA
import os
import glob
import shutil
import tempfile
from qgis.testing import start_app, unittest
from utilities import unitTestDataPath
from qgis.PyQt.QtCore import QFileInfo, QRectF, qWarning
from qgis.core import (
QgsCategorizedSymbolRenderer,
QgsComposerLabel,
QgsComposerLegend,
QgsComposerMap,
QgsComposition,
QgsCoordinateReferenceSystem,
QgsFeature,
QgsFillSymbol,
QgsFontUtils,
QgsGeometry,
QgsMarkerSymbol,
QgsPointXY,
QgsProject,
QgsRectangle,
QgsRendererCategory,
QgsSingleSymbolRenderer,
QgsVectorLayer,
)
from qgscompositionchecker import QgsCompositionChecker
start_app()
class TestQgsAtlasComposition(unittest.TestCase):
def testCase(self):
self.TEST_DATA_DIR = unitTestDataPath()
tmppath = tempfile.mkdtemp()
for file in glob.glob(os.path.join(self.TEST_DATA_DIR, 'france_parts.*')):
shutil.copy(os.path.join(self.TEST_DATA_DIR, file), tmppath)
vectorFileInfo = QFileInfo(tmppath + "/france_parts.shp")
mVectorLayer = QgsVectorLayer(vectorFileInfo.filePath(), vectorFileInfo.completeBaseName(), "ogr")
QgsProject.instance().addMapLayers([mVectorLayer])
self.layers = [mVectorLayer]
# create composition with composer map
# select epsg:2154
crs = QgsCoordinateReferenceSystem()
crs.createFromSrid(2154)
QgsProject.instance().setCrs(crs)
self.mComposition = QgsComposition(QgsProject.instance())
self.mComposition.setPaperSize(297, 210)
# fix the renderer, fill with green
props = {"color": "0,127,0"}
fillSymbol = QgsFillSymbol.createSimple(props)
renderer = QgsSingleSymbolRenderer(fillSymbol)
mVectorLayer.setRenderer(renderer)
# the atlas map
self.mAtlasMap = QgsComposerMap(self.mComposition, 20, 20, 130, 130)
self.mAtlasMap.setFrameEnabled(True)
self.mAtlasMap.setLayers([mVectorLayer])
self.mComposition.addComposerMap(self.mAtlasMap)
# the atlas
self.mAtlas = self.mComposition.atlasComposition()
self.mAtlas.setCoverageLayer(mVectorLayer)
self.mAtlas.setEnabled(True)
self.mComposition.setAtlasMode(QgsComposition.ExportAtlas)
# an overview
self.mOverview = QgsComposerMap(self.mComposition, 180, 20, 50, 50)
self.mOverview.setFrameEnabled(True)
self.mOverview.overview().setFrameMap(self.mAtlasMap.id())
self.mOverview.setLayers([mVectorLayer])
self.mComposition.addComposerMap(self.mOverview)
nextent = QgsRectangle(49670.718, 6415139.086, 699672.519, 7065140.887)
self.mOverview.setNewExtent(nextent)
# set the fill symbol of the overview map
props2 = {"color": "127,0,0,127"}
fillSymbol2 = QgsFillSymbol.createSimple(props2)
self.mOverview.overview().setFrameSymbol(fillSymbol2)
# header label
self.mLabel1 = QgsComposerLabel(self.mComposition)
self.mComposition.addComposerLabel(self.mLabel1)
self.mLabel1.setText("[% \"NAME_1\" %] area")
self.mLabel1.setFont(QgsFontUtils.getStandardTestFont())
self.mLabel1.adjustSizeToText()
self.mLabel1.setSceneRect(QRectF(150, 5, 60, 15))
qWarning(
"header label font: %s exactMatch:%s" % (self.mLabel1.font().toString(), self.mLabel1.font().exactMatch()))
# feature number label
self.mLabel2 = QgsComposerLabel(self.mComposition)
self.mComposition.addComposerLabel(self.mLabel2)
self.mLabel2.setText("# [%@atlas_featurenumber || ' / ' || @atlas_totalfeatures%]")
self.mLabel2.setFont(QgsFontUtils.getStandardTestFont())
self.mLabel2.adjustSizeToText()
self.mLabel2.setSceneRect(QRectF(150, 200, 60, 15))
qWarning("feature number label font: %s exactMatch:%s" % (
self.mLabel2.font().toString(), self.mLabel2.font().exactMatch()))
self.filename_test()
self.autoscale_render_test()
self.fixedscale_render_test()
self.predefinedscales_render_test()
self.hidden_render_test()
self.legend_test()
self.rotation_test()
shutil.rmtree(tmppath, True)
def filename_test(self):
self.mAtlas.setFilenamePattern("'output_' || @atlas_featurenumber")
self.mAtlas.beginRender()
for i in range(0, self.mAtlas.numFeatures()):
self.mAtlas.prepareForFeature(i)
expected = "output_%d" % (i + 1)
self.assertEqual(self.mAtlas.currentFilename(), expected)
self.mAtlas.endRender()
def autoscale_render_test(self):
self.mAtlasMap.setAtlasDriven(True)
self.mAtlasMap.setAtlasScalingMode(QgsComposerMap.Auto)
self.mAtlasMap.setAtlasMargin(0.10)
self.mAtlas.beginRender()
for i in range(0, 2):
self.mAtlas.prepareForFeature(i)
self.mLabel1.adjustSizeToText()
checker = QgsCompositionChecker('atlas_autoscale%d' % (i + 1), self.mComposition)
checker.setControlPathPrefix("atlas")
myTestResult, myMessage = checker.testComposition(0, 200)
assert myTestResult
self.mAtlas.endRender()
self.mAtlasMap.setAtlasDriven(False)
self.mAtlasMap.setAtlasScalingMode(QgsComposerMap.Fixed)
self.mAtlasMap.setAtlasMargin(0)
def fixedscale_render_test(self):
self.mAtlasMap.setNewExtent(QgsRectangle(209838.166, 6528781.020, 610491.166, 6920530.620))
self.mAtlasMap.setAtlasDriven(True)
self.mAtlasMap.setAtlasScalingMode(QgsComposerMap.Fixed)
self.mAtlas.beginRender()
for i in range(0, 2):
self.mAtlas.prepareForFeature(i)
self.mLabel1.adjustSizeToText()
checker = QgsCompositionChecker('atlas_fixedscale%d' % (i + 1), self.mComposition)
checker.setControlPathPrefix("atlas")
myTestResult, myMessage = checker.testComposition(0, 200)
assert myTestResult
self.mAtlas.endRender()
def predefinedscales_render_test(self):
self.mAtlasMap.setNewExtent(QgsRectangle(209838.166, 6528781.020, 610491.166, 6920530.620))
self.mAtlasMap.setAtlasDriven(True)
self.mAtlasMap.setAtlasScalingMode(QgsComposerMap.Predefined)
scales = [1800000, 5000000]
self.mAtlas.setPredefinedScales(scales)
for i, s in enumerate(self.mAtlas.predefinedScales()):
assert s == scales[i]
self.mAtlas.beginRender()
for i in range(0, 2):
self.mAtlas.prepareForFeature(i)
self.mLabel1.adjustSizeToText()
checker = QgsCompositionChecker('atlas_predefinedscales%d' % (i + 1), self.mComposition)
checker.setControlPathPrefix("atlas")
myTestResult, myMessage = checker.testComposition(0, 200)
assert myTestResult
self.mAtlas.endRender()
def hidden_render_test(self):
self.mAtlasMap.setNewExtent(QgsRectangle(209838.166, 6528781.020, 610491.166, 6920530.620))
self.mAtlasMap.setAtlasScalingMode(QgsComposerMap.Fixed)
self.mAtlas.setHideCoverage(True)
self.mAtlas.beginRender()
for i in range(0, 2):
self.mAtlas.prepareForFeature(i)
self.mLabel1.adjustSizeToText()
checker = QgsCompositionChecker('atlas_hiding%d' % (i + 1), self.mComposition)
checker.setControlPathPrefix("atlas")
myTestResult, myMessage = checker.testComposition(0, 200)
assert myTestResult
self.mAtlas.endRender()
self.mAtlas.setHideCoverage(False)
def sorting_render_test(self):
self.mAtlasMap.setNewExtent(QgsRectangle(209838.166, 6528781.020, 610491.166, 6920530.620))
self.mAtlasMap.setAtlasScalingMode(QgsComposerMap.Fixed)
self.mAtlas.setHideCoverage(False)
self.mAtlas.setSortFeatures(True)
self.mAtlas.setSortKeyAttributeIndex(4) # departement name
self.mAtlas.setSortAscending(False)
self.mAtlas.beginRender()
for i in range(0, 2):
self.mAtlas.prepareForFeature(i)
self.mLabel1.adjustSizeToText()
checker = QgsCompositionChecker('atlas_sorting%d' % (i + 1), self.mComposition)
checker.setControlPathPrefix("atlas")
myTestResult, myMessage = checker.testComposition(0, 200)
assert myTestResult
self.mAtlas.endRender()
def filtering_render_test(self):
self.mAtlasMap.setNewExtent(QgsRectangle(209838.166, 6528781.020, 610491.166, 6920530.620))
self.mAtlasMap.setAtlasScalingMode(QgsComposerMap.Fixed)
self.mAtlas.setHideCoverage(False)
self.mAtlas.setSortFeatures(False)
self.mAtlas.setFilterFeatures(True)
self.mAtlas.setFeatureFilter("substr(NAME_1,1,1)='P'") # select only 'Pays de la loire'
self.mAtlas.beginRender()
for i in range(0, 1):
self.mAtlas.prepareForFeature(i)
self.mLabel1.adjustSizeToText()
checker = QgsCompositionChecker('atlas_filtering%d' % (i + 1), self.mComposition)
checker.setControlPathPrefix("atlas")
myTestResult, myMessage = checker.testComposition(0, 200)
assert myTestResult
self.mAtlas.endRender()
def legend_test(self):
self.mAtlasMap.setAtlasDriven(True)
self.mAtlasMap.setAtlasScalingMode(QgsComposerMap.Auto)
self.mAtlasMap.setAtlasMargin(0.10)
# add a point layer
ptLayer = QgsVectorLayer("Point?crs=epsg:4326&field=attr:int(1)&field=label:string(20)", "points", "memory")
pr = ptLayer.dataProvider()
f1 = QgsFeature(1)
f1.initAttributes(2)
f1.setAttribute(0, 1)
f1.setAttribute(1, "Test label 1")
f1.setGeometry(QgsGeometry.fromPoint(QgsPointXY(-0.638, 48.954)))
f2 = QgsFeature(2)
f2.initAttributes(2)
f2.setAttribute(0, 2)
f2.setAttribute(1, "Test label 2")
f2.setGeometry(QgsGeometry.fromPoint(QgsPointXY(-1.682, 48.550)))
pr.addFeatures([f1, f2])
# categorized symbology
r = QgsCategorizedSymbolRenderer("attr", [QgsRendererCategory(1, QgsMarkerSymbol.createSimple({"color": "255,0,0"}), "red"),
QgsRendererCategory(2, QgsMarkerSymbol.createSimple({"color": "0,0,255"}), "blue")])
ptLayer.setRenderer(r)
QgsProject.instance().addMapLayer(ptLayer)
# add the point layer to the map settings
layers = self.layers
layers = [ptLayer] + layers
self.mAtlasMap.setLayers(layers)
self.mOverview.setLayers(layers)
# add a legend
legend = QgsComposerLegend(self.mComposition)
legend.moveBy(200, 100)
# sets the legend filter parameter
legend.setComposerMap(self.mAtlasMap)
legend.setLegendFilterOutAtlas(True)
self.mComposition.addComposerLegend(legend)
self.mAtlas.beginRender()
self.mAtlas.prepareForFeature(0)
self.mLabel1.adjustSizeToText()
checker = QgsCompositionChecker('atlas_legend', self.mComposition)
myTestResult, myMessage = checker.testComposition()
assert myTestResult
self.mAtlas.endRender()
# restore state
self.mAtlasMap.setLayers([layers[1]])
self.mComposition.removeComposerItem(legend)
QgsProject.instance().removeMapLayer(ptLayer.id())
def rotation_test(self):
# We will create a polygon layer with a rotated rectangle.
# Then we will make it the object layer for the atlas,
# rotate the map and test that the bounding rectangle
# is smaller than the bounds without rotation.
polygonLayer = QgsVectorLayer('Polygon', 'test_polygon', 'memory')
poly = QgsFeature(polygonLayer.pendingFields())
points = [(10, 15), (15, 10), (45, 40), (40, 45)]
poly.setGeometry(QgsGeometry.fromPolygon([[QgsPointXY(x[0], x[1]) for x in points]]))
polygonLayer.dataProvider().addFeatures([poly])
QgsProject.instance().addMapLayer(polygonLayer)
# Recreating the composer locally
composition = QgsComposition(QgsProject.instance())
composition.setPaperSize(297, 210)
# the atlas map
atlasMap = QgsComposerMap(composition, 20, 20, 130, 130)
atlasMap.setFrameEnabled(True)
atlasMap.setLayers([polygonLayer])
atlasMap.setNewExtent(QgsRectangle(0, 0, 100, 50))
composition.addComposerMap(atlasMap)
# the atlas
atlas = composition.atlasComposition()
atlas.setCoverageLayer(polygonLayer)
atlas.setEnabled(True)
composition.setAtlasMode(QgsComposition.ExportAtlas)
atlasMap.setAtlasDriven(True)
atlasMap.setAtlasScalingMode(QgsComposerMap.Auto)
atlasMap.setAtlasMargin(0.0)
# Testing
atlasMap.setMapRotation(0.0)
atlas.firstFeature()
nonRotatedExtent = QgsRectangle(atlasMap.currentMapExtent())
atlasMap.setMapRotation(45.0)
atlas.firstFeature()
rotatedExtent = QgsRectangle(atlasMap.currentMapExtent())
assert rotatedExtent.width() < nonRotatedExtent.width() * 0.9
assert rotatedExtent.height() < nonRotatedExtent.height() * 0.9
QgsProject.instance().removeMapLayer(polygonLayer)
if __name__ == '__main__':
unittest.main()
| GeoCat/QGIS | tests/src/python/test_qgsatlascomposition.py | Python | gpl-2.0 | 14,506 |
a, b = map(int,raw_input().split())
i=0
while(i<a):
j=0
c=[]
if(i%2==0):
while(j<b):
c.append('#')
j=j+1
print (''.join(c))
else:
k = int(i/2)
if (k%2==0):
while(j<(b-1)):
c.append(".")
j=j+1
c.append("#")
print (''.join(c))
else:
c.append('#')
while(j<(b-1)):
c.append(".")
j=j+1
print (''.join(c))
i=i+1
| Sarthak30/Codeforces | fox_and_snake.py | Python | gpl-2.0 | 361 |
# program template for Spaceship
import simplegui
import math
import random
# globals for user interface
WIDTH = 800
HEIGHT = 600
score = 0
lives = 3
time = 0
game_mode = 0 # 0 = splash screen, 1 = game mode, 2 = game over
ANGULAR_ACCEL_SCALAR = math.pi / 800.0
ANGULAR_FRICTION = 0.95
LINEAR_ACCEL_SCALAR = 0.25
LINEAR_FRICTION = 0.99
RANDOM_VEL_MAX = 4.0
RANDOM_VEL_MIN = 0.5
RANDOM_ANG_MAX = math.pi / 100.0
BULLET_VEL = 10
SMALL_ROCK_SPEED = 3
class ImageInfo:
def __init__(self, center, size, radius = 0, lifespan = None, animated = False):
self.center = center
self.size = size
self.radius = radius
if lifespan:
self.lifespan = lifespan
else:
self.lifespan = float('inf')
self.animated = animated
def get_center(self):
return self.center
def get_size(self):
return self.size
def get_radius(self):
return self.radius
def get_lifespan(self):
return self.lifespan
def get_animated(self):
return self.animated
# art assets created by Kim Lathrop, may be freely re-used in non-commercial projects, please credit Kim
# debris images - debris1_brown.png, debris2_brown.png, debris3_brown.png, debris4_brown.png
# debris1_blue.png, debris2_blue.png, debris3_blue.png, debris4_blue.png, debris_blend.png
debris_info = ImageInfo([320, 240], [640, 480])
debris_image = simplegui.load_image("http://commondatastorage.googleapis.com/codeskulptor-assets/lathrop/debris2_blue.png")
# nebula images - nebula_brown.png, nebula_blue.png
nebula_info = ImageInfo([400, 300], [800, 600])
nebula_image = simplegui.load_image("http://commondatastorage.googleapis.com/codeskulptor-assets/lathrop/nebula_blue.f2014.png")
# splash image
splash_info = ImageInfo([200, 150], [400, 300])
splash_image = simplegui.load_image("http://commondatastorage.googleapis.com/codeskulptor-assets/lathrop/splash.png")
# ship image
ship_info = ImageInfo([45, 45], [90, 90], 35)
ship_image = simplegui.load_image("http://commondatastorage.googleapis.com/codeskulptor-assets/lathrop/double_ship.png")
# missile image - shot1.png, shot2.png, shot3.png
missile_info = ImageInfo([5,5], [10, 10], 3, 75)
missile_image = simplegui.load_image("http://commondatastorage.googleapis.com/codeskulptor-assets/lathrop/shot2.png")
# asteroid images - asteroid_blue.png, asteroid_brown.png, asteroid_blend.png
asteroid_info = ImageInfo([45, 45], [90, 90], 40)
asteroid_image = simplegui.load_image("http://commondatastorage.googleapis.com/codeskulptor-assets/lathrop/asteroid_blue.png")
# animated explosion - explosion_orange.png, explosion_blue.png, explosion_blue2.png, explosion_alpha.png
explosion_info = ImageInfo([64, 64], [128, 128], 17, 24, True)
explosion_image = simplegui.load_image("http://commondatastorage.googleapis.com/codeskulptor-assets/lathrop/explosion_alpha.png")
# sound assets purchased from sounddogs.com, please do not redistribute
soundtrack = simplegui.load_sound("http://commondatastorage.googleapis.com/codeskulptor-assets/sounddogs/soundtrack.mp3")
missile_sound = simplegui.load_sound("http://commondatastorage.googleapis.com/codeskulptor-assets/sounddogs/missile.mp3")
missile_sound.set_volume(.5)
ship_thrust_sound = simplegui.load_sound("http://commondatastorage.googleapis.com/codeskulptor-assets/sounddogs/thrust.mp3")
explosion_sound = simplegui.load_sound("http://commondatastorage.googleapis.com/codeskulptor-assets/sounddogs/explosion.mp3")
## CC 3.0 sound file by Jesus Lastra, http://opengameart.org/content/8-bit-pickup-1
extra_life_sound = simplegui.load_sound("http://mwales.net/junk/SFX_Pickup_44.mp3")
# helper functions to handle transformations
def angle_to_vector(ang):
return [math.cos(ang), math.sin(ang)]
def vector_to_angle(v):
return math.atan2(v[0],v[1])
def vector_scale(vec, scale):
return [vec[0] * scale, vec[1] * scale]
def vector_add(vec1, vec2):
return [vec1[0] + vec2[0], vec1[1] + vec2[1]]
def dist(p,q):
return math.sqrt((p[0] - q[0]) ** 2+(p[1] - q[1]) ** 2)
def smallRockExplode(rockInstance):
# Return an explosion sprite
explodeObj = Sprite(rockInstance.get_position(),
(0,0),
random.random() * 2 * math.pi,
0,
explosion_image,
explosion_info,
explosion_sound,
relSize = 0.3)
return explodeObj
def rockExplode(rockInstance, deathBullet):
# Return an explosion sprite
explodeObj = Sprite(rockInstance.get_position(),
(0,0),
random.random() * 2 * math.pi,
0,
explosion_image,
explosion_info,
explosion_sound)
# Create 4 smaller rocks that explode away based on angle bullet came in at
bulletAngle = vector_to_angle(deathBullet.get_velocity())
smallRockAngle = bulletAngle + 45.0 / 360.0 * math.pi * 2.0
for i in range(0,4):
smallRockAngle += math.pi / 2.0
smallRockVel = angle_to_vector(smallRockAngle)
smallRockVel = vector_scale(smallRockVel, SMALL_ROCK_SPEED)
smallRockVel = vector_add(smallRockVel, rockInstance.get_velocity())
randomAngVel = random.random() * RANDOM_ANG_MAX * 4.0 - RANDOM_ANG_MAX
smallRock = Sprite(rockInstance.get_position(),
smallRockVel,
random.random() * 2 * math.pi,
randomAngVel,
asteroid_image,
asteroid_info,
relSize = 0.5)
smallRockList.append(smallRock)
return explodeObj
# Ship class
class Ship:
def __init__(self, pos, vel, angle, image, info, bulletTimer):
self.pos = [pos[0],pos[1]]
self.vel = [vel[0],vel[1]]
self.thrust = False
self.angle = angle
self.angle_vel = 0
self.angle_acc = 0
self.image = image
self.image_center = info.get_center()
self.image_size = info.get_size()
self.radius = info.get_radius()
self.bullet_timer = bulletTimer
self.spawn_bullets = False
self.bullets = []
self.bullet_type = 0
self.weapon_name = {}
self.weapon_name[0] = "Speed Shot"
self.weapon_name[1] = "Spread Shot"
self.weapon_name[2] = "Power Shot"
def get_weapon_name(self):
return self.weapon_name[self.bullet_type]
def draw(self,canvas):
if self.thrust:
canvas.draw_image(self.image,
(self.image_center[0] + self.image_size[0], self.image_center[1]),
self.image_size,
self.pos,
self.image_size,
self.angle)
else:
canvas.draw_image(self.image,
self.image_center,
self.image_size,
self.pos,
self.image_size,
self.angle)
for singleBullets in self.bullets:
singleBullets.draw(canvas)
def update(self):
self.pos = vector_add(self.pos, self.vel)
# Position should wrap around the screen
self.pos = [self.pos[0] % WIDTH, self.pos[1] % HEIGHT]
# Handle ship thrust
if self.thrust:
accel = angle_to_vector(self.angle)
accel = vector_scale(accel, LINEAR_ACCEL_SCALAR)
self.vel = vector_add(self.vel, accel)
# Friction against motion
self.vel = vector_scale(self.vel, LINEAR_FRICTION)
self.angle = self.angle + self.angle_vel
self.angle_vel = self.angle_vel + self.angle_acc
self.angle_vel = self.angle_vel * ANGULAR_FRICTION
oldBullets = []
for singleBullets in self.bullets:
if singleBullets.update():
oldBullets.append(singleBullets)
for bulletToDelete in oldBullets:
self.bullets.remove(bulletToDelete)
def process_collisions(self, rockList, smallRockList, explosionList):
global score, lives, extra_life_sound
# Don't change containers while looping through them
shipExplodes = False
rockListCopy = rockList
bulletListCopy = self.bullets
for singleRock in rockListCopy:
for singleBullet in bulletListCopy:
# Collisions of bullets and rocks
if singleBullet.collide(singleRock):
# delete the bullet
self.bullets.remove(singleBullet)
# delete and explode the rock
if singleRock in rockList:
rockList.remove(singleRock)
explosionList.append(rockExplode(singleRock, singleBullet))
print "Rock goes boom"
# increase score , 1-up consideration
self.scorePoint()
# Collisions of rock and ship
if singleRock.collide(self):
#print "Ship goes boom"
shipExplodes = True
smallRockListCopy = smallRockList
bulletListCopy = self.bullets
for singleSmallRock in smallRockListCopy:
for singleBullet in bulletListCopy:
if singleBullet.collide(singleSmallRock):
# delete the bullet
self.bullets.remove(singleBullet)
# delete and explode the rock
if singleSmallRock in smallRockList:
smallRockList.remove(singleSmallRock)
explosionList.append(smallRockExplode(singleSmallRock))
print "Small Rock goes boom"
# increase score , 1-up consideration
self.scorePoint()
# Collisions of rock and ship
if singleSmallRock.collide(self):
#print "Ship goes boom"
shipExplodes = True
if shipExplodes:
self.attemptRespawn(rockList, explosionList)
def scorePoint(self):
global lives, score
score += 1
if ((score % 100) == 0):
print "1-up"
lives += 1
extra_life_sound.rewind()
extra_life_sound.play()
def attemptRespawn(self, rockList, explosionList):
global lives
lives -= 1
if (lives == 0):
game_over()
return
# Find a safe spot to respawn
bestLocation = []
bestLocationClosestRock = 0
for respawnX in range( int(WIDTH / 10), int(WIDTH * .9), 10):
for respawnY in range( int(HEIGHT / 10), int(HEIGHT * .9), 10):
closestRock = WIDTH * HEIGHT
potentialLocation = [respawnX, respawnY]
# Determine at this location how close closest rock is
for singleRock in rockList:
distFromRock = dist(potentialLocation, singleRock.get_position())
if (distFromRock < closestRock):
closestRock = distFromRock
for singleRock in smallRockList:
distFromRock = dist(potentialLocation, singleRock.get_position())
if (distFromRock < closestRock):
closestRock = distFromRock
# If the closest rock is farther away than other locations, use this location
if (closestRock > bestLocationClosestRock):
bestLocationClosestRock = closestRock
bestLocation = potentialLocation
# Move ship to new location
shipExplosion = Sprite(self.pos,
(0,0),
random.random() * 2 * math.pi,
0,
explosion_image,
explosion_info,
explosion_sound,
relSize = 3.0)
explosionList.append(shipExplosion)
self.pos = bestLocation
self.vel = [0,0]
self.angle_vel = 0
# Just pass in -1 to rotate right, +1 to rotate left
def rotate(self, angularAcceleration):
self.angle_acc = angularAcceleration * ANGULAR_ACCEL_SCALAR
#print "Alpha =" + str(self.angle_acc)
# Just pass in True to thrust, False to not thrust
def setThrust(self, thrustBool):
global ship_thrust_sound
self.thrust = thrustBool
if thrustBool:
ship_thrust_sound.rewind()
ship_thrust_sound.play()
else:
ship_thrust_sound.pause()
def startShooting(self):
self.spawn_bullets = True;
self.bullet_timer.start()
self.spawn_bullet()
def stopShooting(self):
self.spawn_bullets = False
self.bullet_timer.stop()
def change_bullet_type(self):
self.bullet_type = (self.bullet_type + 1) % 3
def set_bullet_type(self, bulletType):
self.bullet_type = bulletType % 3
def get_bullet_type(self):
return self.bullet_type
def spawn_bullet(self):
if (self.bullet_type == 0):
# speed shot
self.make_bullet()
elif (self.bullet_type == 1):
# spread
self.make_bullet(relSpeed=0.5)
self.make_bullet(relAngle=-math.pi * 2 * 30.0 / 360.0,
relSpeed=0.5)
self.make_bullet(relAngle=math.pi * 2 * 30.0 / 360.0,
relSpeed=0.5)
else:
# big bullet
self.make_bullet(relSpeed=0.25,
relSize=3.0,
relLifetime=5.0)
curDirection = angle_to_vector(self.angle)
recoil = vector_scale(curDirection, -1.0)
self.vel = vector_add(self.vel, recoil)
def make_bullet(self, relAngle=0, relSpeed=1.0, relSize=1.0, relLifetime=1.0):
global missle_sound
bulletPos = angle_to_vector(self.angle)
bulletPos = vector_scale(bulletPos, self.image_size[0] / 2)
bulletPos = vector_add(self.pos, bulletPos)
bulletVel = angle_to_vector(self.angle + relAngle)
bulletVel = vector_scale(bulletVel, BULLET_VEL * relSpeed)
bulletVel = vector_add(bulletVel, self.vel)
bulletObj = Sprite(bulletPos,
bulletVel,
self.angle,
0,
missile_image,
missile_info,
missile_sound,
relSize,
relLifetime)
self.bullets.append(bulletObj)
def get_position(self):
return self.pos
def reset(self):
self.pos = [WIDTH / 2, HEIGHT / 2]
self.vel = [0,0]
self.angle = 0
self.bullets = []
def get_radius(self):
return self.radius
def get_velocity(self):
return self.vel
# Sprite class
class Sprite:
def __init__(self, pos, vel, ang, ang_vel, image, info, sound = None, relSize=1.0, relLifetime=1.0):
self.pos = [pos[0],pos[1]]
self.vel = [vel[0],vel[1]]
self.angle = ang
self.angle_vel = ang_vel
self.image = image
self.image_center = info.get_center()
self.image_size = info.get_size()
self.draw_size = vector_scale(self.image_size, relSize)
self.radius = info.get_radius() * relSize
self.lifespan = info.get_lifespan() * relLifetime
self.animated = info.get_animated()
self.age = 0
if sound:
sound.rewind()
sound.play()
def draw(self, canvas):
if self.animated:
frameCenter = vector_add(self.image_center, [self.image_size[0] * self.age,0])
canvas.draw_image(self.image,
frameCenter,
self.image_size,
self.pos,
self.draw_size,
self.angle)
else:
canvas.draw_image(self.image,
self.image_center,
self.image_size,
self.pos,
self.draw_size,
self.angle)
def update(self):
pass
self.pos = vector_add(self.pos, self.vel)
# Position should wrap around the screen
self.pos = [self.pos[0] % WIDTH, self.pos[1] % HEIGHT]
self.angle = self.angle + self.angle_vel
# Age out?
self.age += 1
return (self.age > self.lifespan)
def collide(self, otherObject):
currentDistOfCenters = dist(otherObject.get_position(),
self.pos)
minSafeDistance = (otherObject.get_radius() + \
self.radius) * 0.9
return (currentDistOfCenters < minSafeDistance)
def get_position(self):
return self.pos
def get_radius(self):
return self.radius
def get_velocity(self):
return self.vel
def process_sprites(canvas):
global explodeList
# draw ship and sprites
my_ship.draw(canvas)
for singleRock in rockList:
singleRock.draw(canvas)
for smallRock in smallRockList:
smallRock.draw(canvas)
# update ship and sprites
my_ship.update()
for singleRock in rockList:
singleRock.update()
for smallRock in smallRockList:
smallRock.update()
# update explosions
splodeCopy = explodeList
for singleSplosion in splodeCopy:
singleSplosion.draw(canvas)
if singleSplosion.update():
explodeList.remove(singleSplosion)
my_ship.process_collisions(rockList, smallRockList, explodeList)
def draw(canvas):
global time
# animiate background
time += 1
wtime = (time / 4) % WIDTH
center = debris_info.get_center()
size = debris_info.get_size()
canvas.draw_image(nebula_image, nebula_info.get_center(), nebula_info.get_size(), [WIDTH / 2, HEIGHT / 2], [WIDTH, HEIGHT])
canvas.draw_image(debris_image, center, size, (wtime - WIDTH / 2, HEIGHT / 2), (WIDTH, HEIGHT))
canvas.draw_image(debris_image, center, size, (wtime + WIDTH / 2, HEIGHT / 2), (WIDTH, HEIGHT))
if game_mode == 1:
process_sprites(canvas)
if ( (game_mode == 1) or (game_mode == 2) ):
canvas.draw_text("Score: " + str(score),
(WIDTH - 250,60),
30,
'White')
canvas.draw_text("Lives: " + str(lives),
(150,60),
30,
'White')
canvas.draw_text("Weapon: " + my_ship.get_weapon_name(),
(WIDTH-400, HEIGHT - 50),
25,
'White',
'monospace')
if game_mode == 0:
canvas.draw_image(splash_image,
splash_info.get_center(),
splash_info.get_size(),
[WIDTH / 2, HEIGHT / 2],
splash_info.get_size())
# timer handler that spawns a rock
def rock_spawner(recurseDepth = 10):
global rockList
if (len(rockList) > 12):
print "Too many rocks"
return
randomX = random.choice(range(0, WIDTH))
randomY = random.choice(range(0, HEIGHT))
#print "Rock + " + str(recurseDepth) + " dist = " + str(dist(my_ship.get_position(), [randomX, randomY]))
if (dist(my_ship.get_position(), [randomX, randomY]) < 150):
print "too close for a rock"
if recurseDepth == 0:
return
else:
rock_spawner(recurseDepth - 1)
return
randomVel = angle_to_vector(random.random() * math.pi * 2.0)
randomVel = vector_scale(randomVel, random.random() * (RANDOM_VEL_MAX - RANDOM_VEL_MIN) + RANDOM_VEL_MIN)
randomAngVel = random.random() * RANDOM_ANG_MAX * 2.0 - RANDOM_ANG_MAX
#print "Spawn rock: [" + str(randomX) + "," + str(randomY) + "] v=" + \
# str(randomVel) + " Alpha=" + str(randomAngVel)
spawnRock = Sprite([randomX, randomY],
randomVel,
random.random() * math.pi * 2.0,
randomAngVel,
asteroid_image,
asteroid_info)
rockList.append(spawnRock)
def bullet_spawner():
global my_ship
my_ship.spawn_bullet()
def key_down_handler(key):
global my_ship, game_mode
if (game_mode == 1):
if ( (key == simplegui.KEY_MAP['left']) or (key == simplegui.KEY_MAP['a']) ):
my_ship.rotate(-1)
elif ( (key == simplegui.KEY_MAP['right']) or (key == simplegui.KEY_MAP['d']) ):
my_ship.rotate(1)
elif ( (key == simplegui.KEY_MAP['up']) or (key == simplegui.KEY_MAP['w']) ):
my_ship.setThrust(True)
elif ( (key == simplegui.KEY_MAP['down']) or (key == simplegui.KEY_MAP['s']) ):
pass
elif (key == simplegui.KEY_MAP['space']):
my_ship.startShooting()
elif (key == simplegui.KEY_MAP['1']):
pass
elif (key == simplegui.KEY_MAP['2']):
pass
elif (key == simplegui.KEY_MAP['3']):
pass
elif (game_mode == 0):
if (key == simplegui.KEY_MAP['space']):
start_game()
else:
if (key == simplegui.KEY_MAP['space']):
game_mode = 0
def key_up_handler(key):
global my_ship
if ( (key == simplegui.KEY_MAP['left']) or (key == simplegui.KEY_MAP['a']) ):
my_ship.rotate(0)
elif ( (key == simplegui.KEY_MAP['right']) or (key == simplegui.KEY_MAP['d']) ):
my_ship.rotate(0)
elif ( (key == simplegui.KEY_MAP['up']) or (key == simplegui.KEY_MAP['w']) ):
my_ship.setThrust(False)
elif ( (key == simplegui.KEY_MAP['down']) or (key == simplegui.KEY_MAP['s']) ):
my_ship.change_bullet_type()
elif (key == simplegui.KEY_MAP['space']):
my_ship.stopShooting()
elif (key == simplegui.KEY_MAP['1']):
my_ship.set_bullet_type(0)
elif (key == simplegui.KEY_MAP['2']):
my_ship.set_bullet_type(1)
elif (key == simplegui.KEY_MAP['3']):
my_ship.set_bullet_type(2)
def game_over():
global my_ship, rockList, smallRockList, timer, game_mode, soundtrack
rockList = []
smallRockList = []
timer.stop()
game_mode = 2
soundtrack.pause()
def start_game():
global timer, game_mode, lives, score, soundtrack
my_ship.reset()
timer.start()
game_mode = 1
lives = 3
score = 0
soundtrack.rewind()
soundtrack.play()
def mouse_handler(position):
if (game_mode == 0):
start_game()
# initialize frame
frame = simplegui.create_frame("Asteroids", WIDTH, HEIGHT)
frame.set_keydown_handler(key_down_handler)
frame.set_keyup_handler(key_up_handler)
frame.set_mouseclick_handler(mouse_handler)
frame.add_label("A/D or Left/Right to rotate")
frame.add_label("W or Up to thrust")
frame.add_label("S or Down to change weapon")
frame.add_label("1,2,3 are weapon hot key")
# initialize ship and two sprites
bulletSpawnerTimer = simplegui.create_timer(200, bullet_spawner)
my_ship = Ship([WIDTH / 2, HEIGHT / 2], [0, 0], math.pi, ship_image, ship_info, bulletSpawnerTimer)
rockList = []
smallRockList = []
explodeList = []
# register handlers
frame.set_draw_handler(draw)
timer = simplegui.create_timer(1000.0, rock_spawner)
# get things rolling
frame.start()
| mwales/education | InteractivePython/asteroids.py | Python | gpl-2.0 | 25,069 |
"""
AUTHOR: Peter Collins, 2005.
This software is Copyright (C) 2004-2008 Bristol University
and is released under the GNU General Public License version 2.
MODULE: RunHill
PURPOSE:
A sample setup and configuration for the normalization algorithms.
NOTES:
See RunConfig.py for configuration options
"""
import sys
import RunConfig
degree = 6
if len(sys.argv)>1:
degree = int(sys.argv[1])
# pull things into the global context for profile
# from RunConfig import run_nf
# degree 6 runs in about 2m, 8 in 20m, 10 in 2h
config = { "tolerance" : 5.0e-14 , "degree" : degree , "system" : "Hill" ,
"do_stream" : False ,
"compute_diagonalisation" : True ,
"run_normal_form_python" : False ,
"run_normal_form_cpp" : True }
RunConfig.NfConfig(config).run_examp()
# Now do a python run if degree is < 7
config["compute_diagonalisation"] = False
config["run_normal_form_python"] = True
config["run_normal_form_cpp"] = False
if degree < 7:
RunConfig.NfConfig(config).run_examp()
| Peter-Collins/NormalForm | src/config-run/RunHill.py | Python | gpl-2.0 | 1,029 |
from Plugins.Plugin import PluginDescriptor
from Screens.PluginBrowser import *
from Screens.Ipkg import Ipkg
from Screens.HarddiskSetup import HarddiskSetup
from Components.ProgressBar import ProgressBar
from Components.SelectionList import SelectionList
from Screens.NetworkSetup import *
from enigma import *
from Screens.Standby import *
from Screens.LogManager import *
from Screens.MessageBox import MessageBox
from Plugins.SystemPlugins.SoftwareManager.Flash_online import FlashOnline
from Components.ActionMap import ActionMap, NumberActionMap, HelpableActionMap
from Screens.Screen import Screen
from Screens.TaskView import JobView
from Components.Task import Task, Job, job_manager, Condition
from GlobalActions import globalActionMap
from Screens.ChoiceBox import ChoiceBox
from Tools.BoundFunction import boundFunction
from Tools.LoadPixmap import LoadPixmap
from Tools.Directories import resolveFilename, SCOPE_CURRENT_SKIN, SCOPE_PLUGINS
from Components.MenuList import MenuList
from Components.FileList import FileList
from Components.Label import Label
from Components.ScrollLabel import ScrollLabel
from Components.Pixmap import Pixmap
from Components.config import ConfigSubsection, ConfigInteger, ConfigText, getConfigListEntry, ConfigSelection, ConfigIP, ConfigYesNo, ConfigSequence, ConfigNumber, NoSave, ConfigEnableDisable, configfile
from Components.ConfigList import ConfigListScreen, ConfigList
from Components.Sources.StaticText import StaticText
from Components.Sources.Progress import Progress
from Components.Button import Button
from Components.ActionMap import ActionMap
from Components.SystemInfo import SystemInfo
from Screens.VirtualKeyBoard import VirtualKeyBoard
from Components.MultiContent import MultiContentEntryText, MultiContentEntryPixmapAlphaTest
from OPENDROID.OscamSmartcard import *
from enigma import eConsoleAppContainer
from Tools.Directories import fileExists
from Tools.Downloader import downloadWithProgress
from boxbranding import getBoxType, getMachineName, getMachineBrand, getBrandOEM
from enigma import getDesktop
from Screens.InputBox import PinInput
import string
from random import Random
import os
import sys
import re, string
font = 'Regular;16'
import ServiceReference
import time
import datetime
inOPD_panel = None
config.softcam = ConfigSubsection()
config.softcam.actCam = ConfigText(visible_width=200)
config.softcam.actCam2 = ConfigText(visible_width=200)
config.softcam.waittime = ConfigSelection([('0',_("dont wait")),('1',_("1 second")), ('5',_("5 seconds")),('10',_("10 seconds")),('15',_("15 seconds")),('20',_("20 seconds")),('30',_("30 seconds"))], default='15')
if os.path.isfile('/usr/lib/enigma2/python/Plugins/Extensions/MultiQuickButton/plugin.pyo') is True:
try:
from Plugins.Extensions.MultiQuickButton.plugin import *
except:
pass
from OPENDROID.BluePanel import *
from OPENDROID.CronManager import *
from OPENDROID.ScriptRunner import *
from OPENDROID.MountManager import *
from OPENDROID.SwapManager import Swap, SwapAutostart
from OPENDROID.SoftwarePanel import SoftwarePanel
from Plugins.SystemPlugins.SoftwareManager.BackupRestore import BackupScreen, RestoreScreen, BackupSelection, getBackupPath, getBackupFilename
import gettext
def _(txt):
t = gettext.dgettext("OPD_panel", txt)
if t == txt:
print "[OPD_panel] fallback to default translation for", txt
t = gettext.gettext(txt)
return t
def command(comandline, strip=1):
comandline = comandline + " >/tmp/command.txt"
os.system(comandline)
text = ""
if os.path.exists("/tmp/command.txt") is True:
file = open("/tmp/command.txt", "r")
if strip == 1:
for line in file:
text = text + line.strip() + '\n'
else:
for line in file:
text = text + line
if text[-1:] != '\n': text = text + "\n"
file.close()
# if one or last line then remove linefeed
if text[-1:] == '\n': text = text[:-1]
comandline = text
os.system("rm /tmp/command.txt")
return comandline
boxversion = getBoxType()
machinename = getMachineName()
machinebrand = getMachineBrand()
OEMname = getBrandOEM()
OPD_panel_Version = 'OPD PANEL V1.4 (By OPD-Team)'
print "[OPD_panel] machinebrand: %s" % (machinebrand)
print "[OPD_panel] machinename: %s" % (machinename)
print "[OPD_panel] oem name: %s" % (OEMname)
print "[OPD_panel] boxtype: %s" % (boxversion)
panel = open("/tmp/OPD_panel.ver", "w")
panel.write(OPD_panel_Version + '\n')
panel.write("Machinebrand: %s " % (machinebrand)+ '\n')
panel.write("Machinename: %s " % (machinename)+ '\n')
panel.write("oem name: %s " % (OEMname)+ '\n')
panel.write("Boxtype: %s " % (boxversion)+ '\n')
panel.close()
ExitSave = "[Exit] = " +_("Cancel") +" [Ok] =" +_("Save")
class ConfigPORT(ConfigSequence):
def __init__(self, default):
ConfigSequence.__init__(self, seperator = ".", limits = [(1,65535)], default = default)
def main(session, **kwargs):
session.open(OPD_panel)
def Apanel(menuid, **kwargs):
if menuid == "mainmenu":
return [(_("OPD_panel"), main, "OPD_panel", 3)]
else:
return []
def Plugins(**kwargs):
return [
PluginDescriptor(name='OPD_panel', description='OPD_panel GUI 16/5/2016', where=PluginDescriptor.WHERE_MENU, fnc=Apanel),
PluginDescriptor(where=[PluginDescriptor.WHERE_SESSIONSTART, PluginDescriptor.WHERE_AUTOSTART], fnc=camstart),
PluginDescriptor(where=[PluginDescriptor.WHERE_SESSIONSTART, PluginDescriptor.WHERE_AUTOSTART], fnc=SwapAutostart),
PluginDescriptor(name='OPD_panel', description='OPD_panel GUI 16/5/2016', where=PluginDescriptor.WHERE_EXTENSIONSMENU, fnc=main)]
MENU_SKIN = '<screen position="center,center" size="950,470" title="OPD Panel - Main Menu" >\n\t<ePixmap pixmap="/usr/lib/enigma2/python/OPENDROID/icons/redlogo.png" position="0,380" size="950,84" alphatest="on" zPosition="1"/>\n\t<ePixmap pixmap="/usr/lib/enigma2/python/OPENDROID/icons/opendroid_info.png" position="510,11" size="550,354" alphatest="on" zPosition="1"/>\n\t\t<widget source="global.CurrentTime" render="Label" position="450, 340" size="500,24" font="Regular;20" foregroundColor="#FFFFFF" halign="right" transparent="1" zPosition="5">\n\t\t<convert type="ClockToText">>Format%H:%M:%S</convert>\n\t</widget>\n\t<eLabel backgroundColor="#56C856" position="0,330" size="950,1" zPosition="0" />\n <widget name="Mlist" position="70,110" size="705,260" itemHeight="50" scrollbarMode="showOnDemand" transparent="1" zPosition="0" />\n\t<widget name="label1" position="10,340" size="490,25" font="Regular;20" transparent="1" foregroundColor="#f2e000" halign="left" />\n</screen>'
CONFIG_SKIN = '<screen position="center,center" size="600,440" title="PANEL Config" >\n\t<widget name="config" position="10,10" size="580,377" enableWrapAround="1" scrollbarMode="showOnDemand" />\n\t<widget name="labelExitsave" position="90,410" size="420,25" halign="center" font="Regular;20" transparent="1" foregroundColor="#f2e000" />\n</screen>'
INFO_SKIN = '<screen name="OPD_panel" position="center,center" size="730,400" title="OPD_panel" >\n\t<widget name="label2" position="0,10" size="730,25" font="Regular;20" transparent="1" halign="center" foregroundColor="#f2e000" />\n\t<widget name="label1" position="10,45" size="710,350" font="Console;20" zPosition="1" backgroundColor="#251e1f20" transparent="1" />\n</screen>'
INFO_SKIN2 = '<screen name="OPD_panel" position="center,center" size="530,400" title="OPD_panel" backgroundColor="#251e1f20">\n\t<widget name="label1" position="10,50" size="510,340" font="Regular;15" zPosition="1" backgroundColor="#251e1f20" transparent="1" />\n</screen>'
class PanelList(MenuList):
if (getDesktop(0).size().width() == 1920):
def __init__(self, list, font0 = 38, font1 = 28, itemHeight = 60, enableWrapAround = True):
MenuList.__init__(self, list, enableWrapAround, eListboxPythonMultiContent)
self.l.setFont(0, gFont("Regular", font0))
self.l.setFont(1, gFont("Regular", font1))
self.l.setItemHeight(itemHeight)
else:
def __init__(self, list, font0 = 24, font1 = 16, itemHeight = 50, enableWrapAround = True):
MenuList.__init__(self, list, enableWrapAround, eListboxPythonMultiContent)
self.l.setFont(0, gFont("Regular", font0))
self.l.setFont(1, gFont("Regular", font1))
self.l.setItemHeight(itemHeight)
def MenuEntryItem(entry):
if (getDesktop(0).size().width() == 1920):
res = [entry]
res.append(MultiContentEntryPixmapAlphaTest(pos=(0, 10), size=(60, 60), png=entry[0]))
res.append(MultiContentEntryText(pos=(110, 5), size=(690, 50), font=0, text=entry[1]))
return res
else:
res = [entry]
res.append(MultiContentEntryPixmapAlphaTest(pos=(0, 5), size=(100, 40), png=entry[0]))
res.append(MultiContentEntryText(pos=(110, 10), size=(440, 40), font=0, text=entry[1]))
return res
from Screens.PiPSetup import PiPSetup
from Screens.InfoBarGenerics import InfoBarPiP
def InfoEntryComponent(file):
png = LoadPixmap(cached=True, path=resolveFilename(SCOPE_CURRENT_SKIN, 'icons/' + file + '.png'))
if png == None:
png = LoadPixmap('/usr/lib/enigma2/python/OPENDROID/icons/' + file + '.png')
if png == None:
png = LoadPixmap(cached=True, path=resolveFilename(SCOPE_CURRENT_SKIN, 'icons/default.png'))
if png == None:
png = LoadPixmap('/usr/lib/enigma2/python/OPENDROID/icons/default.png')
res = png
return res
class OPD_panel(Screen, InfoBarPiP):
servicelist = None
def __init__(self, session, services = None):
global menu
global inOPD_panel
global pluginlist
global INFOCONF
Screen.__init__(self, session)
self.session = session
self.skin = MENU_SKIN
self.onShown.append(self.setWindowTitle)
self.service = None
INFOCONF = 0
pluginlist = 'False'
try:
print '[OPD_panel] SHOW'
OPD_panel = self
except:
print '[OPD_Panel] Error Hide'
if services is not None:
self.servicelist = services
else:
self.servicelist = None
self.list = []
self['actions'] = ActionMap(['OkCancelActions', 'DirectionActions', 'ColorActions'], {'cancel': self.Exit,
'upUp': self.up,
'downUp': self.down,
'ok': self.ok}, 1)
self['label1'] = Label(OPD_panel_Version)
self.Mlist = []
self.Mlist.append(MenuEntryItem((InfoEntryComponent('ImageFlash'), _('Image-Flasher'), 'ImageFlash')))
self.Mlist.append(MenuEntryItem((InfoEntryComponent('LogManager'), _('Log-Manager'), 'LogManager')))
self.Mlist.append(MenuEntryItem((InfoEntryComponent('SoftwareManager'), _('Software-Manager'), 'software-manager')))
self.Mlist.append(MenuEntryItem((InfoEntryComponent('services'), _('services'), 'services')))
self.Mlist.append(MenuEntryItem((InfoEntryComponent('Infos'), _('Infos'), 'Infos')))
self.Mlist.append(MenuEntryItem((InfoEntryComponent('Infobar_Setup'), _('Infobar_Setup'), 'Infobar_Setup')))
self.onChangedEntry = []
self["Mlist"] = PanelList([])
self["Mlist"].l.setList(self.Mlist)
menu = 0
self['Mlist'].onSelectionChanged.append(self.selectionChanged)
def getCurrentEntry(self):
if self['Mlist'].l.getCurrentSelection():
selection = self['Mlist'].l.getCurrentSelection()[0]
if selection[0] is not None:
return selection[0]
return
def selectionChanged(self):
item = self.getCurrentEntry()
def setWindowTitle(self):
self.setTitle(_('OPD-Main Menu'))
def up(self):
pass
def down(self):
pass
def left(self):
pass
def right(self):
pass
def Red(self):
self.showExtensionSelection1(Parameter='run')
def Green(self):
pass
def yellow(self):
pass
def blue(self):
pass
def Exit(self):
global menu
global inOPD_panel
if menu == 0:
try:
self.service = self.session.nav.getCurrentlyPlayingServiceReference()
service = self.service.toCompareString()
servicename = ServiceReference.ServiceReference(service).getServiceName().replace('\xc2\x87', '').replace('\xc2\x86', '').ljust(16)
print '[OPD_panel] HIDE'
inOPD_panel = None
except:
print '[OPD_panel] Error Hide'
self.close()
elif menu == 1:
self['Mlist'].moveToIndex(0)
self['Mlist'].l.setList(self.oldmlist)
menu = 0
self['label1'].setText(OPD_panel_Version)
elif menu == 2:
self['Mlist'].moveToIndex(0)
self['Mlist'].l.setList(self.oldmlist1)
menu = 1
self['label1'].setText('Infos')
return
def ok(self):
menu = self['Mlist'].l.getCurrentSelection()[0][2]
print '[OPD_panel] MenuItem: ' + menu
if menu == 'services':
self.services()
elif menu == 'Pluginbrowser':
self.session.open(PluginBrowser)
elif menu == 'Infos':
self.Infos()
elif menu == 'Service_Team':
self.session.open(Info, 'Service_Team')
elif menu == 'Info':
self.session.open(Info, 'SystemInfo')
elif menu == 'ImageVersion':
self.session.open(Info, 'ImageVersion')
elif menu == 'FreeSpace':
self.session.open(Info, 'FreeSpace')
elif menu == 'Network':
self.session.open(Info, 'Network')
elif menu == 'Mounts':
self.session.open(Info, 'Mounts')
elif menu == 'Kernel':
self.session.open(Info, 'Kernel')
elif menu == 'Ram':
self.session.open(Info, 'Free')
elif menu == 'Cpu':
self.session.open(Info, 'Cpu')
elif menu == 'Top':
self.session.open(Info, 'Top')
elif menu == 'MemInfo':
self.session.open(Info, 'MemInfo')
elif menu == 'Module':
self.session.open(Info, 'Module')
elif menu == 'Mtd':
self.session.open(Info, 'Mtd')
elif menu == 'Partitions':
self.session.open(Info, 'Partitions')
elif menu == 'Swap':
self.session.open(Info, 'Swap')
elif menu == 'SystemInfo':
self.System()
elif menu == 'CronManager':
self.session.open(CronManager)
elif menu == 'Infobar_Setup':
from OPENDROID.GreenPanel import InfoBarSetup
self.session.open(InfoBarSetup)
elif menu == 'Decoding_Setup':
from OPENDROID.GreenPanel import DecodingSetup
self.session.open(DecodingSetup)
elif menu == 'JobManager':
self.session.open(ScriptRunner)
elif menu == 'software-manager':
self.Software_Manager()
elif menu == 'software-update':
self.session.open(SoftwarePanel)
elif menu == 'backup-settings':
self.session.openWithCallback(self.backupDone, BackupScreen, runBackup=True)
elif menu == 'restore-settings':
self.backuppath = getBackupPath()
self.backupfile = getBackupFilename()
self.fullbackupfilename = self.backuppath + '/' + self.backupfile
if os_path.exists(self.fullbackupfilename):
self.session.openWithCallback(self.startRestore, MessageBox, _('Are you sure you want to restore your STB backup?\nSTB will restart after the restore'))
else:
self.session.open(MessageBox, _('Sorry no backups found!'), MessageBox.TYPE_INFO, timeout=10)
elif menu == 'backup-files':
self.session.openWithCallback(self.backupfiles_choosen, BackupSelection)
elif menu == 'MultiQuickButton':
self.session.open(MultiQuickButton)
elif menu == 'MountManager':
self.session.open(DeviceManager)
elif menu == 'OscamSmartcard':
self.session.open(OscamSmartcard)
elif menu == 'SwapManager':
self.session.open(Swap)
elif menu == 'RedPanel':
self.session.open(RedPanel)
elif menu == 'Yellow-Key-Action':
self.session.open(YellowPanel)
elif menu == 'LogManager':
self.session.open(LogManager)
elif menu == 'ImageFlash':
self.session.open(FlashOnline)
elif menu == 'Samba':
self.session.open(NetworkSamba)
def services(self):
global menu
menu = 1
self['label1'].setText(_('services'))
self.tlist = []
self.oldmlist = []
self.oldmlist = self.Mlist
self.tlist.append(MenuEntryItem((InfoEntryComponent('MountManager'), _('MountManager'), 'MountManager')))
self.tlist.append(MenuEntryItem((InfoEntryComponent('CronManager'), _('CronManager'), 'CronManager')))
self.tlist.append(MenuEntryItem((InfoEntryComponent('JobManager'), _('JobManager'), 'JobManager')))
self.tlist.append(MenuEntryItem((InfoEntryComponent('SwapManager'), _('SwapManager'), 'SwapManager')))
self.tlist.append(MenuEntryItem((InfoEntryComponent('OscamSmartcard'), _('OscamSmartcard'), 'OscamSmartcard')))
self.tlist.append(MenuEntryItem((InfoEntryComponent('Samba'), _('Samba'), 'Samba')))
if os.path.isfile('/usr/lib/enigma2/python/Plugins/Extensions/MultiQuickButton/plugin.pyo') is True:
self.tlist.append(MenuEntryItem((InfoEntryComponent('MultiQuickButton'), _('MultiQuickButton'), 'MultiQuickButton')))
self['Mlist'].moveToIndex(0)
self['Mlist'].l.setList(self.tlist)
def Infos(self):
global menu
menu = 1
self['label1'].setText(_('Infos'))
self.tlist = []
self.oldmlist = []
self.oldmlist1 = []
self.oldmlist = self.Mlist
self.tlist.append(MenuEntryItem((InfoEntryComponent('Service_Team'), _('Service_Team'), 'Service_Team')))
self.tlist.append(MenuEntryItem((InfoEntryComponent('ImageVersion'), _('Image-Version'), 'ImageVersion')))
self.tlist.append(MenuEntryItem((InfoEntryComponent('FreeSpace'), _('FreeSpace'), 'FreeSpace')))
self.tlist.append(MenuEntryItem((InfoEntryComponent('Kernel'), _('Kernel'), 'Kernel')))
self.tlist.append(MenuEntryItem((InfoEntryComponent('Mounts'), _('Mounts'), 'Mounts')))
self.tlist.append(MenuEntryItem((InfoEntryComponent('Network'), _('Network'), 'Network')))
self.tlist.append(MenuEntryItem((InfoEntryComponent('Ram'), _('Ram'), 'Ram')))
self.tlist.append(MenuEntryItem((InfoEntryComponent('SystemInfo'), _('SystemInfo'), 'SystemInfo')))
self['Mlist'].moveToIndex(0)
self['Mlist'].l.setList(self.tlist)
self.oldmlist1 = self.tlist
def System(self):
global menu
menu = 2
self['label1'].setText(_('System Info'))
self.tlist = []
self.tlist.append(MenuEntryItem((InfoEntryComponent('Cpu'), _('Cpu'), 'Cpu')))
self.tlist.append(MenuEntryItem((InfoEntryComponent('MemInfo'), _('MemInfo'), 'MemInfo')))
self.tlist.append(MenuEntryItem((InfoEntryComponent('Mtd'), _('Mtd'), 'Mtd')))
self.tlist.append(MenuEntryItem((InfoEntryComponent('Module'), _('Module'), 'Module')))
self.tlist.append(MenuEntryItem((InfoEntryComponent('Partitions'), _('Partitions'), 'Partitions')))
self.tlist.append(MenuEntryItem((InfoEntryComponent('Swap'), _('Swap'), 'Swap')))
self.tlist.append(MenuEntryItem((InfoEntryComponent('Top'), _('Top'), 'Top')))
self['Mlist'].moveToIndex(0)
self['Mlist'].l.setList(self.tlist)
def System_main(self):
global menu
menu = 1
self["label1"].setText(_("Image/Remote Setup"))
self.tlist = []
self.oldmlist = []
self.oldmlist = self.Mlist
self.tlist.append(MenuEntryItem((InfoEntryComponent('Red-Key-Action'), _("Red Panel"), 'Red-Key-Action')))
self.tlist.append(MenuEntryItem((InfoEntryComponent('Blue-Key-Action'), _("Blue Panel"), 'Blue-Key-Action')))
self["Mlist"].moveToIndex(0)
self["Mlist"].l.setList(self.tlist)
def System_main(self):
global menu
menu = 1
self['label1'].setText(_('System'))
self.tlist = []
self.oldmlist = []
self.oldmlist = self.Mlist
self.tlist.append(MenuEntryItem((InfoEntryComponent('Info'), _('Info'), 'Info')))
self['Mlist'].moveToIndex(0)
self['Mlist'].l.setList(self.tlist)
def Software_Manager(self):
global menu
menu = 1
self['label1'].setText(_('Software Manager'))
self.tlist = []
self.oldmlist = []
self.oldmlist = self.Mlist
self.tlist.append(MenuEntryItem((InfoEntryComponent('SoftwareManager'), _('Software update'), 'software-update')))
self.tlist.append(MenuEntryItem((InfoEntryComponent('BackupSettings'), _('Backup Settings'), 'backup-settings')))
self.tlist.append(MenuEntryItem((InfoEntryComponent('RestoreSettings'), _('Restore Settings'), 'restore-settings')))
self.tlist.append(MenuEntryItem((InfoEntryComponent('BackupFiles'), _('Choose backup files'), 'backup-files')))
self['Mlist'].moveToIndex(0)
self['Mlist'].l.setList(self.tlist)
def backupfiles_choosen(self, ret):
config.plugins.configurationbackup.backupdirs.save()
config.plugins.configurationbackup.save()
config.save()
def backupDone(self, retval = None):
if retval is True:
self.session.open(MessageBox, _('Backup done.'), MessageBox.TYPE_INFO, timeout=10)
else:
self.session.open(MessageBox, _('Backup failed.'), MessageBox.TYPE_INFO, timeout=10)
def startRestore(self, ret = False):
if ret == True:
self.exe = True
self.session.open(RestoreScreen, runRestore=True)
class RedPanel(ConfigListScreen, Screen):
def __init__(self, session):
Screen.__init__(self, session)
self.session = session
self.skinName = 'Setup'
Screen.setTitle(self, _('RedPanel') + '...')
self.setup_title = _('RedPanel') + '...'
self['HelpWindow'] = Pixmap()
self['HelpWindow'].hide()
self['status'] = StaticText()
self['footnote'] = Label('')
self['description'] = Label(_(''))
self['labelExitsave'] = Label('[Exit] = ' + _('Cancel') + ' [Ok] =' + _('Save'))
self.onChangedEntry = []
self.list = []
ConfigListScreen.__init__(self, self.list, session=self.session, on_change=self.changedEntry)
self.createSetup()
self['actions'] = ActionMap(['SetupActions', 'ColorActions'], {'ok': self.keySave,
'cancel': self.keyCancel,
'red': self.keyCancel,
'green': self.keySave,
'menu': self.keyCancel}, -2)
self['key_red'] = StaticText(_('Cancel'))
self['key_green'] = StaticText(_('OK'))
if self.selectionChanged not in self['config'].onSelectionChanged:
self['config'].onSelectionChanged.append(self.selectionChanged)
self.selectionChanged()
def createSetup(self):
self.editListEntry = None
self.list = []
self.list.append(getConfigListEntry(_('Show OPD_panel Red-key'), config.plugins.OPD_panel_redpanel.enabled))
self.list.append(getConfigListEntry(_('Show Softcam-Panel Red-key long'), config.plugins.OPD_panel_redpanel.enabledlong))
self['config'].list = self.list
self['config'].setList(self.list)
if config.usage.sort_settings.value:
self['config'].list.sort()
return
def selectionChanged(self):
self['status'].setText(self['config'].getCurrent()[0])
def changedEntry(self):
for x in self.onChangedEntry:
x()
self.selectionChanged()
def getCurrentEntry(self):
return self['config'].getCurrent()[0]
def getCurrentValue(self):
return str(self['config'].getCurrent()[1].getText())
def getCurrentDescription(self):
return self['config'].getCurrent() and len(self['config'].getCurrent()) > 2 and self['config'].getCurrent()[2] or ''
def createSummary(self):
from Screens.Setup import SetupSummary
return SetupSummary
def saveAll(self):
for x in self['config'].list:
x[1].save()
configfile.save()
def keySave(self):
self.saveAll()
self.close()
def cancelConfirm(self, result):
if not result:
return
for x in self['config'].list:
x[1].cancel()
self.close()
def keyCancel(self):
if self['config'].isChanged():
self.session.openWithCallback(self.cancelConfirm, MessageBox, _('Really close without saving settings?'))
else:
self.close()
class YellowPanel(ConfigListScreen, Screen):
def __init__(self, session):
Screen.__init__(self, session)
self.session = session
self.skinName = 'Setup'
Screen.setTitle(self, _('Yellow Key Action') + '...')
self.setup_title = _('Yellow Key Action') + '...'
self['HelpWindow'] = Pixmap()
self['HelpWindow'].hide()
self['status'] = StaticText()
self['footnote'] = Label('')
self['description'] = Label('')
self['labelExitsave'] = Label('[Exit] = ' + _('Cancel') + ' [Ok] =' + _('Save'))
self.onChangedEntry = []
self.list = []
ConfigListScreen.__init__(self, self.list, session=self.session, on_change=self.changedEntry)
self.createSetup()
self['actions'] = ActionMap(['SetupActions', 'ColorActions'], {'ok': self.keySave,
'cancel': self.keyCancel,
'red': self.keyCancel,
'green': self.keySave,
'menu': self.keyCancel}, -2)
self['key_red'] = StaticText(_('Cancel'))
self['key_green'] = StaticText(_('OK'))
if self.selectionChanged not in self['config'].onSelectionChanged:
self['config'].onSelectionChanged.append(self.selectionChanged)
self.selectionChanged()
def createSetup(self):
self.editListEntry = None
self.list = []
self.list.append(getConfigListEntry(_('Yellow Key Action'), config.plugins.OPD_panel_yellowkey.list))
self['config'].list = self.list
self['config'].setList(self.list)
if config.usage.sort_settings.value:
self['config'].list.sort()
return
def selectionChanged(self):
self['status'].setText(self['config'].getCurrent()[0])
def changedEntry(self):
for x in self.onChangedEntry:
x()
self.selectionChanged()
def getCurrentEntry(self):
return self['config'].getCurrent()[0]
def getCurrentValue(self):
return str(self['config'].getCurrent()[1].getText())
def getCurrentDescription(self):
return self['config'].getCurrent() and len(self['config'].getCurrent()) > 2 and self['config'].getCurrent()[2] or ''
def createSummary(self):
from Screens.Setup import SetupSummary
return SetupSummary
def saveAll(self):
for x in self['config'].list:
x[1].save()
configfile.save()
def keySave(self):
self.saveAll()
self.close()
def cancelConfirm(self, result):
if not result:
return
for x in self['config'].list:
x[1].cancel()
self.close()
def keyCancel(self):
if self['config'].isChanged():
self.session.openWithCallback(self.cancelConfirm, MessageBox, _('Really close without saving settings?'))
else:
self.close()
class Info(Screen):
def __init__(self, session, info):
self.service = None
Screen.__init__(self, session)
self.skin = INFO_SKIN
self['label2'] = Label('INFO')
self['label1'] = ScrollLabel()
if info == 'Service_Team':
self.Service_Team()
if info == 'SystemInfo':
self.SystemInfo()
elif info == 'ImageVersion':
self.ImageVersion()
elif info == 'FreeSpace':
self.FreeSpace()
elif info == 'Mounts':
self.Mounts()
elif info == 'Network':
self.Network()
elif info == 'Kernel':
self.Kernel()
elif info == 'Free':
self.Free()
elif info == 'Cpu':
self.Cpu()
elif info == 'Top':
self.Top()
elif info == 'MemInfo':
self.MemInfo()
elif info == 'Module':
self.Module()
elif info == 'Mtd':
self.Mtd()
elif info == 'Partitions':
self.Partitions()
elif info == 'Swap':
self.Swap()
self['actions'] = ActionMap(['OkCancelActions', 'DirectionActions'], {'cancel': self.Exit,
'ok': self.ok,
'up': self.Up,
'down': self.Down}, -1)
return
def Exit(self):
self.close()
def ok(self):
self.close()
def Down(self):
self['label1'].pageDown()
def Up(self):
self['label1'].pageUp()
def Service_Team(self):
try:
self['label2'].setText('INFO')
info1 = self.Do_cmd('cat', '/etc/motd', None)
if info1.find('wElc0me') > -1:
info1 = info1[info1.find('wElc0me'):len(info1)] + '\n'
info1 = info1.replace('|', '')
else:
info1 = info1[info1.find('INFO'):len(info1)] + '\n'
info2 = self.Do_cmd('cat', '/etc/image-version', None)
info3 = self.Do_cut(info1 + info2)
self['label1'].setText(info3)
except:
self['label1'].setText(_('an internal error has occur'))
return
def SystemInfo(self):
try:
self['label2'].setText(_('Image Info'))
info1 = self.Do_cmd('cat', '/etc/version', None)
info1 = self.Do_cut(info1)
self['label1'].setText(info1)
except:
self['label1'].setText(_('an internal error has occur'))
return
def ImageVersion(self):
try:
self['label2'].setText(_('Image Version'))
now = datetime.now()
info1 = 'Date = ' + now.strftime('%d-%B-%Y') + '\n'
info2 = 'Time = ' + now.strftime('%H:%M:%S') + '\n'
info3 = self.Do_cmd('uptime', None, None)
tmp = info3.split(',')
info3 = 'Uptime = ' + tmp[0].lstrip() + '\n'
info4 = self.Do_cmd('cat', '/etc/image-version', ' | head -n 1')
info4 = info4[9:]
info4 = 'Imagetype = ' + info4 + '\n'
info5 = 'Load = ' + self.Do_cmd('cat', '/proc/loadavg', None)
info6 = self.Do_cut(info1 + info2 + info3 + info4 + info5)
self['label1'].setText(info6)
except:
self['label1'].setText(_('an internal error has occur'))
return
def FreeSpace(self):
try:
self['label2'].setText(_('FreeSpace'))
info1 = self.Do_cmd('df', None, '-h')
info1 = self.Do_cut(info1)
self['label1'].setText(info1)
except:
self['label1'].setText(_('an internal error has occur'))
return
def Mounts(self):
try:
self['label2'].setText(_('Mounts'))
info1 = self.Do_cmd('mount', None, None)
info1 = self.Do_cut(info1)
self['label1'].setText(info1)
except:
self['label1'].setText(_('an internal error has occur'))
return
def Network(self):
try:
self['label2'].setText(_('Network'))
info1 = self.Do_cmd('ifconfig', None, None) + '\n'
info2 = self.Do_cmd('route', None, '-n')
info3 = self.Do_cut(info1 + info2)
self['label1'].setText(info3)
except:
self['label1'].setText(_('an internal error has occur'))
return
def Kernel(self):
try:
self['label2'].setText(_('Kernel'))
info0 = self.Do_cmd('cat', '/proc/version', None)
info = info0.split('(')
info1 = 'Name = ' + info[0] + '\n'
info2 = 'Owner = ' + info[1].replace(')', '') + '\n'
info3 = 'Mainimage = ' + info[2][0:info[2].find(')')] + '\n'
info4 = 'Date = ' + info[3][info[3].find('SMP') + 4:len(info[3])]
info5 = self.Do_cut(info1 + info2 + info3 + info4)
self['label1'].setText(info5)
except:
self['label1'].setText(_('an internal error has occur'))
return
def Free(self):
try:
self['label2'].setText(_('Ram'))
info1 = self.Do_cmd('free', None, None)
info1 = self.Do_cut(info1)
self['label1'].setText(info1)
except:
self['label1'].setText(_('an internal error has occur'))
return
def Cpu(self):
try:
self['label2'].setText(_('Cpu'))
info1 = self.Do_cmd('cat', '/proc/cpuinfo', None, " | sed 's/\t\t/\t/'")
info1 = self.Do_cut(info1)
self['label1'].setText(info1)
except:
self['label1'].setText(_('an internal error has occur'))
return
def Top(self):
try:
self['label2'].setText(_('Top'))
info1 = self.Do_cmd('top', None, '-b -n1')
info1 = self.Do_cut(info1)
self['label1'].setText(info1)
except:
self['label1'].setText(_('an internal error has occur'))
return
def MemInfo(self):
try:
self['label2'].setText(_('MemInfo'))
info1 = self.Do_cmd('cat', '/proc/meminfo', None)
info1 = self.Do_cut(info1)
self['label1'].setText(info1)
except:
self['label1'].setText(_('an internal error has occur'))
return
def Module(self):
try:
self['label2'].setText(_('Module'))
info1 = self.Do_cmd('cat', '/proc/modules', None)
info1 = self.Do_cut(info1)
self['label1'].setText(info1)
except:
self['label1'].setText(_('an internal error has occur'))
return
def Mtd(self):
try:
self['label2'].setText(_('Mtd'))
info1 = self.Do_cmd('cat', '/proc/mtd', None)
info1 = self.Do_cut(info1)
self['label1'].setText(info1)
except:
self['label1'].setText(_('an internal error has occur'))
return
def Partitions(self):
try:
self['label2'].setText(_('Partitions'))
info1 = self.Do_cmd('cat', '/proc/partitions', None)
info1 = self.Do_cut(info1)
self['label1'].setText(info1)
except:
self['label1'].setText(_('an internal error has occur'))
return
def Swap(self):
try:
self['label2'].setText(_('Swap'))
info0 = self.Do_cmd('cat', '/proc/swaps', None, " | sed 's/\t/ /g; s/[ ]* / /g'")
info0 = info0.split('\n')
info1 = ''
for l in info0[1:]:
l1 = l.split(' ')
info1 = info1 + 'Name: ' + l1[0] + '\n'
info1 = info1 + 'Type: ' + l1[1] + '\n'
info1 = info1 + 'Size: ' + l1[2] + '\n'
info1 = info1 + 'Used: ' + l1[3] + '\n'
info1 = info1 + 'Prio: ' + l1[4] + '\n\n'
if info1[-1:] == '\n':
info1 = info1[:-1]
if info1[-1:] == '\n':
info1 = info1[:-1]
info1 = self.Do_cut(info1)
self['label1'].setText(info1)
except:
self['label1'].setText(_('an internal error has occur'))
return
def Do_find(self, text, search):
text = text + ' '
ret = ''
pos = text.find(search)
pos1 = text.find(' ', pos)
if pos > -1:
ret = text[pos + len(search):pos1]
return ret
def Do_cut(self, text):
text1 = text.split('\n')
text = ''
for line in text1:
text = text + line[:95] + '\n'
if text[-1:] == '\n':
text = text[:-1]
return text
def Do_cmd(self, cmd, file, arg, pipe = ''):
try:
if file != None:
if os.path.exists(file) is True:
o = command(cmd + ' ' + file + pipe, 0)
else:
o = 'File not found: \n' + file
elif arg == None:
o = command(cmd, 0)
else:
o = command(cmd + ' ' + arg, 0)
return o
except:
o = ''
return o
return
####################################################################################################################################
class FileDownloadJob(Job):
def __init__(self, url, filename, file):
Job.__init__(self, _('Downloading %s' % file))
FileDownloadTask(self, url, filename)
class DownloaderPostcondition(Condition):
def check(self, task):
return task.returncode == 0
def getErrorMessage(self, task):
return self.error_message
class FileDownloadTask(Task):
def __init__(self, job, url, path):
Task.__init__(self, job, _('Downloading'))
self.postconditions.append(DownloaderPostcondition())
self.job = job
self.url = url
self.path = path
self.error_message = ''
self.last_recvbytes = 0
self.error_message = None
self.download = None
self.aborted = False
return
def run(self, callback):
self.callback = callback
self.download = downloadWithProgress(self.url, self.path)
self.download.addProgress(self.download_progress)
self.download.start().addCallback(self.download_finished).addErrback(self.download_failed)
print '[FileDownloadTask] downloading', self.url, 'to', self.path
def abort(self):
print '[FileDownloadTask] aborting', self.url
if self.download:
self.download.stop()
self.aborted = True
def download_progress(self, recvbytes, totalbytes):
if recvbytes - self.last_recvbytes > 10000:
self.progress = int(100 * (float(recvbytes) / float(totalbytes)))
self.name = _('Downloading') + ' ' + '%d of %d kBytes' % (recvbytes / 1024, totalbytes / 1024)
self.last_recvbytes = recvbytes
def download_failed(self, failure_instance = None, error_message = ''):
self.error_message = error_message
if error_message == '' and failure_instance is not None:
self.error_message = failure_instance.getErrorMessage()
Task.processFinished(self, 1)
return
def download_finished(self, string = ''):
if self.aborted:
self.finish(aborted=True)
else:
Task.processFinished(self, 0)
| trunca/enigma2 | lib/python/OPENDROID/OPD_panel.py | Python | gpl-2.0 | 35,438 |
import os
import sys
import tnetstring
def read_packets (filename):
try:
os.stat (filename)
except OSError:
print "No such file : %s"%filename
sys.exit (1)
pkts = open (filename).read ()
pkts = tnetstring.loads (pkts, 'iso-8859-15')
for data in pkts:
yield data
if '__main__' == __name__:
if not sys.argv [1:]:
print "Usage: %s 'file'"%sys.argv [0]
sys.exit (0)
filename = sys.argv [1]
for pkt in read_packets (filename):
print "found %d's len packet"%len (pkt)
| sipdbg/sipdbg | rtp/read_dump.py | Python | gpl-2.0 | 552 |
from polybori import BooleSet, interpolate_smallest_lex
class PartialFunction(object):
"""docstring for PartialFunction"""
def __init__(self, zeros, ones):
super(PartialFunction, self).__init__()
self.zeros = zeros.set()
self.ones = ones.set()
def interpolate_smallest_lex(self):
return interpolate_smallest_lex(self.zeros, self.ones)
def __str__(self):
return "PartialFunction(zeros=" + str(self.zeros) + ", ones=" + str(
self.ones) + ")"
def definedOn(self):
return self.zeros.union(self.ones)
def __add__(self, other):
domain = self.definedOn().intersect(other.definedOn())
zeros = self.zeros.intersect(other.zeros).union(self.ones.intersect(
other.ones))
ones = self.zeros.intersect(other.ones).union(self.ones.intersect(
other.zeros))
assert zeros.diff(domain).empty()
assert ones.diff(domain).empty()
return PartialFunction(zeros, ones)
def __repr__(self):
return str(self)
def __mul__(self, other):
zeros = self.zeros.union(other.zeros)
ones = self.ones.intersect(other.ones)
return PartialFunction(zeros, ones)
def __or__(self, other):
zeros = self.zeros.intersect(other.zeros)
ones = self.ones.union(other.ones)
return PartialFunction(zeros, ones)
def __xor__(self, other):
return self + other
def __and__(self, other):
return self * other
| ohanar/PolyBoRi | pyroot/polybori/partial.py | Python | gpl-2.0 | 1,509 |
## This file is part of Invenio.
## Copyright (C) 2004, 2005, 2006, 2007, 2008, 2009, 2010, 2011 CERN.
##
## Invenio is free software; you can redistribute it and/or
## modify it under the terms of the GNU General Public License as
## published by the Free Software Foundation; either version 2 of the
## License, or (at your option) any later version.
##
## Invenio is distributed in the hope that it will be useful, but
## WITHOUT ANY WARRANTY; without even the implied warranty of
## MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
## General Public License for more details.
##
## You should have received a copy of the GNU General Public License
## along with Invenio; if not, write to the Free Software Foundation, Inc.,
## 59 Temple Place, Suite 330, Boston, MA 02111-1307, USA.
"""
This is the Create_Modify_Interface function (along with its helpers).
It is used by WebSubmit for the "Modify Bibliographic Information" action.
"""
__revision__ = "$Id$"
import os
import re
import time
import pprint
import cgi
from invenio.dbquery import run_sql
from invenio.websubmit_config import InvenioWebSubmitFunctionError
from invenio.websubmit_functions.Retrieve_Data import Get_Field
from invenio.errorlib import register_exception
from invenio.htmlutils import escape_javascript_string
from invenio.messages import gettext_set_language, wash_language
def Create_Modify_Interface_getfieldval_fromfile(cur_dir, fld=""):
"""Read a field's value from its corresponding text file in 'cur_dir' (if it exists) into memory.
Delete the text file after having read-in its value.
This function is called on the reload of the modify-record page. This way, the field in question
can be populated with the value last entered by the user (before reload), instead of always being
populated with the value still found in the DB.
"""
fld_val = ""
if len(fld) > 0 and os.access("%s/%s" % (cur_dir, fld), os.R_OK|os.W_OK):
fp = open( "%s/%s" % (cur_dir, fld), "r" )
fld_val = fp.read()
fp.close()
try:
os.unlink("%s/%s"%(cur_dir, fld))
except OSError:
# Cannot unlink file - ignore, let WebSubmit main handle this
pass
fld_val = fld_val.strip()
return fld_val
def Create_Modify_Interface_getfieldval_fromDBrec(fieldcode, recid):
"""Read a field's value from the record stored in the DB.
This function is called when the Create_Modify_Interface function is called for the first time
when modifying a given record, and field values must be retrieved from the database.
"""
fld_val = ""
if fieldcode != "":
for next_field_code in [x.strip() for x in fieldcode.split(",")]:
fld_val += "%s\n" % Get_Field(next_field_code, recid)
fld_val = fld_val.rstrip('\n')
return fld_val
def Create_Modify_Interface_transform_date(fld_val):
"""Accept a field's value as a string. If the value is a date in one of the following formats:
DD Mon YYYY (e.g. 23 Apr 2005)
YYYY-MM-DD (e.g. 2005-04-23)
...transform this date value into "DD/MM/YYYY" (e.g. 23/04/2005).
"""
if re.search("^[0-9]{2} [a-z]{3} [0-9]{4}$", fld_val, re.IGNORECASE) is not None:
try:
fld_val = time.strftime("%d/%m/%Y", time.strptime(fld_val, "%d %b %Y"))
except (ValueError, TypeError):
# bad date format:
pass
elif re.search("^[0-9]{4}-[0-9]{2}-[0-9]{2}$", fld_val, re.IGNORECASE) is not None:
try:
fld_val = time.strftime("%d/%m/%Y", time.strptime(fld_val, "%Y-%m-%d"))
except (ValueError,TypeError):
# bad date format:
pass
return fld_val
def Create_Modify_Interface(parameters, curdir, form, user_info=None):
"""
Create an interface for the modification of a document, based on
the fields that the user has chosen to modify. This avoids having
to redefine a submission page for the modifications, but rely on
the elements already defined for the initial submission i.e. SBI
action (The only page that needs to be built for the modification
is the page letting the user specify a document to modify).
This function should be added at step 1 of your modification
workflow, after the functions that retrieves report number and
record id (Get_Report_Number, Get_Recid). Functions at step 2 are
the one executed upon successful submission of the form.
Create_Modify_Interface expects the following parameters:
* "fieldnameMBI" - the name of a text file in the submission
working directory that contains a list of the names of the
WebSubmit fields to include in the Modification interface.
These field names are separated by"\n" or "+".
* "prefix" - some content displayed before the main
modification interface. Can contain HTML (i.e. needs to be
pre-escaped). The prefix can make use of Python string
replacement for common values (such as 'rn'). Percent signs
(%) must consequently be escaped (with %%).
* "suffix" - some content displayed after the main modification
interface. Can contain HTML (i.e. needs to be
pre-escaped). The suffix can make use of Python string
replacement for common values (such as 'rn'). Percent signs
(%) must consequently be escaped (with %%).
* "button_label" - the label for the "END" button.
* "button_prefix" - some content displayed before the button to
submit the form. Can contain HTML (i.e. needs to be
pre-escaped). The prefix can make use of Python string
replacement for common values (such as 'rn'). Percent signs
(%) must consequently be escaped (with %%).
* "dates_conversion" - by default, values interpreted as dates
are converted to their 'DD/MM/YYYY' format, whenever
possible. Set another value for a different behaviour
(eg. 'none' for no conversion)
Given the list of WebSubmit fields to be included in the
modification interface, the values for each field are retrieved
for the given record (by way of each WebSubmit field being
configured with a MARC Code in the WebSubmit database). An HTML
FORM is then created. This form allows a user to modify certain
field values for a record.
The file referenced by 'fieldnameMBI' is usually generated from a
multiple select form field): users can then select one or several
fields to modify
Note that the function will display WebSubmit Response elements,
but will not be able to set an initial value: this must be done by
the Response element iteself.
Additionally the function creates an internal field named
'Create_Modify_Interface_DONE' on the interface, that can be
retrieved in curdir after the form has been submitted.
This flag is an indicator for the function that displayed values
should not be retrieved from the database, but from the submitted
values (in case the page is reloaded). You can also rely on this
value when building your WebSubmit Response element in order to
retrieve value either from the record, or from the submission
directory.
"""
ln = wash_language(form['ln'])
_ = gettext_set_language(ln)
global sysno,rn
t = ""
# variables declaration
fieldname = parameters['fieldnameMBI']
prefix = ''
suffix = ''
end_button_label = 'END'
end_button_prefix = ''
date_conversion_setting = ''
if parameters.has_key('prefix'):
prefix = parameters['prefix']
if parameters.has_key('suffix'):
suffix = parameters['suffix']
if parameters.has_key('button_label') and parameters['button_label']:
end_button_label = parameters['button_label']
if parameters.has_key('button_prefix'):
end_button_prefix = parameters['button_prefix']
if parameters.has_key('dates_conversion'):
date_conversion_setting = parameters['dates_conversion']
# Path of file containing fields to modify
the_globals = {
'doctype' : doctype,
'action' : action,
'act' : action, ## for backward compatibility
'step' : step,
'access' : access,
'ln' : ln,
'curdir' : curdir,
'uid' : user_info['uid'],
'uid_email' : user_info['email'],
'rn' : rn,
'last_step' : last_step,
'action_score' : action_score,
'__websubmit_in_jail__' : True,
'form': form,
'sysno': sysno,
'user_info' : user_info,
'__builtins__' : globals()['__builtins__'],
'Request_Print': Request_Print
}
if os.path.exists("%s/%s" % (curdir, fieldname)):
fp = open( "%s/%s" % (curdir, fieldname), "r" )
fieldstext = fp.read()
fp.close()
fieldstext = re.sub("\+","\n", fieldstext)
fields = fieldstext.split("\n")
else:
res = run_sql("SELECT fidesc FROM sbmFIELDDESC WHERE name=%s", (fieldname,))
if len(res) == 1:
fields = res[0][0].replace(" ", "")
fields = re.findall("<optionvalue=.*>", fields)
regexp = re.compile("""<optionvalue=(?P<quote>['|"]?)(?P<value>.*?)(?P=quote)""")
fields = [regexp.search(x) for x in fields]
fields = [x.group("value") for x in fields if x is not None]
fields = [x for x in fields if x not in ("Select", "select")]
else:
raise InvenioWebSubmitFunctionError("cannot find fields to modify")
#output some text
if not prefix:
t += "<center bgcolor=\"white\">The document <b>%s</b> has been found in the database.</center><br />Please modify the following fields:<br />Then press the '%s' button at the bottom of the page<br />\n" % \
(rn, cgi.escape(_(end_button_label)))
else:
t += prefix % the_globals
for field in fields:
subfield = ""
value = ""
marccode = ""
text = ""
# retrieve and display the modification text
t = t + "<FONT color=\"darkblue\">\n"
res = run_sql("SELECT modifytext FROM sbmFIELDDESC WHERE name=%s", (field,))
if len(res)>0:
t = t + "<small>%s</small> </FONT>\n" % res[0][0]
# retrieve the marc code associated with the field
res = run_sql("SELECT marccode FROM sbmFIELDDESC WHERE name=%s", (field,))
if len(res) > 0:
marccode = res[0][0]
# then retrieve the previous value of the field
if os.path.exists("%s/%s" % (curdir, "Create_Modify_Interface_DONE")):
# Page has been reloaded - get field value from text file on server, not from DB record
value = Create_Modify_Interface_getfieldval_fromfile(curdir, field)
else:
# First call to page - get field value from DB record
value = Create_Modify_Interface_getfieldval_fromDBrec(marccode, sysno)
if date_conversion_setting != 'none':
# If field is a date value, transform date into format DD/MM/YYYY:
value = Create_Modify_Interface_transform_date(value)
res = run_sql("SELECT * FROM sbmFIELDDESC WHERE name=%s", (field,)) # kwalitee: disable=sql
if len(res) > 0:
element_type = res[0][3]
numcols = res[0][6]
numrows = res[0][5]
size = res[0][4]
maxlength = res[0][7]
val = res[0][8]
fidesc = res[0][9]
if element_type == "T":
text = "<textarea name=\"%s\" rows=%s cols=%s wrap>%s</textarea>" % (field, numrows, numcols, cgi.escape(value))
elif element_type == "F":
text = "<input type=\"file\" name=\"%s\" size=%s maxlength=\"%s\">" % (field, size, maxlength)
elif element_type == "I":
text = "<input name=\"%s\" size=%s value=\"%s\"> " % (field, size, val and escape_javascript_string(val, escape_quote_for_html=True) or '')
text = text + '''<script type="text/javascript">/*<![CDATA[*/
document.forms[0].%s.value="%s";
/*]]>*/</script>''' % (field, escape_javascript_string(value, escape_for_html=False))
elif element_type == "H":
text = "<input type=\"hidden\" name=\"%s\" value=\"%s\">" % (field, val and escape_javascript_string(val, escape_quote_for_html=True) or '')
text = text + '''<script type="text/javascript">/*<![CDATA[*/
document.forms[0].%s.value="%s";
/*]]>*/</script>''' % (field, escape_javascript_string(value, escape_for_html=False))
elif element_type == "S":
values = re.split("[\n\r]+", value)
text = fidesc
if re.search("%s\[\]" % field, fidesc):
multipletext = "[]"
else:
multipletext = ""
if len(values) > 0 and not(len(values) == 1 and values[0] == ""):
text += '<script type="text/javascript">/*<![CDATA[*/\n'
text += "var i = 0;\n"
text += "el = document.forms[0].elements['%s%s'];\n" % (field, multipletext)
text += "max = el.length;\n"
for val in values:
text += "var found = 0;\n"
text += "var i=0;\n"
text += "while (i != max) {\n"
text += " if (el.options[i].value == \"%s\" || el.options[i].text == \"%s\") {\n" % \
(escape_javascript_string(val, escape_for_html=False), escape_javascript_string(val, escape_for_html=False))
text += " el.options[i].selected = true;\n"
text += " found = 1;\n"
text += " }\n"
text += " i=i+1;\n"
text += "}\n"
#text += "if (found == 0) {\n"
#text += " el[el.length] = new Option(\"%s\", \"%s\", 1,1);\n"
#text += "}\n"
text += "/*]]>*/</script>\n"
elif element_type == "D":
text = fidesc
elif element_type == "R":
try:
co = compile(fidesc.replace("\r\n", "\n"), "<string>", "exec")
## Note this exec is safe WRT global variable because the
## Create_Modify_Interface has already been parsed by
## execfile within a protected environment.
the_globals['text'] = ''
exec co in the_globals
text = the_globals['text']
except:
msg = "Error in evaluating response element %s with globals %s" % (pprint.pformat(field), pprint.pformat(globals()))
register_exception(req=None, alert_admin=True, prefix=msg)
raise InvenioWebSubmitFunctionError(msg)
else:
text = "%s: unknown field type" % field
t = t + "<small>%s</small>" % text
# output our flag field
t += '<input type="hidden" name="Create_Modify_Interface_DONE" value="DONE\n" />'
t += '<br />'
if end_button_prefix:
t += end_button_prefix % the_globals
# output some more text
t += "<br /><CENTER><small><INPUT type=\"button\" width=400 height=50 name=\"End\" value=\"%(end_button_label)s\" onClick=\"document.forms[0].step.value = 2;user_must_confirm_before_leaving_page = false;document.forms[0].submit();\"></small></CENTER></H4>" % {'end_button_label': escape_javascript_string(_(end_button_label), escape_quote_for_html=True)}
if suffix:
t += suffix % the_globals
return t
| pamfilos/invenio | modules/websubmit/lib/functions/Create_Modify_Interface.py | Python | gpl-2.0 | 15,906 |
#!/usr/bin/env python
import glob
import os
import site
from cx_Freeze import setup, Executable
import meld.build_helpers
import meld.conf
site_dir = site.getsitepackages()[1]
include_dll_path = os.path.join(site_dir, "gnome")
missing_dll = [
'libgtk-3-0.dll',
'libgdk-3-0.dll',
'libatk-1.0-0.dll',
'libintl-8.dll',
'libzzz.dll',
'libwinpthread-1.dll',
'libcairo-gobject-2.dll',
'libgdk_pixbuf-2.0-0.dll',
'libpango-1.0-0.dll',
'libpangocairo-1.0-0.dll',
'libpangoft2-1.0-0.dll',
'libpangowin32-1.0-0.dll',
'libffi-6.dll',
'libfontconfig-1.dll',
'libfreetype-6.dll',
'libgio-2.0-0.dll',
'libglib-2.0-0.dll',
'libgmodule-2.0-0.dll',
'libgobject-2.0-0.dll',
'libgirepository-1.0-1.dll',
'libgtksourceview-3.0-1.dll',
'libjasper-1.dll',
'libjpeg-8.dll',
'libpng16-16.dll',
'libgnutls-26.dll',
'libxmlxpat.dll',
'librsvg-2-2.dll',
'libharfbuzz-gobject-0.dll',
'libwebp-5.dll',
]
gtk_libs = [
'etc/fonts',
'etc/gtk-3.0/settings.ini',
'etc/pango',
'lib/gdk-pixbuf-2.0',
'lib/girepository-1.0',
'share/fontconfig',
'share/fonts',
'share/glib-2.0',
'share/gtksourceview-3.0',
'share/icons',
]
include_files = [(os.path.join(include_dll_path, path), path) for path in
missing_dll + gtk_libs]
build_exe_options = {
"compressed": False,
"icon": "data/icons/meld.ico",
"includes": ["gi"],
"packages": ["gi", "weakref"],
"include_files": include_files,
}
# Create our registry key, and fill with install directory and exe
registry_table = [
('MeldKLM', 2, 'SOFTWARE\Meld', '*', None, 'TARGETDIR'),
('MeldInstallDir', 2, 'SOFTWARE\Meld', 'InstallDir', '[TARGETDIR]', 'TARGETDIR'),
('MeldExecutable', 2, 'SOFTWARE\Meld', 'Executable', '[TARGETDIR]Meld.exe', 'TARGETDIR'),
]
# Provide the locator and app search to give MSI the existing install directory
# for future upgrades
reg_locator_table = [
('MeldInstallDirLocate', 2, 'SOFTWARE\Meld', 'InstallDir', 0)
]
app_search_table = [('TARGETDIR', 'MeldInstallDirLocate')]
msi_data = {
'Registry': registry_table,
'RegLocator': reg_locator_table,
'AppSearch': app_search_table
}
bdist_msi_options = {
"upgrade_code": "{1d303789-b4e2-4d6e-9515-c301e155cd50}",
"data": msi_data,
}
setup(
name="Meld",
version=meld.conf.__version__,
description='Visual diff and merge tool',
author='The Meld project',
author_email='[email protected]',
maintainer='Kai Willadsen',
url='http://meldmerge.org',
classifiers=[
'Development Status :: 5 - Production/Stable',
'Environment :: X11 Applications :: GTK',
'Intended Audience :: Developers',
'Intended Audience :: System Administrators',
'License :: OSI Approved :: GNU General Public License v2 or later (GPLv2+)',
'Programming Language :: Python',
'Topic :: Desktop Environment :: Gnome',
'Topic :: Software Development',
'Topic :: Software Development :: Version Control',
],
options = {
"build_exe": build_exe_options,
"bdist_msi": bdist_msi_options,
},
executables = [
Executable(
"bin/meld",
base="Win32GUI",
targetName="Meld.exe",
shortcutName="Meld",
shortcutDir="ProgramMenuFolder",
),
],
packages=[
'meld',
'meld.ui',
'meld.util',
'meld.vc',
],
package_data={
'meld': ['README', 'COPYING', 'NEWS']
},
scripts=['bin/meld'],
data_files=[
('share/man/man1',
['meld.1']
),
('share/doc/meld-' + meld.conf.__version__,
['COPYING', 'NEWS']
),
('share/meld',
['data/meld.css', 'data/meld-dark.css']
),
('share/meld/icons',
glob.glob("data/icons/*.png") +
glob.glob("data/icons/COPYING*")
),
('share/meld/ui',
glob.glob("data/ui/*.ui") + glob.glob("data/ui/*.xml")
),
],
cmdclass={
"build_i18n": meld.build_helpers.build_i18n,
"build_help": meld.build_helpers.build_help,
"build_icons": meld.build_helpers.build_icons,
"build_data": meld.build_helpers.build_data,
}
)
| culots/meld | setup_win32.py | Python | gpl-2.0 | 4,346 |
# coding: utf-8
from Sensor import Sensor
import nxt
class ColorSensor(Sensor):
name = 'color'
def Initialize(self):
#self.sensor = nxt.Light(self.robot.GetBrick(), self.port)
#self.sensor.set_illuminated(0)
self.sensor = nxt.Color20(self.robot.GetBrick(), self.port)
def Scan(self):
return self.sensor.get_sample()
| Lopt/nxt | NXT/ColorSensor.py | Python | gpl-2.0 | 372 |
"""
Implements compartmental model of a passive cable. See Neuronal Dynamics
`Chapter 3 Section 2 <http://neuronaldynamics.epfl.ch/online/Ch3.S2.html>`_
"""
# This file is part of the exercise code repository accompanying
# the book: Neuronal Dynamics (see http://neuronaldynamics.epfl.ch)
# located at http://github.com/EPFL-LCN/neuronaldynamics-exercises.
# This free software: you can redistribute it and/or modify it under
# the terms of the GNU General Public License 2.0 as published by the
# Free Software Foundation. You should have received a copy of the
# GNU General Public License along with the repository. If not,
# see http://www.gnu.org/licenses/.
# Should you reuse and publish the code for your own purposes,
# please cite the book or point to the webpage http://neuronaldynamics.epfl.ch.
# Wulfram Gerstner, Werner M. Kistler, Richard Naud, and Liam Paninski.
# Neuronal Dynamics: From Single Neurons to Networks and Models of Cognition.
# Cambridge University Press, 2014.
import brian2 as b2
from neurodynex3.tools import input_factory
import matplotlib.pyplot as plt
import numpy as np
# integration time step in milliseconds
b2.defaultclock.dt = 0.01 * b2.ms
# DEFAULT morphological and electrical parameters
CABLE_LENGTH = 500. * b2.um # length of dendrite
CABLE_DIAMETER = 2. * b2.um # diameter of dendrite
R_LONGITUDINAL = 0.5 * b2.kohm * b2.mm # Intracellular medium resistance
R_TRANSVERSAL = 1.25 * b2.Mohm * b2.mm ** 2 # cell membrane resistance (->leak current)
E_LEAK = -70. * b2.mV # reversal potential of the leak current (-> resting potential)
CAPACITANCE = 0.8 * b2.uF / b2.cm ** 2 # membrane capacitance
DEFAULT_INPUT_CURRENT = input_factory.get_step_current(2000, 3000, unit_time=b2.us, amplitude=0.2 * b2.namp)
DEFAULT_INPUT_LOCATION = [CABLE_LENGTH / 3] # provide an array of locations
# print("Membrane Timescale = {}".format(R_TRANSVERSAL*CAPACITANCE))
def simulate_passive_cable(current_injection_location=DEFAULT_INPUT_LOCATION, input_current=DEFAULT_INPUT_CURRENT,
length=CABLE_LENGTH, diameter=CABLE_DIAMETER,
r_longitudinal=R_LONGITUDINAL,
r_transversal=R_TRANSVERSAL, e_leak=E_LEAK, initial_voltage=E_LEAK,
capacitance=CAPACITANCE, nr_compartments=200, simulation_time=5 * b2.ms):
"""Builds a multicompartment cable and numerically approximates the cable equation.
Args:
t_spikes (int): list of spike times
current_injection_location (list): List [] of input locations (Quantity, Length): [123.*b2.um]
input_current (TimedArray): TimedArray of current amplitudes. One column per current_injection_location.
length (Quantity): Length of the cable: 0.8*b2.mm
diameter (Quantity): Diameter of the cable: 0.2*b2.um
r_longitudinal (Quantity): The longitudinal (axial) resistance of the cable: 0.5*b2.kohm*b2.mm
r_transversal (Quantity): The transversal resistance (=membrane resistance): 1.25*b2.Mohm*b2.mm**2
e_leak (Quantity): The reversal potential of the leak current (=resting potential): -70.*b2.mV
initial_voltage (Quantity): Value of the potential at t=0: -70.*b2.mV
capacitance (Quantity): Membrane capacitance: 0.8*b2.uF/b2.cm**2
nr_compartments (int): Number of compartments. Spatial discretization: 200
simulation_time (Quantity): Time for which the dynamics are simulated: 5*b2.ms
Returns:
(StateMonitor, SpatialNeuron): The state monitor contains the membrane voltage in a
Time x Location matrix. The SpatialNeuron object specifies the simulated neuron model
and gives access to the morphology. You may want to use those objects for
spatial indexing: myVoltageStateMonitor[mySpatialNeuron.morphology[0.123*b2.um]].v
"""
assert isinstance(input_current, b2.TimedArray), "input_current is not of type TimedArray"
assert input_current.values.shape[1] == len(current_injection_location),\
"number of injection_locations does not match nr of input currents"
cable_morphology = b2.Cylinder(diameter=diameter, length=length, n=nr_compartments)
# Im is transmembrane current
# Iext is injected current at a specific position on dendrite
EL = e_leak
RT = r_transversal
eqs = """
Iext = current(t, location_index): amp (point current)
location_index : integer (constant)
Im = (EL-v)/RT : amp/meter**2
"""
cable_model = b2.SpatialNeuron(morphology=cable_morphology, model=eqs, Cm=capacitance, Ri=r_longitudinal)
monitor_v = b2.StateMonitor(cable_model, "v", record=True)
# inject all input currents at the specified location:
nr_input_locations = len(current_injection_location)
input_current_0 = np.insert(input_current.values, 0, 0., axis=1) * b2.amp # insert default current: 0. [amp]
current = b2.TimedArray(input_current_0, dt=input_current.dt * b2.second)
for current_index in range(nr_input_locations):
insert_location = current_injection_location[current_index]
compartment_index = int(np.floor(insert_location / (length / nr_compartments)))
# next line: current_index+1 because 0 is the default current 0Amp
cable_model.location_index[compartment_index] = current_index + 1
# set initial values and run for 1 ms
cable_model.v = initial_voltage
b2.run(simulation_time)
return monitor_v, cable_model
def getting_started():
"""A simple code example to get started.
"""
current = input_factory.get_step_current(500, 510, unit_time=b2.us, amplitude=3. * b2.namp)
voltage_monitor, cable_model = simulate_passive_cable(
length=0.5 * b2.mm, current_injection_location=[0.1 * b2.mm], input_current=current,
nr_compartments=100, simulation_time=2 * b2.ms)
# provide a minimal plot
plt.figure()
plt.imshow(voltage_monitor.v / b2.volt)
plt.colorbar(label="voltage")
plt.xlabel("time index")
plt.ylabel("location index")
plt.title("vm at (t,x), raw data voltage_monitor.v")
plt.show()
if __name__ == "__main__":
getting_started()
| EPFL-LCN/neuronaldynamics-exercises | neurodynex3/cable_equation/passive_cable.py | Python | gpl-2.0 | 6,153 |
#/*##########################################################################
# Copyright (C) 2004-2012 European Synchrotron Radiation Facility
#
# This file is part of the PyMca X-ray Fluorescence Toolkit developed at
# the ESRF by the Software group.
#
# This file is free software; you can redistribute it and/or modify it
# under the terms of the GNU Lesser General Public License as published by the
# Free Software Foundation; either version 2 of the License, or (at your option)
# any later version.
#
# This file is distributed in the hope that it will be useful, but WITHOUT ANY
# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more
# details.
#
#############################################################################*/
__author__ = "V.A. Sole - ESRF Data Analysis"
import os
import numpy
import time
try:
from PyMca import EdfFile
from PyMca import TiffIO
except ImportError:
print("ArraySave.py is importing EdfFile and TiffIO from local directory")
import EdfFile
import TiffIO
HDF5 = True
try:
import h5py
except ImportError:
HDF5 = False
DEBUG = 0
def getDate():
localtime = time.localtime()
gtime = time.gmtime()
#year, month, day, hour, minute, second,\
# week_day, year_day, delta = time.localtime()
year = localtime[0]
month = localtime[1]
day = localtime[2]
hour = localtime[3]
minute = localtime[4]
second = localtime[5]
#get the difference against Greenwich
delta = hour - gtime[3]
return "%4d-%02d-%02dT%02d:%02d:%02d%+02d:00" % (year, month, day, hour,
minute, second, delta)
def save2DArrayListAsASCII(datalist, filename,
labels=None, csv=False, csvseparator=";"):
if type(datalist) != type([]):
datalist = [datalist]
r, c = datalist[0].shape
ndata = len(datalist)
if os.path.exists(filename):
try:
os.remove(filename)
except OSError:
pass
if labels is None:
labels = []
for i in range(len(datalist)):
labels.append("Array_%d" % i)
if len(labels) != len(datalist):
raise ValueError("Incorrect number of labels")
if csv:
header = '"row"%s"column"' % csvseparator
for label in labels:
header += '%s"%s"' % (csvseparator, label)
else:
header = "row column"
for label in labels:
header += " %s" % label
filehandle = open(filename, 'w+')
filehandle.write('%s\n' % header)
fileline = ""
if csv:
for row in range(r):
for col in range(c):
fileline += "%d" % row
fileline += "%s%d" % (csvseparator, col)
for i in range(ndata):
fileline += "%s%g" % (csvseparator, datalist[i][row, col])
fileline += "\n"
filehandle.write("%s" % fileline)
fileline = ""
else:
for row in range(r):
for col in range(c):
fileline += "%d" % row
fileline += " %d" % col
for i in range(ndata):
fileline += " %g" % datalist[i][row, col]
fileline += "\n"
filehandle.write("%s" % fileline)
fileline = ""
filehandle.write("\n")
filehandle.close()
def save2DArrayListAsEDF(datalist, filename, labels=None, dtype=None):
if type(datalist) != type([]):
datalist = [datalist]
ndata = len(datalist)
if os.path.exists(filename):
try:
os.remove(filename)
except OSError:
pass
if labels is None:
labels = []
for i in range(ndata):
labels.append("Array_%d" % i)
if len(labels) != ndata:
raise ValueError("Incorrect number of labels")
edfout = EdfFile.EdfFile(filename, access="ab")
for i in range(ndata):
if dtype is None:
edfout.WriteImage({'Title': labels[i]},
datalist[i], Append=1)
else:
edfout.WriteImage({'Title': labels[i]},
datalist[i].astype(dtype),
Append=1)
del edfout # force file close
def save2DArrayListAsMonochromaticTiff(datalist, filename,
labels=None, dtype=None):
if type(datalist) != type([]):
datalist = [datalist]
ndata = len(datalist)
if dtype is None:
dtype = datalist[0].dtype
for i in range(len(datalist)):
dtypeI = datalist[i].dtype
if dtypeI in [numpy.float32, numpy.float64] or\
dtypeI.str[-2] == 'f':
dtype = numpy.float32
break
elif dtypeI != dtype:
dtype = numpy.float32
break
if os.path.exists(filename):
try:
os.remove(filename)
except OSError:
pass
if labels is None:
labels = []
for i in range(ndata):
labels.append("Array_%d" % i)
if len(labels) != ndata:
raise ValueError("Incorrect number of labels")
outfileInstance = TiffIO.TiffIO(filename, mode="wb+")
for i in range(ndata):
if i == 1:
outfileInstance = TiffIO.TiffIO(filename, mode="rb+")
if dtype is None:
data = datalist[i]
else:
data = datalist[i].astype(dtype)
outfileInstance.writeImage(data, info={'Title': labels[i]})
outfileInstance.close() # force file close
def openHDF5File(name, mode='a', **kwargs):
"""
Open an HDF5 file.
Valid modes (like Python's file() modes) are:
- r Readonly, file must exist
- r+ Read/write, file must exist
- w Create file, truncate if exists
- w- Create file, fail if exists
- a Read/write if exists, create otherwise (default)
sorted_with is a callable function like python's builtin sorted, or
None.
"""
h5file = h5py.File(name, mode, **kwargs)
if h5file.mode != 'r' and len(h5file) == 0:
if 'file_name' not in h5file.attrs:
attr = 'file_name'
txt = "%s" % name
dtype = '<S%d' % len(txt)
h5file.attrs.create(attr, txt, dtype=dtype)
if 'file_time' not in h5file.attrs:
attr = 'file_time'
txt = "%s" % getDate()
dtype = '<S%d' % len(txt)
h5file.attrs.create(attr, txt, dtype=dtype)
if 'HDF5_version' not in h5file.attrs:
attr = 'HDF5_version'
txt = "%s" % h5py.version.hdf5_version
dtype = '<S%d' % len(txt)
h5file.attrs.create(attr, txt, dtype=dtype)
if 'HDF5_API_version' not in h5file.attrs:
attr = 'HDF5_API_version'
txt = "%s" % h5py.version.api_version
dtype = '<S%d' % len(txt)
h5file.attrs.create(attr, txt, dtype=dtype)
if 'h5py_version' not in h5file.attrs:
attr = 'h5py_version'
txt = "%s" % h5py.version.version
dtype = '<S%d' % len(txt)
h5file.attrs.create(attr, txt, dtype=dtype)
if 'creator' not in h5file.attrs:
attr = 'creator'
txt = "%s" % 'PyMca'
dtype = '<S%d' % len(txt)
h5file.attrs.create(attr, txt, dtype=dtype)
#if 'format_version' not in self.attrs and len(h5file) == 0:
# h5file.attrs['format_version'] = __format_version__
return h5file
def getHDF5FileInstanceAndBuffer(filename, shape,
buffername="data",
dtype=numpy.float32,
interpretation=None,
compression=None):
if not HDF5:
raise IOError('h5py does not seem to be installed in your system')
if os.path.exists(filename):
try:
os.remove(filename)
except:
raise IOError("Cannot overwrite existing file!")
hdf = openHDF5File(filename, 'a')
entryName = "data"
#entry
nxEntry = hdf.require_group(entryName)
if 'NX_class' not in nxEntry.attrs:
nxEntry.attrs['NX_class'] = 'NXentry'.encode('utf-8')
elif nxEntry.attrs['NX_class'] != 'NXentry'.encode('utf-8'):
#should I raise an error?
pass
nxEntry['title'] = "PyMca saved 3D Array".encode('utf-8')
nxEntry['start_time'] = getDate().encode('utf-8')
nxData = nxEntry.require_group('NXdata')
if 'NX_class' not in nxData.attrs:
nxData.attrs['NX_class'] = 'NXdata'.encode('utf-8')
elif nxData.attrs['NX_class'] == 'NXdata'.encode('utf-8'):
#should I raise an error?
pass
if compression:
if DEBUG:
print("Saving compressed and chunked dataset")
chunk1 = int(shape[1] / 10)
if chunk1 == 0:
chunk1 = shape[1]
for i in [11, 10, 8, 7, 5, 4]:
if (shape[1] % i) == 0:
chunk1 = int(shape[1] / i)
break
chunk2 = int(shape[2] / 10)
if chunk2 == 0:
chunk2 = shape[2]
for i in [11, 10, 8, 7, 5, 4]:
if (shape[2] % i) == 0:
chunk2 = int(shape[2] / i)
break
data = nxData.require_dataset(buffername,
shape=shape,
dtype=dtype,
chunks=(1, chunk1, chunk2),
compression=compression)
else:
#no chunking
if DEBUG:
print("Saving not compressed and not chunked dataset")
data = nxData.require_dataset(buffername,
shape=shape,
dtype=dtype,
compression=None)
data.attrs['signal'] = numpy.int32(1)
if interpretation is not None:
data.attrs['interpretation'] = interpretation.encode('utf-8')
for i in range(len(shape)):
dim = numpy.arange(shape[i]).astype(numpy.float32)
dset = nxData.require_dataset('dim_%d' % i,
dim.shape,
dim.dtype,
dim,
chunks=dim.shape)
dset.attrs['axis'] = numpy.int32(i + 1)
nxEntry['end_time'] = getDate().encode('utf-8')
return hdf, data
def save3DArrayAsMonochromaticTiff(data, filename,
labels=None, dtype=None, mcaindex=-1):
ndata = data.shape[mcaindex]
if dtype is None:
dtype = numpy.float32
if os.path.exists(filename):
try:
os.remove(filename)
except OSError:
pass
if labels is None:
labels = []
for i in range(ndata):
labels.append("Array_%d" % i)
if len(labels) != ndata:
raise ValueError("Incorrect number of labels")
outfileInstance = TiffIO.TiffIO(filename, mode="wb+")
if mcaindex in [2, -1]:
for i in range(ndata):
if i == 1:
outfileInstance = TiffIO.TiffIO(filename, mode="rb+")
if dtype is None:
tmpData = data[:, :, i]
else:
tmpData = data[:, :, i].astype(dtype)
outfileInstance.writeImage(tmpData, info={'Title': labels[i]})
if (ndata > 10):
print("Saved image %d of %d" % (i + 1, ndata))
elif mcaindex == 1:
for i in range(ndata):
if i == 1:
outfileInstance = TiffIO.TiffIO(filename, mode="rb+")
if dtype is None:
tmpData = data[:, i, :]
else:
tmpData = data[:, i, :].astype(dtype)
outfileInstance.writeImage(tmpData, info={'Title': labels[i]})
if (ndata > 10):
print("Saved image %d of %d" % (i + 1, ndata))
else:
for i in range(ndata):
if i == 1:
outfileInstance = TiffIO.TiffIO(filename, mode="rb+")
if dtype is None:
tmpData = data[i]
else:
tmpData = data[i].astype(dtype)
outfileInstance.writeImage(tmpData, info={'Title': labels[i]})
if (ndata > 10):
print("Saved image %d of %d" % (i + 1, ndata))
outfileInstance.close() # force file close
# it should be used to name the data that for the time being is named 'data'.
def save3DArrayAsHDF5(data, filename, axes=None, labels=None, dtype=None, mode='nexus',
mcaindex=-1, interpretation=None, compression=None):
if not HDF5:
raise IOError('h5py does not seem to be installed in your system')
if (mcaindex == 0) and (interpretation in ["spectrum", None]):
#stack of images to be saved as stack of spectra
modify = True
shape = [data.shape[1], data.shape[2], data.shape[0]]
elif (mcaindex != 0) and (interpretation in ["image"]):
#stack of spectra to be saved as stack of images
modify = True
shape = [data.shape[2], data.shape[0], data.shape[1]]
else:
modify = False
shape = data.shape
if dtype is None:
dtype = data.dtype
if mode.lower() in ['nexus', 'nexus+']:
#raise IOError, 'NeXus data saving not implemented yet'
if os.path.exists(filename):
try:
os.remove(filename)
except:
raise IOError("Cannot overwrite existing file!")
hdf = openHDF5File(filename, 'a')
entryName = "data"
#entry
nxEntry = hdf.require_group(entryName)
if 'NX_class' not in nxEntry.attrs:
nxEntry.attrs['NX_class'] = 'NXentry'.encode('utf-8')
elif nxEntry.attrs['NX_class'] != 'NXentry'.encode('utf-8'):
#should I raise an error?
pass
nxEntry['title'] = "PyMca saved 3D Array".encode('utf-8')
nxEntry['start_time'] = getDate().encode('utf-8')
nxData = nxEntry.require_group('NXdata')
if ('NX_class' not in nxData.attrs):
nxData.attrs['NX_class'] = 'NXdata'.encode('utf-8')
elif nxData.attrs['NX_class'] != 'NXdata'.encode('utf-8'):
#should I raise an error?
pass
if modify:
if interpretation in ["image", "image".encode('utf-8')]:
if compression:
if DEBUG:
print("Saving compressed and chunked dataset")
#risk of taking a 10 % more space in disk
chunk1 = int(shape[1] / 10)
if chunk1 == 0:
chunk1 = shape[1]
for i in [11, 10, 8, 7, 5, 4]:
if (shape[1] % i) == 0:
chunk1 = int(shape[1] / i)
break
chunk2 = int(shape[2] / 10)
for i in [11, 10, 8, 7, 5, 4]:
if (shape[2] % i) == 0:
chunk2 = int(shape[2] / i)
break
dset = nxData.require_dataset('data',
shape=shape,
dtype=dtype,
chunks=(1, chunk1, chunk2),
compression=compression)
else:
if DEBUG:
print("Saving not compressed and not chunked dataset")
#print not compressed -> Not chunked
dset = nxData.require_dataset('data',
shape=shape,
dtype=dtype,
compression=None)
for i in range(data.shape[-1]):
tmp = data[:, :, i:i + 1]
tmp.shape = 1, shape[1], shape[2]
dset[i, 0:shape[1], :] = tmp
print("Saved item %d of %d" % (i + 1, data.shape[-1]))
elif 0:
#if I do not match the input and output shapes it takes ages
#to save the images as spectra. However, it is much faster
#when performing spectra operations.
dset = nxData.require_dataset('data',
shape=shape,
dtype=dtype,
chunks=(1, shape[1], shape[2]))
for i in range(data.shape[1]): # shape[0]
chunk = numpy.zeros((1, data.shape[2], data.shape[0]),
dtype)
for k in range(data.shape[0]): # shape[2]
if 0:
tmpData = data[k:k + 1]
for j in range(data.shape[2]): # shape[1]
tmpData.shape = data.shape[1], data.shape[2]
chunk[0, j, k] = tmpData[i, j]
else:
tmpData = data[k:k + 1, i, :]
tmpData.shape = -1
chunk[0, :, k] = tmpData
print("Saving item %d of %d" % (i, data.shape[1]))
dset[i, :, :] = chunk
else:
#if I do not match the input and output shapes it takes ages
#to save the images as spectra. This is a very fast saving, but
#the performance is awful when reading.
if compression:
if DEBUG:
print("Saving compressed and chunked dataset")
dset = nxData.require_dataset('data',
shape=shape,
dtype=dtype,
chunks=(shape[0], shape[1], 1),
compression=compression)
else:
if DEBUG:
print("Saving not compressed and not chunked dataset")
dset = nxData.require_dataset('data',
shape=shape,
dtype=dtype,
compression=None)
for i in range(data.shape[0]):
tmp = data[i:i + 1, :, :]
tmp.shape = shape[0], shape[1], 1
dset[:, :, i:i + 1] = tmp
else:
if compression:
if DEBUG:
print("Saving compressed and chunked dataset")
chunk1 = int(shape[1] / 10)
if chunk1 == 0:
chunk1 = shape[1]
for i in [11, 10, 8, 7, 5, 4]:
if (shape[1] % i) == 0:
chunk1 = int(shape[1] / i)
break
chunk2 = int(shape[2] / 10)
if chunk2 == 0:
chunk2 = shape[2]
for i in [11, 10, 8, 7, 5, 4]:
if (shape[2] % i) == 0:
chunk2 = int(shape[2] / i)
break
if DEBUG:
print("Used chunk size = (1, %d, %d)" % (chunk1, chunk2))
dset = nxData.require_dataset('data',
shape=shape,
dtype=dtype,
chunks=(1, chunk1, chunk2),
compression=compression)
else:
if DEBUG:
print("Saving not compressed and notchunked dataset")
dset = nxData.require_dataset('data',
shape=shape,
dtype=dtype,
compression=None)
tmpData = numpy.zeros((1, data.shape[1], data.shape[2]),
data.dtype)
for i in range(data.shape[0]):
tmpData[0:1] = data[i:i + 1]
dset[i:i + 1] = tmpData[0:1]
print("Saved item %d of %d" % (i + 1, data.shape[0]))
dset.attrs['signal'] = "1".encode('utf-8')
if interpretation is not None:
dset.attrs['interpretation'] = interpretation.encode('utf-8')
axesAttribute = []
for i in range(len(shape)):
if axes is None:
dim = numpy.arange(shape[i]).astype(numpy.float32)
dimlabel = 'dim_%d' % i
elif axes[i] is not None:
dim = axes[i]
try:
dimlabel = "%s" % labels[i]
except:
dimlabel = 'dim_%d' % i
else:
dim = numpy.arange(shape[i]).astype(numpy.float32)
dimlabel = 'dim_%d' % i
axesAttribute.append(dimlabel)
adset = nxData.require_dataset(dimlabel,
dim.shape,
dim.dtype,
compression=None)
adset[:] = dim[:]
adset.attrs['axis'] = i + 1
dset.attrs['axes'] = (":".join(axesAttribute)).encode('utf-8')
nxEntry['end_time'] = getDate().encode('utf-8')
if mode.lower() == 'nexus+':
#create link
g = h5py.h5g.open(hdf.fid, '/'.encode('utf-8'))
g.link('/data/NXdata/data'.encode('utf-8'),
'/data/data'.encode('utf-8'),
h5py.h5g.LINK_HARD)
elif mode.lower() == 'simplest':
if os.path.exists(filename):
try:
os.remove(filename)
except:
raise IOError("Cannot overwrite existing file!")
hdf = h5py.File(filename, 'a')
if compression:
hdf.require_dataset('data',
shape=shape,
dtype=dtype,
data=data,
chunks=(1, shape[1], shape[2]),
compression=compression)
else:
hdf.require_dataset('data',
shape=shape,
data=data,
dtype=dtype,
compression=None)
else:
if os.path.exists(filename):
try:
os.remove(filename)
except:
raise IOError("Cannot overwrite existing file!")
shape = data.shape
dtype = data.dtype
hdf = h5py.File(filename, 'a')
dataGroup = hdf.require_group('data')
dataGroup.require_dataset('data',
shape=shape,
dtype=dtype,
data=data,
chunks=(1, shape[1], shape[2]))
hdf.flush()
hdf.close()
def main():
a = numpy.arange(1000000.)
a.shape = 20, 50, 1000
save3DArrayAsHDF5(a, '/test.h5', mode='nexus+', interpretation='image')
getHDF5FileInstanceAndBuffer('/test2.h5', (100, 100, 100))
print("Date String = ", getDate())
if __name__ == "__main__":
main()
| tonnrueter/pymca_devel | PyMca/ArraySave.py | Python | gpl-2.0 | 23,377 |
from Screen import Screen
from Screens.ChoiceBox import ChoiceBox
class ResolutionSelection(Screen):
def __init__(self, session, infobar=None):
Screen.__init__(self, session)
self.session = session
xresString = open("/proc/stb/vmpeg/0/xres", "r").read()
yresString = open("/proc/stb/vmpeg/0/yres", "r").read()
fpsString = open("/proc/stb/vmpeg/0/framerate", "r").read()
xres = int(xresString, 16)
yres = int(yresString, 16)
fps = int(fpsString, 16)
fpsFloat = float(fps)
fpsFloat = fpsFloat/1000
selection = 0
tlist = []
tlist.append((_("Exit"), "exit"))
tlist.append((_("Auto(not available)"), "auto"))
tlist.append(("Video: " + str(xres) + "x" + str(yres) + "@" + str(fpsFloat) + "hz", ""))
tlist.append(("--", ""))
tlist.append(("576i", "576i50"))
tlist.append(("576p", "576p50"))
tlist.append(("720p", "720p50"))
tlist.append(("1080i", "1080i50"))
tlist.append(("[email protected]", "1080p23"))
tlist.append(("1080p@24hz", "1080p24"))
tlist.append(("1080p@25hz", "1080p25"))
keys = ["green", "yellow", "blue", "", "0", "1", "2", "3", "4", "5", "6", "7", "8", "9" ]
mode = open("/proc/stb/video/videomode").read()[:-1]
print mode
for x in range(len(tlist)):
if tlist[x][1] == mode:
selection = x
self.session.openWithCallback(self.ResolutionSelected, ChoiceBox, title=_("Please select a resolution..."), list = tlist, selection = selection, keys = keys)
#return
def ResolutionSelected(self, Resolution):
if not Resolution is None:
if isinstance(Resolution[1], str):
if Resolution[1] == "exit":
self.ExGreen_toggleGreen()
elif Resolution[1] != "auto":
open("/proc/stb/video/videomode", "w").write(Resolution[1])
from enigma import gFBDC
gFBDC.getInstance().setResolution(-1, -1)
self.ExGreen_toggleGreen()
return
| OpenLD/enigma2-wetek | lib/python/Screens/ResolutionSelection.py | Python | gpl-2.0 | 1,827 |
#!/usr/bin/env python
from time import sleep
import os
import RPi.GPIO as GPIO
import subprocess
import datetime
GPIO.setmode(GPIO.BCM)
GPIO.setup(24, GPIO.IN)
count = 0
up = False
down = False
command = ""
filename = ""
index = 0
camera_pause = "500"
def takepic(imageName):
print("picture")
command = "sudo raspistill -o " + imageName + " -q 100 -t " + camera_pause
print(command)
os.system(command)
while(True):
if(up==True):
if(GPIO.input(24)==False):
now = datetime.datetime.now()
timeString = now.strftime("%Y-%m-%d_%H%M%S")
filename = "photo-"+timeString+".jpg"
takepic(filename)
subprocess.call(['./processImage.sh', filename, '&'])
up = GPIO.input(24)
count = count+1
sleep(.1)
print "done"
| sgonzalez/wsn-parking-project | sensor-pi/testimages.py | Python | gpl-2.0 | 764 |
class City(object):
def __init__(self, name):
self.name = name
def Name(self):
return self.name
class Home(object):
def __init__(self, name, city):
self.name = name
self.city = city
def Name(self):
return self.name
def City(self):
return self.city
class Person(object):
def __init__(self, name, home):
self.name = name
self.home = home
def Name(self):
return self.name
def Home(self):
return self.home
city = City('Karlstad')
home = Home('Nilssons hemmet', city)
person = Person('Nils', home)
print '%s bor i %s som ligger i staden %s.' % (person.Name(), person.Home().Name(), person.Home().City().Name())
| Kolguyev/samples | law_of_demeter/law_of_demeter_bad_example.py | Python | gpl-2.0 | 738 |
import P1011
import unittest
class test_phylab(unittest.TestCase):
def testSteelWire_1(self):
m = [10.000,12.000,14.000,16.000,18.000,20.000,22.000,24.000,26.00]
C_plus = [3.50, 3.81, 4.10, 4.40, 4.69, 4.98, 5.28, 5.59, 5.89]
C_sub = [3.52, 3.80, 4.08, 4.38, 4.70, 4.99, 5.30, 5.59, 5.89]
D = [0.789, 0.788, 0.788, 0.787, 0.788]
L = 38.9
H = 77.0
b = 8.50
res = P1011.SteelWire(m, C_plus, C_sub, D, L, H, b)
self.assertEqual(res,'(1.90\\pm0.04){\\times}10^{11}',"test SteelWire fail")
def testInertia_1(self):
m = [711.77, 711.82, 1242.30, 131.76, 241.56,238.38]
d = [99.95, 99.95, 93.85, 114.60, 610.00]
T = [[4.06, 4.06, 4.07, 4.06, 4.06], [6.57, 6.57, 6.57, 6.56, 6.57],
[8.16, 8.16, 8.17, 8.17, 8.17], [7.35, 7.35, 7.33, 7.35, 7.37],
[11.40, 11.40, 11.41, 11.41, 11.41]]
l = [34.92, 6.02, 33.05]
T2 = [[13.07,13.07,13.07,13.07,13.06],[16.86,16.86,16.88,16.87,16.88],
[21.79,21.82,21.83,21.84,21.84],[27.28,27.28,27.29,27.27,27.27],
[32.96,32.96,32.96,32.97,32.96]]
res = P1011.Inertia(m, d, T, l, T2)
x = 1
if(abs(res[0] - 0.9999989) > pow(10,-7)):
x = 0
if(abs(res[1] - 610.9)/610.9 > 0.001):
x = 0
self.assertEqual(x,1,"test Inertia fail")
if __name__ =='__main__':
unittest.main()
| buaase/Phylab-Web | PythonExperimentDataHandle/test/P1011_test.py | Python | gpl-2.0 | 1,628 |
# -*- coding: utf-8 -*-
# Copyright(c) 2016-2020 Jonas Sjöberg <[email protected]>
# Source repository: https://github.com/jonasjberg/autonameow
#
# This file is part of autonameow.
#
# autonameow is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation.
#
# autonameow is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with autonameow. If not, see <http://www.gnu.org/licenses/>.
from .extractor_crossplatform import CrossPlatformFileSystemExtractor
from .extractor_epub import EpubMetadataExtractor
from .extractor_exiftool import ExiftoolMetadataExtractor
from .extractor_filetags import FiletagsMetadataExtractor
from .extractor_guessit import GuessitMetadataExtractor
from .extractor_jpeginfo import JpeginfoMetadataExtractor
from .extractor_pandoc import PandocMetadataExtractor
| jonasjberg/autonameow | autonameow/extractors/metadata/__init__.py | Python | gpl-2.0 | 1,177 |
#!/usr/bin/python
import unittest
class FooTest(unittest.TestCase):
'''Sample test case -- FooTest()'''
def setUp(self):
'''Set up for testing...'''
print 'FooTest:setUp_:begin'
testName = self.shortDescription()
if (testName == 'Test routine A'):
print 'setting up for test A'
elif (testName == 'Test routine B'):
print 'setting up for test B'
else:
print 'UNKNOWN TEST ROUTINE'
print 'FooTest:setUp_:end'
def testA(self):
'''Test routine A'''
print 'FooTest: running testA...'
def testB(self):
'''Test routine B'''
print 'FooTest: running testB...'
def tearDown(self):
'''Tear down from testing...'''
print 'FooTest:tearDown_:begin'
testName = self.shortDescription()
if (testName == 'Test routine A'):
print 'cleaning up after test A'
elif (testName == 'Test routine B'):
print 'cleaning up after test B'
else:
print 'UNKNOWN TEST ROUTINE'
print 'FooTest:tearDown_:end'
class BarTest(unittest.TestCase):
'''Sample test case -- BarTest()'''
def setUp(self):
'''Set up for testing...'''
print 'BarTest:setUp_:begin'
testName = self.shortDescription()
if (testName == 'Test routine A'):
print 'setting up for test A'
elif (testName == 'Test routine B'):
print 'setting up for test B'
else:
print 'UNKNOWN TEST ROUTINE'
print 'BarTest:setUp_:end'
def testA(self):
'''Test routine A'''
print 'BarTest: running testA...'
def testB(self):
'''Test routine B'''
print 'BarTest: running testB...'
def tearDown(self):
'''Tear down from testing...'''
print 'BarTest:tearDown_:begin'
testName = self.shortDescription()
if (testName == 'Test routine A'):
print 'cleaning up after test A'
elif (testName == 'Test routine B'):
print 'cleaning up after test B'
else:
print 'UNKNOWN TEST ROUTINE'
print 'BarTest:tearDown_:end'
if __name__ == '__main__':
unittest.main() | jeffwright13/unittest_tutorial | test_foo.py | Python | gpl-2.0 | 2,412 |
#!/usr/bin/env python
#coding=utf-8
# Copyright 2007 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import webapp2
import time
import dbcontroller as dc
import speak
import User
import logging
class MainHandler(webapp2.RequestHandler):
def get(self):
list = dc.refresh()
lines = speak.speak(list)
import twitter
for user in User.users:
for i in lines:
str1 = i
logging.log(logging.INFO, u"twitter length is " + \
str(len(str1)))
try:
twitter.sendMessage(str1)
except:
logging.log(logging.WARNING, u"twitter send fail:" + str1)
return self.response.out.write('ok')
app = webapp2.WSGIApplication([
('/whyisme', MainHandler)
], debug=True)
| ariwaranosai/twitter_bot | twitter_bot/main.py | Python | gpl-2.0 | 1,342 |
__author__ = 'bruno'
import unittest
import algorithms.math.abacus as Abacus
class TestAbacus(unittest.TestCase):
def setUp(self):
pass
def test_abacus1(self):
abacus = Abacus.generate_abacus(0)
self.assertEqual(['|00000***** |',
'|00000***** |',
'|00000***** |',
'|00000***** |',
'|00000***** |',
'|00000***** |',
'|00000***** |',
'|00000***** |',
'|00000***** |',
'|00000***** |'], abacus)
def test_abacus2(self):
abacus = Abacus.generate_abacus(8)
self.assertEqual(['|00000***** |',
'|00000***** |',
'|00000***** |',
'|00000***** |',
'|00000***** |',
'|00000***** |',
'|00000***** |',
'|00000***** |',
'|00000***** |',
'|00 000*****|'], abacus)
def test_abacus3(self):
abacus = Abacus.generate_abacus(32)
self.assertEqual(['|00000***** |',
'|00000***** |',
'|00000***** |',
'|00000***** |',
'|00000***** |',
'|00000***** |',
'|00000***** |',
'|00000***** |',
'|00000** ***|',
'|00000*** **|'], abacus)
def test_abacus4(self):
abacus = Abacus.generate_abacus(147)
self.assertEqual(['|00000***** |',
'|00000***** |',
'|00000***** |',
'|00000***** |',
'|00000***** |',
'|00000***** |',
'|00000***** |',
'|00000**** *|',
'|00000* ****|',
'|000 00*****|'], abacus)
def test_abacus5(self):
abacus = Abacus.generate_abacus(986)
self.assertEqual(['|00000***** |',
'|00000***** |',
'|00000***** |',
'|00000***** |',
'|00000***** |',
'|00000***** |',
'|00000***** |',
'|0 0000*****|',
'|00 000*****|',
'|0000 0*****|'], abacus)
def test_abacus6(self):
abacus = Abacus.generate_abacus(5821)
self.assertEqual(['|00000***** |',
'|00000***** |',
'|00000***** |',
'|00000***** |',
'|00000***** |',
'|00000***** |',
'|00000 *****|',
'|00 000*****|',
'|00000*** **|',
'|00000**** *|'], abacus)
def test_abacus7(self):
abacus = Abacus.generate_abacus(1234)
self.assertEqual(['|00000***** |',
'|00000***** |',
'|00000***** |',
'|00000***** |',
'|00000***** |',
'|00000***** |',
'|00000**** *|',
'|00000*** **|',
'|00000** ***|',
'|00000* ****|'], abacus)
def test_abacus8(self):
abacus = Abacus.generate_abacus(999)
self.assertEqual(['|00000***** |',
'|00000***** |',
'|00000***** |',
'|00000***** |',
'|00000***** |',
'|00000***** |',
'|00000***** |',
'|0 0000*****|',
'|0 0000*****|',
'|0 0000*****|'], abacus)
def test_abacus9(self):
abacus = Abacus.generate_abacus(13)
self.assertEqual(['|00000***** |',
'|00000***** |',
'|00000***** |',
'|00000***** |',
'|00000***** |',
'|00000***** |',
'|00000***** |',
'|00000***** |',
'|00000**** *|',
'|00000** ***|'], abacus)
def test_abacus10(self):
abacus = Abacus.generate_abacus(49)
self.assertEqual(['|00000***** |',
'|00000***** |',
'|00000***** |',
'|00000***** |',
'|00000***** |',
'|00000***** |',
'|00000***** |',
'|00000***** |',
'|00000* ****|',
'|0 0000*****|'], abacus) | bnsantos/python-junk-code | tests/math/abacusTest.py | Python | gpl-2.0 | 5,476 |
# -*- coding: utf-8 -*-
# ===========================================================================
# eXe
# Copyright 2004-2006, University of Auckland
# Copyright 2006-2008 eXe Project, http://eXeLearning.org/
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
# ===========================================================================
"""
Package represents the collection of resources the user is editing
i.e. the "package".
"""
import datetime
import shutil
import logging
import time
import zipfile
import uuid
import re
from xml.dom import minidom
from exe.engine.path import Path, TempDirPath, toUnicode
from exe.engine.node import Node
from exe.engine.genericidevice import GenericIdevice
from exe.engine.multichoiceidevice import MultichoiceIdevice
from exe.engine.quiztestidevice import QuizTestIdevice
from exe.engine.truefalseidevice import TrueFalseIdevice
from exe.engine.wikipediaidevice import WikipediaIdevice
from exe.engine.casestudyidevice import CasestudyIdevice
from exe.engine.casopracticofpdidevice import CasopracticofpdIdevice
from exe.engine.citasparapensarfpdidevice import CitasparapensarfpdIdevice
from exe.engine.clozefpdidevice import ClozefpdIdevice
from exe.engine.clozeidevice import ClozeIdevice
from exe.engine.clozelangfpdidevice import ClozelangfpdIdevice
from exe.engine.debesconocerfpdidevice import DebesconocerfpdIdevice
from exe.engine.destacadofpdidevice import DestacadofpdIdevice
from exe.engine.ejercicioresueltofpdidevice import EjercicioresueltofpdIdevice
from exe.engine.eleccionmultiplefpdidevice import EleccionmultiplefpdIdevice
from exe.engine.freetextfpdidevice import FreeTextfpdIdevice
from exe.engine.galleryidevice import GalleryIdevice
from exe.engine.imagemagnifieridevice import ImageMagnifierIdevice
from exe.engine.listaidevice import ListaIdevice
from exe.engine.multiselectidevice import MultiSelectIdevice
from exe.engine.orientacionesalumnadofpdidevice import OrientacionesalumnadofpdIdevice
from exe.engine.orientacionestutoriafpdidevice import OrientacionestutoriafpdIdevice
from exe.engine.parasabermasfpdidevice import ParasabermasfpdIdevice
from exe.engine.recomendacionfpdidevice import RecomendacionfpdIdevice
from exe.engine.reflectionfpdidevice import ReflectionfpdIdevice
from exe.engine.reflectionfpdmodifidevice import ReflectionfpdmodifIdevice
from exe.engine.reflectionidevice import ReflectionIdevice
from exe.engine.seleccionmultiplefpdidevice import SeleccionmultiplefpdIdevice
from exe.engine.verdaderofalsofpdidevice import VerdaderofalsofpdIdevice
from exe.engine.persist import Persistable, encodeObject, decodeObjectRaw
from exe import globals as G
from exe.engine.resource import Resource
from twisted.persisted.styles import doUpgrade
from twisted.spread.jelly import Jellyable, Unjellyable
from exe.engine.beautifulsoup import BeautifulSoup
from exe.engine.field import Field, TextAreaField
from exe.engine.persistxml import encodeObjectToXML, decodeObjectFromXML
from exe.engine.lom import lomsubs
from exe.engine.checker import Checker
from exe.webui import common
log = logging.getLogger(__name__)
def clonePrototypeIdevice(title):
idevice = None
for prototype in G.application.ideviceStore.getIdevices():
if prototype.get_title() == title:
log.debug('have prototype of:' + prototype.get_title())
idevice = prototype.clone()
idevice.edit = False
break
return idevice
def burstIdevice(idev_type, i, node):
# given the iDevice type and the BeautifulSoup fragment i, burst it:
idevice = clonePrototypeIdevice(idev_type)
if idevice is None:
log.warn("unable to clone " + idev_type + " idevice")
freetext_idevice = clonePrototypeIdevice('Free Text')
if freetext_idevice is None:
log.error("unable to clone Free Text for " + idev_type
+ " idevice")
return
idevice = freetext_idevice
# For idevices such as GalleryImage, where resources are being attached,
# the idevice should already be attached to a node before bursting it open:
node.addIdevice(idevice)
idevice.burstHTML(i)
return idevice
def loadNodesIdevices(node, s):
soup = BeautifulSoup(s)
body = soup.find('body')
if body:
idevices = body.findAll(name='div',
attrs={'class' : re.compile('Idevice$') })
if len(idevices) > 0:
for i in idevices:
# WARNING: none of the idevices yet re-attach their media,
# but they do attempt to re-attach images and other links.
if i.attrMap['class']=="activityIdevice":
idevice = burstIdevice('Activity', i, node)
elif i.attrMap['class']=="objectivesIdevice":
idevice = burstIdevice('Objectives', i, node)
elif i.attrMap['class']=="preknowledgeIdevice":
idevice = burstIdevice('Preknowledge', i, node)
elif i.attrMap['class']=="readingIdevice":
idevice = burstIdevice('Reading Activity', i, node)
# the above are all Generic iDevices;
# below are all others:
elif i.attrMap['class']=="RssIdevice":
idevice = burstIdevice('RSS', i, node)
elif i.attrMap['class']=="WikipediaIdevice":
# WARNING: Wiki problems loading images with accents, etc:
idevice = burstIdevice('Wiki Article', i, node)
elif i.attrMap['class']=="ReflectionIdevice":
idevice = burstIdevice('Reflection', i, node)
elif i.attrMap['class']=="GalleryIdevice":
# WARNING: Gallery problems with the popup html:
idevice = burstIdevice('Image Gallery', i, node)
elif i.attrMap['class']=="ImageMagnifierIdevice":
# WARNING: Magnifier missing major bursting components:
idevice = burstIdevice('Image Magnifier', i, node)
elif i.attrMap['class']=="AppletIdevice":
# WARNING: Applet missing file bursting components:
idevice = burstIdevice('Java Applet', i, node)
elif i.attrMap['class']=="ExternalUrlIdevice":
idevice = burstIdevice('External Web Site', i, node)
elif i.attrMap['class']=="ClozeIdevice":
idevice = burstIdevice('Cloze Activity', i, node)
elif i.attrMap['class']=="FreeTextIdevice":
idevice = burstIdevice('Free Text', i, node)
elif i.attrMap['class']=="CasestudyIdevice":
idevice = burstIdevice('Case Study', i, node)
elif i.attrMap['class']=="MultichoiceIdevice":
idevice = burstIdevice('Multi-choice', i, node)
elif i.attrMap['class']=="MultiSelectIdevice":
idevice = burstIdevice('Multi-select', i, node)
elif i.attrMap['class']=="QuizTestIdevice":
idevice = burstIdevice('SCORM Quiz', i, node)
elif i.attrMap['class']=="TrueFalseIdevice":
idevice = burstIdevice('True-False Question', i, node)
else:
# NOTE: no custom idevices burst yet,
# nor any deprecated idevices. Just burst into a FreeText:
log.warn("unburstable idevice " + i.attrMap['class'] +
"; bursting into Free Text")
idevice = burstIdevice('Free Text', i, node)
else:
# no idevices listed on this page,
# just create a free-text for the entire page:
log.warn("no idevices found on this node, bursting into Free Text.")
idevice = burstIdevice('Free Text', i, node)
else:
log.warn("unable to read the body of this node.")
def test_for_node(html_content):
# to see if this html really is an exe-generated node
exe_string = u"<!-- Created using eXe: http://exelearning.org -->"
if html_content.decode('utf-8').find(exe_string) >= 0:
return True
else:
return False
def loadNode(pass_num, resourceDir, zippedFile, node, doc, item, level):
# populate this node
# 1st pass = merely unzipping all resources such that they are available,
# 2nd pass = loading the actual node idevices.
titles = item.getElementsByTagName('title')
node.setTitle(titles[0].firstChild.data)
node_resource = item.attributes['identifierref'].value
log.debug('*' * level + ' ' + titles[0].firstChild.data + '->' + item.attributes['identifierref'].value)
for resource in doc.getElementsByTagName('resource'):
if resource.attributes['identifier'].value == node_resource:
for file in resource.childNodes:
if file.nodeName == 'file':
filename = file.attributes['href'].value
is_exe_node_html = False
if filename.endswith('.html') \
and filename != "fdl.html" \
and not filename.startswith("galleryPopup"):
# fdl.html is the wikipedia license, ignore it
# as well as any galleryPopups:
is_exe_node_html = \
test_for_node(zippedFile.read(filename))
if is_exe_node_html:
if pass_num == 1:
# 2nd pass call to actually load the nodes:
log.debug('loading idevices from node: ' + filename)
loadNodesIdevices(node, zippedFile.read(filename))
elif filename == "fdl.html" or \
filename.startswith("galleryPopup."):
# let these be re-created upon bursting.
if pass_num == 0:
# 1st pass call to unzip the resources:
log.debug('ignoring resource file: '+ filename)
else:
if pass_num == 0:
# 1st pass call to unzip the resources:
try:
zipinfo = zippedFile.getinfo(filename)
log.debug('unzipping resource file: '
+ resourceDir/filename )
outFile = open(resourceDir/filename, "wb")
outFile.write(zippedFile.read(filename))
outFile.flush()
outFile.close()
except:
log.warn('error unzipping resource file: '
+ resourceDir/filename )
##########
# WARNING: the resource is now in the resourceDir,
# BUT it is NOT YET added into any of the project,
# much less to the specific idevices or fields!
# Although they WILL be saved out with the project
# upon the next Save.
##########
break
# process this node's children
for subitem in item.childNodes:
if subitem.nodeName == 'item':
# for the first pass, of unzipping only, do not
# create any child nodes, just cruise on with this one:
next_node = node
if pass_num == 1:
# if this is actually loading the nodes:
next_node = node.createChild()
loadNode(pass_num, resourceDir, zippedFile, next_node,
doc, subitem, level+1)
def loadCC(zippedFile, filename):
"""
Load an IMS Common Cartridge or Content Package from filename
"""
package = Package(Path(filename).namebase)
xmldoc = minidom.parseString( zippedFile.read('imsmanifest.xml'))
organizations_list = xmldoc.getElementsByTagName('organizations')
level = 0
# now a two-pass system to first unzip all applicable resources:
for pass_num in range(2):
for organizations in organizations_list:
organization_list = organizations.getElementsByTagName(
'organization')
for organization in organization_list:
for item in organization.childNodes:
if item.nodeName == 'item':
loadNode(pass_num, package.resourceDir, zippedFile,
package.root, xmldoc, item, level)
return package
# ===========================================================================
class DublinCore(Jellyable, Unjellyable):
"""
Holds dublin core info
"""
def __init__(self):
self.title = ''
self.creator = ''
self.subject = ''
self.description = ''
self.publisher = ''
self.contributors = ''
self.date = ''
self.type = ''
self.format = ''
self.identifier = str(uuid.uuid4())
self.source = ''
self.language = ''
self.relation = ''
self.coverage = ''
self.rights = ''
def __setattr__(self, name, value):
self.__dict__[name] = toUnicode(value)
class Package(Persistable):
"""
Package represents the collection of resources the user is editing
i.e. the "package".
"""
persistenceVersion = 13
nonpersistant = ['resourceDir', 'filename', 'previewDir']
# Name is used in filenames and urls (saving and navigating)
_name = ''
tempFile = False # This is set when the package is saved as a temp copy file
# Title is rendered in exports
_title = ''
_author = ''
_description = ''
_backgroundImg = ''
#styledefault=u"INTEF"
# This is like a constant
defaultLevelNames = [x_(u"Topic"), x_(u"Section"), x_(u"Unit")]
def __init__(self, name):
"""
Initialize
"""
log.debug(u"init " + repr(name))
self._nextIdeviceId = 0
self._nextNodeId = 0
# For looking up nodes by ids
self._nodeIdDict = {}
self._levelNames = self.defaultLevelNames[:]
self.name = name
self._title = u''
self._backgroundImg = u''
self.backgroundImgTile = False
# Empty if never saved/loaded
self.filename = u''
self.root = Node(self, None, _(u"Home"))
self.currentNode = self.root
# self.style = u"default"
#self.styledefault=u"INTEF"
self.style = G.application.config.defaultStyle
self._isChanged = False
self.previewDir = None
self.idevices = []
self.dublinCore = DublinCore()
self._lang = G.application.config.locale.split('_')[0]
self.setLomDefaults()
self.setLomEsDefaults()
self.scolinks = False
self.scowsinglepage= False
self.scowwebsite = False
self.exportSource = True
self.exportMetadataType = "LOMES"
self.license = u''
self.footer = ""
self._objectives = u''
self._preknowledge = u''
self._learningResourceType = u''
self._intendedEndUserRoleType = u''
self._intendedEndUserRoleGroup = False
self._intendedEndUserRoleTutor = False
self._contextPlace = u''
self._contextMode = u''
self.compatibleWithVersion9 = False
#for export to Sugar (e.g. OLPC)
self.sugaractivityname = ""
self.sugarservicename = ""
#for export to Ustad Mobile
self.mxmlprofilelist = ""
self.mxmlheight = ""
self.mxmlwidth = ""
self.mxmlforcemediaonly = False
# Temporary directory to hold resources in
self.resourceDir = TempDirPath()
self.resources = {} # Checksum-[_Resource(),..]
self._docType = G.application.config.docType
def setLomDefaults(self):
self.lom = lomsubs.lomSub.factory()
self.lom.addChilds(self.lomDefaults(self.dublinCore.identifier, 'LOMv1.0'))
def setLomEsDefaults(self):
self.lomEs = lomsubs.lomSub.factory()
self.lomEs.addChilds(self.lomDefaults(self.dublinCore.identifier, 'LOM-ESv1.0', True))
# Property Handlers
def set_docType(self,value):
self._docType = toUnicode(value)
common.setExportDocType(value)
def set_name(self, value):
self._name = toUnicode(value)
def set_title(self, value):
if self.dublinCore.title == self._title:
self.dublinCore.title = value
lang_str = self.lang.encode('utf-8')
value_str = value.encode('utf-8')
for metadata in [self.lom, self.lomEs]:
title = metadata.get_general().get_title()
if title:
found = False
for string in title.get_string():
if string.get_valueOf_() == self._title.encode('utf-8'):
found = True
if value:
string.set_language(lang_str)
string.set_valueOf_(value_str)
else:
title.string.remove(string)
if not found:
if value:
title.add_string(lomsubs.LangStringSub(lang_str, value_str))
else:
if value:
title = lomsubs.titleSub([lomsubs.LangStringSub(lang_str, value_str)])
metadata.get_general().set_title(title)
self._title = toUnicode(value)
def set_lang(self, value):
if self.dublinCore.language in [self._lang, '']:
self.dublinCore.language = value
value_str = value.encode('utf-8')
for metadata in [self.lom, self.lomEs]:
language = metadata.get_general().get_language()
if language:
for LanguageId in language:
if LanguageId.get_valueOf_() == self._lang.encode('utf-8'):
LanguageId.set_valueOf_(value_str)
else:
language = [lomsubs.LanguageIdSub(value_str)]
metadata.get_general().set_language(language)
metametadata = metadata.get_metaMetadata()
if metametadata:
language = metametadata.get_language()
if language:
if language.get_valueOf_() == self._lang.encode('utf-8'):
language.set_valueOf_(value_str)
else:
language = lomsubs.LanguageIdSub(value_str)
metametadata.set_language(language)
else:
language = lomsubs.LanguageIdSub(value_str)
metametadata = lomsubs.metaMetadataSub(language=language)
metadata.set_metaMetadata(metametadata)
educationals = metadata.get_educational()
if educationals:
for educational in educationals:
language = educational.get_language()
if language:
for LanguageId in language:
if LanguageId.get_valueOf_() == self._lang.encode('utf-8'):
LanguageId.set_valueOf_(value_str)
else:
language = lomsubs.LanguageIdSub(value_str)
educational = [lomsubs.educationalSub(language=[language])]
metadata.set_educational(educational)
self._lang = toUnicode(value)
if value in G.application.config.locales:
__builtins__['c_'] = lambda s: G.application.config.locales[value].ugettext(s) if s else s
def set_author(self, value):
if self.dublinCore.creator == self._author:
self.dublinCore.creator = value
value_str = value.encode('utf-8')
vcard = 'BEGIN:VCARD VERSION:3.0 FN:%s EMAIL;TYPE=INTERNET: ORG: END:VCARD'
for metadata, source in [(self.lom, 'LOMv1.0'), (self.lomEs, 'LOM-ESv1.0')]:
src = lomsubs.sourceValueSub()
src.set_valueOf_(source)
src.set_uniqueElementName('source')
val = lomsubs.roleValueSub()
val.set_valueOf_('author')
val.set_uniqueElementName('value')
role = lomsubs.roleSub()
role.set_source(src)
role.set_value(val)
role.set_uniqueElementName('role')
entity = lomsubs.entitySub(vcard % value_str)
dateTime = lomsubs.DateTimeValueSub()
dateTime.set_valueOf_(datetime.datetime.now().strftime('%Y-%m-%d'))
dateTime.set_uniqueElementName('dateTime')
lang_str = self.lang.encode('utf-8')
value_meta_str = c_(u'Metadata creation date').encode('utf-8')
dateDescription = lomsubs.LanguageStringSub([lomsubs.LangStringSub(lang_str, value_meta_str)])
date = lomsubs.dateSub(dateTime, dateDescription)
lifeCycle = metadata.get_lifeCycle()
if lifeCycle:
contributes = lifeCycle.get_contribute()
found = False
for contribute in contributes:
entitys = contribute.get_entity()
rol = contribute.get_role()
if rol:
rolval = rol.get_value()
if rolval:
if rolval.get_valueOf_() == 'author':
for ent in entitys:
if ent.get_valueOf_() == vcard % self.author.encode('utf-8'):
found = True
if value:
ent.set_valueOf_(vcard % value_str)
else:
contribute.entity.remove(ent)
if not contribute.entity:
contributes.remove(contribute)
if not found:
contribute = lomsubs.contributeSub(role, [entity], date)
lifeCycle.add_contribute(contribute)
else:
if value:
contribute = lomsubs.contributeSub(role, [entity], date)
lifeCycle = lomsubs.lifeCycleSub(contribute=[contribute])
metadata.set_lifeCycle(lifeCycle)
val = lomsubs.roleValueSub()
val.set_valueOf_('creator')
val.set_uniqueElementName('value')
role = lomsubs.roleSub()
role.set_source(src)
role.set_value(val)
role.set_uniqueElementName('role')
metaMetadata = metadata.get_metaMetadata()
if metaMetadata:
contributes = metaMetadata.get_contribute()
found = False
for contribute in contributes:
entitys = contribute.get_entity()
rol = contribute.get_role()
if rol:
rolval = rol.get_value()
if rolval:
if rolval.get_valueOf_() == 'creator':
for ent in entitys:
if ent.get_valueOf_() == vcard % self.author.encode('utf-8'):
found = True
if value:
ent.set_valueOf_(vcard % value_str)
else:
contribute.entity.remove(ent)
if not contribute.entity:
contributes.remove(contribute)
if not found:
contribute = lomsubs.contributeMetaSub(role, [entity], date)
metaMetadata.add_contribute(contribute)
else:
if value:
contribute = lomsubs.contributeMetaSub(role, [entity], date)
metaMetadata.set_contribute([contribute])
self._author = toUnicode(value)
def set_description(self, value):
if self.dublinCore.description == self._description:
self.dublinCore.description = value
lang_str = self.lang.encode('utf-8')
value_str = value.encode('utf-8')
for metadata in [self.lom, self.lomEs]:
description = metadata.get_general().get_description()
if description:
description_found = False
for desc in description:
for string in desc.get_string():
if string.get_valueOf_() == self._description.encode('utf-8'):
description_found = True
if value:
string.set_language(lang_str)
string.set_valueOf_(value_str)
else:
desc.string.remove(string)
description.remove(desc)
if not description_found:
if value:
description = lomsubs.descriptionSub([lomsubs.LangStringSub(lang_str, value_str)])
metadata.get_general().add_description(description)
else:
if value:
description = [lomsubs.descriptionSub([lomsubs.LangStringSub(lang_str, value_str)])]
metadata.get_general().set_description(description)
self._description = toUnicode(value)
def get_backgroundImg(self):
"""Get the background image for this package"""
if self._backgroundImg:
return "file://" + self._backgroundImg.path
else:
return ""
def set_backgroundImg(self, value):
"""Set the background image for this package"""
if self._backgroundImg:
self._backgroundImg.delete()
if value:
if value.startswith("file://"):
value = value[7:]
imgFile = Path(value)
self._backgroundImg = Resource(self, Path(imgFile))
else:
self._backgroundImg = u''
def get_level1(self):
return self.levelName(0)
def set_level1(self, value):
if value != '':
self._levelNames[0] = value
else:
self._levelNames[0] = self.defaultLevelNames[0]
def get_level2(self):
return self.levelName(1)
def set_level2(self, value):
if value != '':
self._levelNames[1] = value
else:
self._levelNames[1] = self.defaultLevelNames[1]
def get_level3(self):
return self.levelName(2)
def set_level3(self, value):
if value != '':
self._levelNames[2] = value
else:
self._levelNames[2] = self.defaultLevelNames[2]
def set_objectives(self, value):
lang_str = self.lang.encode('utf-8')
value_str = c_("Objectives").upper() + ": " + value.encode('utf-8')
for metadata in [self.lom, self.lomEs]:
educationals = metadata.get_educational()
description = lomsubs.descriptionSub([lomsubs.LangStringSub(lang_str, value_str)])
if educationals:
for educational in educationals:
descriptions = educational.get_description()
found = False
if descriptions:
for desc in descriptions:
for string in desc.get_string():
if string.get_valueOf_() == c_("Objectives").upper() + ": " + self._objectives.encode('utf-8'):
found = True
if value:
string.set_language(lang_str)
string.set_valueOf_(value_str)
else:
desc.string.remove(string)
descriptions.remove(desc)
if not found:
if value:
educational.add_description(description)
else:
if value:
educational = [lomsubs.educationalSub(description=[description])]
metadata.set_educational(educational)
self._objectives = toUnicode(value)
def set_preknowledge(self, value):
lang_str = self.lang.encode('utf-8')
value_str = c_("Preknowledge").upper() + ": " + value.encode('utf-8')
for metadata in [self.lom, self.lomEs]:
educationals = metadata.get_educational()
description = lomsubs.descriptionSub([lomsubs.LangStringSub(lang_str, value_str)])
if educationals:
for educational in educationals:
descriptions = educational.get_description()
found = False
if descriptions:
for desc in descriptions:
for string in desc.get_string():
if string.get_valueOf_() == c_("Preknowledge").upper() + ": " + self._preknowledge.encode('utf-8'):
found = True
if value:
string.set_language(lang_str)
string.set_valueOf_(value_str)
else:
desc.string.remove(string)
descriptions.remove(desc)
if not found:
if value:
educational.add_description(description)
else:
if value:
educational = [lomsubs.educationalSub(description=[description])]
metadata.set_educational(educational)
self._preknowledge = toUnicode(value)
def license_map(self, source, value):
'''From document "ANEXO XIII ANÁLISIS DE MAPEABILIDAD LOM/LOM-ES V1.0"'''
if source == 'LOM-ESv1.0':
return value
elif source == 'LOMv1.0':
if value == 'not appropriate' or value == 'public domain':
return 'no'
else:
return 'yes'
def set_license(self, value):
value_str = value.rstrip(' 0123456789.').encode('utf-8')
if self.dublinCore.rights == self.license:
self.dublinCore.rights = value
for metadata, source in [(self.lom, 'LOMv1.0'), (self.lomEs, 'LOM-ESv1.0')]:
rights = metadata.get_rights()
if not rights:
metadata.set_rights(lomsubs.rightsSub())
copyrightAndOtherRestrictions = metadata.get_rights().get_copyrightAndOtherRestrictions()
if copyrightAndOtherRestrictions:
if copyrightAndOtherRestrictions.get_value().get_valueOf_() == self.license_map(source, self.license.encode('utf-8').rstrip(' 0123456789.')):
if value:
copyrightAndOtherRestrictions.get_value().set_valueOf_(self.license_map(source, value_str))
else:
metadata.get_rights().set_copyrightAndOtherRestrictions(None)
else:
if value:
src = lomsubs.sourceValueSub()
src.set_valueOf_(source)
src.set_uniqueElementName('source')
val = lomsubs.copyrightAndOtherRestrictionsValueSub()
val.set_valueOf_(self.license_map(source, value_str))
val.set_uniqueElementName('value')
copyrightAndOtherRestrictions = lomsubs.copyrightAndOtherRestrictionsSub()
copyrightAndOtherRestrictions.set_source(src)
copyrightAndOtherRestrictions.set_value(val)
copyrightAndOtherRestrictions.set_uniqueElementName('copyrightAndOtherRestrictions')
metadata.get_rights().set_copyrightAndOtherRestrictions(copyrightAndOtherRestrictions)
self.license = toUnicode(value)
def learningResourceType_map(self, source, value):
'''From document "ANEXO XIII ANÁLISIS DE MAPEABILIDAD LOM/LOM-ES V1.0"'''
if source == 'LOM-ESv1.0':
return value
elif source == 'LOMv1.0':
lomMap = {
"guided reading": "narrative text",
"master class": "lecture",
"textual-image analysis": "exercise",
"discussion activity": "problem statement",
"closed exercise or problem": "exercise",
"contextualized case problem": "exercise",
"open problem": "problem statement",
"real or virtual learning environment": "simulation",
"didactic game": "exercise",
"webquest": "problem statement",
"experiment": "experiment",
"real project": "simulation",
"simulation": "simulation",
"questionnaire": "questionnaire",
"exam": "exam",
"self assessment": "self assessment",
"": ""
}
return lomMap[value]
def set_learningResourceType(self, value):
value_str = value.encode('utf-8')
for metadata, source in [(self.lom, 'LOMv1.0'), (self.lomEs, 'LOM-ESv1.0')]:
educationals = metadata.get_educational()
src = lomsubs.sourceValueSub()
src.set_valueOf_(source)
src.set_uniqueElementName('source')
val = lomsubs.learningResourceTypeValueSub()
val.set_valueOf_(self.learningResourceType_map(source, value_str))
val.set_uniqueElementName('value')
learningResourceType = lomsubs.learningResourceTypeSub(self.learningResourceType_map(source, value_str))
learningResourceType.set_source(src)
learningResourceType.set_value(val)
if educationals:
for educational in educationals:
learningResourceTypes = educational.get_learningResourceType()
found = False
if learningResourceTypes:
for i in learningResourceTypes:
if i.get_value().get_valueOf_() == self.learningResourceType_map(source, self.learningResourceType.encode('utf-8')):
found = True
index = learningResourceTypes.index(i)
if value:
educational.insert_learningResourceType(index, learningResourceType)
else:
learningResourceTypes.pop(index)
if not found:
educational.add_learningResourceType(learningResourceType)
else:
educational = [lomsubs.educationalSub(learningResourceType=[learningResourceType])]
metadata.set_educational(educational)
self._learningResourceType = toUnicode(value)
def intendedEndUserRole_map(self, source, value):
'''From document "ANEXO XIII ANÁLISIS DE MAPEABILIDAD LOM/LOM-ES V1.0"'''
if source == 'LOM-ESv1.0':
return value
elif source == 'LOMv1.0':
if not value or value == 'tutor':
return value
else:
return 'learner'
def set_intendedEndUserRoleType(self, value):
value_str = value.encode('utf-8')
if value:
for metadata, source in [(self.lom, 'LOMv1.0'), (self.lomEs, 'LOM-ESv1.0')]:
educationals = metadata.get_educational()
src = lomsubs.sourceValueSub()
src.set_valueOf_(source)
src.set_uniqueElementName('source')
val = lomsubs.intendedEndUserRoleValueSub()
val.set_valueOf_(self.intendedEndUserRole_map(source, value_str))
val.set_uniqueElementName('value')
intendedEndUserRole = lomsubs.intendedEndUserRoleSub(self.intendedEndUserRole_map(source, value_str))
intendedEndUserRole.set_source(src)
intendedEndUserRole.set_value(val)
if educationals:
for educational in educationals:
intendedEndUserRoles = educational.get_intendedEndUserRole()
found = False
if intendedEndUserRoles:
for i in intendedEndUserRoles:
if i.get_value().get_valueOf_() == self.intendedEndUserRole_map(source, self.intendedEndUserRoleType.encode('utf-8')):
found = True
index = intendedEndUserRoles.index(i)
educational.insert_intendedEndUserRole(index, intendedEndUserRole)
if not found:
educational.add_intendedEndUserRole(intendedEndUserRole)
else:
educational = [lomsubs.educationalSub(intendedEndUserRole=[intendedEndUserRole])]
metadata.set_educational(educational)
self._intendedEndUserRoleType = toUnicode(value)
def set_intendedEndUserRole(self, value, valueOf):
for metadata, source in [(self.lom, 'LOMv1.0'), (self.lomEs, 'LOM-ESv1.0')]:
educationals = metadata.get_educational()
src = lomsubs.sourceValueSub()
src.set_valueOf_(source)
src.set_uniqueElementName('source')
val = lomsubs.intendedEndUserRoleValueSub()
mappedValueOf = self.intendedEndUserRole_map(source, valueOf)
val.set_valueOf_(mappedValueOf)
val.set_uniqueElementName('value')
intendedEndUserRole = lomsubs.intendedEndUserRoleSub(mappedValueOf)
intendedEndUserRole.set_source(src)
intendedEndUserRole.set_value(val)
if educationals:
for educational in educationals:
intendedEndUserRoles = educational.get_intendedEndUserRole()
found = False
if intendedEndUserRoles:
for i in intendedEndUserRoles:
if i.get_value().get_valueOf_() == mappedValueOf:
found = True
if value:
index = intendedEndUserRoles.index(i)
educational.insert_intendedEndUserRole(index, intendedEndUserRole)
else:
if source != 'LOMv1.0' or valueOf != 'group':
educational.intendedEndUserRole.remove(i)
if not found and value:
educational.add_intendedEndUserRole(intendedEndUserRole)
else:
if value:
educational = [lomsubs.educationalSub(intendedEndUserRole=[intendedEndUserRole])]
metadata.set_educational(educational)
def set_intendedEndUserRoleGroup(self, value):
self.set_intendedEndUserRole(value, 'group')
self._intendedEndUserRoleGroup = value
def set_intendedEndUserRoleTutor(self, value):
self.set_intendedEndUserRole(value, 'tutor')
self._intendedEndUserRoleTutor = value
def context_map(self, source, value):
'''From document "ANEXO XIII ANÁLISIS DE MAPEABILIDAD LOM/LOM-ES V1.0"'''
if source == 'LOM-ESv1.0':
return value
elif source == 'LOMv1.0':
lomMap = {
"classroom": "school",
"real environment": "training",
"face to face": "other",
"blended": "other",
"distance": "other",
"presencial": "other",
"": ""
}
return lomMap[value]
def set_context(self, value, valueOf):
value_str = value.encode('utf-8')
if value:
for metadata, source in [(self.lom, 'LOMv1.0'), (self.lomEs, 'LOM-ESv1.0')]:
educationals = metadata.get_educational()
src = lomsubs.sourceValueSub()
src.set_valueOf_(source)
src.set_uniqueElementName('source')
val = lomsubs.contextValueSub()
val.set_valueOf_(self.context_map(source, value_str))
val.set_uniqueElementName('value')
context = lomsubs.contextSub(self.context_map(source, value_str))
context.set_source(src)
context.set_value(val)
if educationals:
for educational in educationals:
contexts = educational.get_context()
found = False
if contexts:
for i in contexts:
if i.get_value().get_valueOf_() == self.context_map(source, valueOf.encode('utf-8')):
found = True
index = contexts.index(i)
educational.insert_context(index, context)
if not found:
educational.add_context(context)
else:
educational = [lomsubs.educationalSub(context=[context])]
metadata.set_educational(educational)
def set_contextPlace(self, value):
self.set_context(value, self._contextPlace)
self._contextPlace = toUnicode(value)
def set_contextMode(self, value):
self.set_context(value, self._contextMode)
self._contextMode = toUnicode(value)
def set_changed(self, changed):
self._isChanged = changed
if changed:
if hasattr(self, 'previewDir'):
if self.previewDir:
shutil.rmtree(self.previewDir, True)
self.previewDir = None
# Properties
isChanged = property(lambda self: self._isChanged, set_changed)
name = property(lambda self:self._name, set_name)
title = property(lambda self:self._title, set_title)
lang = property(lambda self: self._lang, set_lang)
author = property(lambda self:self._author, set_author)
description = property(lambda self:self._description, set_description)
newlicense = property(lambda self:self.license, set_license)
docType = property(lambda self:self._docType, set_docType)
backgroundImg = property(get_backgroundImg, set_backgroundImg)
level1 = property(get_level1, set_level1)
level2 = property(get_level2, set_level2)
level3 = property(get_level3, set_level3)
objectives = property(lambda self: self._objectives, set_objectives)
preknowledge = property(lambda self: self._preknowledge, set_preknowledge)
learningResourceType = property(lambda self: self._learningResourceType, set_learningResourceType)
intendedEndUserRoleType = property(lambda self: self._intendedEndUserRoleType, set_intendedEndUserRoleType)
intendedEndUserRoleGroup = property(lambda self: self._intendedEndUserRoleGroup, set_intendedEndUserRoleGroup)
intendedEndUserRoleTutor = property(lambda self: self._intendedEndUserRoleTutor, set_intendedEndUserRoleTutor)
contextPlace = property(lambda self: self._contextPlace, set_contextPlace)
contextMode = property(lambda self: self._contextMode, set_contextMode)
def findNode(self, nodeId):
"""
Finds a node from its nodeId
(nodeId can be a string or a list/tuple)
"""
log.debug(u"findNode" + repr(nodeId))
node = self._nodeIdDict.get(nodeId)
if node and node.package is self:
return node
else:
return None
def levelName(self, level):
"""
Return the level name
"""
if level < len(self._levelNames):
return _(self._levelNames[level])
else:
return _(u"?????")
def save(self, filename=None, tempFile=False):
"""
Save package to disk
pass an optional filename
"""
self.tempFile = tempFile
# Get the filename
if filename:
filename = Path(filename)
# If we are being given a new filename...
# Change our name to match our new filename
name = filename.splitpath()[1]
if not tempFile:
self.name = name.basename().splitext()[0]
elif self.filename:
# Otherwise use our last saved/loaded from filename
filename = Path(self.filename)
else:
# If we don't have a last saved/loaded from filename,
# raise an exception because, we need to have a new
# file passed when a brand new package is saved
raise AssertionError(u'No name passed when saving a new package')
#JR: Convertimos el nombre del paquete para evitar nombres problematicos
import string
validPackagenameChars = "-_. %s%s" % (string.ascii_letters, string.digits)
self.name = ''.join(c for c in self.name if c in validPackagenameChars).replace(' ','_')
#JR: Si por casualidad quedase vacio le damos un nombre por defecto
if self.name == "":
self.name = "invalidpackagename"
# Store our new filename for next file|save, and save the package
log.debug(u"Will save %s to: %s" % (self.name, filename))
if tempFile:
self.nonpersistant.remove('filename')
oldFilename, self.filename = self.filename, unicode(self.filename)
try:
filename.safeSave(self.doSave, _('SAVE FAILED!\nLast succesful save is %s.'))
finally:
self.nonpersistant.append('filename')
self.filename = oldFilename
else:
# Update our new filename for future saves
self.filename = filename
filename.safeSave(self.doSave, _('SAVE FAILED!\nLast succesful save is %s.'))
self.isChanged = False
self.updateRecentDocuments(filename)
def updateRecentDocuments(self, filename):
"""
Updates the list of recent documents
"""
# Don't update the list for the generic.data "package"
genericData = G.application.config.configDir/'idevices'/'generic.data'
if genericData.isfile() or genericData.islink():
if Path(filename).samefile(genericData):
return
# Save in recentDocuments list
recentProjects = G.application.config.recentProjects
if filename in recentProjects:
# If we're already number one, carry on
if recentProjects[0] == filename:
return
recentProjects.remove(filename)
recentProjects.insert(0, filename)
del recentProjects[5:] # Delete any older names from the list
G.application.config.configParser.write() # Save the settings
def doSave(self, fileObj):
"""
Actually performs the save to 'fileObj'.
"""
if self.compatibleWithVersion9:
self.downgradeToVersion9()
zippedFile = zipfile.ZipFile(fileObj, "w", zipfile.ZIP_DEFLATED)
try:
for resourceFile in self.resourceDir.files():
zippedFile.write(unicode(resourceFile.normpath()),
resourceFile.name.encode('utf8'), zipfile.ZIP_DEFLATED)
zinfo = zipfile.ZipInfo(filename='content.data',
date_time=time.localtime()[0:6])
zinfo.external_attr = 0100644<<16L
zinfo.compress_type = zipfile.ZIP_DEFLATED
zippedFile.writestr(zinfo, encodeObject(self))
zinfo2 = zipfile.ZipInfo(filename='contentv3.xml',
date_time=time.localtime()[0:6])
zinfo2.external_attr = 0100644<<16L
zinfo2.compress_type = zipfile.ZIP_DEFLATED
zippedFile.writestr(zinfo2, encodeObjectToXML(self))
zippedFile.write(G.application.config.webDir/'templates'/'content.xsd', 'content.xsd', zipfile.ZIP_DEFLATED)
finally:
zippedFile.close()
if self.compatibleWithVersion9:
self.upgradeToVersion10()
CasestudyIdevice.persistenceVersion = 9
CasopracticofpdIdevice.persistenceVersion = 9
CitasparapensarfpdIdevice.persistenceVersion = 9
ClozefpdIdevice.persistenceVersion = 7
ClozeIdevice.persistenceVersion = 7
ClozelangfpdIdevice.persistenceVersion = 7
DebesconocerfpdIdevice.persistenceVersion = 9
DestacadofpdIdevice.persistenceVersion = 9
EjercicioresueltofpdIdevice.persistenceVersion = 10
EleccionmultiplefpdIdevice.persistenceVersion = 10
TextAreaField.persistenceVersion = 2
FreeTextfpdIdevice.persistenceVersion = 8
GalleryIdevice.persistenceVersion = 8
ImageMagnifierIdevice.persistenceVersion = 4
ListaIdevice.persistenceVersion = 5
MultichoiceIdevice.persistenceVersion = 9
GenericIdevice.persistenceVersion = 11
MultiSelectIdevice.persistenceVersion = 1
OrientacionesalumnadofpdIdevice.persistenceVersion = 9
OrientacionestutoriafpdIdevice.persistenceVersion = 9
ParasabermasfpdIdevice.persistenceVersion = 9
QuizTestIdevice.persistenceVersion = 10
RecomendacionfpdIdevice.persistenceVersion = 9
ReflectionfpdIdevice.persistenceVersion = 9
ReflectionfpdmodifIdevice.persistenceVersion = 9
ReflectionIdevice.persistenceVersion = 8
SeleccionmultiplefpdIdevice.persistenceVersion = 2
TrueFalseIdevice.persistenceVersion = 11
VerdaderofalsofpdIdevice.persistenceVersion = 12
WikipediaIdevice.persistenceVersion = 9
Package.persistenceVersion = 13
def extractNode(self):
"""
Clones and extracts the currently selected node into a new package.
"""
newPackage = Package('NoName') # Name will be set once it is saved..
newPackage.title = self.currentNode.title
newPackage.style = self.style
newPackage.author = self.author
newPackage._nextNodeId = self._nextNodeId
# Copy the nodes from the original package
# and merge into the root of the new package
self.currentNode.copyToPackage(newPackage)
return newPackage
@staticmethod
def load(filename, newLoad=True, destinationPackage=None, fromxml=None):
"""
Load package from disk, returns a package.
"""
if not zipfile.is_zipfile(filename):
return None
zippedFile = zipfile.ZipFile(filename, "r")
xml = None
try:
xml = zippedFile.read(u"contentv3.xml")
except:
pass
if not xml:
try:
# Get the jellied package data
toDecode = zippedFile.read(u"content.data")
except KeyError:
log.info("no content.data, trying Common Cartridge/Content Package")
newPackage = loadCC(zippedFile, filename)
newPackage.tempFile = False
newPackage.isChanged = False
newPackage.filename = Path(filename)
return newPackage
# Need to add a TempDirPath because it is a nonpersistant member
resourceDir = TempDirPath()
# Extract resource files from package to temporary directory
for fn in zippedFile.namelist():
if unicode(fn, 'utf8') not in [u"content.data", u"content.xml", u"contentv2.xml", u"contentv3.xml", u"content.xsd" ]:
#JR: Hacemos las comprobaciones necesarias por si hay directorios
if ("/" in fn):
dir = fn[:fn.index("/")]
Dir = Path(resourceDir/dir)
if not Dir.exists():
Dir.mkdir()
Fn = Path(resourceDir/fn)
if not Fn.isdir():
outFile = open(resourceDir/fn, "wb")
outFile.write(zippedFile.read(fn))
outFile.flush()
outFile.close()
try:
validxml = False
if fromxml:
newPackage, validxml = decodeObjectFromXML(fromxml)
elif xml:
xmlinfo = zippedFile.getinfo(u"contentv3.xml")
if u"content.data" not in zippedFile.NameToInfo:
newPackage, validxml = decodeObjectFromXML(xml)
else:
datainfo = zippedFile.getinfo(u"content.data")
if xmlinfo.date_time >= datainfo.date_time:
newPackage, validxml = decodeObjectFromXML(xml)
if not validxml:
toDecode = zippedFile.read(u"content.data")
newPackage = decodeObjectRaw(toDecode)
try:
lomdata = zippedFile.read(u'imslrm.xml')
if 'LOM-ES' in lomdata:
importType = 'lomEs'
else:
importType = 'lom'
setattr(newPackage, importType, lomsubs.parseString(lomdata))
except:
pass
G.application.afterUpgradeHandlers = []
newPackage.resourceDir = resourceDir
G.application.afterUpgradeZombies2Delete = []
if not validxml and (xml or fromxml or "content.xml" in zippedFile.namelist()):
for key, res in newPackage.resources.items():
if len(res) < 1:
newPackage.resources.pop(key)
else:
if (hasattr(res[0], 'testForAndDeleteZombieResources')):
res[0].testForAndDeleteZombieResources()
if newLoad:
# provide newPackage to doUpgrade's versionUpgrade() to
# correct old corrupt extracted packages by setting the
# any corrupt package references to the new package:
#JR: Convertimos el nombre del paquete para evitar nombres problematicos
import string
validPackagenameChars = "-_. %s%s" % (string.ascii_letters, string.digits)
newPackage._name = ''.join(c for c in newPackage._name if c in validPackagenameChars).replace(' ','_')
#JR: Si por casualidad quedase vacio le damos un nombre por defecto
if newPackage._name == "":
newPackage._name = "invalidpackagename"
log.debug("load() about to doUpgrade newPackage \""
+ newPackage._name + "\" " + repr(newPackage) )
if hasattr(newPackage, 'resourceDir'):
log.debug("newPackage resourceDir = "
+ newPackage.resourceDir)
else:
# even though it was just set above? should not get here:
log.error("newPackage resourceDir has NO resourceDir!")
doUpgrade(newPackage)
# after doUpgrade, compare the largest found field ID:
if G.application.maxFieldId >= Field.nextId:
Field.nextId = G.application.maxFieldId + 1
if hasattr(newPackage,'_docType'):
common.setExportDocType(newPackage.docType)
else:
newPackage.set_docType(toUnicode('XHTML'))
else:
# and when merging, automatically set package references to
# the destinationPackage, into which this is being merged:
log.debug("load() about to merge doUpgrade newPackage \""
+ newPackage._name + "\" " + repr(newPackage)
+ " INTO destinationPackage \""
+ destinationPackage._name + "\" "
+ repr(destinationPackage))
log.debug("using their resourceDirs:")
if hasattr(newPackage, 'resourceDir'):
log.debug(" newPackage resourceDir = "
+ newPackage.resourceDir)
else:
log.error("newPackage has NO resourceDir!")
if hasattr(destinationPackage, 'resourceDir'):
log.debug(" destinationPackage resourceDir = "
+ destinationPackage.resourceDir)
else:
log.error("destinationPackage has NO resourceDir!")
doUpgrade(destinationPackage,
isMerge=True, preMergePackage=newPackage)
# after doUpgrade, compare the largest found field ID:
if G.application.maxFieldId >= Field.nextId:
Field.nextId = G.application.maxFieldId + 1
except:
import traceback
traceback.print_exc()
raise
if newPackage.tempFile:
# newPackage.filename was stored as it's original filename
newPackage.tempFile = False
else:
# newPackage.filename is the name that the package was last loaded from
# or saved to
newPackage.filename = Path(filename)
checker = Checker(newPackage)
inconsistencies = checker.check()
for inconsistency in inconsistencies:
inconsistency.fix()
# Let idevices and nodes handle any resource upgrading they may need to
# Note: Package afterUpgradeHandlers *must* be done after Resources'
# and the package should be updated before everything else,
# so, prioritize with a 3-pass, 3-level calling setup
# in order of: 1) resources, 2) package, 3) anything other objects
for handler_priority in range(3):
for handler in G.application.afterUpgradeHandlers:
if handler_priority == 0 and \
repr(handler.im_class)=="<class 'exe.engine.resource.Resource'>":
# level-0 handlers: Resource
handler()
elif handler_priority == 1 and \
repr(handler.im_class)=="<class 'exe.engine.package.Package'>":
# level-1 handlers: Package (requires resources first)
if handler.im_self == newPackage:
handler()
else:
log.warn("Extra package object found, " \
+ "ignoring its afterUpgradeHandler: " \
+ repr(handler))
elif handler_priority == 2 and \
repr(handler.im_class)!="<class 'exe.engine.resource.Resource'>" \
and \
repr(handler.im_class)!="<class 'exe.engine.package.Package'>":
# level-2 handlers: all others
handler()
G.application.afterUpgradeHandlers = []
num_zombies = len(G.application.afterUpgradeZombies2Delete)
for i in range(num_zombies-1, -1, -1):
zombie = G.application.afterUpgradeZombies2Delete[i]
# now, the zombie list can contain nodes OR resources to delete.
# if zombie is a node, then also pass in a pruning parameter..
zombie_is_node = False
if isinstance(zombie, Node):
zombie_is_node = True
if zombie_is_node:
zombie.delete(pruningZombies=True)
else:
#JR: Eliminamos el recurso del idevice
if hasattr(zombie._idevice, 'userResources'):
for i in range(len(zombie._idevice.userResources)-1, -1, -1):
if hasattr(zombie._idevice.userResources[i], 'storageName'):
if zombie._idevice.userResources[i].storageName == zombie.storageName:
aux = zombie._idevice.userResources[i]
zombie._idevice.userResources.remove(aux)
aux.delete
#Eliminamos el recurso de los recursos del sistema
#for resource in newPackage.resources.keys():
# if hasattr(newPackage.resources[resource][0], 'storageName'):
# if newPackage.resources[resource][0].storageName == zombie.storageName:
# del newPackage.resources[resource]
#JR: Esto ya no haria falta
#zombie.delete()
del zombie
userResourcesFiles = newPackage.getUserResourcesFiles(newPackage.root)
#JR: Borramos recursos que no estan siendo utilizados
newPackage.cleanUpResources(userResourcesFiles)
G.application.afterUpgradeZombies2Delete = []
newPackage.updateRecentDocuments(newPackage.filename)
newPackage.isChanged = False
nstyle=Path(G.application.config.stylesDir/newPackage.style)
if not nstyle.isdir():
newPackage.style=G.application.config.defaultStyle
newPackage.lang = newPackage._lang
return newPackage
def getUserResourcesFiles(self, node):
resourceFiles = set()
for idevice in node.idevices:
if hasattr(idevice, 'userResources'):
for i in range(len(idevice.userResources) - 1, -1, -1):
if hasattr(idevice.userResources[i], 'storageName'):
resourceFiles.add(idevice.userResources[i].storageName)
for child in node.children:
resourceFiles = resourceFiles | self.getUserResourcesFiles(child)
return resourceFiles
def cleanUpResources(self, userResourcesFiles=set()):
"""
Removes duplicate resource files
"""
# Delete unused resources.
# Only really needed for upgrading to version 0.20,
# but upgrading of resources and package happens in no particular order
# and must be done after all resources have been upgraded
# some earlier .elp files appear to have been corrupted with
# two packages loaded, *possibly* from some strange extract/merge
# functionality in earlier eXe versions?
# Regardless, only the real package will have a resourceDir,
# and the other will fail.
# For now, then, put in this quick and easy safety check:
if not hasattr(self,'resourceDir'):
log.warn("cleanUpResources called on a redundant package")
return
existingFiles = set([fn.basename() for fn in self.resourceDir.files()])
#JR
usedFiles = set([])
for reses in self.resources.values():
if hasattr(reses[0], 'storageName'):
usedFiles.add(reses[0].storageName)
#usedFiles = set([reses[0].storageName for reses in self.resources.values()])
for fn in existingFiles - usedFiles - userResourcesFiles:
log.debug('Removing unused resource %s' % fn)
(self.resourceDir/fn).remove()
def findResourceByName(self, queryName):
"""
Support for merging, and anywhere else that unique names might be
checked before actually comparing against the files (as will be
done by the resource class itself in its _addOurselvesToPackage() )
"""
foundResource = None
queryResources = self.resources
for this_checksum in queryResources:
for this_resource in queryResources[this_checksum]:
if queryName == this_resource.storageName:
foundResource = this_resource
return foundResource
return foundResource
def upgradeToVersion1(self):
"""
Called to upgrade from 0.3 release
"""
self._nextNodeId = 0
self._nodeIdDict = {}
# Also upgrade all the nodes.
# This needs to be done here so that draft gets id 0
# If it's done in the nodes, the ids are assigned in reverse order
draft = getattr(self, 'draft')
draft._id = self._regNewNode(draft)
draft._package = self
setattr(self, 'editor', Node(self, None, _(u"iDevice Editor")))
# Add a default idevice to the editor
idevice = GenericIdevice("", "", "", "", "")
editor = getattr(self, 'editor')
idevice.parentNode = editor
editor.addIdevice(idevice)
def superReg(node):
"""Registers all our nodes
because in v0 they were not registered
in this way"""
node._id = self._regNewNode(node)
node._package = self
for child in node.children:
superReg(child)
superReg(self.root)
def _regNewNode(self, node):
"""
Called only by nodes,
stores the node in our id lookup dict
returns a new unique id
"""
id_ = unicode(self._nextNodeId)
self._nextNodeId += 1
self._nodeIdDict[id_] = node
return id_
def getNewIdeviceId(self):
"""
Returns an iDevice Id which is unique for this package.
"""
id_ = unicode(self._nextIdeviceId)
self._nextIdeviceId += 1
return id_
def upgradeToVersion2(self):
"""
Called to upgrade from 0.4 release
"""
getattr(self, 'draft').delete()
getattr(self, 'editor').delete()
delattr(self, 'draft')
delattr(self, 'editor')
# Need to renumber nodes because idevice node and draft nodes are gone
self._nextNodeId = 0
def renumberNode(node):
"""
Gives the old node a number
"""
node._id = self._regNewNode(node)
for child in node.children:
renumberNode(child)
renumberNode(self.root)
def upgradeToVersion3(self):
"""
Also called to upgrade from 0.4 release
"""
self._nextIdeviceId = 0
def upgradeToVersion4(self):
"""
Puts properties in their place
Also called to upgrade from 0.8 release
"""
self._name = toUnicode(self.__dict__['name'])
self._author = toUnicode(self.__dict__['author'])
self._description = toUnicode(self.__dict__['description'])
def upgradeToVersion5(self):
"""
For version 0.11
"""
self._levelNames = self.levelNames
del self.levelNames
def upgradeToVersion6(self):
"""
For version 0.14
"""
self.dublinCore = DublinCore()
# Copy some of the package properties to dublin core
self.title = self.root.title
self.dublinCore.title = self.root.title
self.dublinCore.creator = self._author
self.dublinCore.description = self._description
self.scolinks = False
def upgradeToVersion7(self):
"""
For version 0.15
"""
self._backgroundImg = ''
self.backgroundImgTile = False
def upgradeToVersion8(self):
"""
For version 0.20, alpha, for nightlies r2469
"""
self.license = 'None'
self.footer = ""
self.idevices = []
def upgradeToVersion9(self):
"""
For version >= 0.20.4
"""
if not hasattr(self, 'resources'):
# The hasattr is needed, because sometimes, Resource instances are upgraded
# first and they also set this attribute on the package
self.resources = {}
G.application.afterUpgradeHandlers.append(self.cleanUpResources)
def lomDefaults(self, entry, schema, rights=False):
defaults = {'general': {'identifier': [{'catalog': c_('My Catalog'), 'entry': entry}],
'aggregationLevel': {'source': schema, 'value': '2'}
},
'metaMetadata': {'metadataSchema': [schema]},
}
if rights:
defaults['rights'] = {'access': {'accessType': {'source': schema, 'value': 'universal'},
'description': {'string': [{'valueOf_': c_('Default'), 'language': str(self.lang)}]}}}
return defaults
oldLicenseMap = {"None": "None",
"GNU Free Documentation License": u"license GFDL",
"Creative Commons Attribution 3.0 License": u"creative commons: attribution 3.0",
"Creative Commons Attribution Share Alike 3.0 License": u"creative commons: attribution - share alike 3.0",
"Creative Commons Attribution No Derivatives 3.0 License": u"creative commons: attribution - non derived work 3.0",
"Creative Commons Attribution Non-commercial 3.0 License": u"creative commons: attribution - non commercial 3.0",
"Creative Commons Attribution Non-commercial Share Alike 3.0 License": u"creative commons: attribution - non commercial - share alike 3.0",
"Creative Commons Attribution Non-commercial No Derivatives 3.0 License": u"creative commons: attribution - non derived work - non commercial 3.0",
"Creative Commons Attribution 2.5 License": u"creative commons: attribution 2.5",
"Creative Commons Attribution-ShareAlike 2.5 License": u"creative commons: attribution - share alike 2.5",
"Creative Commons Attribution-NoDerivs 2.5 License": u"creative commons: attribution - non derived work 2.5",
"Creative Commons Attribution-NonCommercial 2.5 License": u"creative commons: attribution - non commercial 2.5",
"Creative Commons Attribution-NonCommercial-ShareAlike 2.5 License": u"creative commons: attribution - non commercial - share alike 2.5",
"Creative Commons Attribution-NonCommercial-NoDerivs 2.5 License": u"creative commons: attribution - non derived work - non commercial 2.5",
"Developing Nations 2.0": u""
}
def upgradeToVersion10(self):
"""
For version >= 2.0
"""
if not hasattr(self, 'lang'):
self._lang = G.application.config.locale.split('_')[0]
entry = str(uuid.uuid4())
if not hasattr(self, 'lomEs') or not isinstance(self.lomEs, lomsubs.lomSub):
self.lomEs = lomsubs.lomSub.factory()
self.lomEs.addChilds(self.lomDefaults(entry, 'LOM-ESv1.0', True))
if not hasattr(self, 'lom') or not isinstance(self.lom, lomsubs.lomSub):
self.lom = lomsubs.lomSub.factory()
self.lom.addChilds(self.lomDefaults(entry, 'LOMv1.0'))
if not hasattr(self, 'scowsinglepage'):
self.scowsinglepage = False
if not hasattr(self, 'scowwebsite'):
self.scowwebsite = False
if not hasattr(self, 'exportSource'):
self.exportSource = True
if not hasattr(self, 'exportMetadataType'):
self.exportMetadataType = "LOMES"
if not hasattr(self, 'objectives'):
self._objectives = u''
if not hasattr(self, 'preknowledge'):
self._preknowledge = u''
if not hasattr(self, 'learningResourceType'):
self._learningResourceType = u''
if not hasattr(self, 'intendedEndUserRoleType'):
self._intendedEndUserRoleType = u''
if not hasattr(self, 'intendedEndUserRoleGroup'):
self._intendedEndUserRoleGroup = False
if not hasattr(self, 'intendedEndUserRoleTutor'):
self._intendedEndUserRoleTutor = False
if not hasattr(self, 'contextPlace'):
self._contextPlace = u''
if not hasattr(self, 'contextMode'):
self._contextMode = u''
if hasattr(self, 'scowsource'):
del self.scowsource
try:
if not self.license in self.oldLicenseMap.values():
self.newlicense = self.oldLicenseMap[self.license]
except:
self.license = u''
if not hasattr(self, 'mxmlprofilelist'):
self.mxmlprofilelist = ""
if not hasattr(self, 'mxmlforcemediaonly'):
self.mxmlforcemediaonly = False
if not hasattr(self, 'mxmlheight'):
self.mxmlheight = ""
if not hasattr(self, 'mxmlwidth'):
self.mxmlwidth = ""
if not hasattr(self, 'compatibleWithVersion9'):
self.compatibleWithVersion9 = False
self.set_title(self._title)
self.set_author(self._author)
self.set_description(self._description)
def upgradeToVersion11(self):
pass
def upgradeToVersion12(self):
#because actually version 11 was exe-next-gen
self.upgradeToVersion9()
self.upgradeToVersion10()
def upgradeToVersion13(self):
if not hasattr(self, '_docType'):
self._docType = G.application.config.docType
def downgradeToVersion9(self):
for attr in ['lomEs', 'lom', 'scowsinglepage', 'scowwebsite',
'exportSource', 'exportMetadataType', '_lang',
'_objectives', '_preknowledge', '_learningResourceType',
'_intendedEndUserRoleType', '_intendedEndUserRoleGroup',
'_intendedEndUserRoleTutor', '_contextPlace',
'_contextMode', 'scowsource', 'mxmlprofilelist',
'mxmlforcemediaonly', 'mxmlheight', 'mxmlwidth']:
if hasattr(self, attr):
delattr(self, attr)
self.license = u''
CasestudyIdevice.persistenceVersion = 8
CasopracticofpdIdevice.persistenceVersion = 7
CitasparapensarfpdIdevice.persistenceVersion = 7
ClozefpdIdevice.persistenceVersion = 4
ClozeIdevice.persistenceVersion = 4
ClozelangfpdIdevice.persistenceVersion = 4
DebesconocerfpdIdevice.persistenceVersion = 7
DestacadofpdIdevice.persistenceVersion = 7
EjercicioresueltofpdIdevice.persistenceVersion = 8
EleccionmultiplefpdIdevice.persistenceVersion = 7
TextAreaField.persistenceVersion = 1
FreeTextfpdIdevice.persistenceVersion = 7
GalleryIdevice.persistenceVersion = 7
ImageMagnifierIdevice.persistenceVersion = 2
ListaIdevice.persistenceVersion = 4
MultichoiceIdevice.persistenceVersion = 7
GenericIdevice.persistenceVersion = 9
delattr(MultiSelectIdevice, "persistenceVersion")
OrientacionesalumnadofpdIdevice.persistenceVersion = 7
OrientacionestutoriafpdIdevice.persistenceVersion = 7
ParasabermasfpdIdevice.persistenceVersion = 7
QuizTestIdevice.persistenceVersion = 8
RecomendacionfpdIdevice.persistenceVersion = 7
ReflectionfpdIdevice.persistenceVersion = 7
ReflectionfpdmodifIdevice.persistenceVersion = 7
ReflectionIdevice.persistenceVersion = 7
delattr(SeleccionmultiplefpdIdevice, "persistenceVersion")
TrueFalseIdevice.persistenceVersion = 9
VerdaderofalsofpdIdevice.persistenceVersion = 9
WikipediaIdevice.persistenceVersion = 8
Package.persistenceVersion = 9
def getExportDocType(self):
return self._docType
def delNotes(self, node):
"""
Delete all notes
"""
for idevice in node.idevices:
if idevice.klass == 'NotaIdevice':
idevice.delete()
for child in node.children:
self.delNotes(child)
# ===========================================================================
| RichDijk/eXe | exe/engine/package.py | Python | gpl-2.0 | 77,805 |
# -*- coding: utf-8 -*-
#This is generated code - do not edit
encoding = 'utf-8'
dict = {
'&About...': '&\xd8\xb9\xd9\x86...',
'&Delete Window': '&\xd8\xa7\xd8\xad\xd8\xb0\xd9\x81 \xd8\xa7\xd9\x84\xd9\x86\xd8\xa7\xd9\x81\xd8\xb0\xd8\xa9',
'&Describe Action': '&\xd8\xa3\xd9\x88\xd8\xb5\xd9\x81 \xd8\xa7\xd9\x84\xd8\xb9\xd9\x85\xd9\x84\xd9\x8a\xd8\xa9',
'&Execute Action': '&\xd9\x86\xd9\x81\xd8\xb0 \xd8\xa7\xd9\x84\xd8\xb9\xd9\x85\xd9\x84\xd9\x8a\xd8\xa9',
'&Folding': '&\xd8\xa7\xd9\x84\xd8\xb7\xd9\x8a',
'&Help': '&\xd9\x85\xd8\xb3\xd8\xa7\xd8\xb9\xd8\xaf\xd8\xa9',
'&Line Numbers': '&\xd8\xb9\xd8\xaf\xd8\xaf \xd8\xa7\xd9\x84\xd8\xb3\xd8\xb7\xd9\x88\xd8\xb1',
'&New Window': '&\xd9\x86\xd8\xa7\xd9\x81\xd8\xb0\xd8\xa9 \xd8\xac\xd8\xaf\xd9\x8a\xd8\xaf\xd8\xa9',
'&Preferences...': '&\xd8\xa7\xd9\x84\xd8\xaa\xd9\x81\xd8\xb6\xd9\x8a\xd9\x84\xd8\xa7\xd8\xaa...',
'&Revert': '&\xd8\xa5\xd8\xb3\xd8\xaa\xd8\xb1\xd8\xac\xd8\xb9',
'&Save...': '&\xd8\xad\xd9\x81\xd8\xb8...',
'&Show Toolbars': '&\xd8\xb9\xd8\xb1\xd8\xb6 \xd8\xb4\xd8\xb1\xd9\x8a\xd8\xb7 \xd8\xa7\xd9\x84\xd8\xa3\xd8\xaf\xd9\x88\xd8\xa7\xd8\xa9',
'&Word Count': '&\xd8\xb9\xd8\xaf \xd8\xa7\xd9\x84\xd9\x83\xd9\x84\xd9\x85\xd8\xa7\xd8\xaa',
'About this program': '\xd8\xad\xd9\x88\xd9\x92\xd9\x84 \xd9\x87\xd8\xb0\xd8\xa7 \xd8\xa7\xd9\x84\xd8\xa8\xd8\xb1\xd9\x86\xd8\xa7\xd9\x85\xd8\xac',
'Actions': '\xd8\xa5\xd8\xac\xd8\xb1\xd8\xa7\xd8\xa1\xd8\xa7\xd8\xaa',
'Attributes': '\xd8\xa7\xd9\x84\xd8\xb5\xd9\x91\xd9\x81\xd8\xa7\xd8\xaa',
'Background': '\xd8\xa7\xd9\x84\xd8\xae\xd9\x84\xd9\x81\xd9\x8a\xd9\x91\xd8\xa9',
'Cancel': '\xd8\xa5\xd9\x84\xd8\xba\xd8\xa7\xef\xba\x80',
'Case': '\xd8\xa7\xd9\x84\xd8\xad\xd8\xa7\xd9\x84\xd8\xa9',
'Clear Playlist': '\xd9\x85\xd8\xb3\xd8\xad \xd9\x82\xd8\xa7\xd8\xa6\xd9\x85\xd8\xa9 \xd8\xa7\xd9\x84\xd8\xaa\xd8\xb4\xd8\xba\xd9\x8a\xd9\x84',
'Close Tab': '\xd8\xa3\xd8\xba\xd9\x84\xd9\x82 \xd8\xa7\xd9\x84\xd9\x84\xd8\xb3\xd8\xa7\xd9\x86',
'Close the current tab': '\xd8\xa3\xd8\xba\xd9\x84\xd9\x82 \xd8\xa7\xd9\x84\xd9\x84\xd8\xb3\xd8\xa7\xd9\x86 \xd8\xa7\xd9\x84\xd8\xad\xd8\xa7\xd9\x84\xd9\x8a',
'Color': '\xd8\xa7\xd9\x84\xd9\x84\xd9\x88\xd9\x86',
'Contrast': '\xd8\xa7\xd9\x84\xd8\xaa\xd8\xa8\xd8\xa7\xd9\x8a\xd9\x86',
'Copy': '\xd9\x86\xd8\xb3\xd8\xae',
'Cut': '\xd9\x82\xd8\xb5',
'Debug': '\xd8\xaa\xd9\x86\xd9\x82\xd9\x8a\xd8\xad',
'Documents': '\xd8\xa7\xd9\x84\xd9\x85\xd8\xb3\xd8\xaa\xd9\x86\xd8\xaf\xd8\xa7\xd8\xaa',
'E&xit': '&\xd8\xae\xd8\xb1\xd9\x88\xd8\xac',
}
| robmcmullen/peppy | peppy/i18n/ar.py | Python | gpl-2.0 | 2,474 |
import sys
from starstoloves.models import User as UserModel
from starstoloves import model_repository
from starstoloves.lib.track import lastfm_track_repository
from .user import User
def from_session_key(session_key):
user_model, created = UserModel.objects.get_or_create(session_key=session_key)
return User(
session_key=session_key,
repository=sys.modules[__name__],
);
def delete(user):
try:
user_model = model_repository.from_user(user)
user_model.delete()
except UserModel.DoesNotExist:
pass;
| tdhooper/starstoloves | starstoloves/lib/user/user_repository.py | Python | gpl-2.0 | 564 |
# -*- coding: utf-8 -*-
"""
translate variance and its formated character which have regularities
for example:
raw input:
v={'aa': 12345, 'bbbb': [1, 2, 3, 4, {'flag': 'vvvv||||xxxxx'}, set(['y', 'x', 'z'])]}
after `var2str.var2str(v)`
v_str=<aa::12345##bbbb::<1||2||3||4||<flag::vvvv|xxxxx>||<y|||x|||z>>>
then reverse back: `var2str.str2var(v_str)`
v_var={'aa': '12345', 'bbbb': ['1', '2', '3', '4', {'flag': 'vvvv|xxxxx'}, set(['y', 'x', 'z'])]}
NOTATION:
1, KEY of DICT should be string.
2, SET amd TUPLE automatically are transformed to LIST
3, INT/FLOAT/LONG etc. are automatically transformed to STRING
4, SEPERATORS would be replace to '' in character.
"""
import types
# TAKE notation of sequence, which has one order
sep_dict = {
"dict_sep": "##", # seperator of elements of dict
"dict_k_v_sep": "::", # k::v
"list_sep": "||", # list seperator
"set_sep": "|||", # set seperator
"tuple_sep": "||" # tuple seperator
}
sep_nest = ("<", ">") # better not repeated char, e.x. ("<-", "->")
# internal operations
sep_values = sep_dict.values()
def erase_sep(s):
for v in sep_values:
s = s.replace(v, "")
for v in sep_nest:
s=s.replace(v, "")
return s
_s=sep_nest[0]
_e=sep_nest[1]
class var2str(object):
@staticmethod
def var2str(var):
if not var: return ""
if type(var) == types.DictType:
result = []
for key,value in var.items():
v_str = var2str.var2str(value)
k_str = erase_sep("{0}".format(key))
result.append("{key}{sep}{value}".format(
key=k_str,
sep=sep_dict["dict_k_v_sep"],
value=v_str))
return _s+sep_dict["dict_sep"].join(result)+_e
#return sep_dict["dict_sep"].join(result)
elif type(var) == types.ListType:
result = [var2str.var2str(v) for v in var]
return _s+sep_dict["list_sep"].join(result)+_e
#return sep_dict["list_sep"].join(result)
elif type(var) == type(set([])):
result = [var2str.var2str(v) for v in var]
return _s+sep_dict["set_sep"].join(result)+_e
#return sep_dict["set_sep"].join(result)
elif type(var) == types.TupleType:
result = [var2str.var2str(v) for v in var]
return _s+sep_dict["tuple_sep"].join(result)+_e
#return sep_dict["tuple_sep"].join(result)
elif type(var) in [types.StringType,
types.IntType,
types.LongType,
types.FloatType]:
return erase_sep("{0}".format(var))
else:
raise TypeError("Type is not supported. var: {0}, type: {1}".format(
var, type(var)))
@staticmethod
def str2var(value):
# certain the outer nested elements' type
if NestType.is_nest_type(value, _s, _e):
_var = NestType(value)
_var.replace_nest_vars()
var = _var.parse_var()
if type(var) == types.DictType:
for k, v in var.items():
if type(v)==NestType:
var[k] = var2str.str2var(str(v))
if type(var) == types.ListType:
for n, v in enumerate(var):
if type(v) == NestType:
var[n] = var2str.str2var(str(v))
if type(var) == type(set()):
# because element in set must be hashable, so there is no meaning for
# for parsing set
pass
return var
else:
return value
class NestType(object):
def __init__(self, s, s_tag=_s, e_tag=_e):
self.value = str(s)
self.s_tag = s_tag
self.e_tag = e_tag
self.replace_s = None
@staticmethod
def is_nest_type(value, s_tag, e_tag):
if (not value.startswith(s_tag) or
not value.endswith(e_tag)):
return 0
return 1
def _get_obj_str(self, var):
return "[NestType]"+str(hash(var))
def has_nest_element(self):
if self.replace_s is None:
self.replace_nest_vars()
return self.repalce_s == self.value
def _replace_nest_var(self, s, nest_dic={}):
s_len = len(s)
tag_index = 0
s_tag_len, e_tag_len = len(self.s_tag), len(self.e_tag)
nest_index =[]
for i in range(s_len):
if s[i:i+s_tag_len] == self.s_tag:
tag_index +=1
if tag_index == 1: nest_index.append(i)
if s[i:i+e_tag_len] == self.e_tag:
tag_index -=1
if tag_index == 0: nest_index.append(i)
if len(nest_index) == 2: break
if len(nest_index) <2: return s
nest_index_s = nest_index[0]
nest_index_e = nest_index[1] + e_tag_len
nest_str = s[nest_index_s:nest_index_e]
nest_var = NestType(nest_str, s_tag=self.s_tag, e_tag = self.e_tag)
nest_var_str = self._get_obj_str(nest_var)
nest_dic[nest_var_str] = nest_var
return s[0:nest_index_s] + nest_var_str + s[nest_index_e:]
def replace_nest_vars(self):
# trim sign in start and end
nest_dic = {}
if not NestType.is_nest_type(self.value, self.s_tag, self.e_tag):
raise Exception(
"[ERROR] `{0}` does not match NestType format".format(self.value))
s = _trim_tag(self.value, self.s_tag, self.e_tag)
while 1:
replace_s = self._replace_nest_var(s,nest_dic)
if replace_s == s: break
s = replace_s
self.replace_s = replace_s
self.nest_dic = nest_dic
def parse_var(self):
"""string `replace_s` has no nestType at all"""
s = self.replace_s
var = None
dict_sep = sep_dict["dict_sep"]
dict_k_v_sep = sep_dict["dict_k_v_sep"]
list_sep = sep_dict["list_sep"]
set_sep = sep_dict["set_sep"]
if dict_k_v_sep in s: # dict
var = {}
items = s.split(dict_sep)
for item in items:
if not item: continue
k,v=item.split(dict_k_v_sep)
var[k] = self.nest_dic.get(v, v)
elif set_sep in s:
var = set([self.nest_dic.get(t, t) for t in s.split(set_sep)])
elif list_sep in s:
var = [self.nest_dic.get(t, t) for t in s.split(list_sep)]
else:
# just one string
var = s
return var
def __str__(self):
return self.value
def __unicode__(self):
return self.value
def _trim_tag(str, s, e):
"""trim the `str` off start `s` and end `e`"""
return str[len(s):(len(str)-len(e))]
def test():
a = {"aa": 12345, "bbbb":[1,2,3,4,{'flag':"vvvv||||世界是我的"},set(['x', 'y','z'])]}
#a = {}
print a
a_str = var2str.var2str(a)
print ">>", a_str
a_var = var2str.str2var(a_str)
print ">>", a_var
if __name__ == "__main__":
test()
| mavarick/spider-python | webspider/utils/var2str.py | Python | gpl-2.0 | 6,752 |
from triple_draw_poker.model.Pot import Pot
class HandDetails:
def __init__(self):
self.pot = Pot()
self.raised = 0
self.street = 0
self.number_of_streets = 4
self.in_draw = False
self.hands = []
self.dealt_cards_index = 0
def getDealtCardsIndex(self):
return dealt_cards_index
def getHands(self):
return self.hands
def getPot(self):
return self.pot
def getRaised(self):
return self.raised
def getStreet(self):
return self.street
def getStreetPremium(self):
if self.street < 3:
return 2
return 1
def getNumberOfStreets(self):
return self.number_of_streets
def getInDraw(self):
return self.in_draw
def setDealtCardsIndex(self, index):
self.dealt_cards_index = index
def addHand(self, hand):
self.hands.append(hand)
def incrementRaised(self):
self.raised += 1
def incrementStreet(self):
self.street += 1
def changeInDraw(self):
self.in_draw = not self.in_draw
| zmetcalf/Triple-Draw-Deuce-to-Seven-Lowball-Limit | triple_draw_poker/model/HandDetails.py | Python | gpl-2.0 | 1,109 |
from src.tools.enum import enum
import pyxbmct.addonwindow as pyxbmct
from src.tools.dialog import dialog
EnumMode = enum(SELECT=0, ROTATE=1)
class EnumButton(object):
def __init__(self, label, values, current, default, changeCallback=None, saveCallback=None, customLabels=None, mode=EnumMode.SELECT, returnValue=False, alignment=pyxbmct.ALIGN_CENTER):
self.label = label
self.values = values
self.customLabels = customLabels
self.mode = mode
self.returnValue = returnValue
self.changeCallback = changeCallback
self.saveCallback = saveCallback
self.currentValue = current
self.defaultValue = default
self.currentIndex = None
self.defaultIndex = None
self.assignedValue = False
if saveCallback is None:
self.onSave = None
if customLabels:
self._findCurrentIndex()
label = str(customLabels[self.currentIndex])
else:
label = str(current)
if alignment is not None:
self.button = pyxbmct.Button(label, alignment=alignment)
else:
self.button = pyxbmct.Button(label)
def update(self, value):
if self.currentValue != value:
self.currentValue = value
if self.customLabels:
self._findCurrentIndex()
label = str(self.customLabels[self.currentIndex])
else:
self.currentIndex = None
label = str(value)
self.button.setLabel(label)
self.assignedValue = True
def onClick(self):
if self.mode == EnumMode.SELECT:
if self.customLabels:
values = self.customLabels
else:
values = self.values
selectedIndex = dialog.select(self.label, list((str(value) for value in values)))
if selectedIndex == -1:
return
index = selectedIndex
else:
if self.currentIndex is None:
self._findCurrentIndex()
if self.currentIndex == len(self.values) - 1:
index = 0
else:
index = self.currentIndex + 1
self.assign(index)
def onDefault(self):
if self.defaultIndex is None:
self._findDefaultIndex()
self.assign(self.defaultIndex)
def onSave(self):
if self.assignedValue:
if self.returnValue:
self.saveCallback(self.currentValue)
else:
self.saveCallback(self.currentIndex)
def assign(self, index):
value = self.values[index]
self.currentIndex = index
self.currentValue = value
if self.customLabels:
label = str(self.customLabels[index])
else:
label = str(value)
self.button.setLabel(label)
self.assignedValue = True
if self.changeCallback:
if self.returnValue:
self.changeCallback(value)
else:
self.changeCallback(index)
def _findDefaultIndex(self):
for i in range(0, len(self.values)):
value = self.values[i]
if value == self.defaultValue:
self.defaultIndex = i
if self.defaultIndex is None:
raise ValueError ('Default value not found in value list')
def _findCurrentIndex(self):
for i in range(0, len(self.values)):
value = self.values[i]
if value == self.currentValue:
self.currentIndex = i
if self.currentIndex is None:
raise ValueError ('Current value not found in value list') | SportySpice/Collections | src/gui/EnumButton.py | Python | gpl-2.0 | 4,537 |
# -*- coding: utf-8 -*-
# @Author: Marco Benzi <[email protected]>
# @Date: 2015-06-07 19:44:12
# @Last Modified 2015-06-09
# @Last Modified time: 2015-06-09 16:07:05
# ==========================================================================
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
# ==========================================================================
import math
"""
Speed of light constant
"""
c = 3E8
"""
Vacuum permittivity
"""
e0 = 8.8541E-12
"""
Vacuum permeability
"""
u0 = 4E-7*math.pi
def getEffectivePermitivity(WHratio, er):
"""
Returns the effective permitivity for a given W/H ratio.
This function assumes that the thickenss of conductors is insignificant.
Parameters:
- `WHratio` : W/H ratio.
- `er` : Relative permitivity of the dielectric.
"""
if WHratio <= 1:
return (er + 1)/2 + ((1 + 12/WHratio)**(-0.5) + 0.04*(1-WHratio)**2)*(er -1)/2
else:
return (er + 1)/2 + ((1 + 12/WHratio)**(-0.5))*(er -1)/2
def getAuxVarA(Zo,er):
"""
Returns the auxiliary variable
A = (Zo)/60 * math.sqrt((er + 1)/2) + (er-1)/(er+1)*(0.23+0.11/er)
This function assumes that the thickenss of conductors is insignificant.
Parameters:
- `Zo` : Real impedance of the line.
- `er` : Relative permitivity of the dielectric.
"""
return (Zo)/60 * math.sqrt((er + 1)/2) + (er-1)/(er+1)*(0.23+0.11/er)
def getAuxVarB(Zo,er):
"""
Returns the auxiliary variable
B = (377*math.pi)/(2*Zo*math.sqrt(er))
This function assumes that the thickenss of conductors is insignificant.
Parameters:
- `Zo` : Real impedance of the line.
- `er` : Relative permitivity of the dielectric.
"""
return (377*math.pi)/(2*Zo*math.sqrt(er))
def getWHRatioA(Zo,er):
"""
Returns the W/H ratio for W/H < 2. If the result is > 2, then other method
should be used.
This function assumes that the thickenss of conductors is insignificant.
Parameters:
- `Zo` : Real impedance of the line.
- `er` : Relative permitivity of the dielectric.
"""
A = getAuxVarA(Zo,er)
return (8*math.e**A)/(math.e**(2*A) - 2)
def getWHRatioB(Zo,er):
"""
Returns the W/H ratio for W/H > 2. If the result is < 2, then other method
should be used.
This function assumes that the thickenss of conductors is insignificant.
Parameters:
- `Zo` : Real impedance of the line.
- `er` : Relative permitivity of the dielectric.
"""
B = getAuxVarB(Zo,er)
return (2/math.pi)*(B-1 - math.log(2*B - 1) + (er - 1)*(math.log(B-1) + 0.39 - 0.61/er)/(2*er))
def getCharacteristicImpedance(WHratio, ef):
"""
Returns the characteristic impedance of the medium, based on the effective
permitivity and W/H ratio.
This function assumes that the thickenss of conductors is insignificant.
Parameters:
- `WHratio` : W/H ratio.
- `ef` : Effective permitivity of the dielectric.
"""
if WHratio <= 1:
return (60/math.sqrt(ef))*math.log(8/WHratio + WHratio/4)
else:
return (120*math.pi/math.sqrt(ef))/(WHratio + 1.393 + 0.667*math.log(WHratio +1.444))
def getWHRatio(Zo,er):
"""
Returns the W/H ratio, after trying with the two possible set of solutions,
for when W/H < 2 or else. When no solution, returns zero.
This function assumes that the thickenss of conductors is insignificant.
Parameters:
- `Zo` : Real impedance of the line.
- `er` : Relative permitivity of the dielectric.
"""
efa = er
efb = er
Zoa = Zo
Zob = Zo
while 1:
rA = getWHRatioA(Zoa,efa)
rB = getWHRatioB(Zob,efb)
if rA < 2:
return rA
if rB > 2:
return rB
Zoa = math.sqrt(efa)*Zoa
Zob = math.sqrt(efb)*Zob
def getCorrectedWidth(W,H,t):
"""
For significant conductor thickness, this returns the corrected width.
Paramenters:
- `W` : Width
- `H` : Height
- `t` : Conductor thickness
"""
if t < H and t < W/2:
if W/H <= math.pi/2:
return W + (1 + math.log(2*H/t))*(t/math.pi)
else:
return W + (1 + math.log(4*math.pi*H/t))*(t/math.pi)
else:
print "The conductor is too thick!!"
def getConductorLoss(W,H,t,sigma,f,Zo):
"""
Returns the conductor loss in [Np/m].
Parameters:
- `W` : Width
- `H` : Height
- `t` : Conductor thickness
- `sigma` : Conductance of medium
- `f` : Operating frequency
- `Zo` : Characteristic impedance
"""
We = getCorrectedWidth(W,H,t)
P = 1 - (We/4/H)**2
Rs = math.sqrt((math.pi*f*u0)/sigma)
Q = 1 + H/We + (math.log((2*H)/t)-t/W)*H/(We*math.pi)
if W/H <= 1/(2*math.pi):
return (1 + H/We + (math.log(4*pi*W/t) + t/W)*H/(math.pi*We))*(8.68*Rs*P)/(2*pi*Zo*H)
elif W/H <= 2:
return (8.68*Rs*P*Q)/(2*math.pi*Zo*H)
else:
return ((8.68*Rs*Q)/(Zo*H))*(We/H + (We/math.pi/H)/(We/2/H)+0.94)*((H/We + 2*math.log(We/2/H + 0.94)/math.pi)**(-2))
def getDielectricLoss(er,ef,tanD,f):
"""
Returns the dielectric loss in [dB/cm].
Paramenters:
- `er` : Relative permitivity of the dielectric
- `ef` : Effective permitivity
- `tanD` : tan \delta
- `f` : Operating frequency
"""
lam = c/math.sqrt(ef)/f
return 27.3*(er*(ef-1)*tanD)/(lam*math.sqrt(er)*(er-1)) | Lisergishnu/LTXKit | uStripDesign.py | Python | gpl-2.0 | 5,581 |
#!/usr/bin/env python2.7
# -*- coding:utf-8 -*-
import sklearn.datasets as skds
import numpy as np
import random
import theano.tensor as T
import theano
import matplotlib.pyplot as plt
import math
#I don't know what the jesus 'housing.data' means so I used self-generated dataset
x = np.arange(-50., 50., 1)
y = np.array(map(lambda tmp: 1.0/(1 + math.exp(-3 * tmp + 5.0)), x))
noise = np.random.uniform(-0.1, .1, size=len(x))
y += noise
print x
print y
#declarations
theta = theano.shared(np.random.uniform(-0.1, 0.1))
omega = theano.shared(np.random.uniform(-0.1, 0.1))
X = T.dscalar('X')
Y = T.dscalar('Y')
#functions
prediction = 1/(1 + T.exp(-omega * X + theta))
loss1 = -Y * T.log(prediction)
loss2 = 1/2.0 * (prediction - Y) ** 2
predict = theano.function([X], prediction)
calculate_loss = theano.function([X, Y], loss2)
print predict(1.0)
#derivatives
dX = T.grad(loss2, X)
dtheta = T.grad(loss2, theta)
domega = T.grad(loss2, omega)
epsilon = .01
#gradient function
gradient_step = theano.function(
[X, Y],
updates=((omega, omega - epsilon * domega),
(theta, theta - epsilon * dtheta)))
#optimization
for i in range(100):
loss = 0
for j in range(len(x)):
gradient_step(x[j], y[j])
loss += calculate_loss(x[j], y[j])
print 'loss after' + str(i) + 'iterations.' + str(loss)
print x
print y
mul = 1 - 1/len(x)
plt.xlim(x.min() * mul, x.max() * mul)
plt.ylim(y.min() * mul, y.max() * mul)
plt.xlabel('x')
plt.ylabel('y')
plt.title('lr test')
plt.plot(x, y, 'ro')
xx = np.arange(x.min(), x.max(), 0.1)
yy = map(lambda abc: predict(abc), xx)
plt.plot(xx, yy, 'b')
plt.show()
# vim: ts=4 sw=4 sts=4 expandtab
| AthenaYe/UFLDL_Tutorial | Chap1_Supervised_Learning_and_Optimization/logistic_regression.py | Python | gpl-2.0 | 1,696 |
# -*- coding: utf-8 -*-
##
## This file is part of Invenio.
## Copyright (C) 2012 CERN.
##
## Invenio is free software; you can redistribute it and/or
## modify it under the terms of the GNU General Public License as
## published by the Free Software Foundation; either version 2 of the
## License, or (at your option) any later version.
##
## Invenio is distributed in the hope that it will be useful, but
## WITHOUT ANY WARRANTY; without even the implied warranty of
## MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
## General Public License for more details.
##
## You should have received a copy of the GNU General Public License
## along with Invenio; if not, write to the Free Software Foundation, Inc.,
## 59 Temple Place, Suite 330, Boston, MA 02111-1307, USA.
"""
This implements a redirection for CERN HR Documents in the CERN Document
Server. It's useful as a reference on how goto plugins could be implemented.
"""
import time
import re
from invenio.legacy.search_engine import perform_request_search
from invenio.legacy.bibrecord import get_fieldvalues
from invenio.legacy.bibdocfile.api import BibRecDocs
def make_cern_ssr_docname(lang, edition, modif=0):
if modif:
return "CERN_SSR_%(lang)s_ed%(edition)02d_modif%(modif)02d" % {
'lang': lang,
'edition': edition,
'modif': modif
}
else:
return "CERN_SSR_%(lang)s_ed%(edition)02d" % {
'lang': lang,
'edition': edition,
}
_RE_REVISION = re.compile(r"rev(\d\d)")
def _get_revision(docname):
"""
Return the revision in a docname. E.g.:
CERN_Circ_Op_en_02_rev01_Implementation measures.pdf -> 1
CERN_Circ_Op_en_02_rev02_Implementation measures.PDF -> 2
"""
g = _RE_REVISION.search(docname)
if g:
return int(g.group(1))
return 0
def _register_document(documents, docname, key):
"""
Register in the documents mapping the docname to key, but only if the
docname has a revision higher of the docname already associated with a key
"""
if key in documents:
if _get_revision(docname) > _get_revision(documents[key]):
documents[key] = docname
else:
documents[key] = docname
def goto(type, document='', number=0, lang='en', modif=0):
today = time.strftime('%Y-%m-%d')
if type == 'SSR':
## We would like a CERN Staff Rules and Regulations
recids = perform_request_search(cc='Staff Rules and Regulations', f="925__a:1996-01-01->%s 925__b:%s->9999-99-99" % (today, today))
recid = recids[-1]
reportnumber = get_fieldvalues(recid, '037__a')[0]
edition = int(reportnumber[-2:]) ## e.g. CERN-STAFF-RULES-ED08
return BibRecDocs(recid).get_bibdoc(make_cern_ssr_docname(lang, edition, modif)).get_file('.pdf').get_url()
elif type == "OPER-CIRC":
recids = perform_request_search(cc="Operational Circulars", p="reportnumber=\"CERN-OPER-CIRC-%s-*\"" % number, sf="925__a")
recid = recids[-1]
documents = {}
bibrecdocs = BibRecDocs(recid)
for docname in bibrecdocs.get_bibdoc_names():
ldocname = docname.lower()
if 'implementation' in ldocname:
_register_document(documents, docname, 'implementation_en')
elif 'application' in ldocname:
_register_document(documents, docname, 'implementation_fr')
elif 'archiving' in ldocname:
_register_document(documents, docname, 'archiving_en')
elif 'archivage' in ldocname:
_register_document(documents, docname, 'archiving_fr')
elif 'annexe' in ldocname or 'annexes_fr' in ldocname:
_register_document(documents, docname, 'annex_fr')
elif 'annexes_en' in ldocname or 'annex' in ldocname:
_register_document(documents, docname, 'annex_en')
elif '_en_' in ldocname or '_eng_' in ldocname or '_angl_' in ldocname:
_register_document(documents, docname, 'en')
elif '_fr_' in ldocname:
_register_document(documents, docname, 'fr')
return bibrecdocs.get_bibdoc(documents[document]).get_file('.pdf').get_url()
elif type == 'ADMIN-CIRC':
recids = perform_request_search(cc="Administrative Circulars", p="reportnumber=\"CERN-ADMIN-CIRC-%s-*\"" % number, sf="925__a")
recid = recids[-1]
documents = {}
bibrecdocs = BibRecDocs(recid)
for docname in bibrecdocs.get_bibdoc_names():
ldocname = docname.lower()
if 'implementation' in ldocname:
_register_document(documents, docname, 'implementation-en')
elif 'application' in ldocname:
_register_document(documents, docname, 'implementation-fr')
elif 'archiving' in ldocname:
_register_document(documents, docname, 'archiving-en')
elif 'archivage' in ldocname:
_register_document(documents, docname, 'archiving-fr')
elif 'annexe' in ldocname or 'annexes_fr' in ldocname:
_register_document(documents, docname, 'annex-fr')
elif 'annexes_en' in ldocname or 'annex' in ldocname:
_register_document(documents, docname, 'annex-en')
elif '_en_' in ldocname or '_eng_' in ldocname or '_angl_' in ldocname:
_register_document(documents, docname, 'en')
elif '_fr_' in ldocname:
_register_document(documents, docname, 'fr')
return bibrecdocs.get_bibdoc(documents[document]).get_file('.pdf').get_url()
def register_hr_redirections():
"""
Run this only once
"""
from invenio.modules.redirector.api import register_redirection
plugin = 'goto_plugin_cern_hr_documents'
## Staff rules and regulations
for modif in range(1, 20):
for lang in ('en', 'fr'):
register_redirection('hr-srr-modif%02d-%s' % (modif, lang), plugin, parameters={'type': 'SSR', 'lang': lang, 'modif': modif})
for lang in ('en', 'fr'):
register_redirection('hr-srr-%s' % lang, plugin, parameters={'type': 'SSR', 'lang': lang, 'modif': 0})
## Operational Circulars
for number in range(1, 10):
for lang in ('en', 'fr'):
register_redirection('hr-oper-circ-%s-%s' % (number, lang), plugin, parameters={'type': 'OPER-CIRC', 'document': lang, 'number': number})
for number, special_document in ((2, 'implementation'), (2, 'annex'), (3, 'archiving'), (3, 'annex')):
for lang in ('en', 'fr'):
register_redirection('hr-circ-%s-%s-%s' % (number, special_document, lang), plugin, parameters={'type': 'OPER-CIRC', 'document': '%s-%s' % (special_document, lang), 'number': number})
## Administrative Circulars:
for number in range(1, 32):
for lang in ('en', 'fr'):
register_redirection('hr-admin-circ-%s-%s' % (number, lang), plugin, parameters={'type': 'ADMIN-CIRC', 'document': lang, 'number': number})
if __name__ == "__main__":
register_hr_redirections()
| PXke/invenio | invenio/modules/redirector/redirect_methods/goto_plugin_cern_hr_documents.py | Python | gpl-2.0 | 7,113 |
# -*- coding: utf-8 -*-
#
# Picard, the next-generation MusicBrainz tagger
# Copyright (C) 2006 Lukáš Lalinský
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
import os.path
import sys
from PyQt5 import QtGui
if sys.platform == 'win32':
_search_paths = []
else:
_search_paths = [
os.path.expanduser('~/.icons'),
os.path.join(os.environ.get('XDG_DATA_DIRS', '/usr/share'), 'icons'),
'/usr/share/pixmaps',
]
_current_theme = None
if 'XDG_CURRENT_DESKTOP' in os.environ:
desktop = os.environ['XDG_CURRENT_DESKTOP'].lower()
if desktop in ('gnome', 'unity'):
_current_theme = (os.popen('gsettings get org.gnome.desktop.interface icon-theme').read().strip()[1:-1]
or None)
elif os.environ.get('KDE_FULL_SESSION'):
_current_theme = (os.popen("kreadconfig --file kdeglobals --group Icons --key Theme --default crystalsvg").read().strip()
or None)
ICON_SIZE_MENU = ('16x16',)
ICON_SIZE_TOOLBAR = ('22x22',)
ICON_SIZE_ALL = ('22x22', '16x16')
def lookup(name, size=ICON_SIZE_ALL):
icon = QtGui.QIcon()
if _current_theme:
for path in _search_paths:
for subdir in ('actions', 'places', 'devices'):
fullpath = os.path.join(path, _current_theme, size[0], subdir, name)
if os.path.exists(fullpath + '.png'):
icon.addFile(fullpath + '.png')
for s in size[1:]:
icon.addFile(os.path.join(path, _current_theme, s, subdir, name) + '.png')
return icon
for s in size:
icon.addFile('/'.join([':', 'images', s, name]) + '.png')
return icon
| mineo/picard | picard/util/icontheme.py | Python | gpl-2.0 | 2,350 |
#
#
# Copyright (C) 2006, 2007, 2008, 2009, 2010, 2011 Google Inc.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
# 02110-1301, USA.
"""OpCodes module
This module implements the data structures which define the cluster
operations - the so-called opcodes.
Every operation which modifies the cluster state is expressed via
opcodes.
"""
# this are practically structures, so disable the message about too
# few public methods:
# pylint: disable-msg=R0903
import logging
import re
import operator
from ganeti import constants
from ganeti import errors
from ganeti import ht
# Common opcode attributes
#: output fields for a query operation
_POutputFields = ("output_fields", ht.NoDefault, ht.TListOf(ht.TNonEmptyString),
"Selected output fields")
#: the shutdown timeout
_PShutdownTimeout = \
("shutdown_timeout", constants.DEFAULT_SHUTDOWN_TIMEOUT, ht.TPositiveInt,
"How long to wait for instance to shut down")
#: the force parameter
_PForce = ("force", False, ht.TBool, "Whether to force the operation")
#: a required instance name (for single-instance LUs)
_PInstanceName = ("instance_name", ht.NoDefault, ht.TNonEmptyString,
"Instance name")
#: Whether to ignore offline nodes
_PIgnoreOfflineNodes = ("ignore_offline_nodes", False, ht.TBool,
"Whether to ignore offline nodes")
#: a required node name (for single-node LUs)
_PNodeName = ("node_name", ht.NoDefault, ht.TNonEmptyString, "Node name")
#: a required node group name (for single-group LUs)
_PGroupName = ("group_name", ht.NoDefault, ht.TNonEmptyString, "Group name")
#: Migration type (live/non-live)
_PMigrationMode = ("mode", None,
ht.TOr(ht.TNone, ht.TElemOf(constants.HT_MIGRATION_MODES)),
"Migration mode")
#: Obsolete 'live' migration mode (boolean)
_PMigrationLive = ("live", None, ht.TMaybeBool,
"Legacy setting for live migration, do not use")
#: Tag type
_PTagKind = ("kind", ht.NoDefault, ht.TElemOf(constants.VALID_TAG_TYPES), None)
#: List of tag strings
_PTags = ("tags", ht.NoDefault, ht.TListOf(ht.TNonEmptyString), None)
_PForceVariant = ("force_variant", False, ht.TBool,
"Whether to force an unknown OS variant")
_PWaitForSync = ("wait_for_sync", True, ht.TBool,
"Whether to wait for the disk to synchronize")
_PIgnoreConsistency = ("ignore_consistency", False, ht.TBool,
"Whether to ignore disk consistency")
_PStorageName = ("name", ht.NoDefault, ht.TMaybeString, "Storage name")
_PUseLocking = ("use_locking", False, ht.TBool,
"Whether to use synchronization")
_PNameCheck = ("name_check", True, ht.TBool, "Whether to check name")
_PNodeGroupAllocPolicy = \
("alloc_policy", None,
ht.TOr(ht.TNone, ht.TElemOf(constants.VALID_ALLOC_POLICIES)),
"Instance allocation policy")
_PGroupNodeParams = ("ndparams", None, ht.TMaybeDict,
"Default node parameters for group")
_PQueryWhat = ("what", ht.NoDefault, ht.TElemOf(constants.QR_VIA_OP),
"Resource(s) to query for")
_PIpCheckDoc = "Whether to ensure instance's IP address is inactive"
#: Do not remember instance state changes
_PNoRemember = ("no_remember", False, ht.TBool,
"Do not remember the state change")
#: Target node for instance migration/failover
_PMigrationTargetNode = ("target_node", None, ht.TMaybeString,
"Target node for shared-storage instances")
#: OP_ID conversion regular expression
_OPID_RE = re.compile("([a-z])([A-Z])")
#: Utility function for L{OpClusterSetParams}
_TestClusterOsList = ht.TOr(ht.TNone,
ht.TListOf(ht.TAnd(ht.TList, ht.TIsLength(2),
ht.TMap(ht.WithDesc("GetFirstItem")(operator.itemgetter(0)),
ht.TElemOf(constants.DDMS_VALUES)))))
# TODO: Generate check from constants.INIC_PARAMS_TYPES
#: Utility function for testing NIC definitions
_TestNicDef = ht.TDictOf(ht.TElemOf(constants.INIC_PARAMS),
ht.TOr(ht.TNone, ht.TNonEmptyString))
_SUMMARY_PREFIX = {
"CLUSTER_": "C_",
"GROUP_": "G_",
"NODE_": "N_",
"INSTANCE_": "I_",
}
def _NameToId(name):
"""Convert an opcode class name to an OP_ID.
@type name: string
@param name: the class name, as OpXxxYyy
@rtype: string
@return: the name in the OP_XXXX_YYYY format
"""
if not name.startswith("Op"):
return None
# Note: (?<=[a-z])(?=[A-Z]) would be ideal, since it wouldn't
# consume any input, and hence we would just have all the elements
# in the list, one by one; but it seems that split doesn't work on
# non-consuming input, hence we have to process the input string a
# bit
name = _OPID_RE.sub(r"\1,\2", name)
elems = name.split(",")
return "_".join(n.upper() for n in elems)
def RequireFileStorage():
"""Checks that file storage is enabled.
While it doesn't really fit into this module, L{utils} was deemed too large
of a dependency to be imported for just one or two functions.
@raise errors.OpPrereqError: when file storage is disabled
"""
if not constants.ENABLE_FILE_STORAGE:
raise errors.OpPrereqError("File storage disabled at configure time",
errors.ECODE_INVAL)
def RequireSharedFileStorage():
"""Checks that shared file storage is enabled.
While it doesn't really fit into this module, L{utils} was deemed too large
of a dependency to be imported for just one or two functions.
@raise errors.OpPrereqError: when shared file storage is disabled
"""
if not constants.ENABLE_SHARED_FILE_STORAGE:
raise errors.OpPrereqError("Shared file storage disabled at"
" configure time", errors.ECODE_INVAL)
@ht.WithDesc("CheckFileStorage")
def _CheckFileStorage(value):
"""Ensures file storage is enabled if used.
"""
if value == constants.DT_FILE:
RequireFileStorage()
elif value == constants.DT_SHARED_FILE:
RequireSharedFileStorage()
return True
_CheckDiskTemplate = ht.TAnd(ht.TElemOf(constants.DISK_TEMPLATES),
_CheckFileStorage)
def _CheckStorageType(storage_type):
"""Ensure a given storage type is valid.
"""
if storage_type not in constants.VALID_STORAGE_TYPES:
raise errors.OpPrereqError("Unknown storage type: %s" % storage_type,
errors.ECODE_INVAL)
if storage_type == constants.ST_FILE:
RequireFileStorage()
return True
#: Storage type parameter
_PStorageType = ("storage_type", ht.NoDefault, _CheckStorageType,
"Storage type")
class _AutoOpParamSlots(type):
"""Meta class for opcode definitions.
"""
def __new__(mcs, name, bases, attrs):
"""Called when a class should be created.
@param mcs: The meta class
@param name: Name of created class
@param bases: Base classes
@type attrs: dict
@param attrs: Class attributes
"""
assert "__slots__" not in attrs, \
"Class '%s' defines __slots__ when it should use OP_PARAMS" % name
assert "OP_ID" not in attrs, "Class '%s' defining OP_ID" % name
attrs["OP_ID"] = _NameToId(name)
# Always set OP_PARAMS to avoid duplicates in BaseOpCode.GetAllParams
params = attrs.setdefault("OP_PARAMS", [])
# Use parameter names as slots
slots = [pname for (pname, _, _, _) in params]
assert "OP_DSC_FIELD" not in attrs or attrs["OP_DSC_FIELD"] in slots, \
"Class '%s' uses unknown field in OP_DSC_FIELD" % name
attrs["__slots__"] = slots
return type.__new__(mcs, name, bases, attrs)
class BaseOpCode(object):
"""A simple serializable object.
This object serves as a parent class for OpCode without any custom
field handling.
"""
# pylint: disable-msg=E1101
# as OP_ID is dynamically defined
__metaclass__ = _AutoOpParamSlots
def __init__(self, **kwargs):
"""Constructor for BaseOpCode.
The constructor takes only keyword arguments and will set
attributes on this object based on the passed arguments. As such,
it means that you should not pass arguments which are not in the
__slots__ attribute for this class.
"""
slots = self._all_slots()
for key in kwargs:
if key not in slots:
raise TypeError("Object %s doesn't support the parameter '%s'" %
(self.__class__.__name__, key))
setattr(self, key, kwargs[key])
def __getstate__(self):
"""Generic serializer.
This method just returns the contents of the instance as a
dictionary.
@rtype: C{dict}
@return: the instance attributes and their values
"""
state = {}
for name in self._all_slots():
if hasattr(self, name):
state[name] = getattr(self, name)
return state
def __setstate__(self, state):
"""Generic unserializer.
This method just restores from the serialized state the attributes
of the current instance.
@param state: the serialized opcode data
@type state: C{dict}
"""
if not isinstance(state, dict):
raise ValueError("Invalid data to __setstate__: expected dict, got %s" %
type(state))
for name in self._all_slots():
if name not in state and hasattr(self, name):
delattr(self, name)
for name in state:
setattr(self, name, state[name])
@classmethod
def _all_slots(cls):
"""Compute the list of all declared slots for a class.
"""
slots = []
for parent in cls.__mro__:
slots.extend(getattr(parent, "__slots__", []))
return slots
@classmethod
def GetAllParams(cls):
"""Compute list of all parameters for an opcode.
"""
slots = []
for parent in cls.__mro__:
slots.extend(getattr(parent, "OP_PARAMS", []))
return slots
def Validate(self, set_defaults):
"""Validate opcode parameters, optionally setting default values.
@type set_defaults: bool
@param set_defaults: Whether to set default values
@raise errors.OpPrereqError: When a parameter value doesn't match
requirements
"""
for (attr_name, default, test, _) in self.GetAllParams():
assert test == ht.NoType or callable(test)
if not hasattr(self, attr_name):
if default == ht.NoDefault:
raise errors.OpPrereqError("Required parameter '%s.%s' missing" %
(self.OP_ID, attr_name),
errors.ECODE_INVAL)
elif set_defaults:
if callable(default):
dval = default()
else:
dval = default
setattr(self, attr_name, dval)
if test == ht.NoType:
# no tests here
continue
if set_defaults or hasattr(self, attr_name):
attr_val = getattr(self, attr_name)
if not test(attr_val):
logging.error("OpCode %s, parameter %s, has invalid type %s/value %s",
self.OP_ID, attr_name, type(attr_val), attr_val)
raise errors.OpPrereqError("Parameter '%s.%s' fails validation" %
(self.OP_ID, attr_name),
errors.ECODE_INVAL)
class OpCode(BaseOpCode):
"""Abstract OpCode.
This is the root of the actual OpCode hierarchy. All clases derived
from this class should override OP_ID.
@cvar OP_ID: The ID of this opcode. This should be unique amongst all
children of this class.
@cvar OP_DSC_FIELD: The name of a field whose value will be included in the
string returned by Summary(); see the docstring of that
method for details).
@cvar OP_PARAMS: List of opcode attributes, the default values they should
get if not already defined, and types they must match.
@cvar WITH_LU: Boolean that specifies whether this should be included in
mcpu's dispatch table
@ivar dry_run: Whether the LU should be run in dry-run mode, i.e. just
the check steps
@ivar priority: Opcode priority for queue
"""
# pylint: disable-msg=E1101
# as OP_ID is dynamically defined
WITH_LU = True
OP_PARAMS = [
("dry_run", None, ht.TMaybeBool, "Run checks only, don't execute"),
("debug_level", None, ht.TOr(ht.TNone, ht.TPositiveInt), "Debug level"),
("priority", constants.OP_PRIO_DEFAULT,
ht.TElemOf(constants.OP_PRIO_SUBMIT_VALID), "Opcode priority"),
]
def __getstate__(self):
"""Specialized getstate for opcodes.
This method adds to the state dictionary the OP_ID of the class,
so that on unload we can identify the correct class for
instantiating the opcode.
@rtype: C{dict}
@return: the state as a dictionary
"""
data = BaseOpCode.__getstate__(self)
data["OP_ID"] = self.OP_ID
return data
@classmethod
def LoadOpCode(cls, data):
"""Generic load opcode method.
The method identifies the correct opcode class from the dict-form
by looking for a OP_ID key, if this is not found, or its value is
not available in this module as a child of this class, we fail.
@type data: C{dict}
@param data: the serialized opcode
"""
if not isinstance(data, dict):
raise ValueError("Invalid data to LoadOpCode (%s)" % type(data))
if "OP_ID" not in data:
raise ValueError("Invalid data to LoadOpcode, missing OP_ID")
op_id = data["OP_ID"]
op_class = None
if op_id in OP_MAPPING:
op_class = OP_MAPPING[op_id]
else:
raise ValueError("Invalid data to LoadOpCode: OP_ID %s unsupported" %
op_id)
op = op_class()
new_data = data.copy()
del new_data["OP_ID"]
op.__setstate__(new_data)
return op
def Summary(self):
"""Generates a summary description of this opcode.
The summary is the value of the OP_ID attribute (without the "OP_"
prefix), plus the value of the OP_DSC_FIELD attribute, if one was
defined; this field should allow to easily identify the operation
(for an instance creation job, e.g., it would be the instance
name).
"""
assert self.OP_ID is not None and len(self.OP_ID) > 3
# all OP_ID start with OP_, we remove that
txt = self.OP_ID[3:]
field_name = getattr(self, "OP_DSC_FIELD", None)
if field_name:
field_value = getattr(self, field_name, None)
if isinstance(field_value, (list, tuple)):
field_value = ",".join(str(i) for i in field_value)
txt = "%s(%s)" % (txt, field_value)
return txt
def TinySummary(self):
"""Generates a compact summary description of the opcode.
"""
assert self.OP_ID.startswith("OP_")
text = self.OP_ID[3:]
for (prefix, supplement) in _SUMMARY_PREFIX.items():
if text.startswith(prefix):
return supplement + text[len(prefix):]
return text
# cluster opcodes
class OpClusterPostInit(OpCode):
"""Post cluster initialization.
This opcode does not touch the cluster at all. Its purpose is to run hooks
after the cluster has been initialized.
"""
class OpClusterDestroy(OpCode):
"""Destroy the cluster.
This opcode has no other parameters. All the state is irreversibly
lost after the execution of this opcode.
"""
class OpClusterQuery(OpCode):
"""Query cluster information."""
class OpClusterVerifyConfig(OpCode):
"""Verify the cluster config.
"""
OP_PARAMS = [
("verbose", False, ht.TBool, None),
("error_codes", False, ht.TBool, None),
("debug_simulate_errors", False, ht.TBool, None),
]
class OpClusterVerifyGroup(OpCode):
"""Run verify on a node group from the cluster.
@type skip_checks: C{list}
@ivar skip_checks: steps to be skipped from the verify process; this
needs to be a subset of
L{constants.VERIFY_OPTIONAL_CHECKS}; currently
only L{constants.VERIFY_NPLUSONE_MEM} can be passed
"""
OP_DSC_FIELD = "group_name"
OP_PARAMS = [
("group_name", ht.NoDefault, ht.TNonEmptyString, None),
("skip_checks", ht.EmptyList,
ht.TListOf(ht.TElemOf(constants.VERIFY_OPTIONAL_CHECKS)), None),
("verbose", False, ht.TBool, None),
("error_codes", False, ht.TBool, None),
("debug_simulate_errors", False, ht.TBool, None),
]
class OpClusterVerifyDisks(OpCode):
"""Verify the cluster disks.
Parameters: none
Result: a tuple of four elements:
- list of node names with bad data returned (unreachable, etc.)
- dict of node names with broken volume groups (values: error msg)
- list of instances with degraded disks (that should be activated)
- dict of instances with missing logical volumes (values: (node, vol)
pairs with details about the missing volumes)
In normal operation, all lists should be empty. A non-empty instance
list (3rd element of the result) is still ok (errors were fixed) but
non-empty node list means some node is down, and probably there are
unfixable drbd errors.
Note that only instances that are drbd-based are taken into
consideration. This might need to be revisited in the future.
"""
class OpClusterRepairDiskSizes(OpCode):
"""Verify the disk sizes of the instances and fixes configuration
mimatches.
Parameters: optional instances list, in case we want to restrict the
checks to only a subset of the instances.
Result: a list of tuples, (instance, disk, new-size) for changed
configurations.
In normal operation, the list should be empty.
@type instances: list
@ivar instances: the list of instances to check, or empty for all instances
"""
OP_PARAMS = [
("instances", ht.EmptyList, ht.TListOf(ht.TNonEmptyString), None),
]
class OpClusterConfigQuery(OpCode):
"""Query cluster configuration values."""
OP_PARAMS = [
_POutputFields
]
class OpClusterRename(OpCode):
"""Rename the cluster.
@type name: C{str}
@ivar name: The new name of the cluster. The name and/or the master IP
address will be changed to match the new name and its IP
address.
"""
OP_DSC_FIELD = "name"
OP_PARAMS = [
("name", ht.NoDefault, ht.TNonEmptyString, None),
]
class OpClusterSetParams(OpCode):
"""Change the parameters of the cluster.
@type vg_name: C{str} or C{None}
@ivar vg_name: The new volume group name or None to disable LVM usage.
"""
OP_PARAMS = [
("vg_name", None, ht.TMaybeString, "Volume group name"),
("enabled_hypervisors", None,
ht.TOr(ht.TAnd(ht.TListOf(ht.TElemOf(constants.HYPER_TYPES)), ht.TTrue),
ht.TNone),
"List of enabled hypervisors"),
("hvparams", None, ht.TOr(ht.TDictOf(ht.TNonEmptyString, ht.TDict),
ht.TNone),
"Cluster-wide hypervisor parameter defaults, hypervisor-dependent"),
("beparams", None, ht.TOr(ht.TDict, ht.TNone),
"Cluster-wide backend parameter defaults"),
("os_hvp", None, ht.TOr(ht.TDictOf(ht.TNonEmptyString, ht.TDict),
ht.TNone),
"Cluster-wide per-OS hypervisor parameter defaults"),
("osparams", None, ht.TOr(ht.TDictOf(ht.TNonEmptyString, ht.TDict),
ht.TNone),
"Cluster-wide OS parameter defaults"),
("candidate_pool_size", None, ht.TOr(ht.TStrictPositiveInt, ht.TNone),
"Master candidate pool size"),
("uid_pool", None, ht.NoType,
"Set UID pool, must be list of lists describing UID ranges (two items,"
" start and end inclusive)"),
("add_uids", None, ht.NoType,
"Extend UID pool, must be list of lists describing UID ranges (two"
" items, start and end inclusive) to be added"),
("remove_uids", None, ht.NoType,
"Shrink UID pool, must be list of lists describing UID ranges (two"
" items, start and end inclusive) to be removed"),
("maintain_node_health", None, ht.TMaybeBool,
"Whether to automatically maintain node health"),
("prealloc_wipe_disks", None, ht.TMaybeBool,
"Whether to wipe disks before allocating them to instances"),
("nicparams", None, ht.TMaybeDict, "Cluster-wide NIC parameter defaults"),
("ndparams", None, ht.TMaybeDict, "Cluster-wide node parameter defaults"),
("drbd_helper", None, ht.TOr(ht.TString, ht.TNone), "DRBD helper program"),
("default_iallocator", None, ht.TOr(ht.TString, ht.TNone),
"Default iallocator for cluster"),
("master_netdev", None, ht.TOr(ht.TString, ht.TNone),
"Master network device"),
("reserved_lvs", None, ht.TOr(ht.TListOf(ht.TNonEmptyString), ht.TNone),
"List of reserved LVs"),
("hidden_os", None, _TestClusterOsList,
"Modify list of hidden operating systems. Each modification must have"
" two items, the operation and the OS name. The operation can be"
" ``%s`` or ``%s``." % (constants.DDM_ADD, constants.DDM_REMOVE)),
("blacklisted_os", None, _TestClusterOsList,
"Modify list of blacklisted operating systems. Each modification must have"
" two items, the operation and the OS name. The operation can be"
" ``%s`` or ``%s``." % (constants.DDM_ADD, constants.DDM_REMOVE)),
]
class OpClusterRedistConf(OpCode):
"""Force a full push of the cluster configuration.
"""
class OpQuery(OpCode):
"""Query for resources/items.
@ivar what: Resources to query for, must be one of L{constants.QR_VIA_OP}
@ivar fields: List of fields to retrieve
@ivar filter: Query filter
"""
OP_PARAMS = [
_PQueryWhat,
("fields", ht.NoDefault, ht.TListOf(ht.TNonEmptyString),
"Requested fields"),
("filter", None, ht.TOr(ht.TNone, ht.TListOf),
"Query filter"),
]
class OpQueryFields(OpCode):
"""Query for available resource/item fields.
@ivar what: Resources to query for, must be one of L{constants.QR_VIA_OP}
@ivar fields: List of fields to retrieve
"""
OP_PARAMS = [
_PQueryWhat,
("fields", None, ht.TOr(ht.TNone, ht.TListOf(ht.TNonEmptyString)),
"Requested fields; if not given, all are returned"),
]
class OpOobCommand(OpCode):
"""Interact with OOB."""
OP_PARAMS = [
("node_names", ht.EmptyList, ht.TListOf(ht.TNonEmptyString),
"List of nodes to run the OOB command against"),
("command", None, ht.TElemOf(constants.OOB_COMMANDS),
"OOB command to be run"),
("timeout", constants.OOB_TIMEOUT, ht.TInt,
"Timeout before the OOB helper will be terminated"),
("ignore_status", False, ht.TBool,
"Ignores the node offline status for power off"),
("power_delay", constants.OOB_POWER_DELAY, ht.TPositiveFloat,
"Time in seconds to wait between powering on nodes"),
]
# node opcodes
class OpNodeRemove(OpCode):
"""Remove a node.
@type node_name: C{str}
@ivar node_name: The name of the node to remove. If the node still has
instances on it, the operation will fail.
"""
OP_DSC_FIELD = "node_name"
OP_PARAMS = [
_PNodeName,
]
class OpNodeAdd(OpCode):
"""Add a node to the cluster.
@type node_name: C{str}
@ivar node_name: The name of the node to add. This can be a short name,
but it will be expanded to the FQDN.
@type primary_ip: IP address
@ivar primary_ip: The primary IP of the node. This will be ignored when the
opcode is submitted, but will be filled during the node
add (so it will be visible in the job query).
@type secondary_ip: IP address
@ivar secondary_ip: The secondary IP of the node. This needs to be passed
if the cluster has been initialized in 'dual-network'
mode, otherwise it must not be given.
@type readd: C{bool}
@ivar readd: Whether to re-add an existing node to the cluster. If
this is not passed, then the operation will abort if the node
name is already in the cluster; use this parameter to 'repair'
a node that had its configuration broken, or was reinstalled
without removal from the cluster.
@type group: C{str}
@ivar group: The node group to which this node will belong.
@type vm_capable: C{bool}
@ivar vm_capable: The vm_capable node attribute
@type master_capable: C{bool}
@ivar master_capable: The master_capable node attribute
"""
OP_DSC_FIELD = "node_name"
OP_PARAMS = [
_PNodeName,
("primary_ip", None, ht.NoType, "Primary IP address"),
("secondary_ip", None, ht.TMaybeString, "Secondary IP address"),
("readd", False, ht.TBool, "Whether node is re-added to cluster"),
("group", None, ht.TMaybeString, "Initial node group"),
("master_capable", None, ht.TMaybeBool,
"Whether node can become master or master candidate"),
("vm_capable", None, ht.TMaybeBool,
"Whether node can host instances"),
("ndparams", None, ht.TMaybeDict, "Node parameters"),
]
class OpNodeQuery(OpCode):
"""Compute the list of nodes."""
OP_PARAMS = [
_POutputFields,
_PUseLocking,
("names", ht.EmptyList, ht.TListOf(ht.TNonEmptyString),
"Empty list to query all nodes, node names otherwise"),
]
class OpNodeQueryvols(OpCode):
"""Get list of volumes on node."""
OP_PARAMS = [
_POutputFields,
("nodes", ht.EmptyList, ht.TListOf(ht.TNonEmptyString),
"Empty list to query all nodes, node names otherwise"),
]
class OpNodeQueryStorage(OpCode):
"""Get information on storage for node(s)."""
OP_PARAMS = [
_POutputFields,
_PStorageType,
("nodes", ht.EmptyList, ht.TListOf(ht.TNonEmptyString), "List of nodes"),
("name", None, ht.TMaybeString, "Storage name"),
]
class OpNodeModifyStorage(OpCode):
"""Modifies the properies of a storage unit"""
OP_PARAMS = [
_PNodeName,
_PStorageType,
_PStorageName,
("changes", ht.NoDefault, ht.TDict, "Requested changes"),
]
class OpRepairNodeStorage(OpCode):
"""Repairs the volume group on a node."""
OP_DSC_FIELD = "node_name"
OP_PARAMS = [
_PNodeName,
_PStorageType,
_PStorageName,
_PIgnoreConsistency,
]
class OpNodeSetParams(OpCode):
"""Change the parameters of a node."""
OP_DSC_FIELD = "node_name"
OP_PARAMS = [
_PNodeName,
_PForce,
("master_candidate", None, ht.TMaybeBool,
"Whether the node should become a master candidate"),
("offline", None, ht.TMaybeBool,
"Whether the node should be marked as offline"),
("drained", None, ht.TMaybeBool,
"Whether the node should be marked as drained"),
("auto_promote", False, ht.TBool,
"Whether node(s) should be promoted to master candidate if necessary"),
("master_capable", None, ht.TMaybeBool,
"Denote whether node can become master or master candidate"),
("vm_capable", None, ht.TMaybeBool,
"Denote whether node can host instances"),
("secondary_ip", None, ht.TMaybeString,
"Change node's secondary IP address"),
("ndparams", None, ht.TMaybeDict, "Set node parameters"),
("powered", None, ht.TMaybeBool,
"Whether the node should be marked as powered"),
]
class OpNodePowercycle(OpCode):
"""Tries to powercycle a node."""
OP_DSC_FIELD = "node_name"
OP_PARAMS = [
_PNodeName,
_PForce,
]
class OpNodeMigrate(OpCode):
"""Migrate all instances from a node."""
OP_DSC_FIELD = "node_name"
OP_PARAMS = [
_PNodeName,
_PMigrationMode,
_PMigrationLive,
_PMigrationTargetNode,
("iallocator", None, ht.TMaybeString,
"Iallocator for deciding the target node for shared-storage instances"),
]
class OpNodeEvacStrategy(OpCode):
"""Compute the evacuation strategy for a list of nodes."""
OP_DSC_FIELD = "nodes"
OP_PARAMS = [
("nodes", ht.NoDefault, ht.TListOf(ht.TNonEmptyString), None),
("remote_node", None, ht.TMaybeString, None),
("iallocator", None, ht.TMaybeString, None),
]
# instance opcodes
class OpInstanceCreate(OpCode):
"""Create an instance.
@ivar instance_name: Instance name
@ivar mode: Instance creation mode (one of L{constants.INSTANCE_CREATE_MODES})
@ivar source_handshake: Signed handshake from source (remote import only)
@ivar source_x509_ca: Source X509 CA in PEM format (remote import only)
@ivar source_instance_name: Previous name of instance (remote import only)
@ivar source_shutdown_timeout: Shutdown timeout used for source instance
(remote import only)
"""
OP_DSC_FIELD = "instance_name"
OP_PARAMS = [
_PInstanceName,
_PForceVariant,
_PWaitForSync,
_PNameCheck,
("beparams", ht.EmptyDict, ht.TDict, "Backend parameters for instance"),
("disks", ht.NoDefault,
# TODO: Generate check from constants.IDISK_PARAMS_TYPES
ht.TListOf(ht.TDictOf(ht.TElemOf(constants.IDISK_PARAMS),
ht.TOr(ht.TNonEmptyString, ht.TInt))),
"Disk descriptions, for example ``[{\"%s\": 100}, {\"%s\": 5}]``;"
" each disk definition must contain a ``%s`` value and"
" can contain an optional ``%s`` value denoting the disk access mode"
" (%s)" %
(constants.IDISK_SIZE, constants.IDISK_SIZE, constants.IDISK_SIZE,
constants.IDISK_MODE,
" or ".join("``%s``" % i for i in sorted(constants.DISK_ACCESS_SET)))),
("disk_template", ht.NoDefault, _CheckDiskTemplate, "Disk template"),
("file_driver", None, ht.TOr(ht.TNone, ht.TElemOf(constants.FILE_DRIVER)),
"Driver for file-backed disks"),
("file_storage_dir", None, ht.TMaybeString,
"Directory for storing file-backed disks"),
("hvparams", ht.EmptyDict, ht.TDict,
"Hypervisor parameters for instance, hypervisor-dependent"),
("hypervisor", None, ht.TMaybeString, "Hypervisor"),
("iallocator", None, ht.TMaybeString,
"Iallocator for deciding which node(s) to use"),
("identify_defaults", False, ht.TBool,
"Reset instance parameters to default if equal"),
("ip_check", True, ht.TBool, _PIpCheckDoc),
("mode", ht.NoDefault, ht.TElemOf(constants.INSTANCE_CREATE_MODES),
"Instance creation mode"),
("nics", ht.NoDefault, ht.TListOf(_TestNicDef),
"List of NIC (network interface) definitions, for example"
" ``[{}, {}, {\"%s\": \"198.51.100.4\"}]``; each NIC definition can"
" contain the optional values %s" %
(constants.INIC_IP,
", ".join("``%s``" % i for i in sorted(constants.INIC_PARAMS)))),
("no_install", None, ht.TMaybeBool,
"Do not install the OS (will disable automatic start)"),
("osparams", ht.EmptyDict, ht.TDict, "OS parameters for instance"),
("os_type", None, ht.TMaybeString, "Operating system"),
("pnode", None, ht.TMaybeString, "Primary node"),
("snode", None, ht.TMaybeString, "Secondary node"),
("source_handshake", None, ht.TOr(ht.TList, ht.TNone),
"Signed handshake from source (remote import only)"),
("source_instance_name", None, ht.TMaybeString,
"Source instance name (remote import only)"),
("source_shutdown_timeout", constants.DEFAULT_SHUTDOWN_TIMEOUT,
ht.TPositiveInt,
"How long source instance was given to shut down (remote import only)"),
("source_x509_ca", None, ht.TMaybeString,
"Source X509 CA in PEM format (remote import only)"),
("src_node", None, ht.TMaybeString, "Source node for import"),
("src_path", None, ht.TMaybeString, "Source directory for import"),
("start", True, ht.TBool, "Whether to start instance after creation"),
("tags", ht.EmptyList, ht.TListOf(ht.TNonEmptyString), "Instance tags"),
]
class OpInstanceReinstall(OpCode):
"""Reinstall an instance's OS."""
OP_DSC_FIELD = "instance_name"
OP_PARAMS = [
_PInstanceName,
_PForceVariant,
("os_type", None, ht.TMaybeString, "Instance operating system"),
("osparams", None, ht.TMaybeDict, "Temporary OS parameters"),
]
class OpInstanceRemove(OpCode):
"""Remove an instance."""
OP_DSC_FIELD = "instance_name"
OP_PARAMS = [
_PInstanceName,
_PShutdownTimeout,
("ignore_failures", False, ht.TBool,
"Whether to ignore failures during removal"),
]
class OpInstanceRename(OpCode):
"""Rename an instance."""
OP_PARAMS = [
_PInstanceName,
_PNameCheck,
("new_name", ht.NoDefault, ht.TNonEmptyString, "New instance name"),
("ip_check", False, ht.TBool, _PIpCheckDoc),
]
class OpInstanceStartup(OpCode):
"""Startup an instance."""
OP_DSC_FIELD = "instance_name"
OP_PARAMS = [
_PInstanceName,
_PForce,
_PIgnoreOfflineNodes,
("hvparams", ht.EmptyDict, ht.TDict,
"Temporary hypervisor parameters, hypervisor-dependent"),
("beparams", ht.EmptyDict, ht.TDict, "Temporary backend parameters"),
_PNoRemember,
]
class OpInstanceShutdown(OpCode):
"""Shutdown an instance."""
OP_DSC_FIELD = "instance_name"
OP_PARAMS = [
_PInstanceName,
_PIgnoreOfflineNodes,
("timeout", constants.DEFAULT_SHUTDOWN_TIMEOUT, ht.TPositiveInt,
"How long to wait for instance to shut down"),
_PNoRemember,
]
class OpInstanceReboot(OpCode):
"""Reboot an instance."""
OP_DSC_FIELD = "instance_name"
OP_PARAMS = [
_PInstanceName,
_PShutdownTimeout,
("ignore_secondaries", False, ht.TBool,
"Whether to start the instance even if secondary disks are failing"),
("reboot_type", ht.NoDefault, ht.TElemOf(constants.REBOOT_TYPES),
"How to reboot instance"),
]
class OpInstanceReplaceDisks(OpCode):
"""Replace the disks of an instance."""
OP_DSC_FIELD = "instance_name"
OP_PARAMS = [
_PInstanceName,
("mode", ht.NoDefault, ht.TElemOf(constants.REPLACE_MODES),
"Replacement mode"),
("disks", ht.EmptyList, ht.TListOf(ht.TPositiveInt),
"Disk indexes"),
("remote_node", None, ht.TMaybeString, "New secondary node"),
("iallocator", None, ht.TMaybeString,
"Iallocator for deciding new secondary node"),
("early_release", False, ht.TBool,
"Whether to release locks as soon as possible"),
]
class OpInstanceFailover(OpCode):
"""Failover an instance."""
OP_DSC_FIELD = "instance_name"
OP_PARAMS = [
_PInstanceName,
_PShutdownTimeout,
_PIgnoreConsistency,
_PMigrationTargetNode,
("iallocator", None, ht.TMaybeString,
"Iallocator for deciding the target node for shared-storage instances"),
]
class OpInstanceMigrate(OpCode):
"""Migrate an instance.
This migrates (without shutting down an instance) to its secondary
node.
@ivar instance_name: the name of the instance
@ivar mode: the migration mode (live, non-live or None for auto)
"""
OP_DSC_FIELD = "instance_name"
OP_PARAMS = [
_PInstanceName,
_PMigrationMode,
_PMigrationLive,
_PMigrationTargetNode,
("cleanup", False, ht.TBool,
"Whether a previously failed migration should be cleaned up"),
("iallocator", None, ht.TMaybeString,
"Iallocator for deciding the target node for shared-storage instances"),
("allow_failover", False, ht.TBool,
"Whether we can fallback to failover if migration is not possible"),
]
class OpInstanceMove(OpCode):
"""Move an instance.
This move (with shutting down an instance and data copying) to an
arbitrary node.
@ivar instance_name: the name of the instance
@ivar target_node: the destination node
"""
OP_DSC_FIELD = "instance_name"
OP_PARAMS = [
_PInstanceName,
_PShutdownTimeout,
("target_node", ht.NoDefault, ht.TNonEmptyString, "Target node"),
_PIgnoreConsistency,
]
class OpInstanceConsole(OpCode):
"""Connect to an instance's console."""
OP_DSC_FIELD = "instance_name"
OP_PARAMS = [
_PInstanceName
]
class OpInstanceActivateDisks(OpCode):
"""Activate an instance's disks."""
OP_DSC_FIELD = "instance_name"
OP_PARAMS = [
_PInstanceName,
("ignore_size", False, ht.TBool, "Whether to ignore recorded size"),
]
class OpInstanceDeactivateDisks(OpCode):
"""Deactivate an instance's disks."""
OP_DSC_FIELD = "instance_name"
OP_PARAMS = [
_PInstanceName,
_PForce,
]
class OpInstanceRecreateDisks(OpCode):
"""Deactivate an instance's disks."""
OP_DSC_FIELD = "instance_name"
OP_PARAMS = [
_PInstanceName,
("disks", ht.EmptyList, ht.TListOf(ht.TPositiveInt),
"List of disk indexes"),
("nodes", ht.EmptyList, ht.TListOf(ht.TNonEmptyString),
"New instance nodes, if relocation is desired"),
]
class OpInstanceQuery(OpCode):
"""Compute the list of instances."""
OP_PARAMS = [
_POutputFields,
_PUseLocking,
("names", ht.EmptyList, ht.TListOf(ht.TNonEmptyString),
"Empty list to query all instances, instance names otherwise"),
]
class OpInstanceQueryData(OpCode):
"""Compute the run-time status of instances."""
OP_PARAMS = [
_PUseLocking,
("instances", ht.EmptyList, ht.TListOf(ht.TNonEmptyString),
"Instance names"),
("static", False, ht.TBool,
"Whether to only return configuration data without querying"
" nodes"),
]
class OpInstanceSetParams(OpCode):
"""Change the parameters of an instance."""
OP_DSC_FIELD = "instance_name"
OP_PARAMS = [
_PInstanceName,
_PForce,
_PForceVariant,
# TODO: Use _TestNicDef
("nics", ht.EmptyList, ht.TList,
"List of NIC changes. Each item is of the form ``(op, settings)``."
" ``op`` can be ``%s`` to add a new NIC with the specified settings,"
" ``%s`` to remove the last NIC or a number to modify the settings"
" of the NIC with that index." %
(constants.DDM_ADD, constants.DDM_REMOVE)),
("disks", ht.EmptyList, ht.TList, "List of disk changes. See ``nics``."),
("beparams", ht.EmptyDict, ht.TDict, "Per-instance backend parameters"),
("hvparams", ht.EmptyDict, ht.TDict,
"Per-instance hypervisor parameters, hypervisor-dependent"),
("disk_template", None, ht.TOr(ht.TNone, _CheckDiskTemplate),
"Disk template for instance"),
("remote_node", None, ht.TMaybeString,
"Secondary node (used when changing disk template)"),
("os_name", None, ht.TMaybeString,
"Change instance's OS name. Does not reinstall the instance."),
("osparams", None, ht.TMaybeDict, "Per-instance OS parameters"),
("wait_for_sync", True, ht.TBool,
"Whether to wait for the disk to synchronize, when changing template"),
]
class OpInstanceGrowDisk(OpCode):
"""Grow a disk of an instance."""
OP_DSC_FIELD = "instance_name"
OP_PARAMS = [
_PInstanceName,
_PWaitForSync,
("disk", ht.NoDefault, ht.TInt, "Disk index"),
("amount", ht.NoDefault, ht.TInt,
"Amount of disk space to add (megabytes)"),
]
# Node group opcodes
class OpGroupAdd(OpCode):
"""Add a node group to the cluster."""
OP_DSC_FIELD = "group_name"
OP_PARAMS = [
_PGroupName,
_PNodeGroupAllocPolicy,
_PGroupNodeParams,
]
class OpGroupAssignNodes(OpCode):
"""Assign nodes to a node group."""
OP_DSC_FIELD = "group_name"
OP_PARAMS = [
_PGroupName,
_PForce,
("nodes", ht.NoDefault, ht.TListOf(ht.TNonEmptyString),
"List of nodes to assign"),
]
class OpGroupQuery(OpCode):
"""Compute the list of node groups."""
OP_PARAMS = [
_POutputFields,
("names", ht.EmptyList, ht.TListOf(ht.TNonEmptyString),
"Empty list to query all groups, group names otherwise"),
]
class OpGroupSetParams(OpCode):
"""Change the parameters of a node group."""
OP_DSC_FIELD = "group_name"
OP_PARAMS = [
_PGroupName,
_PNodeGroupAllocPolicy,
_PGroupNodeParams,
]
class OpGroupRemove(OpCode):
"""Remove a node group from the cluster."""
OP_DSC_FIELD = "group_name"
OP_PARAMS = [
_PGroupName,
]
class OpGroupRename(OpCode):
"""Rename a node group in the cluster."""
OP_PARAMS = [
_PGroupName,
("new_name", ht.NoDefault, ht.TNonEmptyString, "New group name"),
]
# OS opcodes
class OpOsDiagnose(OpCode):
"""Compute the list of guest operating systems."""
OP_PARAMS = [
_POutputFields,
("names", ht.EmptyList, ht.TListOf(ht.TNonEmptyString),
"Which operating systems to diagnose"),
]
# Exports opcodes
class OpBackupQuery(OpCode):
"""Compute the list of exported images."""
OP_PARAMS = [
_PUseLocking,
("nodes", ht.EmptyList, ht.TListOf(ht.TNonEmptyString),
"Empty list to query all nodes, node names otherwise"),
]
class OpBackupPrepare(OpCode):
"""Prepares an instance export.
@ivar instance_name: Instance name
@ivar mode: Export mode (one of L{constants.EXPORT_MODES})
"""
OP_DSC_FIELD = "instance_name"
OP_PARAMS = [
_PInstanceName,
("mode", ht.NoDefault, ht.TElemOf(constants.EXPORT_MODES),
"Export mode"),
]
class OpBackupExport(OpCode):
"""Export an instance.
For local exports, the export destination is the node name. For remote
exports, the export destination is a list of tuples, each consisting of
hostname/IP address, port, HMAC and HMAC salt. The HMAC is calculated using
the cluster domain secret over the value "${index}:${hostname}:${port}". The
destination X509 CA must be a signed certificate.
@ivar mode: Export mode (one of L{constants.EXPORT_MODES})
@ivar target_node: Export destination
@ivar x509_key_name: X509 key to use (remote export only)
@ivar destination_x509_ca: Destination X509 CA in PEM format (remote export
only)
"""
OP_DSC_FIELD = "instance_name"
OP_PARAMS = [
_PInstanceName,
_PShutdownTimeout,
# TODO: Rename target_node as it changes meaning for different export modes
# (e.g. "destination")
("target_node", ht.NoDefault, ht.TOr(ht.TNonEmptyString, ht.TList),
"Destination information, depends on export mode"),
("shutdown", True, ht.TBool, "Whether to shutdown instance before export"),
("remove_instance", False, ht.TBool,
"Whether to remove instance after export"),
("ignore_remove_failures", False, ht.TBool,
"Whether to ignore failures while removing instances"),
("mode", constants.EXPORT_MODE_LOCAL, ht.TElemOf(constants.EXPORT_MODES),
"Export mode"),
("x509_key_name", None, ht.TOr(ht.TList, ht.TNone),
"Name of X509 key (remote export only)"),
("destination_x509_ca", None, ht.TMaybeString,
"Destination X509 CA (remote export only)"),
]
class OpBackupRemove(OpCode):
"""Remove an instance's export."""
OP_DSC_FIELD = "instance_name"
OP_PARAMS = [
_PInstanceName,
]
# Tags opcodes
class OpTagsGet(OpCode):
"""Returns the tags of the given object."""
OP_DSC_FIELD = "name"
OP_PARAMS = [
_PTagKind,
# Name is only meaningful for nodes and instances
("name", ht.NoDefault, ht.TMaybeString, None),
]
class OpTagsSearch(OpCode):
"""Searches the tags in the cluster for a given pattern."""
OP_DSC_FIELD = "pattern"
OP_PARAMS = [
("pattern", ht.NoDefault, ht.TNonEmptyString, None),
]
class OpTagsSet(OpCode):
"""Add a list of tags on a given object."""
OP_PARAMS = [
_PTagKind,
_PTags,
# Name is only meaningful for nodes and instances
("name", ht.NoDefault, ht.TMaybeString, None),
]
class OpTagsDel(OpCode):
"""Remove a list of tags from a given object."""
OP_PARAMS = [
_PTagKind,
_PTags,
# Name is only meaningful for nodes and instances
("name", ht.NoDefault, ht.TMaybeString, None),
]
# Test opcodes
class OpTestDelay(OpCode):
"""Sleeps for a configured amount of time.
This is used just for debugging and testing.
Parameters:
- duration: the time to sleep
- on_master: if true, sleep on the master
- on_nodes: list of nodes in which to sleep
If the on_master parameter is true, it will execute a sleep on the
master (before any node sleep).
If the on_nodes list is not empty, it will sleep on those nodes
(after the sleep on the master, if that is enabled).
As an additional feature, the case of duration < 0 will be reported
as an execution error, so this opcode can be used as a failure
generator. The case of duration == 0 will not be treated specially.
"""
OP_DSC_FIELD = "duration"
OP_PARAMS = [
("duration", ht.NoDefault, ht.TFloat, None),
("on_master", True, ht.TBool, None),
("on_nodes", ht.EmptyList, ht.TListOf(ht.TNonEmptyString), None),
("repeat", 0, ht.TPositiveInt, None),
]
class OpTestAllocator(OpCode):
"""Allocator framework testing.
This opcode has two modes:
- gather and return allocator input for a given mode (allocate new
or replace secondary) and a given instance definition (direction
'in')
- run a selected allocator for a given operation (as above) and
return the allocator output (direction 'out')
"""
OP_DSC_FIELD = "allocator"
OP_PARAMS = [
("direction", ht.NoDefault,
ht.TElemOf(constants.VALID_IALLOCATOR_DIRECTIONS), None),
("mode", ht.NoDefault, ht.TElemOf(constants.VALID_IALLOCATOR_MODES), None),
("name", ht.NoDefault, ht.TNonEmptyString, None),
("nics", ht.NoDefault, ht.TOr(ht.TNone, ht.TListOf(
ht.TDictOf(ht.TElemOf([constants.INIC_MAC, constants.INIC_IP, "bridge"]),
ht.TOr(ht.TNone, ht.TNonEmptyString)))), None),
("disks", ht.NoDefault, ht.TOr(ht.TNone, ht.TList), None),
("hypervisor", None, ht.TMaybeString, None),
("allocator", None, ht.TMaybeString, None),
("tags", ht.EmptyList, ht.TListOf(ht.TNonEmptyString), None),
("memory", None, ht.TOr(ht.TNone, ht.TPositiveInt), None),
("vcpus", None, ht.TOr(ht.TNone, ht.TPositiveInt), None),
("os", None, ht.TMaybeString, None),
("disk_template", None, ht.TMaybeString, None),
("evac_nodes", None, ht.TOr(ht.TNone, ht.TListOf(ht.TNonEmptyString)),
None),
("instances", None, ht.TOr(ht.TNone, ht.TListOf(ht.TNonEmptyString)),
None),
("evac_mode", None,
ht.TOr(ht.TNone, ht.TElemOf(constants.IALLOCATOR_NEVAC_MODES)), None),
("target_groups", None, ht.TOr(ht.TNone, ht.TListOf(ht.TNonEmptyString)),
None),
]
class OpTestJqueue(OpCode):
"""Utility opcode to test some aspects of the job queue.
"""
OP_PARAMS = [
("notify_waitlock", False, ht.TBool, None),
("notify_exec", False, ht.TBool, None),
("log_messages", ht.EmptyList, ht.TListOf(ht.TString), None),
("fail", False, ht.TBool, None),
]
class OpTestDummy(OpCode):
"""Utility opcode used by unittests.
"""
OP_PARAMS = [
("result", ht.NoDefault, ht.NoType, None),
("messages", ht.NoDefault, ht.NoType, None),
("fail", ht.NoDefault, ht.NoType, None),
("submit_jobs", None, ht.NoType, None),
]
WITH_LU = False
def _GetOpList():
"""Returns list of all defined opcodes.
Does not eliminate duplicates by C{OP_ID}.
"""
return [v for v in globals().values()
if (isinstance(v, type) and issubclass(v, OpCode) and
hasattr(v, "OP_ID") and v is not OpCode)]
OP_MAPPING = dict((v.OP_ID, v) for v in _GetOpList())
| ekohl/ganeti | lib/opcodes.py | Python | gpl-2.0 | 47,432 |
#!/usr/bin/env python
import re
import string
import sys
import os
USAGE = 'USAGE: parse.y <player.h> <playercore_casts.i> <playercore_arraysofclasses.i> <Jplayercore> <playercore> <player.java>'
if __name__ == '__main__':
if len(sys.argv) != 7:
print USAGE
sys.exit(-1)
infilename = sys.argv[1]
outfilename = sys.argv[2]
aofcfilename = sys.argv[3]
outdir = sys.argv[4]
pcoutdir = sys.argv[5]
pcjfilename = sys.argv[6]
os.system('mkdir -p ' + outdir)
os.system('mkdir -p ' + pcoutdir)
# Read in the entire file
infile = open(infilename, 'r')
instream = infile.read()
infile.close()
outfile = open(outfilename, 'w+')
aofcfile = open(aofcfilename, 'w+')
pcjfile = open(pcoutdir + '/' + pcjfilename, 'w+')
# strip C++-style comments
pattern = re.compile('//.*')
instream = pattern.sub('', instream)
# strip C-style comments
pattern = re.compile('/\*.*?\*/', re.MULTILINE | re.DOTALL)
instream = pattern.sub('', instream)
# strip blank lines
pattern = re.compile('^\s*?\n', re.MULTILINE)
instream = pattern.sub('', instream)
# find structs
pattern = re.compile('typedef\s+struct\s+player_\w+[^}]+\}[^;]+',
re.MULTILINE)
structs = pattern.findall(instream)
print 'Found ' + `len(structs)` + ' struct(s)'
contentspattern = re.compile('.*\{\s*(.*?)\s*\}', re.MULTILINE | re.DOTALL)
declpattern = re.compile('\s*([^;]*?;)', re.MULTILINE)
typepattern = re.compile('\s*\S+')
variablepattern = re.compile('\s*([^,;]+?)\s*[,;]')
#arraypattern = re.compile('\[\s*(\w*?)\s*\]')
arraypattern = re.compile('\[(.*?)\]')
outfile.write('%inline\n%{\n\n')
pcjfile.write('package net.sourceforge.playerstage.Jplayercore;\n')
pcjfile.write('public class player {\n\n')
for s in structs:
# extract type of struct
split = string.split(s)
typename = split[-1]
# pick out the contents of the struct
varpart = contentspattern.findall(s)
if len(varpart) != 1:
print 'skipping nested / empty struct ' + typename
continue
# SWIG macro that lets us access arrays of this non-primitive type
# as Java arrays
aofcfile.write('JAVA_ARRAYSOFCLASSES(' + typename +')\n')
buf_to_name = 'buf_to_' + typename
buf_from_name = typename + '_to_buf'
buf_to_Jname = 'buf_to_J' + typename
buf_from_Jname = 'J' + typename + '_to_buf'
sizeof_name = typename + '_sizeof'
# function to return the size of the underlying C structure
outfile.write('size_t ' + sizeof_name + '(void)\n')
outfile.write('{\n')
outfile.write(' return(sizeof(' + typename + '));\n')
outfile.write('}\n')
# JNI cast from a void* to a pointer to this type
outfile.write(typename + '* ' + buf_to_name + '(void* buf)\n')
outfile.write('{\n')
outfile.write(' return((' + typename + '*)(buf));\n')
outfile.write('}\n')
# JNI cast from a pointer to this type to a void*
outfile.write('void* ' + buf_from_name + '(' + typename + '* msg)\n')
outfile.write('{\n')
outfile.write(' return((void*)(msg));\n')
outfile.write('}\n')
# Equivalent non-JNI Java class
jclass = 'J' + typename
jfile = open(outdir + '/' + jclass + '.java', 'w+')
jfile.write('package net.sourceforge.playerstage.Jplayercore;\n')
jfile.write('import java.io.Serializable;\n')
jfile.write('public class ' + jclass + ' implements Serializable {\n')
jfile.write(' public final static long serialVersionUID = ' + `hash(s)` + 'L;\n')
jclass_constructor = ' public ' + jclass + '() {\n';
# Static method in class player to convert from JNI Java object to
# non-JNI java object
pcj_data_to_jdata = ''
pcj_data_to_jdata += ' public static ' + jclass + ' ' + typename + '_to_' + jclass + '(' + typename + ' data) {\n'
pcj_data_to_jdata += ' ' + jclass + ' Jdata = new ' + jclass + '();\n'
# Static method in class player to convert from non-JNI Java object to
# JNI java object
pcj_jdata_to_data = ''
pcj_jdata_to_data += ' public static ' + typename + ' ' + jclass + '_to_' + typename + '(' + jclass + ' Jdata) {\n'
pcj_jdata_to_data += ' ' + typename + ' data = new ' + typename + '();\n'
# Static method in class playercore to convert from SWIGTYPE_p_void
# to non-JNI Java object.
pcjfile.write(' public static ' + jclass + ' ' + buf_to_Jname + '(SWIGTYPE_p_void buf) {\n')
pcjfile.write(' ' + typename + ' data = playercore_java.' + buf_to_name + '(buf);\n')
pcjfile.write(' return(' + typename + '_to_' + jclass + '(data));\n')
pcjfile.write(' }\n\n')
# Static method in class playercore to convert non-JNI Java object to
# SWIGTYPE_p_void.
pcjfile.write(' public static SWIGTYPE_p_void ' + buf_from_Jname + '(' + jclass + ' Jdata) {\n')
pcjfile.write(' ' + typename + ' data = ' + jclass + '_to_' + typename + '(Jdata);\n')
pcjfile.write(' return(playercore_java.' + buf_from_name + '(data));\n')
pcjfile.write(' }\n\n')
# separate the variable declarations
decls = declpattern.finditer(varpart[0])
for d in decls:
# find the type and variable names in this declaration
dstring = d.string[d.start(1):d.end(1)]
type = typepattern.findall(dstring)[0]
dstring = typepattern.sub('', dstring, 1)
vars = variablepattern.finditer(dstring)
# Do some name mangling for common types
builtin_type = 1
if type == 'int64_t':
jtype = 'long'
elif type == 'uint64_t':
jtype = 'long'
elif type == 'int32_t':
jtype = 'int'
elif type == 'uint32_t':
jtype = 'long'
elif type == 'int16_t':
jtype = 'short'
elif type == 'uint16_t':
jtype = 'int'
elif type == 'int8_t':
jtype = 'byte'
elif type == 'uint8_t':
jtype = 'short'
elif type == 'char':
jtype = 'char'
elif type == 'bool_t':
jtype = 'boolean'
elif type == 'double':
jtype = 'double'
elif type == 'float':
jtype = 'float'
else:
# rely on a previous declaration of a J class for this type
jtype = 'J' + type
builtin_type = 0
# iterate through each variable
for var in vars:
varstring = var.string[var.start(1):var.end(1)]
# is it an array or a scalar?
arraysize = arraypattern.findall(varstring)
if len(arraysize) > 0:
arraysize = arraysize[0]
varstring = arraypattern.sub('', varstring)
if jtype == 'char':
jfile.write(' public String ' + varstring + ';\n')
else:
jfile.write(' public ' + jtype + '[] ' + varstring + ';\n')
#if builtin_type == 0:
if jtype != 'char':
if arraysize.isdigit():
jclass_constructor += ' ' + varstring + ' = new ' + jtype + '[' + arraysize + '];\n'
else:
jclass_constructor += ' ' + varstring + ' = new ' + jtype + '[playercore_javaConstants.' + arraysize + '];\n'
else:
arraysize = ''
jfile.write(' public ' + jtype + ' ' + varstring + ';\n')
if builtin_type == 0:
jclass_constructor += ' ' + varstring + ' = new ' + jtype + '();\n'
capvarstring = string.capitalize(varstring[0]) + varstring[1:]
if builtin_type:
pcj_data_to_jdata += ' Jdata.' + varstring + ' = data.get' + capvarstring + '();\n'
pcj_jdata_to_data += ' data.set' + capvarstring + '(Jdata.' + varstring +');\n'
else:
if arraysize == '':
pcj_data_to_jdata += ' Jdata.' + varstring + ' = ' + type + '_to_' + jtype + '(data.get' + capvarstring + '());\n'
pcj_jdata_to_data += ' data.set' + capvarstring + '(' + jtype + '_to_' + type + '(Jdata.' + varstring + '));\n'
else:
try:
asize = int(arraysize)
except:
arraysize = 'playercore_javaConstants.' + arraysize
pcj_data_to_jdata += ' {\n'
pcj_data_to_jdata += ' ' + type + ' foo[] = data.get' + capvarstring + '();\n'
pcj_data_to_jdata += ' for(int i=0;i<' + arraysize + ';i++)\n'
pcj_data_to_jdata += ' Jdata.' + varstring + '[i] = ' + type + '_to_' + jtype + '(foo[i]);\n'
pcj_data_to_jdata += ' }\n'
pcj_jdata_to_data += ' {\n'
pcj_jdata_to_data += ' ' + type + ' foo[] = new ' + type + '[' + arraysize + '];\n'
pcj_jdata_to_data += ' for(int i=0;i<' + arraysize + ';i++)\n'
pcj_jdata_to_data += ' foo[i] = ' + jtype + '_to_' + type + '(Jdata.' + varstring + '[i]);\n'
pcj_jdata_to_data += ' data.set' + capvarstring + '(foo);\n'
pcj_jdata_to_data += ' }\n'
pcj_data_to_jdata += ' return(Jdata);\n'
pcj_data_to_jdata += ' }\n\n'
pcjfile.write(pcj_data_to_jdata)
pcj_jdata_to_data += ' return(data);\n'
pcj_jdata_to_data += ' }\n\n'
pcjfile.write(pcj_jdata_to_data)
jclass_constructor += ' }\n'
jfile.write(jclass_constructor)
jfile.write('}\n')
jfile.close()
outfile.write('\n%}\n')
outfile.close()
pcjfile.write('\n}\n')
pcjfile.close()
aofcfile.close()
| uml-robotics/player-2.1.3 | libplayercore/bindings/java/parse.py | Python | gpl-2.0 | 9,344 |
#
# Copyright (c) 2008--2010 Red Hat, Inc.
#
# This software is licensed to you under the GNU General Public License,
# version 2 (GPLv2). There is NO WARRANTY for this software, express or
# implied, including the implied warranties of MERCHANTABILITY or FITNESS
# FOR A PARTICULAR PURPOSE. You should have received a copy of GPLv2
# along with this software; if not, see
# http://www.gnu.org/licenses/old-licenses/gpl-2.0.txt.
#
# Red Hat trademarks are not licensed under GPLv2. No permission is
# granted to use or replicate Red Hat trademarks that are incorporated
# in this software or its documentation.
#
# Implements the errata.* functions for XMLRPC
#
# common modules imports
from spacewalk.common.rhnTranslate import _
from spacewalk.common import rhnFault, rhnFlags, log_debug, log_error
# server modules imports
from spacewalk.server.rhnLib import parseRPMName
from spacewalk.server.rhnHandler import rhnHandler
from spacewalk.server import rhnSQL, rhnCapability
class Errata(rhnHandler):
""" Errata class --- retrieve (via xmlrpc) package errata. """
def __init__(self):
rhnHandler.__init__(self)
# Exposed Errata functions:
self.functions = []
self.functions.append('GetByPackage') # Clients v1-
self.functions.append('getPackageErratum') # Clients v2+
self.functions.append('getErrataInfo') # clients v2+
def GetByPackage(self, pkg, osRel):
""" Clients v1- Get errata for a package given "n-v-r" format
IN: pkg: "n-v-r" (old client call)
or [n,v,r]
osRel: OS release
RET: a hash by errata that applies to this package
(ie, newer packages are available). We also limit the scope
for a particular osRel.
"""
if type(pkg) == type(''): # Old client support.
pkg = parseRPMName(pkg)
log_debug(1, pkg, osRel)
# Stuff the action in the headers:
transport = rhnFlags.get('outputTransportOptions')
transport['X-RHN-Action'] = 'GetByPackage'
# now look up the errata
if type(pkg[0]) != type(''):
log_error("Invalid package name: %s %s" % (type(pkg[0]), pkg[0]))
raise rhnFault(30, _("Expected a package name, not: %s") % pkg[0])
#bug#186996:adding synopsis field to advisory info
#client side changes are needed to access this data.
h = rhnSQL.prepare("""
select distinct
e.id errata_id,
e.advisory_type errata_type,
e.advisory advisory,
e.topic topic,
e.description description,
e.synopsis synopsis
from
rhnErrata e,
rhnPublicChannelFamily pcf,
rhnChannelFamilyMembers cfm,
rhnErrataPackage ep,
rhnChannelPackage cp,
rhnChannelErrata ce,
rhnDistChannelMap dcm,
rhnPackage p
where 1=1
and p.name_id = LOOKUP_PACKAGE_NAME(:name)
-- map to a channel
and p.id = cp.package_id
and cp.channel_id = dcm.channel_id
and dcm.release = :dist
-- map to an errata as well
and p.id = ep.package_id
and ep.errata_id = e.id
-- the errata and the channel have to be linked
and ce.channel_id = cp.channel_id
-- and the channel has to be public
and cp.channel_id = cfm.channel_id
and cfm.channel_family_id = pcf.channel_family_id
-- and get the erratum
and e.id = ce.errata_id
""")
h.execute(name = pkg[0], dist = str(osRel))
ret = []
# sanitize the results for display in the clients
while 1:
row = h.fetchone_dict()
if row is None:
break
for k in row.keys():
if row[k] is None:
row[k] = "N/A"
ret.append(row)
return ret
def getPackageErratum(self, system_id, pkg):
""" Clients v2+ - Get errata for a package given [n,v,r,e,a,...] format
Sing-along: You say erratum(sing), I say errata(pl)! :)
IN: pkg: [n,v,r,e,s,a,ch,...]
RET: a hash by errata that applies to this package
"""
log_debug(5, system_id, pkg)
if type(pkg) != type([]) or len(pkg) < 7:
log_error("Got invalid package specification: %s" % str(pkg))
raise rhnFault(30, _("Expected a package, not: %s") % pkg)
# Authenticate and decode server id.
self.auth_system(system_id)
# log the entry
log_debug(1, self.server_id, pkg)
# Stuff the action in the headers:
transport = rhnFlags.get('outputTransportOptions')
transport['X-RHN-Action'] = 'getPackageErratum'
name, ver, rel, epoch, arch, size, channel = pkg[:7]
if epoch in ['', 'none', 'None']:
epoch = None
# XXX: also, should arch/size/channel ever be used?
#bug#186996:adding synopsis field to errata info
#client side changes are needed to access this data.
h = rhnSQL.prepare("""
select distinct
e.id errata_id,
e.advisory_type errata_type,
e.advisory advisory,
e.topic topic,
e.description description,
e.synopsis synopsis
from
rhnServerChannel sc,
rhnChannelPackage cp,
rhnChannelErrata ce,
rhnErrata e,
rhnErrataPackage ep,
rhnPackage p
where
p.name_id = LOOKUP_PACKAGE_NAME(:name)
and p.evr_id = LOOKUP_EVR(:epoch, :ver, :rel)
-- map to a channel
and p.id = cp.package_id
-- map to an errata as well
and p.id = ep.package_id
and ep.errata_id = e.id
-- the errata and the channel have to be linked
and e.id = ce.errata_id
and ce.channel_id = cp.channel_id
-- and the server has to be subscribed to the channel
and cp.channel_id = sc.channel_id
and sc.server_id = :server_id
""") # " emacs sucks
h.execute(name = name, ver = ver, rel = rel, epoch = epoch,
server_id = str(self.server_id))
ret = []
# sanitize the results for display in the clients
while 1:
row = h.fetchone_dict()
if row is None:
break
for k in row.keys():
if row[k] is None:
row[k] = "N/A"
ret.append(row)
return ret
# I don't trust this errata_id business, but chip says "trust me"
def getErrataInfo(self, system_id, errata_id):
log_debug(5, system_id, errata_id)
# Authenticate the server certificate
self.auth_system(system_id)
# log this thing
log_debug(1, self.server_id, errata_id)
client_caps = rhnCapability.get_client_capabilities()
log_debug(3,"Client Capabilities", client_caps)
multiarch = 0
cap_info = None
if client_caps and client_caps.has_key('packages.update'):
cap_info = client_caps['packages.update']
if cap_info and cap_info['version'] > 1:
multiarch = 1
statement = """
select distinct
pn.name,
pe.epoch,
pe.version,
pe.release,
pa.label arch
from
rhnPackageName pn,
rhnPackageEVR pe,
rhnPackage p,
rhnPackageArch pa,
rhnChannelPackage cp,
rhnServerChannel sc,
rhnErrataPackage ep
where
ep.errata_id = :errata_id
and ep.package_id = p.id
and p.name_id = pn.id
and p.evr_id = pe.id
and p.package_arch_id = pa.id
and sc.server_id = :server_id
and sc.channel_id = cp.channel_id
and cp.package_id = p.id
"""
h = rhnSQL.prepare(statement)
h.execute(errata_id = errata_id, server_id = self.server_id)
packages = h.fetchall_dict()
ret = []
if not packages:
return []
for package in packages:
if package['name'] is not None:
if package['epoch'] is None:
package['epoch'] = ""
pkg_arch = ''
if multiarch:
pkg_arch = package['arch'] or ''
ret.append([package['name'],
package['version'],
package['release'],
package['epoch'],
pkg_arch])
return ret
#-----------------------------------------------------------------------------
if __name__ == "__main__":
print "You can not run this module by itself"
import sys; sys.exit(-1)
#-----------------------------------------------------------------------------
| colloquium/spacewalk | backend/server/handlers/xmlrpc/errata.py | Python | gpl-2.0 | 9,420 |
from django import forms
from django.contrib.auth.models import User
from django.forms import ModelForm
from django.db import models
# Create your models here.
#EDICION DE MODELO USER
User.add_to_class('usuario_sico', models.CharField(max_length=10, null=False, blank=False))
User.add_to_class('contrasenia_sico', models.CharField(max_length=10, null=False, blank=False))
#User.add_to_class('amigos', models.ManyToManyField('self', symmetrical=True, blank=True))
#FORMULARIOS
class SignUpForm(ModelForm):
class Meta:
model = User
fields = ['username', 'password', 'email', 'first_name', 'last_name', 'usuario_sico', 'contrasenia_sico']
widgets = {
'password': forms.PasswordInput(),
'contrasenia_sico': forms.PasswordInput(),
} | Titulacion-Sistemas/PracticasDjango | usuarios_logueados/usuarios/models.py | Python | gpl-2.0 | 791 |
import sys
import time
from naoqi import ALProxy
IP = "nao.local"
PORT = 9559
if (len(sys.argv) < 2):
print "Usage: 'python RecordAudio.py nume'"
sys.exit(1)
fileName = "/home/nao/" + sys.argv[1] + ".wav"
aur = ALProxy("ALAudioRecorder", IP, PORT)
channels = [0,0,1,0]
aur.startMicrophonesRecording(fileName, "wav", 160000, channels)
c=raw_input("Sfarsit?")
aur.stopMicrophonesRecording()
c=raw_input("play?")
aup = ALProxy("ALAudioPlayer", IP, PORT)
#Launchs the playing of a file
aup.playFile(fileName,0.5,-1.0)
c=raw_input("gata?")
#Launchs the playing of a file
#aup.playFile("/usr/share/naoqi/wav/random.wav")
#Launchs the playing of a file on the left speaker to a volume of 50%
#aup.playFile("/usr/share/naoqi/wav/random.wav",0.5,-1.0) | ioanaantoche/muhaha | ioana/RecordAudio.py | Python | gpl-2.0 | 757 |
#!/usr/bin/env python3
# Copyright (c) 2008-9 Qtrac Ltd. All rights reserved.
# This program or module is free software: you can redistribute it and/or
# modify it under the terms of the GNU General Public License as published
# by the Free Software Foundation, either version 2 of the License, or
# version 3 of the License, or (at your option) any later version. It is
# provided for educational purposes and is distributed in the hope that
# it will be useful, but WITHOUT ANY WARRANTY; without even the implied
# warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See
# the GNU General Public License for more details.
import re
from PyQt4.QtCore import (Qt, SIGNAL, pyqtSignature)
from PyQt4.QtGui import (QApplication, QDialog)
import ui_findandreplacedlg
MAC = True
try:
from PyQt4.QtGui import qt_mac_set_native_menubar
except ImportError:
MAC = False
class FindAndReplaceDlg(QDialog,
ui_findandreplacedlg.Ui_FindAndReplaceDlg):
def __init__(self, text, parent=None):
super(FindAndReplaceDlg, self).__init__(parent)
self.__text = str(text)
self.__index = 0
self.setupUi(self)
if not MAC:
self.findButton.setFocusPolicy(Qt.NoFocus)
self.replaceButton.setFocusPolicy(Qt.NoFocus)
self.replaceAllButton.setFocusPolicy(Qt.NoFocus)
self.closeButton.setFocusPolicy(Qt.NoFocus)
self.updateUi()
@pyqtSignature("QString")
def on_findLineEdit_textEdited(self, text):
self.__index = 0
self.updateUi()
def makeRegex(self):
findText = str(self.findLineEdit.text())
if str(self.syntaxComboBox.currentText()) == "Literal":
findText = re.escape(findText)
flags = re.MULTILINE|re.DOTALL|re.UNICODE
if not self.caseCheckBox.isChecked():
flags |= re.IGNORECASE
if self.wholeCheckBox.isChecked():
findText = r"\b{0}\b".format(findText)
return re.compile(findText, flags)
@pyqtSignature("")
def on_findButton_clicked(self):
regex = self.makeRegex()
match = regex.search(self.__text, self.__index)
if match is not None:
self.__index = match.end()
self.emit(SIGNAL("found"), match.start())
else:
self.emit(SIGNAL("notfound"))
@pyqtSignature("")
def on_replaceButton_clicked(self):
regex = self.makeRegex()
self.__text = regex.sub(str(self.replaceLineEdit.text()),
self.__text, 1)
@pyqtSignature("")
def on_replaceAllButton_clicked(self):
regex = self.makeRegex()
self.__text = regex.sub(str(self.replaceLineEdit.text()),
self.__text)
def updateUi(self):
enable = not self.findLineEdit.text().isEmpty()
self.findButton.setEnabled(enable)
self.replaceButton.setEnabled(enable)
self.replaceAllButton.setEnabled(enable)
def text(self):
return self.__text
if __name__ == "__main__":
import sys
text = """US experience shows that, unlike traditional patents,
software patents do not encourage innovation and R&D, quite the
contrary. In particular they hurt small and medium-sized enterprises
and generally newcomers in the market. They will just weaken the market
and increase spending on patents and litigation, at the expense of
technological innovation and research. Especially dangerous are
attempts to abuse the patent system by preventing interoperability as a
means of avoiding competition with technological ability.
--- Extract quoted from Linus Torvalds and Alan Cox's letter
to the President of the European Parliament
http://www.effi.org/patentit/patents_torvalds_cox.html"""
def found(where):
print("Found at {0}".format(where))
def nomore():
print("No more found")
app = QApplication(sys.argv)
form = FindAndReplaceDlg(text)
form.connect(form, SIGNAL("found"), found)
form.connect(form, SIGNAL("notfound"), nomore)
form.show()
app.exec_()
print(form.text())
| paradiseOffice/Bash_and_Cplus-plus | CPP/full_examples/pyqt/chap07/findandreplacedlg.py | Python | gpl-2.0 | 4,146 |
Subsets and Splits