code
stringlengths 2
1.05M
| repo_name
stringlengths 5
104
| path
stringlengths 4
251
| language
stringclasses 1
value | license
stringclasses 15
values | size
int32 2
1.05M
|
---|---|---|---|---|---|
import numpy as np
from collections import namedtuple
import interp
epi0 = 1. # Epsilon0
Species = namedtuple("Species", ["q", "m", "N", "x0", "vx0", "vy0"])
__cache_one_d_poisson = {}
def one_d_poisson(n):
if n in __cache_one_d_poisson:
return __cache_one_d_poisson[n]
a = np.zeros((n,n))
np.fill_diagonal(a, -2.)
np.fill_diagonal(a[:-1,1:], 1.)
np.fill_diagonal(a[1:,:-1], 1.)
__cache_one_d_poisson[n] = a
return a
# Dict of solver functions with string keys
__solver = {}
def poisson_solve_fd(b, dx):
""" Assume V0=0
"""
nx = len(b)
A = one_d_poisson(nx-1)
p = -b*(dx**2)
x = np.zeros_like(p)
x[1:] = np.linalg.solve(A, p[1:])
return x
__solver["FD"] = poisson_solve_fd
def poisson_solve_fft(rho, dx):
nx = len(rho)
rhok = np.fft.fft(rho)
k = np.fft.fftfreq(nx)*2*np.pi/dx
kd = (k[1:]**2)*(np.sin(k[1:]*dx/2.)/(k[1:]*dx/2))**2
phik = np.zeros_like(rhok)
phik[1:] = rhok[1:]/kd
sol = np.real(np.fft.ifft(phik))
return sol
__solver["FFT"] = poisson_solve_fft
def poisson_solve(b, dx, method="FFT"):
if method in __solver:
return __solver[method](b, dx)
else:
return method(b, dx)
# Dicts of weight/interp functions with string keys
__weight = {}
__interp = {}
__weight["CIC"] = interp.weight_cic
def interp_cic(E, xp, nx, L):
""" Interpolate E to particle positions (CIC)
"""
xps = xp*nx/L
left = np.floor(xps).astype(np.int)
right = np.mod(np.ceil(xps), nx).astype(np.int)
E_interp = E[left]*(left+1-xps) + E[right]*(xps-left)
return E_interp
__interp["CIC"] = interp_cic
def weight_ngp(xp, q, nx, L):
""" Weighting to grid (NGP)
"""
rho = np.zeros(nx)
xps = np.round(xp*nx/L).astype(np.int)
xps[xps==nx] = 0
for i in xrange(len(xps)):
rho[xps[i]] += q[i]
return rho
__weight["NGP"] = weight_ngp
def interp_ngp(E, xp, nx, L):
""" Interpolate E to particle positions (NGP)
"""
xps = np.round(xp*nx/L).astype(np.int)
xps[xps==nx] = 0
return E[xps]
__interp["NGP"] = interp_ngp
def weight(xp, q, nx, L, method="CIC"):
if method in __weight:
dx = L/nx
return __weight[method](xp, q, nx, L)
else:
return method(xp, q, nx, L)
def interp(E, xp, nx, L, method="CIC"):
if method in __interp:
return __interp[method](E, xp, nx, L)
else:
return method(E, xp, nx, L)
def calc_E(phi, dx):
""" Calc E at the particle positions
Centered difference (second order)
"""
E = np.zeros_like(phi)
E[1:-1] = -(phi[2:]-phi[:-2])
E[0] = -(phi[1]-phi[-1])
E[-1] = -(phi[0]-phi[-2])
return E/(2*dx)
def accel(vx, vy, E, alpha, dt):
""" Accel in place
"""
vx[:] = vx + alpha*E*dt/2.
# vy is unchanged
def rotate(vx, vy, wc, dt):
""" Rotate in place
"""
c = np.cos(wc*dt)
s = np.sin(wc*dt)
vx_new = c*vx + s*vy
vy_new = -s*vx + c*vy
vx[:] = vx_new
vy[:] = vy_new
def normalize(x, L):
""" Keep x in [0,L), assuming a periodic domain
"""
# The order here is significant because of rounding
# If x<0 is very close to 0, then float(x+L)=L
while len(x[x<0])>0 or len(x[x>=L])>0:
x[x<0] = x[x<0] + L
x[x>=L] = x[x>=L] - L
def move(xp, vx, vy, dt, L, do_move=None):
""" Move in place
"""
if do_move is None:
xp[:] = xp + dt*vx
else:
xp[do_move] = xp[do_move] + dt*vx[do_move]
normalize(xp, L)
def pic(species, nx, dx, nt, dt, L, B0, solver_method="FFT",
weight_method="CIC",
interp_method="CIC"):
N = 0
for s in species: N += s.N
q, qm, wc, xp, vx, vy = [np.zeros(N) for _ in range(6)]
do_move = np.ndarray((N,), dtype=np.bool)
do_move[:] = False
count = 0 # Trailing count
for s in species:
q[count:count+s.N] = s.q
qm[count:count+s.N] = s.q/s.m
wc[count:count+s.N] = (s.q/s.m)*B0
xp[count:count+s.N] = s.x0
vx[count:count+s.N] = s.vx0
vy[count:count+s.N] = s.vy0
do_move[count:count+s.N] = s.m>0
count += s.N
# store the results at each time step
xpa = np.zeros((nt+1, N))
vxa = np.zeros((nt+1, N))
vya = np.zeros((nt+1, N))
Ea = np.zeros((nt+1, nx))
phia = np.zeros((nt+1, nx))
rhoa = np.zeros((nt+1, nx))
# Main solution loop
# Init half step back
rho = weight(xp, q, nx, L, method=weight_method)/dx
phi = poisson_solve(rho/epi0, dx, method=solver_method)
E0 = calc_E(phi, dx)
E = interp(E0, xp, nx, L, method=interp_method)
rotate(vx, vy, -wc, dt)
accel(vx, vy, E, -qm, dt)
xpa[0], vxa[0], vya[0] = xp, vx, vy
Ea[0], phia[0], rhoa[0] = E0, phi, rho
for i in range(1, nt+1):
# Update velocity
accel(vx, vy, E, qm, dt)
rotate(vx, vy, wc, dt)
accel(vx, vy, E, qm, dt)
# Update position
move(xp, vx, vy, dt, L, do_move=do_move)
rho = weight(xp, q, nx, L, method=weight_method)/dx
phi = poisson_solve(rho/epi0, dx, method=solver_method)
E0 = calc_E(phi, dx)
E = interp(E0, xp, nx, L, method=interp_method)
xpa[i], vxa[i], vya[i] = xp, vx, vy
Ea[i], phia[i], rhoa[i] = E0, phi, rho
return (xpa, vxa, vya, Ea, phia, rhoa)
| shigh/pyes1 | pyes1/pic1d2v.py | Python | gpl-2.0 | 5,516 |
# -*- coding: utf-8 -*-
# Generated by Django 1.9.12 on 2017-02-09 08:44
from __future__ import unicode_literals
from django.db import migrations, models
import django.utils.timezone
import model_utils.fields
class Migration(migrations.Migration):
dependencies = [
('statusboard', '0005_merge'),
]
operations = [
migrations.CreateModel(
name='Maintenance',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('created', model_utils.fields.AutoCreatedField(default=django.utils.timezone.now, editable=False, verbose_name='created')),
('modified', model_utils.fields.AutoLastModifiedField(default=django.utils.timezone.now, editable=False, verbose_name='modified')),
('scheduled', models.DateTimeField()),
('name', models.CharField(max_length=255)),
('description', models.TextField()),
],
options={
'abstract': False,
},
),
]
| edigiacomo/django-statusboard | statusboard/migrations/0006_maintenance.py | Python | gpl-2.0 | 1,099 |
#!/usr/bin/env python
# This is the MIT License
# http://www.opensource.org/licenses/mit-license.php
#
# Copyright (c) 2007,2008 Nick Galbreath
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
#
#
# Version 1.0 - 21-Apr-2007
# initial
# Version 2.0 - 16-Nov-2008
# made class Gmetric thread safe
# made gmetrix xdr writers _and readers_
# Now this only works for gmond 2.X packets, not tested with 3.X
#
# Version 3.0 - 09-Jan-2011 Author: Vladimir Vuksan
# Made it work with the Ganglia 3.1 data format
#
# Version 3.1 - 30-Apr-2011 Author: Adam Tygart
# Added Spoofing support
from xdrlib import Packer, Unpacker
import socket, re
slope_str2int = {'zero':0,
'positive':1,
'negative':2,
'both':3,
'unspecified':4}
# could be autogenerated from previous but whatever
slope_int2str = {0: 'zero',
1: 'positive',
2: 'negative',
3: 'both',
4: 'unspecified'}
class Gmetric:
"""
Class to send gmetric/gmond 2.X packets
Thread safe
"""
type = ('', 'string', 'uint16', 'int16', 'uint32', 'int32', 'float',
'double', 'timestamp')
protocol = ('udp', 'multicast')
def __init__(self, host, port, protocol):
if protocol not in self.protocol:
raise ValueError("Protocol must be one of: " + str(self.protocol))
self.socket = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
if protocol == 'multicast':
self.socket.setsockopt(socket.IPPROTO_IP,
socket.IP_MULTICAST_TTL, 20)
self.hostport = (host, int(port))
#self.socket.connect(self.hostport)
def send(self, NAME, VAL, TYPE='', UNITS='', SLOPE='both',
TMAX=60, DMAX=0, GROUP="", SPOOF=""):
if SLOPE not in slope_str2int:
raise ValueError("Slope must be one of: " + str(self.slope.keys()))
if TYPE not in self.type:
raise ValueError("Type must be one of: " + str(self.type))
if len(NAME) == 0:
raise ValueError("Name must be non-empty")
( meta_msg, data_msg ) = gmetric_write(NAME, VAL, TYPE, UNITS, SLOPE, TMAX, DMAX, GROUP, SPOOF)
# print msg
self.socket.sendto(meta_msg, self.hostport)
self.socket.sendto(data_msg, self.hostport)
def gmetric_write(NAME, VAL, TYPE, UNITS, SLOPE, TMAX, DMAX, GROUP, SPOOF):
"""
Arguments are in all upper-case to match XML
"""
packer = Packer()
HOSTNAME="test"
if SPOOF == "":
SPOOFENABLED=0
else :
SPOOFENABLED=1
# Meta data about a metric
packer.pack_int(128)
if SPOOFENABLED == 1:
packer.pack_string(SPOOF)
else:
packer.pack_string(HOSTNAME)
packer.pack_string(NAME)
packer.pack_int(SPOOFENABLED)
packer.pack_string(TYPE)
packer.pack_string(NAME)
packer.pack_string(UNITS)
packer.pack_int(slope_str2int[SLOPE]) # map slope string to int
packer.pack_uint(int(TMAX))
packer.pack_uint(int(DMAX))
# Magic number. Indicates number of entries to follow. Put in 1 for GROUP
if GROUP == "":
packer.pack_int(0)
else:
packer.pack_int(1)
packer.pack_string("GROUP")
packer.pack_string(GROUP)
# Actual data sent in a separate packet
data = Packer()
data.pack_int(128+5)
if SPOOFENABLED == 1:
data.pack_string(SPOOF)
else:
data.pack_string(HOSTNAME)
data.pack_string(NAME)
data.pack_int(SPOOFENABLED)
data.pack_string("%s")
data.pack_string(str(VAL))
return ( packer.get_buffer() , data.get_buffer() )
def gmetric_read(msg):
unpacker = Unpacker(msg)
values = dict()
unpacker.unpack_int()
values['TYPE'] = unpacker.unpack_string()
values['NAME'] = unpacker.unpack_string()
values['VAL'] = unpacker.unpack_string()
values['UNITS'] = unpacker.unpack_string()
values['SLOPE'] = slope_int2str[unpacker.unpack_int()]
values['TMAX'] = unpacker.unpack_uint()
values['DMAX'] = unpacker.unpack_uint()
unpacker.done()
return values
def get_gmetrics(path):
data = open(path).read()
start = 0
out = []
while True:
m = re.search('udp_send_channel +\{([^}]+)\}', data[start:], re.M)
if not m:
break
start += m.end()
tokens = re.split('\s+', m.group(1).strip())
host = tokens[tokens.index('host')+2]
port = int(tokens[tokens.index('port')+2])
out.append(Gmetric(host, port, 'udp'))
return out
if __name__ == '__main__':
import optparse
parser = optparse.OptionParser()
parser.add_option("", "--protocol", dest="protocol", default="udp",
help="The gmetric internet protocol, either udp or multicast, default udp")
parser.add_option("", "--host", dest="host", default="127.0.0.1",
help="GMond aggregator hostname to send data to")
parser.add_option("", "--port", dest="port", default="8649",
help="GMond aggregator port to send data to")
parser.add_option("", "--name", dest="name", default="",
help="The name of the metric")
parser.add_option("", "--value", dest="value", default="",
help="The value of the metric")
parser.add_option("", "--units", dest="units", default="",
help="The units for the value, e.g. 'kb/sec'")
parser.add_option("", "--slope", dest="slope", default="both",
help="The sign of the derivative of the value over time, one of zero, positive, negative, both, default both")
parser.add_option("", "--type", dest="type", default="",
help="The value data type, one of string, int8, uint8, int16, uint16, int32, uint32, float, double")
parser.add_option("", "--tmax", dest="tmax", default="60",
help="The maximum time in seconds between gmetric calls, default 60")
parser.add_option("", "--dmax", dest="dmax", default="0",
help="The lifetime in seconds of this metric, default=0, meaning unlimited")
parser.add_option("", "--group", dest="group", default="",
help="Group metric belongs to. If not specified Ganglia will show it as no_group")
parser.add_option("", "--spoof", dest="spoof", default="",
help="the address to spoof (ip:host). If not specified the metric will not be spoofed")
(options,args) = parser.parse_args()
g = Gmetric(options.host, options.port, options.protocol)
g.send(options.name, options.value, options.type, options.units,
options.slope, options.tmax, options.dmax, options.group, options.spoof)
| vollmerk/sysadm-tools | ganglia/lib/gmetric.py | Python | gpl-2.0 | 7,815 |
# -*- coding: utf-8 -*-
import pcapy
import Storage_Classes
from collections import defaultdict
#from scapy.layers.all import *
from scapy.layers.dot11 import Dot11, RadioTap
################################################################
VERBOSE = 1
### auxiliary functions ###
def show_short(p, label):
if ( VERBOSE >= 1 ):
## show
#print p.num, p.timestamp, p.dot11.addr2, \
#label, "[" + str(p.dot11.type) + ", " + str(p.dot11.subtype) + "]", \
#p.get_rssi(), "dBm"
###################################################################################
### modified: # ETH- 'public': 00:0f:61:5d:5c:01 / ETH - 'eth':00:0f:61:5d:5c:00 'eth':00:0f:61:53:1f:00
if (str(p.dot11.addr2) == '00:0b:0e:84:00:46'):
print p.timestamp, p.get_rssi(), "dBm" # das time-format hier sind microseconds als kleinste Einheit. Damit ist die letzte Zahl vor dem Komma die Sekunde.
################################################################
### ANALYZE FUNCTIONS ###
def analyze_beacon(p):
#print p.num, p.timestamp, p.dot11.addr2, "BEACON:", p.get_rssi(), "dBm"
show_short(p, "BEACON")
# ## sanity check ## FIXME won't work!! is this a wrong sanity check..?
# if ( not p.dot11.addr2 == p.dot11.addr3 ):
# print
# print "ERR: BEACON INVALID"
# p.show()
# raise Exception("ERR: BEACON INVALID")
# if ( p.dot11.addr2 == "00:00:00:00:00:00" ):
# print
# print "ERR: BEACON INVALID"
# p.show()
# raise Exception("ERR: BEACON INVALID")
## store
senders[p.dot11.addr2].store(p)
def analyze_regular(p):
## some packet types are not correctly dissected
## ---> e.g. Block ACK Req
## ---> TODO (what shall we do with them?)
if ( not p.dot11.addr2 ):
#####################################
##### THIS IS COMPLETELY UNCOMMENTED TO STOP THIS DISSECTION EXCEPTTION !!! (stephan)
#####################################
#show_short(p, "<<not recognized>>.")
#p.show_stephan()
#show_short(p, "Packet.")
##############################
##was: show(p.dot11) ... just guessing
##############################
#print
# dat = raw_input()
######################raise Exception("uncorrect dissection exception")
return
## BRANCH: "no" sender..
## ---> some packets have no proper sender information
## ---> just ignore them!!
if ( p.dot11.addr2 == "00:00:00:00:00:00" ):
show_short(p, "<<ignored>>")
return
## BRANCH: regular packet
else:
show_short(p, "Packet.")
## store
senders[p.dot11.addr2].store(p)
### general analyze function
### --> hands packets over to specialized ones
def analyze(p):
## Control frames have no useful sender information
## ---> we don't want them. just drop it.
## BRANCH: control frames
try:
if ( p.dot11.type == 1 ):
# BRANCH: ACK (sender unclear...)
if ( p.dot11.subtype == 13 ):
if ( VERBOSE >= 1 ):
macheNix=1
################################################################################################
###modified:
#print p.num, "ignoring ACK (1, 13)", p.get_rssi(), "dBm"
################################################################################################
return
if ( p.dot11.subtype == 12 ):
if ( VERBOSE >= 1 ):
macheNix=1
################################################################################################
###modified:
#print p.num, "ignoring CTS (1, 12)", p.get_rssi(), "dBm"
################################################################################################
return
if ( p.dot11.subtype == 8 ):
if ( VERBOSE >= 1 ):
macheNix=1
################################################################################################
###modified:
#print p.num, "ignoring Block ACK Req (1, 12)", p.get_rssi(), "dBm"
################################################################################################
return
## BRANCH: managemet frames
if ( p.dot11.type == 0 ):
# BRANCH: BEACON
if ( p.dot11.subtype == 8 ):
analyze_beacon(p)
return
# elif ( p.dot11.type == 2 ):
# if ( p.dot11.subtype == 4 ):
# analyze_regular(p)
# dat = raw_input()
# return
except AttributeError:
if ( VERBOSE >= 1 ):
print p.num, "ignoring malformed packet", p.get_rssi(), "dBm"
return
## default
## ---> most packets can just be treated the same..
analyze_regular(p)
# show(p.dot11)
# print
# dat = raw_input()
################################################################
### PCAP READING AND DECODING ###
## TODO do they have to be global?
pcapy_reader = False
packet_num = False
senders = False
def get_next():
global packet_num
pkt = pcapy_reader.next()
if ( type(pkt[0]) == type(None)):
return None
p = Storage_Classes.Packet()
packet_num += 1
p.num = packet_num
p.timestamp = pkt[0].getts()
p.tap = RadioTap()
p.tap.dissect(pkt[1])
p.dot11 = p.tap.payload
#print "----->", ord(pkt[1][14])
p.rssi = ord(pkt[1][14]) - 256
return p
### public ###
def parse(file_name, num=100):
global pcapy_reader
global senders
## init
pcapy_reader = pcapy.open_offline(file_name)
packet_num = 0
senders = defaultdict(Storage_Classes.Sender)
print "Reading", file_name, "..."
i = 0
while ( i != num): ## like a for loop, but e.g. -1 means "to infinity"
if ( i % 20000 == 0 ):
print i
p = get_next()
if ( p == None ):
print "EOF", i
break
analyze(p)
i += 1
# dat = raw_input()
## FIXME how to close this?
#pcapy.close()
### statictics
print
print "----------------------------------------"
print "Senders:", len(senders)
## TODO count total in this module not in the Sender class
print "used packets:", Storage_Classes.Sender.total ## TODO maybe this shuld also be returned from "parse"
return senders
| stephansigg/IPSN_localisation_passive-DF | Data_CaseStudies/CaseStudy_1312_initial/pcap_analyzer/Pcap_Parser.py | Python | gpl-2.0 | 6,609 |
# Rekall Memory Forensics
#
# Copyright 2016 Google Inc. All Rights Reserved.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or (at
# your option) any later version.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
"""Plugins to inspect flows."""
import base64
import json
import os
import time
import arrow
from rekall import plugin
from rekall import utils
from rekall_agent import common
from rekall_agent import flow
from rekall_agent import result_collections
from rekall_agent import serializer
from rekall_agent.locations import files
from rekall_agent.ui import renderers
CANNED_CONDITIONS = dict(
OS_WINDOWS="any from agent_info() where key=='system' and value=='Windows'",
OS_LINUX="any from agent_info() where key=='system' and value=='Linux'",
OS_OSX="any from agent_info() where key=='system' and value=='Darwin'",
)
class AgentControllerShowFlows(common.AbstractControllerCommand):
name = "show_flows"
__args = [
dict(name="limit", type="IntParser", default=20,
help="Total results to display"),
]
table_header = [
dict(name="state", width=8),
dict(name="flow_id", width=14),
dict(name="type", width=18),
dict(name="created", width=19),
dict(name="last_active", width=19),
dict(name="collections"),
]
def collect_db(self, collection):
# Now show all the flows.
for i, row in enumerate(collection.query(order_by="created desc")):
if i > self.plugin_args.limit:
break
ticket = flow.FlowStatus.from_json(
row["ticket_data"], session=self.session)
last_active = row["last_active"]
if last_active:
last_active = arrow.Arrow.fromtimestamp(last_active)
collections = [x.location.get_canonical().to_path()
for x in ticket.collections]
yield dict(state=row["status"],
flow_id=row["flow_id"],
type=row["type"],
created=arrow.Arrow.fromtimestamp(row["created"]),
last_active=last_active,
collections=collections)
def _check_pending_flow(self, row):
"""Check for flow tickets.
For pending flows, it is possible that the worker just has not caught
up. We try to show it anyway by checking for the tickets.
"""
if row["state"] == "Pending":
ticket_location = self._config.server.ticket_for_server(
"FlowStatus", row["flow_id"], self.client_id)
# The client will actually add a nonce to this so we need to find
# all subobjects.
for sub_object in ticket_location.list_files():
# The subobject is a canonical path, we need to authorize it.
data = self._config.server.canonical_for_server(
sub_object.location).read_file()
if data:
ticket = flow.FlowStatus.from_json(
data, session=self.session)
row["state"] = "%s(*)" % ticket.status
row["collections"] = [sub_object.location.to_path()]
row["last_active"] = ticket.timestamp
def collect(self):
if not self.client_id:
raise plugin.PluginError("Client ID must be specified.")
with flow.FlowStatsCollection.load_from_location(
self._config.server.flow_db_for_server(self.client_id),
session=self.session) as collection:
rows = list(self.collect_db(collection))
common.THREADPOOL.map(self._check_pending_flow, rows)
for row in rows:
row["collections"] = [
renderers.UILink("gs", x) for x in row["collections"]]
row["flow_id"] = renderers.UILink("f", row["flow_id"])
yield row
class AgentControllerShowHunts(AgentControllerShowFlows):
name = "show_hunts"
__args = [
dict(name="queue", default="All",
help="The hunt queue."),
]
def collect(self):
with flow.FlowStatsCollection.load_from_location(
self._config.server.flow_db_for_server(
queue=self.plugin_args.queue),
session=self.session) as collection:
for row in self.collect_db(collection):
row["flow_id"] = renderers.UILink("h", row["flow_id"])
yield row
class SerializedObjectInspectorMixin(object):
"""A plugin Mixin which inspects a SerializedObject."""
__args = [
dict(name="verbosity", type="IntParser", default=0,
help="If non zero show all fields."),
]
table_header = [
dict(name="Field", type="TreeNode", max_depth=5, width=20),
dict(name="Value", width=60),
dict(name="Description")
]
def _explain(self, obj, depth=0, ignore_fields=None):
if isinstance(obj, serializer.SerializedObject):
for x in self._collect_serialized_object(
obj, depth=depth, ignore_fields=ignore_fields):
yield x
elif isinstance(obj, basestring):
yield dict(Value=obj)
elif isinstance(obj, list):
yield dict(Value=", ".join(obj))
else:
raise RuntimeError("Unable to render object %r" % obj)
def _collect_list(self, list_obj, field, descriptor, depth):
yield dict(
Field=field,
Value="(Array)",
Description=descriptor.get("doc", ""),
highlight="important" if descriptor.get("user") else "",
depth=depth)
for i, value in enumerate(list_obj):
for row in self._explain(value, depth=depth):
row["Field"] = "[%s] %s" % (i, row.get("Field", ""))
if descriptor.get("user"):
row["highlight"] = "important"
yield row
def _collect_dict(self, dict_obj, field, descriptor, depth):
yield dict(
Field=field,
Value="(Dict)",
Description=descriptor.get("doc", ""),
highlight="important" if descriptor.get("user") else "",
depth=depth)
for key, value in sorted(dict_obj.iteritems()):
for row in self._explain(value, depth=depth+1):
row["Field"] = ". " + key
if descriptor.get("user"):
row["highlight"] = "important"
yield row
def _collect_serialized_object(self, flow_obj, depth=0, ignore_fields=None):
for descriptor in flow_obj.get_descriptors():
# Skip hidden fields if verbosity is low.
if self.plugin_args.verbosity < 2 and descriptor.get("hidden"):
continue
field = descriptor["name"]
# Only show requested fields in non-verbose mode.
if (not self.plugin_args.verbosity and
ignore_fields and field in ignore_fields):
continue
if not flow_obj.HasMember(field):
continue
value = flow_obj.GetMember(field)
if isinstance(value, serializer.SerializedObject):
display_value = "(%s)" % value.__class__.__name__
elif isinstance(value, str):
display_value = base64.b64encode(value)
elif isinstance(value, unicode):
display_value = value
elif isinstance(value, list):
for x in self._collect_list(value, field, descriptor, depth):
yield x
continue
elif isinstance(value, dict):
for x in self._collect_dict(value, field, descriptor, depth):
yield x
continue
else:
display_value = utils.SmartUnicode(value)
if (not self.plugin_args.verbosity and len(display_value) > 45):
display_value = display_value[:45] + " ..."
yield dict(
Field=field,
Value=display_value,
Description=descriptor.get("doc", ""),
highlight="important" if descriptor.get("user") else "",
depth=depth)
if (isinstance(value, serializer.SerializedObject)):
for row in self._explain(value, depth=depth+1):
yield row
class InspectFlow(SerializedObjectInspectorMixin,
common.AbstractControllerCommand):
name = "inspect_flow"
__args = [
dict(name="flow_id", required=True, positional=True,
help="The flow to examine"),
]
table_header = [
dict(name="divider", type="Divider")
] + SerializedObjectInspectorMixin.table_header
def _get_collection(self, client_id):
return flow.FlowStatsCollection.load_from_location(
self._config.server.flow_db_for_server(client_id),
session=self.session)
def get_flow_object(self, flow_id=None):
if flow_id is None:
flow_id = self.plugin_args.flow_id
return flow.Flow.from_json(
self._config.server.flows_for_server(flow_id).read_file(),
session=self.session)
def collect(self):
flow_obj = self.get_flow_object(self.plugin_args.flow_id)
with self._get_collection(flow_obj.client_id) as collection:
yield dict(divider="Flow Object (%s)" % flow_obj.__class__.__name__)
for x in self._explain(flow_obj, ignore_fields=set([
"ticket", "actions"
])):
yield x
for row in collection.query(flow_id=self.plugin_args.flow_id):
ticket = flow.FlowStatus.from_json(row["ticket_data"],
session=self.session)
yield dict(divider="Flow Status Ticket")
for x in self._explain(ticket, ignore_fields=set([
"location", "client_id", "flow_id", "collections"
])):
yield x
if ticket.collections:
yield dict(divider="Collections")
for collection in ticket.collections:
link = renderers.UILink(
"gs", collection.location.get_canonical().to_path())
yield dict(
Field=collection.__class__.__name__,
Value=link,
Description="", nowrap=True)
if ticket.files:
yield dict(divider="Uploads")
for upload in ticket.files:
link = renderers.UILink(
"gs", upload.get_canonical().to_path())
yield dict(Value=link, nowrap=True)
if ticket.error:
yield dict(divider="Error")
yield dict(Field="ticket.error", Value=ticket.error)
if ticket.backtrace:
yield dict(divider="Backtrace")
yield dict(Field="ticket.backtrace", Value=ticket.backtrace)
class InspectHunt(InspectFlow):
name = "inspect_hunt"
__args = [
dict(name="limit", type="IntParser", default=20,
help="Limit of rows to display"),
dict(name="graph_clients", type="Bool",
help="Also plot a graph of client participation."),
]
table_header = [
dict(name="divider", type="Divider"),
dict(name="Field", width=20),
dict(name="Time", width=20),
dict(name="Value", width=20),
dict(name="Description")
]
def _get_collection(self):
return flow.HuntStatsCollection.load_from_location(
self._config.server.hunt_db_for_server(self.plugin_args.flow_id),
session=self.session)
def graph_clients(self, collection):
"""Draw a graph of client engagement."""
# This is optionally dependent on presence of matplotlib.
try:
from matplotlib import pyplot
except ImportError:
raise plugin.PluginError(
"You must have matplotlib installed to plot graphs.")
total_clients = 0
base = None
data_x = []
data_y = []
for row in collection.query(order_by="executed"):
total_clients += 1
if base is None:
base = row["executed"]
data_x.append(row["executed"] - base)
data_y.append(total_clients)
fig = pyplot.figure()
ax = fig.add_subplot(111)
ax.plot(data_x, data_y)
start_time = arrow.Arrow.fromtimestamp(base)
ax.set_title("Clients in Hunt %s" % self.plugin_args.flow_id)
ax.set_xlabel("Seconds after %s (%s)" % (
start_time.ctime(), start_time.humanize()))
ax.set_ylabel("Total Client Count")
pyplot.show()
def collect(self):
with self._get_collection() as collection:
flow_obj = self.get_flow_object(self.plugin_args.flow_id)
if self.plugin_args.graph_clients:
self.graph_clients(collection)
yield dict(divider="Flow Object (%s)" % flow_obj.__class__.__name__)
for x in self._explain(flow_obj, ignore_fields=set([
"ticket", "actions"
])):
yield x
yield dict(divider="Summary")
yield dict(Field="Total Clients",
Value=list(collection.query(
"select count(*) as c from tbl_default"
))[0]["c"])
yield dict(Field="Successful Clients",
Value=list(collection.query(
"select count(*) as c from tbl_default "
"where status = 'Done'"))[0]["c"])
yield dict(Field="Errors Clients",
Value=list(collection.query(
"select count(*) as c from tbl_default "
"where status = 'Error'"))[0]["c"])
total = 0
yield dict(divider="Results")
for row in collection.query(
status="Done", limit=self.plugin_args.limit):
ticket = flow.FlowStatus.from_json(row["ticket_data"],
session=self.session)
for result in ticket.collections:
if total > self.plugin_args.limit:
break
yield dict(Field=ticket.client_id,
Time=ticket.timestamp,
Value=renderers.UILink(
"gs", result.location.to_path()),
nowrap=True)
total += 1
yield dict(divider="Uploads")
total = 0
for row in collection.query(
status="Done", limit=self.plugin_args.limit):
ticket = flow.FlowStatus.from_json(row["ticket_data"],
session=self.session)
for result in ticket.files:
if total > self.plugin_args.limit:
break
yield dict(Field=ticket.client_id,
Time=ticket.timestamp,
Value=renderers.UILink(
"gs", result.to_path()),
nowrap=True)
total += 1
for row in collection.query(
status="Error", limit=self.plugin_args.limit):
ticket = flow.FlowStatus.from_json(row["ticket_data"],
session=self.session)
yield dict(Field=ticket.client_id,
Time=ticket.timestamp,
Value=ticket.error, nowrap=True)
class AgentControllerRunFlow(SerializedObjectInspectorMixin,
common.AbstractControllerCommand):
name = "launch_flow"
__args = [
dict(name="flow", type="Choices", positional=True, required=True,
choices=utils.JITIteratorCallable(
utils.get_all_subclasses, flow.Flow),
help="The flow to launch"),
dict(name="args", type="Any", positional=True, default={},
help="Arguments to the flow (use explain_flow to see valid args)."
"This may also be a JSON encoded string"),
dict(name="queue",
help="Which queue to schedule the hunt on."),
dict(name="condition",
help="An EFilter query to evaluate if the flow should be run."),
# This should only be set if no condition is specified.
dict(name="canned_condition", type="Choices", default=None,
choices=CANNED_CONDITIONS,
help="Canned conditions for the hunt."),
dict(name="live", type="Choices", default="API",
choices=["API", "Memory"],
help="Live mode to use"),
dict(name="quota", type="IntParser", default=3600,
help="Total number of CPU seconds allowed for this flow."),
]
def make_flow_object(self):
flow_cls = flow.Flow.ImplementationByClass(self.plugin_args.flow)
if not flow_cls:
raise plugin.PluginError("Unknown flow %s" % self.plugin_args.flow)
args = self.plugin_args.args
if isinstance(args, basestring):
try:
args = json.loads(args)
except Exception as e:
raise plugin.PluginError(
"args should be a JSON string of a dict: %s" % e)
if not isinstance(args, dict):
raise plugin.PluginError("args should be a dict")
flow_obj = flow_cls.from_primitive(args, session=self.session)
flow_obj.client_id = self.client_id
flow_obj.queue = self.plugin_args.queue
flow_obj.session.live = self.plugin_args.live
# If a canned condition was specified automatically add it.
if self.plugin_args.canned_condition:
flow_obj.condition = CANNED_CONDITIONS[
self.plugin_args.canned_condition]
elif self.plugin_args.condition:
flow_obj.condition = self.plugin_args.condition
# Specify flow quota.
flow_obj.quota.user_time = self.plugin_args.quota
return flow_obj
def collect(self):
# Now launch the flow.
flow_obj = self.make_flow_object()
flow_obj.start()
for x in self._explain(flow_obj):
yield x
class AgentControllerRunHunt(AgentControllerRunFlow):
"""Launch a hunt on many clients at once.
Rekall does not treat hunts as different or special entities - a hunt is
just a flow which targets multiple systems. However, for users it is
sometimes helpful to think in terms of a "hunt". This plugin makes it easier
to launch the hunt.
"""
name = "launch_hunt"
__args = [
# Flows are scheduled on the client's flow queue but hunts are generally
# scheduled on a Label Queue (e.g. the All queue schedules to all
# agents).
dict(name="queue", default="All",
help="Which queue to schedule the hunt on."),
]
class AgentControllerExplainFlows(common.AbstractControllerCommand):
"""Explain all the parameters a flow may take."""
name = "explain_flow"
__args = [
dict(name="flow", type="Choices", positional=True, required=True,
choices=utils.JITIteratorCallable(
utils.get_all_subclasses, flow.Flow),
help="The flow to explain"),
dict(name="verbosity", type="IntParser", default=0,
help="If non zero show all fields."),
dict(name="recursive", type="Bool",
help="Show recursively nested fields."),
]
table_header = [
dict(name="Field", type="TreeNode", max_depth=5),
dict(name="Type"),
dict(name="Description")
]
table_options = dict(
auto_widths=True,
)
def _explain(self, flow_cls, depth=0):
for descriptor in flow_cls.get_descriptors():
user_accessible = descriptor.get("user")
if self.plugin_args.verbosity < 1 and not user_accessible:
continue
field = descriptor["name"]
field_type = descriptor.get("type", "string")
field_description = field_type
if isinstance(field_type, type):
field_description = "(%s)" % field_type.__name__
yield dict(Field=field,
Type=field_description,
Description=descriptor.get("doc", ""),
depth=depth)
if (self.plugin_args.recursive and
isinstance(field_type, type) and
issubclass(field_type, serializer.SerializedObject)):
for row in self._explain(field_type, depth=depth+1):
#row["Field"] = "%s.%s" % (field, row["Field"])
yield row
def collect(self):
flow_cls = flow.Flow.ImplementationByClass(self.plugin_args.flow)
for x in self._explain(flow_cls):
yield x
class AgentControllerExportCollections(common.AbstractControllerCommand):
"""Exports all collections from the hunt or flow."""
name = "export"
__args = [
dict(name="flow_id", positional=True, required=True,
help="The flow or hunt ID we should export."),
dict(name="dumpdir", positional=True, required=True,
help="The output directory we use export to.")
]
table_header = [
dict(name="divider", type="Divider"),
dict(name="Message")
]
def _collect_hunts(self, flow_obj):
with flow.HuntStatsCollection.load_from_location(
self._config.server.hunt_db_for_server(flow_obj.flow_id),
session=self.session) as hunt_db:
collections_by_type = {}
uploads = []
for row in hunt_db.query():
status = flow.HuntStatus.from_json(row["ticket_data"],
session=self.session)
for collection in status.collections:
collections_by_type.setdefault(
collection.collection_type, []).append(
(collection, status.client_id))
uploads.extend(status.files)
yield dict(divider="Exporting Collections")
# Now create a new collection by type into the output directory.
for output_location in common.THREADPOOL.imap_unordered(
self._dump_collection,
collections_by_type.iteritems()):
yield dict(Message=output_location.to_path())
yield dict(divider="Exporting files")
for output_location in common.THREADPOOL.imap_unordered(
self._dump_uploads,
uploads):
yield dict(Message=output_location.to_path())
def _collect_flows(self, flow_obj):
with flow.FlowStatsCollection.load_from_location(
self._config.server.flow_db_for_server(flow_obj.client_id),
session=self.session) as flow_db:
collections_by_type = {}
uploads = []
for row in flow_db.query(flow_id=flow_obj.flow_id):
status = flow.FlowStatus.from_json(row["ticket_data"],
session=self.session)
for collection in status.collections:
collections_by_type.setdefault(
collection.collection_type, []).append(
(collection, status.client_id))
uploads.extend(status.files)
yield dict(divider="Exporting Collections")
# Now create a new collection by type into the output directory.
for output_location in common.THREADPOOL.imap_unordered(
self._dump_collection,
collections_by_type.iteritems()):
yield dict(Message=output_location.to_path())
yield dict(divider="Exporting files")
for output_location in common.THREADPOOL.imap_unordered(
self._dump_uploads,
uploads):
yield dict(Message=output_location.to_path())
def _dump_uploads(self, download_location):
output_location = files.FileLocation.from_keywords(
path=os.path.join(self.plugin_args.dumpdir,
self.flow_id, "files",
download_location.to_path()),
session=self.session)
local_filename = self._config.server.canonical_for_server(
download_location).get_local_filename()
output_location.upload_local_file(local_filename)
return output_location
def _dump_collection(self, args):
type, collections = args
output_location = files.FileLocation.from_keywords(
path=os.path.join(self.plugin_args.dumpdir,
self.flow_id, "collections", type),
session=self.session)
# We assume all the collections of the same type are the same so we can
# just take the first one as the template for the output collection.
output_collection = collections[0][0].copy()
# Add another column for client_id.
output_collection.tables[0].columns.append(
result_collections.ColumnSpec.from_keywords(
name="client_id", session=self.session))
output_collection.location = output_location
with output_collection.create_temp_file():
common.THREADPOOL.map(
self._copy_single_location,
((output_collection, x, y) for x, y in collections))
return output_location
def _copy_single_location(self, args):
output_collection, canonical_collection, client_id = args
with canonical_collection.load_from_location(
self._config.server.canonical_for_server(
canonical_collection.location),
session=self.session) as collection:
for row in collection:
output_collection.insert(client_id=client_id, **row)
def collect(self):
self.flow_id = self.plugin_args.flow_id
if self.flow_id.startswith("f:") or self.flow_id.startswith("h:"):
self.flow_id = self.flow_id[2:]
flow_obj = flow.Flow.from_json(
self._config.server.flows_for_server(self.flow_id).read_file(),
session=self.session)
if flow_obj.is_hunt():
return self._collect_hunts(flow_obj)
else:
return self._collect_flows(flow_obj)
return []
class FlowLauncherAndWaiterMixin(object):
"""A mixin to implement launching and waiting for flows to complete."""
def launch_and_wait(self, flow_obj):
"""A Generator of messages."""
flow_db_location = self._config.server.flow_db_for_server(
self.client_id)
flow_db_stat = flow_db_location.stat()
flow_obj.start()
# Wait until the flow arrives.
while 1:
new_stat = flow_db_location.stat()
if flow_db_stat and new_stat.generation > flow_db_stat.generation:
with flow.FlowStatsCollection.load_from_location(
flow_db_location, session=self.session) as flow_db:
tickets = []
for row in flow_db.query(flow_id=flow_obj.flow_id):
if row["status"] in ["Done", "Error"]:
tickets.append(
flow.FlowStatus.from_json(row["ticket_data"],
session=self.session))
if tickets:
return tickets
time.sleep(2)
| rlugojr/rekall | rekall-agent/rekall_agent/ui/flows.py | Python | gpl-2.0 | 29,082 |
import re
from geopy import exc
from geopy.compat import urlencode
from geopy.geocoders.base import DEFAULT_SENTINEL, Geocoder
from geopy.location import Location
from geopy.util import logger
__all__ = ("What3Words", )
class What3Words(Geocoder):
"""What3Words geocoder.
Documentation at:
https://docs.what3words.com/api/v2/
.. versionadded:: 1.5.0
.. versionchanged:: 1.15.0
API has been updated to v2.
"""
multiple_word_re = re.compile(
r"[^\W\d\_]+\.{1,1}[^\W\d\_]+\.{1,1}[^\W\d\_]+$", re.U
)
geocode_path = '/v2/forward'
reverse_path = '/v2/reverse'
def __init__(
self,
api_key,
format_string=None,
scheme='https',
timeout=DEFAULT_SENTINEL,
proxies=DEFAULT_SENTINEL,
user_agent=None,
ssl_context=DEFAULT_SENTINEL,
):
"""
:param str api_key: Key provided by What3Words
(https://accounts.what3words.com/register).
:param str format_string:
See :attr:`geopy.geocoders.options.default_format_string`.
:param str scheme: Must be ``https``.
.. deprecated:: 1.15.0
API v2 requires https. Don't use this parameter,
it's going to be removed in geopy 2.0.
Scheme other than ``https`` would result in a
:class:`geopy.exc.ConfigurationError` being thrown.
:param int timeout:
See :attr:`geopy.geocoders.options.default_timeout`.
:param dict proxies:
See :attr:`geopy.geocoders.options.default_proxies`.
:param str user_agent:
See :attr:`geopy.geocoders.options.default_user_agent`.
.. versionadded:: 1.12.0
:type ssl_context: :class:`ssl.SSLContext`
:param ssl_context:
See :attr:`geopy.geocoders.options.default_ssl_context`.
.. versionadded:: 1.14.0
"""
super(What3Words, self).__init__(
format_string=format_string,
# The `scheme` argument is present for the legacy reasons only.
# If a custom value has been passed, it should be validated.
# Otherwise use `https` instead of the `options.default_scheme`.
scheme=(scheme or 'https'),
timeout=timeout,
proxies=proxies,
user_agent=user_agent,
ssl_context=ssl_context,
)
if self.scheme != "https":
raise exc.ConfigurationError("What3Words now requires `https`.")
self.api_key = api_key
domain = 'api.what3words.com'
self.geocode_api = '%s://%s%s' % (self.scheme, domain, self.geocode_path)
self.reverse_api = '%s://%s%s' % (self.scheme, domain, self.reverse_path)
def _check_query(self, query):
"""
Check query validity with regex
"""
if not self.multiple_word_re.match(query):
return False
else:
return True
def geocode(self,
query,
lang='en',
exactly_one=True,
timeout=DEFAULT_SENTINEL):
"""
Return a location point for a `3 words` query. If the `3 words` address
doesn't exist, a :class:`geopy.exc.GeocoderQueryError` exception will be
thrown.
:param str query: The 3-word address you wish to geocode.
:param str lang: two character language codes as supported by
the API (https://docs.what3words.com/api/v2/#lang).
:param bool exactly_one: Return one result or a list of results, if
available. Due to the address scheme there is always exactly one
result for each `3 words` address, so this parameter is rather
useless for this geocoder.
.. versionchanged:: 1.14.0
``exactly_one=False`` now returns a list of a single location.
This option wasn't respected before.
:param int timeout: Time, in seconds, to wait for the geocoding service
to respond before raising a :class:`geopy.exc.GeocoderTimedOut`
exception. Set this only if you wish to override, on this call
only, the value set during the geocoder's initialization.
:rtype: :class:`geopy.location.Location` or a list of them, if
``exactly_one=False``.
"""
if not self._check_query(query):
raise exc.GeocoderQueryError(
"Search string must be 'word.word.word'"
)
params = {
'addr': self.format_string % query,
'lang': lang.lower(),
'key': self.api_key,
}
url = "?".join((self.geocode_api, urlencode(params)))
logger.debug("%s.geocode: %s", self.__class__.__name__, url)
return self._parse_json(
self._call_geocoder(url, timeout=timeout),
exactly_one=exactly_one
)
def _parse_json(self, resources, exactly_one=True):
"""
Parse type, words, latitude, and longitude and language from a
JSON response.
"""
code = resources['status'].get('code')
if code:
# https://docs.what3words.com/api/v2/#errors
exc_msg = "Error returned by What3Words: %s" % resources['status']['message']
if code == 401:
raise exc.GeocoderAuthenticationFailure(exc_msg)
raise exc.GeocoderQueryError(exc_msg)
def parse_resource(resource):
"""
Parse record.
"""
if 'geometry' in resource:
words = resource['words']
position = resource['geometry']
latitude, longitude = position['lat'], position['lng']
if latitude and longitude:
latitude = float(latitude)
longitude = float(longitude)
return Location(words, (latitude, longitude), resource)
else:
raise exc.GeocoderParseError('Error parsing result.')
location = parse_resource(resources)
if exactly_one:
return location
else:
return [location]
def reverse(self, query, lang='en', exactly_one=True,
timeout=DEFAULT_SENTINEL):
"""
Return a `3 words` address by location point. Each point on surface has
a `3 words` address, so there's always a non-empty response.
:param query: The coordinates for which you wish to obtain the 3 word
address.
:type query: :class:`geopy.point.Point`, list or tuple of ``(latitude,
longitude)``, or string as ``"%(latitude)s, %(longitude)s"``.
:param str lang: two character language codes as supported by the
API (https://docs.what3words.com/api/v2/#lang).
:param bool exactly_one: Return one result or a list of results, if
available. Due to the address scheme there is always exactly one
result for each `3 words` address, so this parameter is rather
useless for this geocoder.
.. versionchanged:: 1.14.0
``exactly_one=False`` now returns a list of a single location.
This option wasn't respected before.
:param int timeout: Time, in seconds, to wait for the geocoding service
to respond before raising a :class:`geopy.exc.GeocoderTimedOut`
exception. Set this only if you wish to override, on this call
only, the value set during the geocoder's initialization.
:rtype: :class:`geopy.location.Location` or a list of them, if
``exactly_one=False``.
"""
lang = lang.lower()
params = {
'coords': self._coerce_point_to_string(query),
'lang': lang.lower(),
'key': self.api_key,
}
url = "?".join((self.reverse_api, urlencode(params)))
logger.debug("%s.reverse: %s", self.__class__.__name__, url)
return self._parse_reverse_json(
self._call_geocoder(url, timeout=timeout),
exactly_one=exactly_one
)
def _parse_reverse_json(self, resources, exactly_one=True):
"""
Parses a location from a single-result reverse API call.
"""
return self._parse_json(resources, exactly_one)
| phborba/dsgtoolsop | auxiliar/geopy/geocoders/what3words.py | Python | gpl-2.0 | 8,452 |
from django.apps import AppConfig
from threading import Timer
import sys
class TelegramBotConfig(AppConfig):
name = 'telegrambot'
verbose_name = "Telegram Bot"
def ready(self):
from django.conf import settings
if 'runserver' in sys.argv:
from telegrambot.wrapper import Bot
b = Bot(settings.TELEGRAM_BOT_TOKEN)
if not settings.TELEGRAM_USE_WEBHOOK:
b.post('setWebhook',{'url':''})
print("Telegram WebHook Disabled")
Timer(10,b.getUpdates).start()
if settings.TELEGRAM_USE_WEBHOOK:
from telegrambot.wrapper import Bot
b = Bot(settings.TELEGRAM_BOT_TOKEN)
b.setWebhook()
import telegrambot.signals
import telegrambot.connectors | meska/telegrambot | apps.py | Python | gpl-2.0 | 853 |
# -*- coding: utf-8 -*-
"""
logol_analyse provide some analyse tools for logol xml results.
Without any option, it will provide the number of hit, how many sequences have
at least one hit, and a graph with the repartition of the hits.
Usage:
logol_analyse.py <input> <data> [options]
options:
--graph, -g=<name> The graph name, to save it directly.
--help, -h It call help. UNBELIEVABLE!!!!!
--nograph -n No graph creation
--origin, -o INT The 0 emplacement on sequences [default: 150]
--position -p=<name> Return a file containing position of each motif
--result -r=<name> Save a fasta file with the matched sequences.
--signature, -s=<name> Create a file with for each sequences the hits.
--hits, -t Display a hits/sequences graph.
--version, -v Maybe it's a trap ^^
--xclude, -x=<name> Create a file containing all unmatched sequences
"""
##########
# IMPORT #
##########
import matplotlib.pyplot as plt
import pylab
import glob
import os
from docopt import docopt
from lxml import etree
from Bio import SeqIO
#############
# ARGUMENTS #
#############
if __name__ == '__main__':
arguments = docopt(__doc__, version = '1.3')
########
# MAIN #
########
def __main__(arguments):
total = 0
count = 0
hit = []
se = set() # Contain sequences header
hits_per_seq = []
# Here we check all the .xml file
for f in glob.glob(os.getcwd()+"/"+arguments['<input>']+"*.xml"):
nb_hit = 0
total += 1
tree = etree.parse(f)
# Collect of the hit beginning and ID
for seq in tree.xpath("/sequences/match/begin"):
count += 1
nb_hit +=1
hit.append(int(seq.text)-int(arguments['--origin']))
[se.add(a.text) for a in tree.xpath("/sequences/fastaHeader")]
if nb_hit > 0:
hits_per_seq.append(nb_hit)
print("Nombre de hits: "+str(count))
print("Nombre de séquences touchées: "+str(len(se))+" sur "+str(total))
print("Nombre max de hits par séquences: "+str(max(hits_per_seq)))
if arguments['--result'] != None:
seq_match(se)
if arguments['--xclude'] != None:
seq_no_match(se)
if arguments['--nograph'] == False:
graph(hit)
if arguments['--signature'] != None:
save_signature()
if arguments['--position'] != None:
save_position()
if arguments['--hits'] != False:
display_hits(hits_per_seq)
#############
# FUNCTIONS #
#############
def seq_match(seq):
out = open(os.getcwd()+'/'+arguments['--result'], 'w')
data = open(os.getcwd()+'/'+arguments['<data>'], "rU")
for s in SeqIO.parse(data, "fasta"):
if s.id in seq:
out.write(s.format("fasta"))
out.close()
data.close()
def seq_no_match(seq):
out = open(os.getcwd()+'/'+arguments['--xclude'], 'w')
data = open(os.getcwd()+'/'+arguments['<data>'], "rU")
for s in SeqIO.parse(data, "fasta"):
if s.id not in seq:
out.write(s.format("fasta"))
out.close()
data.close()
def graph(hit):
plt.hist(hit, range(min(hit), max(hit)))
plt.xticks(range(min(hit), max(hit), 10))
plt.xlabel("Emplacement des hits sur les séquences")
plt.ylabel("Nombre de hits")
if arguments['--graph'] != None:
plt.savefig(arguments['--graph']+'.png')
pylab.close()
else:
plt.show()
def save_signature():
sign = open(os.getcwd()+'/'+arguments['--signature'], 'w')
for f in glob.glob(os.getcwd()+"/"+arguments['<input>']+"*"):
fr = [] # Will have the last char of var, which is frag nb
c = 0
tree = etree.parse(f)
if tree.xpath("/sequences/match/variable") != []:
[sign.write('>'+h.text+'\n') for h in tree.xpath("/sequences/fastaHeader")]
[fr.append((int(i.get("name")[-1]))) for i in tree.xpath("/sequences/match/variable")]
m = max(fr) # Fragments number to have the complete match
for i in tree.xpath("/sequences/match/variable/content"):
c += 1
sign.write(i.text)
if c >= m:
sign.write("\n")
c = 0
sign.close()
def save_position():
begin = [] # Will contain all the begining number
end = []
seq = [] # Will contain all the sequences found
iD = [] # Will contair the sequences ID
n = 0 # nb of line we will have to write
i = 0
pos = open(os.getcwd()+'/'+arguments['--position'], 'w')
pos.write("ID\tbegin\tsequence\tend\n")
for f in glob.glob(os.getcwd()+"/"+arguments['<input>']+"*"):
tree = etree.parse(f)
for s in tree.xpath("/sequences/match/variable/content"):
n += 1
seq.append(s.text)
[iD.append(h.text) for h in tree.xpath("/sequences/fastaHeader")]
for b in tree.xpath("/sequences/match/variable/begin"):
begin.append(str(b.text))
for e in tree.xpath("/sequences/match/variable/end"):
end.append(str(e.text))
# Now, we write those info into the file
while i < n:
pos.write(iD[i]+"\t"+begin[i]+"\t"+seq[i]+"\t"+end[i]+"\n")
i += 1
pos.close()
def display_hits(hits_per_seq):
plt.hist(hits_per_seq, range(min(hits_per_seq), max(hits_per_seq)))
plt.xticks(range(min(hits_per_seq), max(hits_per_seq), 1))
plt.xlabel("Nombre de hits par séquences")
plt.ylabel("Nombre de séquences")
plt.show()
##########
# LAUNCH #
##########
__main__(arguments)
| Nedgang/logol_analyse | analyse_logol.py | Python | gpl-2.0 | 5,660 |
#!/usr/bin/python
import sys
import csv
import json
import pycountry
import codecs
from collections import defaultdict
class Region(object):
def __init__(self, name, code=None, level=0, parent=None, verbose=False):
self.name = name
self.code = code
self.level = level
self.contains = []
self.parent = parent
try:
self.country = pycountry.countries.get(numeric='%03d' % (int(code)))
except:
self.country = None
if verbose:
print "Created region %s in parent %s" % (name, parent)
def add(self, region):
self.contains.append(region)
def get_countries(self):
'''
return list of countries (pycountry objects) in this region
'''
if self.country:
return [ self.country ]
clist = []
for region in self.contains:
clist += region.get_countries()
return clist
def includes_country(self, cc):
'''
Return True if this region has country with country code alpha2=cc
'''
if self.country is not None and self.country.alpha2==cc:
return True
for region in self.contains:
if region.includes_country(cc):
return True
return False
def __unicode__(self):
return ("<[%d] " % self.level) + self.name + " in " + str(self.parent or 'None') + " >"
__str__ = __unicode__
#cdat = csv.DictReader(codecs.open('un_world_geographic_regions.csv','rU', encoding='utf8'))
cdat = csv.DictReader(open('un_world_geographic_regions.csv','rU'))
regions = {}
regions_by_level = defaultdict(list)
current_region = Region("World", '001', 5)
verbose = True
stack = []
for cd in cdat:
level = int(cd.get('Level', 0) or 0)
inlevel = level
code = cd['Numerical_code']
name = cd['name']
if not name:
# skip blank
# print "skipping %s" % cd
continue
if not name in regions:
region = Region(name, code, level, parent=None)
regions[name] = region
regions_by_level[level].append(region)
else:
region = regions[name]
level = region.level
if level==0: # it's a country: always add to current_region
current_region.add(region)
if region.parent is None:
region.parent = current_region
elif inlevel < 0: # add to current_region, and don't change current_region
print "==> Adding to current_region"
print "Stack has %s" % map(str, stack)
current_region.add(region)
elif level < current_region.level: # subregion: add, then make region the current one
current_region.add(region)
if region.parent is None:
region.parent = current_region
stack.append(current_region) # use stack for parents
current_region = region
else: # go up until at right level
if verbose:
print "==> Going up tree"
print "Stack has %s" % map(str, stack)
while current_region.level <= level:
current_region = stack.pop()
current_region.add(region)
if region.parent is None:
region.parent = current_region
stack.append(current_region) # use stack for parents
current_region = region
if verbose:
print " added: " + str(region)
#-----------------------------------------------------------------------------
# output csv's of countries in each region, with alpha2 country code
print "-"*77
print "France: "
print regions['France']
print "Americas: "
print regions['Americas']
print "Haiti: "
print regions['Haiti']
print
print map(str, regions['Americas'].contains)
print regions['Asia']
print map(str, regions['Asia'].contains)
print regions['Europe']
print map(str, regions['Europe'].contains)
print regions['Africa']
print map(str, regions['Africa'].contains)
print "latin america:"
print regions['Latin America and the Caribbean']
print map(str, regions['Latin America and the Caribbean'].contains)
# sys.exit(0)
def dump_region(cset, name, verbose=True):
fn = "Countries_in_%s.csv" % name
fp = codecs.open(fn, 'w', encoding='utf8')
fp.write('cc, code, name\n')
for country in cset:
#fp.write(('%s,%s,' % (country.alpha2, country.numeric)) + country.name + '\n')
fp.write(('%s,%s,' % (country.alpha2, country.numeric)))
fp.write(country.name + '\n')
#fp.write(country.alpha2 + '\n')
fp.close()
if verbose:
print "Wrote %s" % fn
for level in range(4,0,-1):
print "Regions in Level %d: " % level
for r in regions_by_level[level]:
print " %s" % r
dump_region(r.get_countries(), r.name, verbose=False)
#-----------------------------------------------------------------------------
# Africa
print "-"*77
print "Countries in Africa:"
# cset = [ dict(name=x.name, cc=x.alpha2) for x in regions['Africa'].get_countries() ]
# print json.dumps(cset, indent=2)
dump_region(regions['Africa'].get_countries(), 'Africa')
#-----------------------------------------------------------------------------
# Least developed countries
print "-"*77
rname = "Least developed countries"
print "Countries in %s:" % rname
#cset = [ dict(name=x.name, cc=x.alpha2) for x in regions[rname].get_countries() ]
#print json.dumps(cset, indent=2)
dump_region(regions[rname].get_countries(), rname)
#-----------------------------------------------------------------------------
# developing nations
rnames = ['Africa', 'Americas', 'Caribbean', 'Central America', 'South America', 'Asia', 'Oceania']
rset = set()
for rname in rnames:
rset = rset.union(set(regions[rname].get_countries()))
dump_region(regions[rname].get_countries(), rname)
# remove northern america, Japan, Australia, New Zealand
northam = regions['Northern America'].get_countries()
rset = rset.difference(northam)
rset = rset.difference(regions['Japan'].get_countries())
rset = rset.difference(regions['Australia'].get_countries())
rset = rset.difference(regions['New Zealand'].get_countries())
dump_region(rset, 'Developing_Nations')
#-----------------------------------------------------------------------------
# sub-saharan africa = Africa - Northern Africa + Sudan
rnames = ['Africa']
rset = set()
for rname in rnames:
rset = rset.union(set(regions[rname].get_countries()))
rset = rset.difference(regions['Northern Africa'].get_countries())
rset = rset.union(set(regions['Sudan'].get_countries()))
dump_region(rset, 'Sub-Saharan-Africa')
| mitodl/world_geographic_regions | CountriesByRegion.py | Python | gpl-2.0 | 6,565 |
# Created by Thomas Jones on 17/12/2015 - [email protected]
# pingspec.py, a plugin for minqlx to spec players who have network latency over a certain amount.
# This plugin is released to everyone, for any purpose. It comes with no warranty, no guarantee it works, it's released AS IS.
# You can modify everything, except for lines 1-4 and the !tomtec_versions code. They're there to indicate I whacked this together originally. Please make it better :D
"""
This plugin requires Minqlx Core version v0.4.1 or greater.
The following cvars are used on this plugin:
qlx_pingSpecSecondsBetweenChecks: Specifies the seconds between checking every player's ping. Default: 15
qlx_pingSpecMaxPing: Specifies the maximum ping permitted on the server before the user is put to spec. Default: 125
"""
import minqlx
class pingspec(minqlx.Plugin):
def __init__(self):
self.add_hook("frame", self.process_frame, priority=minqlx.PRI_LOWEST)
self.add_command("tomtec_versions", self.cmd_showversion)
self.set_cvar_once("qlx_pingSpecSecondsBetweenChecks", "15")
self.set_cvar_once("qlx_pingSpecMaxPing", "125")
self.plugin_version = "1.3"
# Don't touch this:
self.frame_counter = 0
def process_frame(self):
self.frame_counter += 1
if self.frame_counter == (int(self.get_cvar("qlx_pingSpecSecondsBetweenChecks")) * int(minqlx.get_cvar("sv_fps"))):
self.frame_counter = 0
self.check_ping()
def check_ping(self):
for player in self.players():
if player.ping > int(self.get_cvar("qlx_pingSpecMaxPing")):
if self.game.state == "warmup":
player.tell("^1Your ping is over the maximum ping tolerated here ({}).".format(self.get_cvar("qlx_pingSpecMaxPing")))
player.tell("You will be put to spec when the game starts if it remains above the threshold.")
else:
if player.team != "spectator":
player.put("spectator")
self.msg("{} has been put in spec automatically for having a ping over {}.".format(player.clean_name, self.get_cvar("qlx_pingSpecMaxPing")))
player.tell("^1Your ping is over {}, the threshold.^7".format(self.get_cvar("qlx_pingSpecMaxPing")))
player.tell("You have been put in spec.")
def cmd_showversion(self, player, msg, channel):
channel.reply("^4pingspec.py^7 - version {}, created by Thomas Jones on 17/12/2015.".format(self.plugin_version))
| tjone270/QuakeLiveDS_Scripts | minqlx-plugins/archive/beta/pingspec.py | Python | gpl-2.0 | 2,601 |
# Copyright (C) 2014 Red Hat, Inc.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License as published
# by the Free Software Foundation; either version 2.1 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
# Author: Chris Lumens <[email protected]>
from dogtail.predicate import GenericPredicate
from dogtail.utils import doDelay
from . import UITestCase
# This test case handles the livecd case on the summary hub where everything
# works as intended. On this spoke, we are testing the following:
#
# * Clicking the Quit button brings up a dialog asking if you're sure, though
# we're not going to test that confirming actually quits.
# * The Begin Installation button is insensitive, since no disks have yet
# been selected.
# * Only the Date & Time, Keyboard, Installation Destination, and Network Config
# spoke selectors are visible.
class LiveCDSummaryTestCase(UITestCase):
def check_quit_button(self):
self.click_button("Quit")
self.check_dialog_displayed("Quit")
self.click_button("No")
def check_begin_installation_button(self):
button = self.find("Begin Installation", "push button")
self.assertIsNotNone(button, msg="Begin Installation button not found")
self.assertTrue(button.showing, msg="Begin Installation button should be displayed")
self.assertFalse(button.sensitive, msg="Begin Installation button should not be sensitive")
def check_shown_spoke_selectors(self):
# FIXME: This forces English.
validSelectors = ["DATE & TIME", "KEYBOARD", "INSTALLATION DESTINATION", "NETWORK & HOSTNAME"]
selectors = self.ana.findChildren(GenericPredicate(roleName="spoke selector"))
self.assertEqual(len(selectors), len(validSelectors), msg="Incorrect number of spoke selectors shown")
# Validate that only the spoke selectors we expect are shown. At the same time,
# we can also validate the status of each selector. This only validates the
# initial state of everything. Once we start clicking on spokes, things are
# going to change.
# FIXME: This encodes default information.
for selector in selectors:
if selector.name == "DATE & TIME":
self.assertEqual(selector.description, "Americas/New York timezone")
elif selector.name == "KEYBOARD":
self.assertEqual(selector.description, "English (US)")
elif selector.name == "INSTALLATION DESTINATION":
# We don't know how many disks are going to be involved - if there's
# just one, anaconda selects it by default. If there's more than
# one, it selects none.
self.assertIn(selector.description, ["Automatic partitioning selected",
"No disks selected"])
elif selector.name == "NETWORK & HOSTNAME":
self.assertRegexpMatches(selector.description, "Wired (.+) connected")
else:
self.fail("Invalid spoke selector shown on livecd: %s" % selector.name)
def _run(self):
# Before doing anything, verify we are on the right screen.
doDelay(5)
self.check_window_displayed("INSTALLATION SUMMARY")
# And now we can check everything else on the screen.
self.check_quit_button()
self.check_begin_installation_button()
self.check_shown_spoke_selectors()
self.check_warning_bar()
| itoed/anaconda | tests/gui/inside/summary.py | Python | gpl-2.0 | 3,986 |
# This file is part of BurnMan - a thermoelastic and thermodynamic toolkit for
# the Earth and Planetary Sciences
# Copyright (C) 2012 - 2022 by the BurnMan team, released under the GNU
# GPL v2 or later.
"""
example_fit_solution
--------------------
This example demonstrates how to fit parameters
for solution models using a range of compositionally-variable
experimental data.
The example in this file deals with finding optimized parameters
for the forsterite-fayalite binary using a mixture of volume
and seismic velocity data.
teaches:
- least squares fitting for solution data
"""
import numpy as np
import matplotlib.pyplot as plt
from numpy import random
import burnman
from burnman.utils.misc import pretty_print_values
from burnman.optimize.eos_fitting import fit_XPTp_data
from burnman.optimize.nonlinear_fitting import plot_residuals, extreme_values
from burnman.optimize.nonlinear_fitting import corner_plot
from burnman.optimize.nonlinear_fitting import weighted_residual_plot
if __name__ == "__main__":
# Set np.array printing precision to be low
# (for more readable covariance matrices)
np.set_printoptions(precision=1)
# First, let's create a solution to optimise.
# In this case, we choose a solution model that exists in
# the BurnMan repository; the Mg-Fe olivine from
# the Stixrude and Lithgow-Bertelloni dataset
solution = burnman.minerals.SLB_2011.mg_fe_olivine()
solution.set_state(1.e5, 300.)
print('Names of endmembers in the olivine solution:')
print(solution.endmember_names)
print('')
# Fit parameters are provided via a list of lists.
# The first element of each list is a string that corresponds
# either to one of the keys in an endmember parameter dictionary,
# or to an excess property for a binary join in the solution.
# The next parameters correspond to the indices of the endmembers
# to which the parameter corresponds.
# Here, we choose to fit he standard state volume, isothermal
# bulk modulus and its first derivative for both endmembers.
# Endmember 0 is forsterite, and Endmember 1 is fayalite.
# We also choose to fit the excess volume on the binary join.
fit_params = [['V_0', 0],
['V_0', 1],
['K_0', 0],
['K_0', 1],
['Kprime_0', 0],
['Kprime_0', 1],
['G_0', 0],
['G_0', 1],
['V', 0, 1]]
# Next, we make some synthetic data
n_data = 100
data = []
data_covariances = []
flags = []
# For this example, we add some Gaussian noise
# to the volumes of olivines on the binary between
# 0-10 GPa and 300-1300 K.
# Here 1 standard deviation is set as 0.1% of the
# volume at P and T
f_Verror = 1.e-3
# Choose a specific seed for the random number generator
# so that this example is reproducible.
random.seed(10)
for i in range(n_data):
x_fa = random.random()
P = random.random() * 1.e10
T = random.random() * 1000. + 300.
X = [1.-x_fa, x_fa]
solution.set_composition(X)
solution.set_state(P, T)
f = (1. + (random.normal() - 0.5)*f_Verror)
V = solution.V * f
data.append([1.-x_fa, x_fa, P, T, V])
data_covariances.append(np.zeros((5, 5)))
data_covariances[-1][4, 4] = np.power(solution.V*f_Verror, 2.)
flags.append('V')
# Here, we add one awful data point in the middle of the domain
# We do this to demonstrate the semi-automatic removal of bad data
# using extreme value theory.
solution.set_composition([0.5, 0.5])
solution.set_state(5.e9, 800.)
data.append([0.5, 0.5, 5.e9, 800., solution.V + 3.e-7])
data_covariances.append(np.zeros((5, 5)))
data_covariances[-1][4, 4] = np.power(solution.V*f_Verror, 2.)
flags.append('V')
# Now create some velocity data, again adding
# some Gaussian noise.
# Here 1 standard deviation is set as 1% of the
# P wave velocity at P and T
n_data = 20
f_Vperror = 1.e-2
for i in range(n_data):
x_fa = random.random()
P = random.random() * 1.e10
T = random.random() * 1000. + 300.
X = [1.-x_fa, x_fa]
solution.set_composition(X)
solution.set_state(P, T)
f = (1. + (random.normal() - 0.5)*f_Vperror)
Vp = solution.p_wave_velocity * f
data.append([1.-x_fa, x_fa, P, T, Vp])
data_covariances.append(np.zeros((5, 5)))
data_covariances[-1][4, 4] = np.power(solution.p_wave_velocity
* f_Vperror, 2.)
flags.append('p_wave_velocity')
data = np.array(data)
data_covariances = np.array(data_covariances)
flags = np.array(flags)
# Here are some (optional) initial step sizes for the optimizer.
delta_params = np.array([1.e-8, 1.e-8, 1.e7, 1.e7,
1.e-1, 1.e-1, 1.e-1, 1.e-1, 1.e-8])
# And some bounds. For this example the bounds are not necessary,
# but if the data is somewhat shaky it can be useful to provide some
# guidance for the least squares minimizer.
bounds = np.array([[0, np.inf],
[0, np.inf],
[0, np.inf],
[0, np.inf],
[3.5, 6.],
[3.5, 6.],
[0, np.inf],
[0, np.inf],
[-np.inf, np.inf]])
param_tolerance = 1.e-5
# Finally, some post-processing options
confidence_interval = 0.95
remove_outliers = True
good_data_confidence_interval = 0.99
properties_for_data_comparison_plots = [('V', 1.e6, 'Volume (cm^3/mol)'),
('p_wave_velocity', 1.e-3,
'$V_P$ (km/s)')]
# The following line fits the parameters to the data we defined above.
print('Starting to fit user-defined data. Please be patient.')
fitted_eos = fit_XPTp_data(solution=solution,
flags=flags,
fit_params=fit_params,
data=data,
data_covariances=data_covariances,
delta_params=delta_params,
bounds=bounds,
param_tolerance=param_tolerance,
verbose=False)
# Print the optimized parameters
print('Optimized equation of state:')
pretty_print_values(fitted_eos.popt, fitted_eos.pcov,
fitted_eos.fit_params_strings)
print('\nParameters:')
print(fitted_eos.popt)
print('\nFull covariance matrix:')
print(fitted_eos.pcov)
print('\nGoodness of fit:')
print(fitted_eos.goodness_of_fit)
print('\n')
# Create a plot of the residuals
fig, ax = plt.subplots()
plot_residuals(ax=ax,
weighted_residuals=fitted_eos.weighted_residuals,
flags=fitted_eos.flags)
plt.show()
val_extreme = extreme_values(fitted_eos.weighted_residuals,
good_data_confidence_interval)
confidence_bound, indices, probabilities = val_extreme
if indices != [] and remove_outliers is True:
print(f'Removing {len(indices):d} outliers'
f' (at the {good_data_confidence_interval*100.:.1f}% '
'confidence interval) and refitting. '
'Please wait just a little longer.')
mask = [i for i in range(len(fitted_eos.weighted_residuals))
if i not in indices]
data = data[mask]
flags = flags[mask]
data_covariances = data_covariances[mask]
fitted_eos = fit_XPTp_data(solution=solution,
flags=flags,
fit_params=fit_params,
data=data,
data_covariances=data_covariances,
param_tolerance=param_tolerance,
verbose=False)
# Print the optimized parameters
print('Optimized equation of state:')
pretty_print_values(fitted_eos.popt, fitted_eos.pcov,
fitted_eos.fit_params_strings)
print('\nParameters:')
print(fitted_eos.popt)
print('\nFull covariance matrix:')
print(fitted_eos.pcov)
print('\nGoodness of fit:')
print(fitted_eos.goodness_of_fit)
print('\n')
# Create a plot of the residuals
fig, ax = plt.subplots()
plot_residuals(ax=ax,
weighted_residuals=fitted_eos.weighted_residuals,
flags=fitted_eos.flags)
plt.show()
# Create a corner plot of the covariances
fig, ax_array = corner_plot(popt=fitted_eos.popt,
pcov=fitted_eos.pcov,
param_names=fitted_eos.fit_params_strings)
plt.show()
# Create plots for the weighted residuals of each type of measurement
enum_prps = enumerate(properties_for_data_comparison_plots)
for i, (material_property, scaling, name) in enum_prps:
fig, ax = plt.subplots()
weighted_residual_plot(ax=ax,
model=fitted_eos,
flag=material_property,
sd_limit=3,
cmap=plt.cm.RdYlBu,
plot_axes=[2, 3],
scale_axes=[1.e-9, 1.])
ax.set_title(f'Weighted residual plot for {name:s}')
ax.set_xlabel('Pressure (GPa)')
ax.set_ylabel('Temperature (K)')
plt.show()
| geodynamics/burnman | examples/example_fit_solution.py | Python | gpl-2.0 | 9,817 |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Authors : David Castellanos <[email protected]>
#
# Copyright (c) 2012, Telefonica Móviles España S.A.U.
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this program; if not, write to the
# Free Software Foundation, Inc., 59 Temple Place - Suite 330,
# Boston, MA 02111-1307, USA.
#
"""
TGCM Logging is a daemon designed to gather TGCM logs
"""
__version__ = '0.1'
__author__ = 'David Castellanos <[email protected]>'
| tgcmteam/tgcmlinux | src/tgcm/contrib/tgcm-logging/src/tgcmlogging/__init__.py | Python | gpl-2.0 | 1,034 |
# -*- coding: utf-8 -*-
__author__ = 'frank'
from flask.ext.sqlalchemy import SQLAlchemy
from sqlalchemy.dialects import mysql
from datetime import datetime
db = SQLAlchemy()
# 时间都存为utcnow,具体显示根据不同的本地环境进行相应转换
# 如分析数据,或者在本地显示(采用moment插件前端显示)
class Application(db.Model):
"""twitter application"""
__tablename__ = 'applications'
id = db.Column(db.Integer, primary_key=True)
consumer_token = db.Column(db.String(30))
consumer_secret = db.Column(db.String(60))
created_at = db.Column(db.DateTime, default=datetime.utcnow)
is_valid = db.Column(db.Boolean, default=True)
access_tokens = db.relationship('AccessToken', backref='application',
lazy='dynamic',
order_by='desc(AccessToken.created_at)')
class User(db.Model):
"""用户信息表"""
__tablename__ = 'users'
# 其中id用于外键链接,user_id与api交互
# 针对于mysql数据库
id = db.Column(mysql.INTEGER(30), primary_key=True)
# id_str
user_id = db.Column(db.String(30))
name = db.Column(db.String(50))
screen_name = db.Column(db.String(50))
location = db.Column(db.String(30))
statuses_count = db.Column(db.Integer)
followers_count = db.Column(db.Integer)
# 关注人员数, following
friends_count = db.Column(db.Integer)
created_at = db.Column(db.DateTime)
# 下次待抓取消息id下限
since_id = db.Column(db.String(30), default='0')
# 是否为待监控用户
is_target = db.Column(db.Boolean, default=False)
# 关注者id,表明该待同步用户被monitor_user_id关注
monitor_user_id = db.Column(mysql.INTEGER(30))
# 图像地址
profile_image_url = db.Column(db.String(150))
# url 主页地址
url = db.Column(db.String(150))
access_tokens = db.relationship(
'AccessToken', backref='user', lazy='dynamic', order_by='desc(AccessToken.created_at)')
statuses = db.relationship(
'Status', backref='user', lazy='dynamic', order_by='desc(Status.created_at)')
def __repr__(self):
return 'User %s' % self.screen_name
class AccessToken(db.Model):
"""access_token信息表"""
__tablename__ = 'accesstokens'
id = db.Column(db.Integer, primary_key=True)
access_token = db.Column(db.String(50))
access_token_secret = db.Column(db.String(45))
is_valid = db.Column(db.Boolean, default=True)
created_at = db.Column(db.DateTime, default=datetime.utcnow)
user_id = db.Column(mysql.INTEGER(30), db.ForeignKey('users.id'))
applcation_id = db.Column(db.Integer, db.ForeignKey('applications.id'))
def __repr__(self):
return "AccessToken userid %d" % self.user_id
class Status(db.Model):
"""状态信息表"""
__tablename__ = 'statuses'
# 针对于mysql数据库
id = db.Column(mysql.INTEGER(30), primary_key=True)
# twitter_status_id
status_id = db.Column(db.String(30))
text = db.Column(db.String(150))
created_at = db.Column(db.DateTime)
# 图片地址
media_url = db.Column(db.String(150))
# 被关注者id,表明该tweet是user_id发出的
user_id = db.Column(mysql.INTEGER(30), db.ForeignKey('users.id'))
# 关注者id,表明该tweet是id关注待同步用户之后产生的
monitor_user_id = db.Column(mysql.INTEGER(30))
def __repr__(self):
print "Status %s" % self.status_id
| Fansion/crawltwitter | crawltwitter/models.py | Python | gpl-2.0 | 3,522 |
import sys
import os
import arcEC
def GNDBruninTOC_exe_G2N(parameters, messages):
# *** Start Edtiting etc. the receiving layer
# http://resources.arcgis.com/en/help/main/10.2/index.html#//00s300000008000000
# XXX Check that we are actually in an edit session ...
workspace = r"C:\Users\b004218\AppData\Roaming\ESRI\Desktop10.3\ArcCatalog\[email protected]"#\NIS.Vores_Navne" #os.path.dirname(lay_in.dataSource)
lay_in = "\NIS.Vores_Navne\NIS.NamesAtest"
arcEC.SetMsg(" WS : "+str(workspace), 0)
edit = arcpy.da.Editor(workspace)
edit.startEditing() # Edit session is started without (False) an undo/redo stack for versioned data
edit.startOperation() # Start an edit operation
# *** for each record:
arcEC.SetMsg("\nRunning through the rows ...", 0)
lst_fields_we_want = ["GST_NID","OBJNAM","NOBJNM","NIS_EDITOR_COMMENT","NAMETYPE"]
lst_Fails = list()
with arcpy.da.UpdateCursor(workspace+lay_in, lst_fields_we_want, "GST_NID IS NOT NULL") as cursor:
for row in cursor:
pass
# *** Start Edtiting etc. the receiving layer
edit.stopOperation() # Stop the edit operation.
edit.stopEditing(True) # Stop the edit session and save the changes
arcEC.SetMsg("Processed rows : "+str(num_row_count), 0)
arcEC.SetMsg(" Changed rows : "+str(num_row_changed), 0)
arcEC.SetMsg(" Failed rows : "+str(len(lstFails)), 0)
return len(lstFails)
# *** End of function GNDBruninTOC()
if __name__ == "__main__":
# This allows the 'executes' to be called from classic .tbx
parameters = [arcpy.GetParameterAsText(0), arcpy.GetParameterAsText(1), arcpy.GetParameterAsText(2), arcpy.GetParameterAsText(3)]
messages = []
result = GNDBruninTOC_exe_G2N(parameters, messages)
# *** End of Script ***
# Music that accompanied the coding of this script:
# Kid Creole & the Coconuts - Tropical gangster
| MartinHvidberg/GNDB | GNDB_exe_GNDB2NamesA.py | Python | gpl-2.0 | 1,978 |
#!/usr/bin/env python
import matplotlib.pyplot as plt
# dict: {title of plot : [measure value files]}
# The input data may not have a \n at file end.
inputFiles = {'LibMergeSort_Sortierszenarien_im_Vergleich':
['sorted', 'shuffle', 'reverse']}
# different colors of the function graphs
COLORS = ['g', 'k', 'm']
print(inputFiles.items())
counter = 0
for outputFileName, fileNames in inputFiles.items():
fig = plt.figure()
ax1 = fig.add_subplot(111)
for fileName in fileNames:
with open(fileName) as f:
data = f.read()
data = data.split('\n')
#print(str(fileName) + str(data))
x = [row.split()[0] for row in data]
y = [float(row.split()[1]) for row in data]
err = [float(row.split()[2]) for row in data]
ax1.plot(x, y, c=COLORS[counter], label=fileName)
ax1.errorbar(x, y, yerr=err, fmt='_',
ecolor=COLORS[counter], capthick=2)
counter = counter + 1
# ax1.set_title(outputFileName)
ax1.set_xlabel('Anzahl Elemente N')
ax1.set_ylabel('Laufzeit [s]')
leg = ax1.legend(loc='upper left')
#leg = ax1.legend(loc='lower right')
# ax1.set_yscale('log')
#fig.savefig(outputFileName + '.png', format='png')
fig.savefig(outputFileName + '.pdf', format='pdf')
# plt.show()
| rm--/matplotlib_examples | example1/evaluate1.py | Python | gpl-2.0 | 1,327 |
from django.contrib.auth.models import User, Permission, Group
from rest_framework import serializers
from apps.account.serializers.PermissionSerializer import PermissionSerializer
class GroupSerializer(serializers.ModelSerializer):
permissions = PermissionSerializer(instance=Permission, many=True)
class Meta:
model = Group
fields = ('name', 'permissions') | mmmaaaxxx77/Python-Django-AdminLTE2 | Demo/apps/account/serializers/GroupSerializer.py | Python | gpl-2.0 | 384 |
from rest_framework import permissions
class IsOwner(permissions.BasePermission):
def has_object_permission(self, request, view, obj):
# Only allow owners of an object to view or edit it
return obj.owner == request.user
| lesavoie/nagiosservice | controlserver/servicelevelinterface/permissions.py | Python | gpl-2.0 | 238 |
#
# Copyright 2012-2013 Red Hat, Inc.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
#
# Refer to the README and COPYING files for full details of the license
#
import os.path
import contextlib
import errno
import logging
import sys
import threading
from testrunner import VdsmTestCase as TestCaseBase
from testrunner import permutations, expandPermutations
from testValidation import checkSudo
from testValidation import stresstest
from vdsm import utils
from vdsm import constants
import time
EXT_SLEEP = "sleep"
class RetryTests(TestCaseBase):
def testStopCallback(self):
counter = [0]
limit = 4
def stopCallback():
counter[0] += 1
if counter[0] == limit:
return True
return False
def foo():
raise RuntimeError("If at first you don't succeed, try, try again."
"Then quit. There's no point in being a damn"
"fool about it.")
# W. C. Fields
self.assertRaises(RuntimeError, utils.retry, foo, tries=(limit + 10),
sleep=0, stopCallback=stopCallback)
# Make sure we had the proper amount of iterations before failing
self.assertEquals(counter[0], limit)
class PidStatTests(TestCaseBase):
def test(self):
args = ["sleep", "3"]
sproc = utils.execCmd(args, sync=False)
stats = utils.pidStat(sproc.pid)
pid = int(stats.pid)
# procName comes in the format of (procname)
name = stats.comm
self.assertEquals(pid, sproc.pid)
self.assertEquals(name, args[0])
sproc.kill()
sproc.wait()
class PgrepTests(TestCaseBase):
def test(self):
sleepProcs = []
for i in range(3):
sleepProcs.append(utils.execCmd([EXT_SLEEP, "3"], sync=False,
sudo=False))
pids = utils.pgrep(EXT_SLEEP)
for proc in sleepProcs:
self.assertTrue(proc.pid in pids, "pid %d was not located by pgrep"
% proc.pid)
for proc in sleepProcs:
proc.kill()
proc.wait()
class GetCmdArgsTests(TestCaseBase):
def test(self):
args = [EXT_SLEEP, "4"]
sproc = utils.execCmd(args, sync=False)
try:
self.assertEquals(utils.getCmdArgs(sproc.pid), tuple(args))
finally:
sproc.kill()
sproc.wait()
def testZombie(self):
args = [EXT_SLEEP, "0"]
sproc = utils.execCmd(args, sync=False)
sproc.kill()
try:
test = lambda: self.assertEquals(utils.getCmdArgs(sproc.pid),
tuple())
utils.retry(AssertionError, test, tries=10, sleep=0.1)
finally:
sproc.wait()
class CommandPathTests(TestCaseBase):
def testExisting(self):
cp = utils.CommandPath('sh', 'utter nonsense', '/bin/sh')
self.assertEquals(cp.cmd, '/bin/sh')
def testMissing(self):
NAME = 'nonsense'
try:
utils.CommandPath(NAME, 'utter nonsense').cmd
except OSError as e:
self.assertEquals(e.errno, errno.ENOENT)
self.assertIn(NAME, e.strerror)
class GeneralUtilsTests(TestCaseBase):
def testPanic(self):
self.assertRaises(AssertionError, utils.panic, "panic test")
def testAnyFnmatch(self):
self.assertTrue(utils.anyFnmatch('test1', ['test0', 'test1']))
def testReadMemInfo(self):
meminfo = utils.readMemInfo()
# most common fields as per man 5 proc
# add your own here
fields = ('MemTotal', 'MemFree', 'Buffers', 'Cached', 'SwapCached',
'Active', 'Inactive', 'SwapTotal', 'SwapFree', 'Dirty',
'Writeback', 'Mapped', 'Slab', 'VmallocTotal',
'VmallocUsed', 'VmallocChunk')
for field in fields:
self.assertIn(field, meminfo)
self.assertTrue(isinstance(meminfo[field], int))
def testParseMemInfo(self):
testPath = os.path.realpath(__file__)
dirName = os.path.dirname(testPath)
path = os.path.join(dirName, "mem_info.out")
with open(path) as f:
meminfo = utils._parseMemInfo(f.readlines())
# testing some random fields
self.assertEquals(meminfo['NFS_Unstable'], 0)
self.assertEquals(meminfo['KernelStack'], 2760)
self.assertEquals(meminfo['Inactive'], 1432748)
def testGrouper(self):
iterable = '1234567890'
grouped = [('1', '2'), ('3', '4'), ('5', '6'), ('7', '8'), ('9', '0')]
self.assertEquals(list(utils.grouper(iterable, 2)), grouped)
iterable += 'a'
grouped.append(('a', None))
self.assertEquals(list(utils.grouper(iterable, 2)), grouped)
iterable += 'bcde'
grouped = [('1', '2', '3'), ('4', '5', '6'), ('7', '8', '9'),
('0', 'a', 'b'), ('c', 'd', 'e')]
self.assertEquals(list(utils.grouper(iterable, 3)), grouped)
grouped = [('1', '2', '3', '4', '5'), ('6', '7', '8', '9', '0'),
('a', 'b', 'c', 'd', 'e')]
self.assertEquals(list(utils.grouper(iterable, 5)), grouped)
class AsyncProcessOperationTests(TestCaseBase):
def _echo(self, text):
proc = utils.execCmd(["echo", "-n", "test"], sync=False)
def parse(rc, out, err):
return out
return utils.AsyncProcessOperation(proc, parse)
def _sleep(self, t):
proc = utils.execCmd(["sleep", str(t)], sync=False)
return utils.AsyncProcessOperation(proc)
def _fail(self, t):
proc = utils.execCmd(["sleep", str(t)], sync=False)
def parse(rc, out, err):
raise Exception("TEST!!!")
return utils.AsyncProcessOperation(proc, parse)
def test(self):
aop = self._sleep(1)
self.assertEquals(aop.result(), ((0, "", ""), None))
def testAlreadyExitedSuccess(self):
aop = self._sleep(0)
time.sleep(1)
self.assertEquals(aop.result(), ((0, "", ""), None))
def testAlreadyExitedFail(self):
aop = self._sleep("hello")
time.sleep(1)
((rc, out, err), err) = aop.result()
self.assertEquals(err, None)
self.assertEquals(rc, 1)
def testWait(self):
aop = self._sleep(1)
aop.wait(timeout=2)
def testParser(self):
aop = self._echo("test")
self.assertEquals(aop.result(), ("test", None))
def testStop(self):
aop = self._sleep(10)
aop.stop()
start = time.time()
aop.result()
end = time.time()
duration = end - start
self.assertTrue(duration < 2)
def testException(self):
aop = self._fail(1)
res, err = aop.result()
self.assertEquals(res, None)
self.assertNotEquals(err, None)
class CallbackChainTests(TestCaseBase):
def testCanPassIterableOfCallbacks(self):
f = lambda: False
callbacks = [f] * 10
chain = utils.CallbackChain(callbacks)
self.assertEqual(list(chain.callbacks), callbacks)
def testEmptyChainIsNoop(self):
chain = utils.CallbackChain()
self.assertFalse(chain.callbacks)
chain.start()
chain.join()
# assert exception isn't thrown in start on empty chain
def testAllCallbacksAreInvokedIfTheyReturnFalse(self):
n = 10
counter = [n]
def callback():
counter[0] -= 1
return False
chain = utils.CallbackChain([callback] * n)
chain.start()
chain.join()
self.assertEqual(counter[0], 0)
def testChainStopsAfterSuccessfulCallback(self):
n = 10
counter = [n]
def callback():
counter[0] -= 1
return counter[0] == 5
chain = utils.CallbackChain([callback] * n)
chain.start()
chain.join()
self.assertEquals(counter[0], 5)
def testArgsPassedToCallback(self):
callbackArgs = ('arg', 42, 'and another')
callbackKwargs = {'some': 42, 'kwargs': []}
def callback(*args, **kwargs):
self.assertEqual(args, callbackArgs)
self.assertEqual(kwargs, callbackKwargs)
chain = utils.CallbackChain()
chain.addCallback(callback, *callbackArgs, **callbackKwargs)
chain.start()
chain.join()
@contextlib.contextmanager
def loghandler(handler, logger=""):
log = logging.getLogger(logger)
log.addHandler(handler)
try:
yield {}
finally:
log.removeHandler(handler)
class TracebackTests(TestCaseBase):
def __init__(self, *a, **kw):
self.record = None
super(TestCaseBase, self).__init__(*a, **kw)
def testDefaults(self):
@utils.traceback()
def fail():
raise Exception
with loghandler(self):
self.assertRaises(Exception, fail)
self.assertEquals(self.record.name, "root")
self.assertTrue(self.record.exc_text is not None)
def testOn(self):
logger = "test"
@utils.traceback(on=logger)
def fail():
raise Exception
with loghandler(self, logger=logger):
self.assertRaises(Exception, fail)
self.assertEquals(self.record.name, logger)
def testMsg(self):
@utils.traceback(msg="WAT")
def fail():
raise Exception
with loghandler(self):
self.assertRaises(Exception, fail)
self.assertEquals(self.record.message, "WAT")
# Logging handler interface
level = logging.DEBUG
def acquire(self):
pass
def release(self):
pass
def handle(self, record):
assert self.record is None
self.record = record
class RollbackContextTests(TestCaseBase):
class UndoException(Exception):
"""A special exception for testing exceptions during undo functions"""
class OriginalException(Exception):
"""A special exception for testing exceptions in the with statement"""
def setUp(self):
self._called = 0
def _callDef(self):
self._called += 1
self.log.info("Incremented call count (%d)", self._called)
def _raiseDef(self, ex=Exception()):
self.log.info("Raised exception (%s)", ex.__class__.__name__)
raise ex
def test(self):
with utils.RollbackContext() as rollback:
rollback.prependDefer(self._callDef)
self.assertEquals(self._called, 1)
def testRaise(self):
"""
Test that raising an exception in a deferred action does
not block all subsequent actions from running
"""
try:
with utils.RollbackContext() as rollback:
rollback.prependDefer(self._callDef)
rollback.prependDefer(self._raiseDef)
rollback.prependDefer(self._callDef)
except Exception:
self.assertEquals(self._called, 2)
return
self.fail("Exception was not raised")
def testFirstUndoException(self):
"""
Test that if multiple actions raise an exception only the first one is
raised. When performing a batch rollback operations, probably the first
exception is the root cause.
"""
try:
with utils.RollbackContext() as rollback:
rollback.prependDefer(self._callDef)
rollback.prependDefer(self._raiseDef)
rollback.prependDefer(self._callDef)
rollback.prependDefer(self._raiseDef, RuntimeError())
rollback.prependDefer(self._callDef)
except RuntimeError:
self.assertEquals(self._called, 3)
return
except Exception:
self.fail("Wrong exception was raised")
self.fail("Exception was not raised")
def testKeyError(self):
"""
KeyError is raised as a tuple and not expection. Re-raising it
should be aware of this fact and handled carfully.
"""
try:
with utils.RollbackContext():
{}['aKey']
except KeyError:
return
except Exception:
self.fail("Wrong exception was raised")
self.fail("Exception was not raised")
def testPreferOriginalException(self):
"""
Test that if an exception is raised both from the with
statement and from the finally clause, the one from the with
statement is the one that's actually raised.
More info in: http://docs.python.org/
2.6/library/stdtypes.html#contextmanager.__exit__
"""
try:
with utils.RollbackContext() as rollback:
rollback.prependDefer(self._raiseDef, self.UndoException())
raise self.OriginalException()
except self.OriginalException:
return
except self.UndoException:
self.fail("Wrong exception was raised - from undo function. \
should have re-raised OriginalException")
except Exception:
self.fail("Wrong exception was raised")
self.fail("Exception was not raised")
@expandPermutations
class ExecCmdTest(TestCaseBase):
CMD_TYPES = ((tuple,), (list,), (iter,))
@permutations(CMD_TYPES)
def testNormal(self, cmd):
rc, out, _ = utils.execCmd(cmd(('echo', 'hello world')))
self.assertEquals(rc, 0)
self.assertEquals(out[0], 'hello world')
@permutations(CMD_TYPES)
def testIoClass(self, cmd):
rc, out, _ = utils.execCmd(cmd(('ionice',)), ioclass=2,
ioclassdata=3)
self.assertEquals(rc, 0)
self.assertEquals(out[0].strip(), 'best-effort: prio 3')
@permutations(CMD_TYPES)
def testNice(self, cmd):
rc, out, _ = utils.execCmd(cmd(('cat', '/proc/self/stat')), nice=7)
self.assertEquals(rc, 0)
self.assertEquals(int(out[0].split()[18]), 7)
@permutations(CMD_TYPES)
def testSetSid(self, cmd):
cmd_args = (constants.EXT_PYTHON, '-c',
'import os; print os.getsid(os.getpid())')
rc, out, _ = utils.execCmd(cmd(cmd_args), setsid=True)
self.assertNotEquals(int(out[0]), os.getsid(os.getpid()))
@permutations(CMD_TYPES)
def testSudo(self, cmd):
checkSudo(['echo'])
rc, out, _ = utils.execCmd(cmd(('grep', 'Uid', '/proc/self/status')),
sudo=True)
self.assertEquals(rc, 0)
self.assertEquals(int(out[0].split()[2]), 0)
class ExecCmdStressTest(TestCaseBase):
CONCURRENCY = 50
FUNC_DELAY = 0.01
FUNC_CALLS = 40
BLOCK_SIZE = 4096
BLOCK_COUNT = 256
def setUp(self):
self.data = None # Written to process stdin
self.workers = []
self.resume = threading.Event()
@stresstest
def test_read_stderr(self):
self.check(self.read_stderr)
@stresstest
def test_read_stdout_stderr(self):
self.check(self.read_stdout_stderr)
@stresstest
def test_write_stdin_read_stderr(self):
self.data = 'x' * self.BLOCK_SIZE * self.BLOCK_COUNT
self.check(self.write_stdin_read_stderr)
def check(self, func):
for i in xrange(self.CONCURRENCY):
worker = Worker(self.resume, func, self.FUNC_CALLS,
self.FUNC_DELAY)
self.workers.append(worker)
worker.start()
for worker in self.workers:
worker.wait()
self.resume.set()
for worker in self.workers:
worker.join()
for worker in self.workers:
if worker.exc_info:
t, v, tb = worker.exc_info
raise t, v, tb
def read_stderr(self):
args = ['if=/dev/zero',
'of=/dev/null',
'bs=%d' % self.BLOCK_SIZE,
'count=%d' % self.BLOCK_COUNT]
self.run_dd(args)
def read_stdout_stderr(self):
args = ['if=/dev/zero',
'bs=%d' % self.BLOCK_SIZE,
'count=%d' % self.BLOCK_COUNT]
out = self.run_dd(args)
size = self.BLOCK_SIZE * self.BLOCK_COUNT
if len(out) < size:
raise self.failureException("Partial read: %d/%d" % (
len(out), size))
def write_stdin_read_stderr(self):
args = ['of=/dev/null',
'bs=%d' % self.BLOCK_SIZE,
'count=%d' % self.BLOCK_COUNT]
self.run_dd(args)
def run_dd(self, args):
cmd = [constants.EXT_DD]
cmd.extend(args)
rc, out, err = utils.execCmd(cmd, raw=True, data=self.data)
if rc != 0:
raise self.failureException("Process failed: rc=%d err=%r" %
(rc, err))
if err == '':
raise self.failureException("No data from stderr")
return out
class Worker(object):
def __init__(self, resume, func, func_calls, func_delay):
self.exc_info = None
self._resume = resume
self._func = func
self._func_calls = func_calls
self._func_delay = func_delay
self._ready = threading.Event()
self._thread = threading.Thread(target=self._run)
self._thread.daemon = True
def start(self):
self._thread.start()
def wait(self):
self._ready.wait()
def join(self):
self._thread.join()
def _run(self):
try:
self._ready.set()
self._resume.wait()
for n in range(self._func_calls):
self._func()
time.sleep(self._func_delay)
except Exception:
self.exc_info = sys.exc_info()
class List2CmdlineeTests(TestCaseBase):
def test_simple(self):
args = ['/usr/bin/dd', 'iflag=direct',
'if=/dev/a70a4106-24f2-4599-be3e-934fee6e4499/metadata',
'bs=4096', 'count=1']
line = ' '.join(args)
self.assertEquals(utils._list2cmdline(args), line)
def test_whitespace(self):
args = ['a b', ' c ', 'd\t', '\ne']
line = "'a b' ' c ' 'd\t' '\ne'"
self.assertEquals(utils._list2cmdline(args), line)
def test_unsafe(self):
args = [c for c in '><*?[]`$|;&()#$\\"']
line = ' '.join("'" + c + "'" for c in args)
self.assertEquals(utils._list2cmdline(args), line)
def test_safe(self):
# Stolen from pipes._safechars
line = ' '.join('%+,-./0123456789:=@ABCDEFGHIJKLMNOPQRSTUVWXYZ_abcdef'
'ghijklmnopqrstuvwxyz')
args = line.split()
self.assertEquals(utils._list2cmdline(args), line)
def test_single_quote(self):
args = ["don't", "try 'this'", "'at home'"]
line = r"'don'\''t' 'try '\''this'\''' ''\''at home'\'''"
self.assertEquals(utils._list2cmdline(args), line)
def test_empty_arg(self):
self.assertEquals(utils._list2cmdline(['a', '', 'b']), "a '' b")
def test_empty(self):
self.assertEquals(utils._list2cmdline([]), "")
| futurice/vdsm | tests/utilsTests.py | Python | gpl-2.0 | 19,919 |
#!/usr/bin/env python
# coding: utf-8
# Copyright (c) 2014
# Gmail:liuzheng712
#
__author__ = 'liuzheng'
from django.conf.urls import patterns, include, url
from django.contrib import admin
import os
urlpatterns = patterns('',
# Examples:
# url(r'^$', 'pyHBase.views.home', name='home'),
# url(r'^blog/', include('blog.urls')),
url(r'^$', 'pyHBase.views.index'),
url(r'^admin/(.*)$', 'pyHBase.views.admin'),
url(r'^DJadmin/', include(admin.site.urls)),
url(r'^css/(?P<path>.*)$', 'django.views.static.serve',
{'document_root': os.path.join(os.path.dirname(__file__), '../templates/css').replace('\\', '/')}),
url(r'^js/(?P<path>.*)$', 'django.views.static.serve',
{'document_root': os.path.join(os.path.dirname(__file__), '../templates/js').replace('\\', '/')}),
url(r'^img/(?P<path>.*)$', 'django.views.static.serve',
{'document_root': os.path.join(os.path.dirname(__file__), '../templates/img').replace('\\', '/')}),
(r'^api/', include('api.urls')),
)
| liuzheng712/pyHBaseadmin | pyHBase/pyHBase/urls.py | Python | gpl-2.0 | 1,013 |
#
# upgrade_bootloader_gui.py: gui bootloader dialog for upgrades
#
# Copyright (C) 2002, 2007 Red Hat, Inc. All rights reserved.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
# Author(s): Jeremy Katz <[email protected]>
#
# must replace with explcit form so update disks will work
from iw_gui import *
import gtk
from booty import checkbootloader
from storage.devices import devicePathToName
from constants import *
import gettext
_ = lambda x: gettext.ldgettext("anaconda", x)
import logging
log = logging.getLogger("anaconda")
class UpgradeBootloaderWindow (InstallWindow):
windowTitle = N_("Upgrade Boot Loader Configuration")
def getPrev(self):
pass
def getNext(self):
if self.nobl_radio.get_active():
self.dispatch.skipStep("bootloadersetup", skip = 1)
self.dispatch.skipStep("bootloader", skip = 1)
self.dispatch.skipStep("bootloaderadvanced", skip = 1)
self.dispatch.skipStep("instbootloader", skip = 1)
elif self.newbl_radio.get_active():
self.dispatch.skipStep("bootloadersetup", skip = 0)
self.dispatch.skipStep("bootloader", skip = 0)
self.dispatch.skipStep("bootloaderadvanced", skip = 0)
self.dispatch.skipStep("instbootloader", skip = 0)
self.bl.doUpgradeOnly = 0
else:
self.dispatch.skipStep("bootloadersetup", skip = 0)
self.dispatch.skipStep("bootloader", skip = 1)
self.dispatch.skipStep("bootloaderadvanced", skip = 1)
self.dispatch.skipStep("instbootloader", skip = 0)
self.bl.doUpgradeOnly = 1
if self.type == "GRUB":
self.bl.useGrubVal = 1
else:
self.bl.useGrubVal = 0
self.bl.setDevice(devicePathToName(self.bootDev))
def _newToLibata(self, rootPath):
# NOTE: any changes here need to be done in upgrade_bootloader_text too
try:
f = open("/proc/modules", "r")
buf = f.read()
if buf.find("libata") == -1:
return False
except:
log.debug("error reading /proc/modules")
pass
try:
f = open(rootPath + "/etc/modprobe.conf")
except:
log.debug("error reading /etc/modprobe.conf")
return False
modlines = f.readlines()
f.close()
try:
f = open("/tmp/scsidisks")
except:
log.debug("error reading /tmp/scsidisks")
return False
mods = []
for l in f.readlines():
(disk, mod) = l.split()
if mod.strip() not in mods:
mods.append(mod.strip())
f.close()
for l in modlines:
stripped = l.strip()
if stripped == "" or stripped[0] == "#":
continue
if stripped.find("scsi_hostadapter") != -1:
mod = stripped.split()[-1]
if mod in mods:
mods.remove(mod)
if len(mods) > 0:
return True
return False
def getScreen(self, anaconda):
self.dispatch = anaconda.dispatch
self.bl = anaconda.id.bootloader
newToLibata = self._newToLibata(anaconda.rootPath)
(self.type, self.bootDev) = \
checkbootloader.getBootloaderTypeAndBoot(anaconda.rootPath, storage=anaconda.id.storage)
self.update_radio = gtk.RadioButton(None, _("_Update boot loader configuration"))
updatestr = _("This will update your current boot loader.")
if newToLibata or (self.type is None or self.bootDev is None):
if newToLibata:
current = _("Due to system changes, your boot loader "
"configuration can not be automatically updated.")
else:
current = _("The installer is unable to detect the boot loader "
"currently in use on your system.")
self.update_label = gtk.Label("%s" % (updatestr,))
self.update_radio.set_sensitive(False)
self.update_label.set_sensitive(False)
update = 0
else:
current = _("The installer has detected the %(type)s boot loader "
"currently installed on %(bootDev)s.") \
% {'type': self.type, 'bootDev': self.bootDev}
self.update_label = gtk.Label("%s %s" % (updatestr,
_("This is the recommended option.")))
self.update_radio.set_active(False)
update = 1
self.newbl_radio = gtk.RadioButton(self.update_radio,
_("_Create new boot loader "
"configuration"))
self.newbl_label = gtk.Label(_("This option creates a "
"new boot loader configuration. If "
"you wish to switch boot loaders, you "
"should choose this."))
self.newbl_radio.set_active(False)
self.nobl_radio = gtk.RadioButton(self.update_radio,
_("_Skip boot loader updating"))
self.nobl_label = gtk.Label(_("This option makes no changes to boot "
"loader configuration. If you are "
"using a third party boot loader, you "
"should choose this."))
self.nobl_radio.set_active(False)
for label in [self.update_label, self.nobl_label, self.newbl_label]:
label.set_alignment(0.8, 0)
label.set_size_request(275, -1)
label.set_line_wrap(True)
str = _("What would you like to do?")
# if they have one, the default is to update, otherwise the
# default is to not touch anything
if update == 1:
default = self.update_radio
elif newToLibata:
default = self.newbl_radio
else:
default = self.nobl_radio
if not self.dispatch.stepInSkipList("bootloader"):
self.newbl_radio.set_active(True)
elif self.dispatch.stepInSkipList("instbootloader"):
self.nobl_radio.set_active(True)
else:
default.set_active(True)
box = gtk.VBox(False, 5)
label = gtk.Label(current)
label.set_line_wrap(True)
label.set_alignment(0.5, 0.0)
label.set_size_request(300, -1)
label2 = gtk.Label(str)
label2.set_line_wrap(True)
label2.set_alignment(0.5, 0.0)
label2.set_size_request(300, -1)
box.pack_start(label, False)
box.pack_start(label2, False, padding = 10)
box.pack_start(self.update_radio, False)
box.pack_start(self.update_label, False)
box.pack_start(self.nobl_radio, False)
box.pack_start(self.nobl_label, False)
box.pack_start(self.newbl_radio, False)
box.pack_start(self.newbl_label, False)
a = gtk.Alignment(0.2, 0.1)
a.add(box)
return a
| icomfort/anaconda | iw/upgrade_bootloader_gui.py | Python | gpl-2.0 | 7,815 |
#!/usr/bin/env python
#
# This script makes a checkout of cartoweb cvs, and launches the unit tests.
# If no update of cvs was done, it does not run the tests.
#
# In case of a test failure, an email is sent to a specified address.
# Configuration: change to match your environment
DISABLING_FILE = "/tmp/auto_test_disabled"
CVSROOT=':pserver:[email protected]:/var/lib/cvs/projects/cw3'
EMAIL = "[email protected]"
PHP_PATH = "php"
BASE_URL="'http://example.com/auto_test/'"
SMTP_SERVER = "example.com"
# You shouldn't need to change these
CVS_DIR='cvs'
LOG_FILE='log.txt'
import commands, sys, os, os.path, datetime, smtplib, email.Utils, random
import os.path as path
class TestsException(Exception):
pass
def prepare():
print "prepare"
if not path.isdir(CVS_DIR):
os.makedirs(CVS_DIR)
print "Enter your cvs password when prompted ..."
ret = commands.getoutput("cd %s; cvs -d %s login" % (CVS_DIR, CVSROOT))
ret = commands.getoutput("cd %s; cvs -d %s co cartoweb3" % (CVS_DIR, CVSROOT))
print ret
def is_uptodate():
out = commands.getoutput('cd %s/cartoweb3;cvs update -dP' % CVS_DIR)
for l in out.split("\n"):
#if l.startswith("U") or l.startswith("P"):
if not l.startswith("cvs update"):
print "FAILED line:", l
return False
#print "LINE", l
#print "Not updated"
return True
def run(cmd):
exitvalue = os.system(cmd)
if exitvalue != 0:
print >>log, "Failed to execute %s, exitting" % cmd
raise TestsException('Failed to execute command: %s' % cmd)
def fetch_cartoweb():
cmds = """[ -d cartoweb3 ] && rm -rf cartoweb3 || :
cp -r cvs/cartoweb3 .
(cd cartoweb3; %s cw3setup.php --install --base-url %s --debug) || true
(cd cartoweb3; %s cw3setup.php --fetch-demo) || true""" % (PHP_PATH, BASE_URL, PHP_PATH)
for cmd in cmds.split("\n"):
run(cmd)
def rd(s, sep="."):
i = random.randint(0, len(s))
return s[0:i] + sep + s[i:]
def send_mail(kind, output):
print >>log, "Test failure, sending mail to %s" % EMAIL
server = smtplib.SMTP(SMTP_SERVER)
FROM_ADDRESS="[email protected]"
subject = rd("auto_test_report", " ")
body = ("This is an error report from the automatic cartoweb testing. \n" + \
" The error type is: '%s' \n\n" + \
" The error message is: \n\n%s") % (kind, output)
msg = ("Date: %s\r\nFrom: %s\r\nTo: %s\r\nSubject: %s\r\n\r\n%s"
% (email.Utils.formatdate(), FROM_ADDRESS, EMAIL, subject, body))
server.sendmail(FROM_ADDRESS, EMAIL, msg)
def run_tests():
print >>log, "Running tests"
(status, output) = commands.getstatusoutput("cd cartoweb3/tests/; %s phpunit.php AllTests" % PHP_PATH)
# truncate to 50k maximum
MAX_SIZE = 25 * 1024
output = output[:MAX_SIZE]
if len(output) == MAX_SIZE:
output += " <TRUNCATED> "
print >>log, "Test output", output
print >>log, "Test status", status
# for debugging:
#status = 1
if status != 0 or "Failure" in output:
send_mail('Unit test failure', output)
def main():
print >>log, "\n" + "=" * 80
print >>log, "Script launched at ", datetime.datetime.now().__str__()
log.flush()
if "-prepare" in sys.argv:
prepare()
sys.exit()
if os.path.exists(DISABLING_FILE):
print >>log, "Disabling file (%s) is there, skipping tests" % DISABLING_FILE
sys.exit()
SKIP_UPDATE_CHECK=False
if "-skip-update-check" in sys.argv or SKIP_UPDATE_CHECK:
uptodate = False
else:
uptodate = is_uptodate()
print "Uptodate: ", uptodate
if uptodate:
print >>log, "CVS up to date, skipping tests"
sys.exit()
fetch_cartoweb()
run_tests()
print >>log, "End of tests\n" + "=" * 80
if __name__ == '__main__':
os.chdir(path.abspath(path.dirname(__file__)))
if "-debug" in sys.argv:
log = sys.stderr
else:
log = open(LOG_FILE, 'a')
try:
main()
except TestsException, e:
print "TestsException: ", e
send_mail('Auto test setup failure', e)
| camptocamp/cartoweb3 | scripts/run_auto_test.py | Python | gpl-2.0 | 4,194 |
#!/usr/bin/env python3
"""
# TOP2049 Open Source programming suite
#
# Commandline utility
#
# Copyright (c) 2014 Pavel Stemberk <[email protected]>
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License along
# with this program; if not, write to the Free Software Foundation, Inc.,
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
"""
import re
import sys
import os
def substitute(input, oldSocket, newSocket):
input = re.sub('(^\s*packages).*', lambda m:'{} = (("DIP10", ""), ),'.format(m.group(1)), input)
input = re.sub('(^\s*chipPackage).*', lambda m:'{} = "DIP10",'.format(m.group(1)), input)
input = re.sub('(^\s*chipPinVCC).*', lambda m:'{} = 9,'.format(m.group(1)), input)
input = re.sub('(^\s*chipPinsVPP).*', lambda m:'{} = 10,'.format(m.group(1)), input)
input = re.sub('(^\s*chipPinGND).*', lambda m:'{} = 8,'.format(m.group(1)), input)
input = re.sub('(^\s*runtimeID).*', lambda m:'{} = (0xDF05, 0x01),'.format(m.group(1)), input)
input = re.sub('(^\s*description).+"(.*)".*', lambda m:'{} = "{} - ICD",'.format(m.group(1), m.group(2)), input)
input = re.sub('(^\s*bitfile).*', lambda m:'{} = "microchip16sip6",'.format(m.group(1)), input)
input = re.sub("{}".format(oldSocket), "{}".format(newSocket), input)
input = re.sub("{}".format(oldSocket.upper()), "{}".format(newSocket.upper()), input)
return input
def makeSip():
inputFileName = '__init__.py'
fin = open(inputFileName)
dMCU = {}
for line in fin:
matchObj = re.match('.*(pic[0-9]+l?f\w+)(sip[0-9a]+).*', line)
if matchObj:
continue
matchObj = re.match('.*(pic[0-9]+l?f\w+)(dip[0-9a]+).*', line)
if not matchObj:
print("{} did not match".format(line))
continue
# print('matched {} - {}'.format(matchObj.group(1), matchObj.group(2)))
dMCU.setdefault(matchObj.group(1), matchObj.group(2))
fin.close()
for item in dMCU.items():
fin = open("{}{}.py".format(item[0], item[1]))
fout = open("{}sip6.py".format(item[0]), 'w')
fout.write("#\n")
fout.write("# THIS FILE WAS AUTOGENERATED BY makeSip6.py\n")
fout.write("# Do not edit this file manually. All changes will be lost.\n")
fout.write("#\n\n")
for line in fin:
fout.write(substitute(line, "{}".format(item[1]), "sip6"))
fout.close()
fin.close()
def main(argv):
makeSip()
if __name__ == "__main__":
exit(main(sys.argv))
| mbuesch/toprammer | libtoprammer/chips/microchip16/makeSip6.py | Python | gpl-2.0 | 2,873 |
# -*- coding: utf-8 -*-
__author__ = 'shreejoy'
import unittest
from article_text_mining.rep_html_table_struct import rep_html_table_struct
class RepTableStructureTest(unittest.TestCase):
@property
def load_html_table_simple(self):
# creates data table object 16055 with some dummy data
with open('tests/test_html_data_tables/example_html_table_simple.html', mode='rb') as f:
simple_table_text = f.read()
return simple_table_text
@property
def load_html_table_complex(self):
with open('tests/test_html_data_tables/example_html_table_complex.html', mode='rb') as f:
complex_table_text = f.read()
return complex_table_text
def test_rep_html_table_struct_simple(self):
expected_table_output = [['th-1', 'th-2', 'th-3', 'th-4', 'th-5', 'th-6'],
['td-1', 'td-1', 'td-1', 'td-1', 'td-1', 'td-1'],
['td-2', 'td-3', 'td-4', 'td-5', 'td-6', 'td-7'],
['td-8', 'td-9', 'td-10', 'td-11', 'td-12', 'td-13'],
['td-14', 'td-15', 'td-16', 'td-17', 'td-18', 'td-19'],
['td-20', 'td-21', 'td-22', 'td-23', 'td-24', 'td-25'],
['td-26', 'td-27', 'td-28', 'td-29', 'td-30', 'td-31'],
['td-32', 'td-33', 'td-34', 'td-35', 'td-36', 'td-37'],
['td-38', 'td-39', 'td-40', 'td-41', 'td-42', 'td-43'],
['td-44', 'td-45', 'td-46', 'td-47', 'td-48', 'td-49'],
['td-50', 'td-51', 'td-52', 'td-53', 'td-54', 'td-55'],
['td-56', 'td-57', 'td-58', 'td-59', 'td-60', 'td-61'],
['td-62', 'td-63', 'td-64', 'td-65', 'td-66', 'td-67'],
['td-68', 'td-69', 'td-70', 'td-71', 'td-72', 'td-73'],
['td-74', 'td-75', 'td-76', 'td-77', 'td-78', 'td-79'],
['td-80', 'td-81', 'td-82', 'td-83', 'td-84', 'td-85'],
['td-86', 'td-87', 'td-88', 'td-89', 'td-90', 'td-91'],
['td-92', 'td-93', 'td-94', 'td-95', 'td-96', 'td-97'],
['td-98', 'td-99', 'td-100', 'td-101', 'td-102', 'td-103'],
['td-104', 'td-105', 'td-106', 'td-107', 'td-108', 'td-109']]
html_table_text = self.load_html_table_simple
a, b, html_id_table = rep_html_table_struct(html_table_text)
self.assertEqual(html_id_table, expected_table_output)
def test_rep_html_table_struct_complex(self):
expected_table_output = [['td-1', 0, 0, 0, 0, 0, 0, 0, 0, 0],
['td-2', 'td-2', 'td-2', 'td-2', 'td-2', 'td-2', 'td-2', 'td-2', 'td-2', 'td-2'],
['td-3', 'td-4', 'td-4', 'td-5', 'td-5', 'td-6', 'td-6', 0, 0, 0],
['td-3', 'td-7', 'td-8', 'td-9', 'td-10', 'td-11', 'td-12', 0, 0, 0],
['td-13', 'td-13', 'td-13', 'td-13', 'td-13', 'td-13', 'td-13', 'td-13', 'td-13', 'td-13'],
['td-14', 'td-15', 'td-16', 'td-17', 'td-18', 'td-19', 'td-20', 0, 0, 0],
['td-21', 'td-22', 'td-23', 'td-24', 'td-25', 'td-26', 'td-27', 0, 0, 0],
['td-28', 'td-29', 'td-30', 'td-31', 'td-32', 'td-33', 'td-34', 0, 0, 0],
['td-35', 'td-36', 'td-37', 'td-38', 'td-39', 'td-40', 'td-41', 0, 0, 0],
['td-42', 'td-43', 'td-44', 'td-45', 'td-46', 'td-47', 'td-48', 0, 0, 0],
['td-49', 'td-50', 'td-51', 'td-52', 'td-53', 'td-54', 'td-55', 0, 0, 0],
['td-56', 'td-57', 'td-58', 'td-59', 'td-60', 'td-61', 'td-62', 0, 0, 0],
['td-63', 'td-64', 'td-65', 'td-66', 'td-67', 'td-68', 'td-69', 0, 0, 0],
['td-70', 'td-71', 'td-72', 'td-73', 'td-74', 'td-75', 'td-76', 0, 0, 0],
['td-77', 'td-78', 'td-79', 'td-80', 'td-81', 'td-82', 'td-83', 0, 0, 0]]
html_table_text = self.load_html_table_complex
a, b, html_id_table = rep_html_table_struct(html_table_text)
self.assertEqual(html_id_table, expected_table_output)
if __name__ == '__main__':
unittest.main() | neuroelectro/neuroelectro_org | tests/test_rep_html_table_struct.py | Python | gpl-2.0 | 4,596 |
"""
AUTHOR: Dr. Andrew David Burbanks, 2005.
This software is Copyright (C) 2004-2008 Bristol University
and is released under the GNU General Public License version 2.
MODULE: SystemBath
PURPOSE:
Used to generate System Bath Hamiltonian, given number of bath modes.
NOTES:
The system defined by the Hamiltonian is (2n+2)-dimensional, and is
ordered (s, p_s, x, p_x, y, p_y, ...).
There is no need for Taylor expansion, as we have the Hamiltonian in
explicit polynomial form.
"""
from math import *
from random import *
from Polynomial import *
from LieAlgebra import LieAlgebra
class SystemBath:
"""
The original (_not_ mass-weighted) system bath.
The system-bath model represents a 'system' part: a symmetric
quartic double-well potential, coupled to a number of 'bath
modes': harmonic oscillators. The coupling is achieved via a
bilinear coupling between the configuration space coordinate of
the system and the conjugate momenta of each of the bath modes.
The resulting Hamiltonian is a polynomial of degree 4 in the phase
space coordinates.
With this version, the client must specify all of the following:-
@param n_bath_modes: (non-negative int).
@param system_mass: (positive real).
@param imag_harmonic_frequency_at_barrier: (real; imag part of pure imag).
@param reciprocal_barrier_height_above_well_bottom: (positive real).
@param bath_masses: (seq of n_bath_modes positive reals).
@param bath_frequencies: (seq of n_bath_modes reals).
@param bath_coupling_constants: (seq of n_bath_modes reals).
"""
def __init__(self,
n_bath_modes,
system_mass,
imag_harmonic_frequency_at_barrier,
reciprocal_barrier_height_above_well_bottom,
bath_masses,
bath_frequencies,
bath_coupling_constants):
assert n_bath_modes>=0
assert system_mass >= 0.0
assert abs(imag_harmonic_frequency_at_barrier) > 0.0
assert reciprocal_barrier_height_above_well_bottom >= 0.0
assert len(bath_masses) == n_bath_modes
assert len(bath_frequencies) == n_bath_modes
assert len(bath_coupling_constants) == n_bath_modes
for f, g in zip(bath_frequencies[:-1], bath_frequencies[1:]):
assert f < g
self._m_s = system_mass #system mass
self._omega_b = imag_harmonic_frequency_at_barrier
self._v_0_sh = reciprocal_barrier_height_above_well_bottom
self._n = n_bath_modes
self._c = bath_coupling_constants #to the system s coordinate.
self._w = bath_frequencies
self._m = bath_masses
self._lie = LieAlgebra(n_bath_modes+1)
#$a = (-1/2)m_s\omega_b^2.$
self._a = -0.5*self._m_s*(self._omega_b**2)
#$b = \frac{m_s^2\omega_b^4}{16V_0}.$
self._b = ((self._m_s**2) * (self._omega_b**4))/(16.0*self._v_0_sh)
def lie_algebra(self):
"""
Return the Lie algebra on which the polynomials will be
constructed. For N bath modes, this has (N+1)-dof.
"""
return self._lie
def hamiltonian_real(self):
"""
Calculate the real Hamiltonian for the system-bath model.
"""
#Establish some convenient notation:
n = self._n
a = self._a
b = self._b
m_s = self._m_s
c = self._c
w = self._w
m = self._m
q_s = self._lie.q(0)
p_s = self._lie.p(0)
#Compute some constants:
coeff_q_s = a
for i in xrange(0, len(c)):
coeff_q_s += (c[i]**2.0)/(2.0 * m[i] * (w[i]**2.0))
coeff_p_s = 1.0/(2.0 * m_s)
coeff_q_bath = []
coeff_p_bath = []
for i in xrange(0, len(c)):
coeff_q_bath.append(0.5 * m[i] * (w[i]**2.0))
coeff_p_bath.append(1.0/(2.0 * m[i]))
#Sanity checks:
assert n >= 0, 'Need zero or more bath modes.'
assert len(c) == n, 'Need a coupling constant for each bath mode.'
assert len(coeff_q_bath) == n, 'Need constant for each bath config.'
assert len(coeff_p_bath) == n, 'Need constant for each bath momentum.'
#System part:
h_system = coeff_p_s * (p_s**2)
h_system += coeff_q_s * (q_s**2)
h_system += b * (q_s**4)
#Bath part:
h_bath = self._lie.zero()
for i in xrange(len(c)):
bath_dof = i+1
h_bath += coeff_q_bath[i] * (self._lie.q(bath_dof)**2)
h_bath += coeff_p_bath[i] * (self._lie.p(bath_dof)**2)
#Coupling part:
h_coupling = self._lie.zero()
for i, c_i in enumerate(c):
bath_dof = i+1
h_coupling += -c_i * (self._lie.q(bath_dof) * q_s)
#Complete Hamiltonian:
h = h_system + h_bath + h_coupling
#Sanity checks:
assert h.degree() == 4
assert h.n_vars() == 2*n+2
assert len(h) == (3) + (2*n) + (n) #system+bath+coupling
return h
class MassWeightedSystemBath:
"""
The system-bath model represents a 'system' part (a symmetric
quartic double-well potential) coupled to a number of 'bath
modes' (harmonic oscillators). The coupling is achieved via a
bilinear coupling between the configuration space coordinate of
the system and the conjugate momenta of each of the bath modes.
The resulting Hamiltonian is a polynomial of degree 4 in the phase
space coordinates.
"""
def __init__(self,
n_bath_modes,
imag_harmonic_frequency_at_barrier,
reciprocal_barrier_height_above_well_bottom,
damping_strength,
bath_cutoff_frequency,
bath_masses,
bath_frequencies,
bath_compound_coupling_constants):
"""
Construct a mass-weighted system bath given the values of the
parameters and the compound coupling constants.
@param n_bath_modes: (non-negative int).
@param imag_harmonic_frequency_at_barrier: (real; im part of pure im).
@param reciprocal_barrier_height_above_well_bottom: (positive real).
@param damping_strength: (real).
@param bath_cutoff_frequency: (real, <<bath_frequencies[-1]).
@param bath_masses: (seq of n_bath_modes positive reals).
@param bath_frequencies: (increasing seq of n_bath_modes reals).
@param bath_compound_coupling_constants: (seq of n_bath_modes reals).
"""
#check inputs
assert n_bath_modes>=0
assert abs(imag_harmonic_frequency_at_barrier) > 0.0
assert reciprocal_barrier_height_above_well_bottom >= 0.0
assert len(bath_masses) == n_bath_modes
assert len(bath_frequencies) == n_bath_modes
assert len(bath_compound_coupling_constants) == n_bath_modes
#ensure that the bath frequencies are increasing
for f, g in zip(bath_frequencies[:-1], bath_frequencies[1:]):
assert f < g
#store member variables
self._n = n_bath_modes
self._omega_b = imag_harmonic_frequency_at_barrier
self._v_0_sh = reciprocal_barrier_height_above_well_bottom
self._eta = damping_strength
self._omega_c = bath_cutoff_frequency
self._c_star = bath_compound_coupling_constants #to system coord
self._w = bath_omegas
self._lie = LieAlgebra(n_bath_modes+1)
def compute_compound_constants(n_bath_modes,
damping_strength,
bath_cutoff_frequency,
bath_frequencies):
"""
Compute the compound coupling constants.
@param n_bath_modes: (non-negative int).
@param damping_strength: (real).
@param bath_cutoff_frequency: (real, <<bath_frequencies[-1]).
@param bath_frequencies: (seq of n_bath_modes reals increasing).
@return: bath_compound_coupling_constants (seq of n_bath_modes reals).
"""
#check inputs
assert n_bath_modes>=0
assert len(bath_frequencies) == n_bath_modes
for f, g in zip(bath_frequencies[:-1], bath_frequencies[1:]):
assert f < g
assert bath_frequencies[-1] > bath_cutoff_frequency
#accumulate compound frequencies
c_star = []
omega_c = bath_cutoff_frequency
eta = damping_strength
for jm1, omega_j in enumerate(bath_frequencies):
c = (-2.0/(pi*(jm1+1.0)))*eta*omega_c
d = ((omega_j+omega_c)*exp(-omega_j/omega_c) - omega_c)
c_star.append(c*d)
return c_star
compute_compound_constants = staticmethod(compute_compound_constants)
def bath_spectral_density_function(self, omega):
"""
The bath is defined in terms of a continuous spectral density
function, which has the Ohmic form with an exponential cutoff.
For infinite bath cutoff frequency, $\omega_c$, the bath is
strictly Ohmic, i.e., the friction kernel becomes a delta
function in the time domain, and the classical dynamics of the
system coordinate are described by the ordinary Langevin
equation. In that case, $eta$ (the damping strength) is the
classically measurable friction coefficient.
However, for finite values of the bath cutoff frequency, the
friction kernel is nonlocal, which introduces memory effects
into the Generalized Langevin Equation (GLE).
"""
return self.eta * omega * exp(-omega/self._omega_c)
def hamiltonian_real(self):
"""
Calculate the real Hamiltonian for the system-bath model.
"""
#establish some convenient notation:
n = self._n
w = self._w
c_star = self._c_star
#sanity checks:
assert n >= 0, 'Need zero or more bath modes.'
assert len(c_star) == n, 'Need a coupling constant for each bath mode.'
#system coefficients:
a = -0.5*(self._omega_b**2)
b = (self._omega_b**4)/(16.0*self._v_0_sh)
coeff_q_s = a
for i in xrange(0, len(c_star)):
coeff_q_s += c_star[i]/(2.0 * (w[i]))
coeff_p_s = 1.0/2.0
#system part:
q_s = self._lie.q(0)
p_s = self._lie.p(0)
h_system = coeff_p_s * (p_s**2)
h_system += coeff_q_s * (q_s**2)
h_system += b * (q_s**4)
#bath coefficients:
coeff_q_bath = []
coeff_p_bath = []
for i in xrange(0, len(c_star)):
coeff_q_bath.append(0.5 * (w[i]**2.0))
coeff_p_bath.append(1.0/2.0)
#sanity checks:
assert len(coeff_q_bath) == n, 'Need constant for each bath config.'
assert len(coeff_p_bath) == n, 'Need constant for each bath momentum.'
#bath part:
h_bath = self._lie.zero()
for i in xrange(len(c_star)):
bath_dof = i+1
h_bath += coeff_q_bath[i] * (self._lie.q(bath_dof)**2)
h_bath += coeff_p_bath[i] * (self._lie.p(bath_dof)**2)
#coupling part:
h_coupling = self._lie.zero()
for i, c_i in enumerate(c_star):
bath_dof = i+1
h_coupling += -sqrt(c_i*w[i]) * (self._lie.q(bath_dof)*q_s)
#complete Hamiltonian:
h = h_system + h_bath + h_coupling
#sanity checks:
assert h.degree() == 4
assert h.n_vars() == 2*n+2
assert len(h) == (3) + (2*n) + (n) #system+bath+coupling
return h
def new_random_system_bath(n_bath_modes,
system_mass,
imag_harmonic_frequency_at_barrier,
reciprocal_barrier_height_above_well_bottom,
random_seed):
#check inputs
assert n_bath_modes >= 0
assert system_mass >= 0.0
#unitialize random number generator
seed(random_seed)
#generate parameters
bath_masses = []
bath_omegas = []
bath_coupling_constants = []
for i in xrange(0, n_bath_modes):
bath_coupling_constants.append(uniform(0.001, 0.5))
bath_masses.append(uniform(0.5, 3.6))
bath_omegas.append(gauss(0.0, 2.0))
#sort frequencies into increasing order
bath_omegas.sort()
#instantiate the system bath
sb = SystemBath(n_bath_modes,
system_mass,
imag_harmonic_frequency_at_barrier,
reciprocal_barrier_height_above_well_bottom,
bath_masses,
bath_omegas,
bath_coupling_constants)
return sb
| Peter-Collins/NormalForm | src/py/SystemBath.py | Python | gpl-2.0 | 12,748 |
import csv
import logging
from fa.miner import yahoo
from fa.piping import csv_string_to_records
from fa.util import partition
from fa.database.query import get_outdated_symbols, update_fundamentals
import initialize
from settings import *
""" Download data from internet to database """
initialize.init()
logger = logging.getLogger(__name__)
logger.info("Will update historical prices of all symbols not up to date on {0}.".format(end_date))
all_symbols = [s.symbol for s in get_outdated_symbols("price", end_date)]
# do the download in chunks of size 8 to prevent overloading servers
for symbols in partition(all_symbols, 8):
data = yahoo.get_historical_data(symbols, start_date, end_date)
for symbol, csv_string in data.items():
if csv_string:
try:
records = csv_string_to_records(symbol, csv_string, strict=True)
update_fundamentals("price", symbol, records, end_date, delete_old=True)
except csv.Error as e:
logger.exception(e)
logger.error("csv of {0} is malformed.".format(symbol))
else:
logger.warning("Could not find updated historical prices of {0}. Skip.".format(symbol))
logger.info("Finished updating historical prices.")
| kakarukeys/algo-fa | examples/update_price_data.py | Python | gpl-2.0 | 1,268 |
from infoshopkeeper_config import configuration
cfg = configuration()
dbtype = cfg.get("dbtype")
dbname = cfg.get("dbname")
dbhost = cfg.get("dbhost")
dbuser = cfg.get("dbuser")
dbpass = cfg.get("dbpass")
from sqlobject import *
if dbtype in ('mysql', 'postgres'):
if dbtype is 'mysql':
import MySQLdb as dbmodule
elif dbtype is 'postgres':
import psycopg as dbmodule
#deprecate
def connect():
return dbmodule.connect (host=dbhost,db=dbname,user=dbuser,passwd=dbpass)
def conn():
return '%s://%s:%s@%s/%s?charset=utf8&sqlobject_encoding=utf8' % (dbtype,dbuser,dbpass,dbhost,dbname)
elif dbtype is 'sqlite':
import os, time, re
from pysqlite2 import dbapi2 as sqlite
db_file_ext = '.' + dbtype
if not dbname.endswith(db_file_ext):
dbname+=db_file_ext
dbpath = os.path.join(dbhost, dbname)
def now():
return time.strftime('%Y-%m-%d %H:%M:%S')
def regexp(regex, val):
print regex, val, bool(re.search(regex, val, re.I))
return bool(re.search(regex, val, re.I))
class SQLiteCustomConnection(sqlite.Connection):
def __init__(self, *args, **kwargs):
print '@@@ SQLiteCustomConnection: registering functions'
sqlite.Connection.__init__(self, *args, **kwargs)
SQLiteCustomConnection.registerFunctions(self)
def registerFunctions(self):
self.create_function("NOW", 0, now)
self.create_function("REGEXP", 2, regexp)
self.create_function("regexp", 2, regexp)
#~ self.execute("SELECT * FROM title WHERE title.booktitle REGEXP 'mar'")
registerFunctions=staticmethod(registerFunctions)
#deprecate
_conn=None
def connect():
import sqlobject
#~ return sqlite.connect (database=dbpath)
global _conn
if not _conn:
#~ _conn = sqlite.connect (database=dbpath)
from objects.title import Title
# can't use factory in URI because sqliteconnection doesn't share globals with us
Title._connection._connOptions['factory'] = SQLiteCustomConnection
# get the connection instance that sqlobject is going to use, so we only have one
_conn = Title._connection.getConnection()
# since a connection is made before we can set the factory, we have to register
# the functions here also
SQLiteCustomConnection.registerFunctions(_conn)
return _conn
def conn():
return '%s://%s?debug=t' % (dbtype,dbpath)
#~ return '%s://%s?debug=t&factory=SQLiteCustomConnection' % (dbtype,dbpath)
| johm/infoshopkeeper | components/db.py | Python | gpl-2.0 | 2,693 |
#!/usr/bin/env python
background_image_filename = 'sushiplate.jpg'
import pygame
from pygame.locals import *
from sys import exit
SCREEN_SIZE = (640, 480)
pygame.init()
screen = pygame.display.set_mode(SCREEN_SIZE, NOFRAME, 32)
background = pygame.image.load(background_image_filename).convert()
while True:
event = pygame.event.wait()
if event.type == QUIT:
exit()
if event.type == VIDEORESIZE:
SCREEN_SIZE = event.size
screen = pygame.display.set_mode(SCREEN_SIZE, RESIZABLE, 32)
pygame.display.set_caption('Window resized to ' + str(event.size))
# screen.fill((0, 0, 0))
screen.blit(background, (0, 0))
# screen_width, screen_height = SCREEN_SIZE
# for y in range(0, screen_height, background.get_height()):
# for x in range(0, screen_width, background.get_width()):
# screen.blit(background, (x, y))
pygame.display.update() | opensvn/python | pygame/resize.py | Python | gpl-2.0 | 919 |
#!/usr/bin/python
# -*- encoding: utf-8; py-indent-offset: 4 -*-
# +------------------------------------------------------------------+
# | ____ _ _ __ __ _ __ |
# | / ___| |__ ___ ___| | __ | \/ | |/ / |
# | | | | '_ \ / _ \/ __| |/ / | |\/| | ' / |
# | | |___| | | | __/ (__| < | | | | . \ |
# | \____|_| |_|\___|\___|_|\_\___|_| |_|_|\_\ |
# | |
# | Copyright Mathias Kettner 2014 [email protected] |
# +------------------------------------------------------------------+
#
# This file is part of Check_MK.
# The official homepage is at http://mathias-kettner.de/check_mk.
#
# check_mk is free software; you can redistribute it and/or modify it
# under the terms of the GNU General Public License as published by
# the Free Software Foundation in version 2. check_mk is distributed
# in the hope that it will be useful, but WITHOUT ANY WARRANTY; with-
# out even the implied warranty of MERCHANTABILITY or FITNESS FOR A
# PARTICULAR PURPOSE. See the GNU General Public License for more de-
# tails. You should have received a copy of the GNU General Public
# License along with GNU Make; see the file COPYING. If not, write
# to the Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
# Boston, MA 02110-1301 USA.
import utils
import config, time, os, re, pprint
import hashlib
import weblib, traceback, forms, valuespec, inventory, visuals, metrics
import sites
import bi
import inspect
import livestatus
from log import logger
from gui_exceptions import MKGeneralException, MKUserError, MKInternalError
import cmk.paths
# Datastructures and functions needed before plugins can be loaded
loaded_with_language = False
display_options = None
# Load all view plugins
def load_plugins(force):
global loaded_with_language
if loaded_with_language == current_language and not force:
# always reload the hosttag painters, because new hosttags might have been
# added during runtime
load_host_tag_painters()
clear_alarm_sound_states()
return
global multisite_datasources ; multisite_datasources = {}
global multisite_layouts ; multisite_layouts = {}
global multisite_painters ; multisite_painters = {}
global multisite_sorters ; multisite_sorters = {}
global multisite_builtin_views ; multisite_builtin_views = {}
global multisite_painter_options ; multisite_painter_options = {}
global multisite_commands ; multisite_commands = []
global multisite_command_groups ; multisite_command_groups = {}
global view_hooks ; view_hooks = {}
global inventory_displayhints ; inventory_displayhints = {}
config.declare_permission_section("action", _("Commands on host and services"), do_sort = True)
utils.load_web_plugins("views", globals())
load_host_tag_painters()
clear_alarm_sound_states()
# This must be set after plugin loading to make broken plugins raise
# exceptions all the time and not only the first time (when the plugins
# are loaded).
loaded_with_language = current_language
# Declare permissions for builtin views
config.declare_permission_section("view", _("Multisite Views"), do_sort = True)
for name, view in multisite_builtin_views.items():
config.declare_permission("view.%s" % name,
format_view_title(view),
"%s - %s" % (name, _u(view["description"])),
config.builtin_role_ids)
# Make sure that custom views also have permissions
config.declare_dynamic_permissions(lambda: visuals.declare_custom_permissions('views'))
declare_inventory_columns()
# Load all views - users or builtins
def load_views():
global multisite_views, available_views
# Skip views which do not belong to known datasources
multisite_views = visuals.load('views', multisite_builtin_views,
skip_func = lambda v: v['datasource'] not in multisite_datasources)
available_views = visuals.available('views', multisite_views)
transform_old_views()
def permitted_views():
try:
return available_views
except:
# In some cases, for example when handling AJAX calls the views might
# have not been loaded yet
load_views()
return available_views
def all_views():
return multisite_views
# Convert views that are saved in the pre 1.2.6-style
# FIXME: Can be removed one day. Mark as incompatible change or similar.
def transform_old_views():
for view in multisite_views.values():
ds_name = view['datasource']
datasource = multisite_datasources[ds_name]
if "context" not in view: # legacy views did not have this explicitly
view.setdefault("user_sortable", True)
if 'context_type' in view:
# This code transforms views from user_views.mk which have been migrated with
# daily snapshots from 2014-08 till beginning 2014-10.
visuals.transform_old_visual(view)
elif 'single_infos' not in view:
# This tries to map the datasource and additional settings of the
# views to get the correct view context
#
# This code transforms views from views.mk (legacy format) to the current format
try:
hide_filters = view.get('hide_filters')
if 'service' in hide_filters and 'host' in hide_filters:
view['single_infos'] = ['service', 'host']
elif 'service' in hide_filters and 'host' not in hide_filters:
view['single_infos'] = ['service']
elif 'host' in hide_filters:
view['single_infos'] = ['host']
elif 'hostgroup' in hide_filters:
view['single_infos'] = ['hostgroup']
elif 'servicegroup' in hide_filters:
view['single_infos'] = ['servicegroup']
elif 'aggr_service' in hide_filters:
view['single_infos'] = ['service']
elif 'aggr_name' in hide_filters:
view['single_infos'] = ['aggr']
elif 'aggr_group' in hide_filters:
view['single_infos'] = ['aggr_group']
elif 'log_contact_name' in hide_filters:
view['single_infos'] = ['contact']
elif 'event_host' in hide_filters:
view['single_infos'] = ['host']
elif hide_filters == ['event_id', 'history_line']:
view['single_infos'] = ['history']
elif 'event_id' in hide_filters:
view['single_infos'] = ['event']
elif 'aggr_hosts' in hide_filters:
view['single_infos'] = ['host']
else:
# For all other context types assume the view is showing multiple objects
# and the datasource can simply be gathered from the datasource
view['single_infos'] = []
except: # Exceptions can happen for views saved with certain GIT versions
if config.debug:
raise
# Convert from show_filters, hide_filters, hard_filters and hard_filtervars
# to context construct
if 'context' not in view:
view['show_filters'] = view['hide_filters'] + view['hard_filters'] + view['show_filters']
single_keys = visuals.get_single_info_keys(view)
# First get vars for the classic filters
context = {}
filtervars = dict(view['hard_filtervars'])
all_vars = {}
for filter_name in view['show_filters']:
if filter_name in single_keys:
continue # skip conflictings vars / filters
context.setdefault(filter_name, {})
try:
f = visuals.get_filter(filter_name)
except:
# The exact match filters have been removed. They where used only as
# link filters anyway - at least by the builtin views.
continue
for var in f.htmlvars:
# Check whether or not the filter is supported by the datasource,
# then either skip or use the filter vars
if var in filtervars and f.info in datasource['infos']:
value = filtervars[var]
all_vars[var] = value
context[filter_name][var] = value
# We changed different filters since the visuals-rewrite. This must be treated here, since
# we need to transform views which have been created with the old filter var names.
# Changes which have been made so far:
changed_filter_vars = {
'serviceregex': { # Name of the filter
# old var name: new var name
'service': 'service_regex',
},
'hostregex': {
'host': 'host_regex',
},
'hostgroupnameregex': {
'hostgroup_name': 'hostgroup_regex',
},
'servicegroupnameregex': {
'servicegroup_name': 'servicegroup_regex',
},
'opthostgroup': {
'opthostgroup': 'opthost_group',
'neg_opthostgroup': 'neg_opthost_group',
},
'optservicegroup': {
'optservicegroup': 'optservice_group',
'neg_optservicegroup': 'neg_optservice_group',
},
'hostgroup': {
'hostgroup': 'host_group',
'neg_hostgroup': 'neg_host_group',
},
'servicegroup': {
'servicegroup': 'service_group',
'neg_servicegroup': 'neg_service_group',
},
'host_contactgroup': {
'host_contactgroup': 'host_contact_group',
'neg_host_contactgroup': 'neg_host_contact_group',
},
'service_contactgroup': {
'service_contactgroup': 'service_contact_group',
'neg_service_contactgroup': 'neg_service_contact_group',
},
}
if filter_name in changed_filter_vars and f.info in datasource['infos']:
for old_var, new_var in changed_filter_vars[filter_name].items():
if old_var in filtervars:
value = filtervars[old_var]
all_vars[new_var] = value
context[filter_name][new_var] = value
# Now, when there are single object infos specified, add these keys to the
# context
for single_key in single_keys:
if single_key in all_vars:
context[single_key] = all_vars[single_key]
view['context'] = context
# Cleanup unused attributes
for k in [ 'hide_filters', 'hard_filters', 'show_filters', 'hard_filtervars' ]:
try:
del view[k]
except KeyError:
pass
def save_views(us):
visuals.save('views', multisite_views)
# For each view a function can be registered that has to return either True
# or False to show a view as context link
view_is_enabled = {}
def is_enabled_for(linking_view, view, context_vars):
if view["name"] not in view_is_enabled:
return True # Not registered are always visible!
return view_is_enabled[view["name"]](linking_view, view, context_vars)
#.
# .--PainterOptions------------------------------------------------------.
# | ____ _ _ ___ _ _ |
# | | _ \ __ _(_)_ __ | |_ ___ _ __ / _ \ _ __ | |_(_) ___ _ __ ___ |
# | | |_) / _` | | '_ \| __/ _ \ '__| | | | '_ \| __| |/ _ \| '_ \/ __| |
# | | __/ (_| | | | | | || __/ | | |_| | |_) | |_| | (_) | | | \__ \ |
# | |_| \__,_|_|_| |_|\__\___|_| \___/| .__/ \__|_|\___/|_| |_|___/ |
# | |_| |
# +----------------------------------------------------------------------+
# | Painter options are settings that can be changed per user per view. |
# | These options are controlled throught the painter options form which |
# | is accessible through the small monitor icon on the top left of the |
# | views. |
# '----------------------------------------------------------------------'
# TODO: Better name it PainterOptions or DisplayOptions? There are options which only affect
# painters, but some which affect generic behaviour of the views, so DisplayOptions might
# be better.
class PainterOptions(object):
def __init__(self, view_name=None):
self._view_name = view_name
# The names of the painter options used by the current view
self._used_option_names = None
# The effective options for this view
self._options = {}
def load(self):
self._load_from_config()
# Load the options to be used for this view
def _load_used_options(self, view):
if self._used_option_names != None:
return # only load once per request
options = set([])
for cell in get_group_cells(view) + get_cells(view):
options.update(cell.painter_options())
# Also layouts can register painter options
layout_name = view.get("layout")
if layout_name != None:
options.update(multisite_layouts[layout_name].get("options", []))
# TODO: Improve sorting. Add a sort index?
self._used_option_names = sorted(options)
def _load_from_config(self):
if self._is_anonymous_view():
return # never has options
if not self.painter_options_permitted():
return
# Options are stored per view. Get all options for all views
vo = config.user.load_file("viewoptions", {})
self._options = vo.get(self._view_name, {})
def save_to_config(self):
vo = config.user.load_file("viewoptions", {}, lock=True)
vo[self._view_name] = self._options
config.user.save_file("viewoptions", vo)
def update_from_url(self, view):
self._load_used_options(view)
if not self.painter_option_form_enabled():
return
if html.has_var("_reset_painter_options"):
self._clear_painter_options()
return
elif html.has_var("_update_painter_options"):
self._set_from_submitted_form()
def _set_from_submitted_form(self):
# TODO: Remove all keys that are in multisite_painter_options
# but not in self._used_option_names
modified = False
for option_name in self._used_option_names:
# Get new value for the option from the value spec
vs = self.get_valuespec_of(option_name)
value = vs.from_html_vars("po_%s" % option_name)
if not self._is_set(option_name) or self.get(option_name) != value:
modified = True
self.set(option_name, value)
if modified:
self.save_to_config()
def _clear_painter_options(self):
# TODO: This never removes options that are not existant anymore
modified = False
for name in multisite_painter_options.keys():
try:
del self._options[name]
modified = True
except KeyError:
pass
if modified:
self.save_to_config()
# Also remove the options from current html vars. Otherwise the
# painter option form will display the just removed options as
# defaults of the painter option form.
for varname in html.all_varnames_with_prefix("po_"):
html.del_var(varname)
def get_valuespec_of(self, name):
opt = multisite_painter_options[name]
if type(lambda: None) == type(opt["valuespec"]):
return opt["valuespec"]()
else:
return opt["valuespec"]
def _is_set(self, name):
return name in self._options
# Sets a painter option value (only for this request). Is not persisted!
def set(self, name, value):
self._options[name] = value
# Returns either the set value, the provided default value or if none
# provided, it returns the default value of the valuespec.
def get(self, name, dflt=None):
if dflt == None:
try:
dflt = self.get_valuespec_of(name).default_value()
except KeyError:
# Some view options (that are not declared as display options)
# like "refresh" don't have a valuespec. So they need to default
# to None.
# TODO: Find all occurences and simply declare them as "invisible"
# painter options.
pass
return self._options.get(name, dflt)
# Not falling back to a default value, simply returning None in case
# the option is not set.
def get_without_default(self, name):
return self._options.get(name)
def get_all(self):
return self._options
def _is_anonymous_view(self):
return self._view_name == None
def painter_options_permitted(self):
return config.user.may("general.painter_options")
def painter_option_form_enabled(self):
return self._used_option_names and self.painter_options_permitted()
def show_form(self, view):
self._load_used_options(view)
if not display_options.enabled(display_options.D) or not self.painter_option_form_enabled():
return
html.open_div(id_="painteroptions", class_=["view_form"], style="display: none;")
html.begin_form("painteroptions")
forms.header(_("Display Options"))
for name in self._used_option_names:
vs = self.get_valuespec_of(name)
forms.section(vs.title())
# TODO: Possible improvement for vars which default is specified
# by the view: Don't just default to the valuespecs default. Better
# use the view default value here to get the user the current view
# settings reflected.
vs.render_input("po_%s" % name, self.get(name))
forms.end()
html.button("_update_painter_options", _("Submit"), "submit")
html.button("_reset_painter_options", _("Reset"), "submit")
html.hidden_fields()
html.end_form()
html.close_div()
def prepare_painter_options(view_name=None):
global painter_options
painter_options = PainterOptions(view_name)
painter_options.load()
#.
# .--Cells---------------------------------------------------------------.
# | ____ _ _ |
# | / ___|___| | |___ |
# | | | / _ \ | / __| |
# | | |__| __/ | \__ \ |
# | \____\___|_|_|___/ |
# | |
# +----------------------------------------------------------------------+
# | View cell handling classes. Each cell instanciates a multisite |
# | painter to render a table cell. |
# '----------------------------------------------------------------------'
# A cell is an instance of a painter in a view (-> a cell or a grouping cell)
class Cell(object):
# Wanted to have the "parse painter spec logic" in one place (The Cell() class)
# but this should be cleaned up more. TODO: Move this to another place
@staticmethod
def painter_exists(painter_spec):
if type(painter_spec[0]) == tuple:
painter_name = painter_spec[0][0]
else:
painter_name = painter_spec[0]
return painter_name in multisite_painters
# Wanted to have the "parse painter spec logic" in one place (The Cell() class)
# but this should be cleaned up more. TODO: Move this to another place
@staticmethod
def is_join_cell(painter_spec):
return len(painter_spec) >= 4
def __init__(self, view, painter_spec=None):
self._view = view
self._painter_name = None
self._painter_params = None
self._link_view_name = None
self._tooltip_painter_name = None
if painter_spec:
self._from_view(painter_spec)
# In views the painters are saved as tuples of the following formats:
#
# Painter name, Link view name
# ('service_discovery_service', None),
#
# Painter name, Link view name, Hover painter name
# ('host_plugin_output', None, None),
#
# Join column: Painter name, Link view name, hover painter name, Join service description
# ('service_description', None, None, u'CPU load')
#
# Join column: Painter name, Link view name, hover painter name, Join service description, custom title
# ('service_description', None, None, u'CPU load')
#
# Parameterized painters:
# Same as above but instead of the "Painter name" a two element tuple with the painter name as
# first element and a dictionary of parameters as second element is set.
def _from_view(self, painter_spec):
if type(painter_spec[0]) == tuple:
self._painter_name, self._painter_params = painter_spec[0]
else:
self._painter_name = painter_spec[0]
if painter_spec[1] != None:
self._link_view_name = painter_spec[1]
# Clean this call to Cell.painter_exists() up!
if len(painter_spec) >= 3 and Cell.painter_exists((painter_spec[2], None)):
self._tooltip_painter_name = painter_spec[2]
# Get a list of columns we need to fetch in order to render this cell
def needed_columns(self):
columns = set(get_painter_columns(self.painter()))
if self._link_view_name:
# Make sure that the information about the available views is present. If
# called via the reporting, then this might not be the case
# TODO: Move this to some better place.
views = permitted_views()
if self._has_link():
link_view = self._link_view()
if link_view:
# TODO: Clean this up here
for filt in [ visuals.get_filter(fn) for fn in visuals.get_single_info_keys(link_view) ]:
columns.update(filt.link_columns)
if self.has_tooltip():
columns.update(get_painter_columns(self.tooltip_painter()))
return columns
def is_joined(self):
return False
def join_service(self):
return None
def _has_link(self):
return self._link_view_name != None
def _link_view(self):
try:
return get_view_by_name(self._link_view_name)
except KeyError:
return None
def painter(self):
return multisite_painters[self._painter_name]
def painter_name(self):
return self._painter_name
def export_title(self):
return self._painter_name
def painter_options(self):
return self.painter().get("options", [])
# The parameters configured in the view for this painter. In case the
# painter has params, it defaults to the valuespec default value and
# in case the painter has no params, it returns None.
def painter_parameters(self):
vs_painter_params = get_painter_params_valuespec(self.painter())
if not vs_painter_params:
return
if vs_painter_params and self._painter_params == None:
return vs_painter_params.default_value()
else:
return self._painter_params
def title(self, use_short=True):
painter = self.painter()
if use_short:
return painter.get("short", painter["title"])
else:
return painter["title"]
# Can either be:
# True : Is printable in PDF
# False : Is not printable at all
# "<string>" : ID of a painter_printer (Reporting module)
def printable(self):
return self.painter().get("printable", True)
def has_tooltip(self):
return self._tooltip_painter_name != None
def tooltip_painter_name(self):
return self._tooltip_painter_name
def tooltip_painter(self):
return multisite_painters[self._tooltip_painter_name]
def paint_as_header(self, is_last_column_header=False):
# Optional: Sort link in title cell
# Use explicit defined sorter or implicit the sorter with the painter name
# Important for links:
# - Add the display options (Keeping the same display options as current)
# - Link to _self (Always link to the current frame)
classes = []
onclick = ''
title = ''
if display_options.enabled(display_options.L) \
and self._view.get('user_sortable', False) \
and get_sorter_name_of_painter(self.painter_name()) is not None:
params = [
('sort', self._sort_url()),
]
if display_options.title_options:
params.append(('display_options', display_options.title_options))
classes += [ "sort", get_primary_sorter_order(self._view, self.painter_name()) ]
onclick = "location.href=\'%s\'" % html.makeuri(params, 'sort')
title = _('Sort by %s') % self.title()
if is_last_column_header:
classes.append("last_col")
html.open_th(class_=classes, onclick=onclick, title=title)
html.write(self.title())
html.close_th()
#html.guitest_record_output("view", ("header", title))
def _sort_url(self):
"""
The following sorters need to be handled in this order:
1. group by sorter (needed in grouped views)
2. user defined sorters (url sorter)
3. configured view sorters
"""
sorter = []
group_sort, user_sort, view_sort = get_separated_sorters(self._view)
sorter = group_sort + user_sort + view_sort
# Now apply the sorter of the current column:
# - Negate/Disable when at first position
# - Move to the first position when already in sorters
# - Add in the front of the user sorters when not set
sorter_name = get_sorter_name_of_painter(self.painter_name())
if self.is_joined():
# TODO: Clean this up and then remove Cell.join_service()
this_asc_sorter = (sorter_name, False, self.join_service())
this_desc_sorter = (sorter_name, True, self.join_service())
else:
this_asc_sorter = (sorter_name, False)
this_desc_sorter = (sorter_name, True)
if user_sort and this_asc_sorter == user_sort[0]:
# Second click: Change from asc to desc order
sorter[sorter.index(this_asc_sorter)] = this_desc_sorter
elif user_sort and this_desc_sorter == user_sort[0]:
# Third click: Remove this sorter
sorter.remove(this_desc_sorter)
else:
# First click: add this sorter as primary user sorter
# Maybe the sorter is already in the user sorters or view sorters, remove it
for s in [ user_sort, view_sort ]:
if this_asc_sorter in s:
s.remove(this_asc_sorter)
if this_desc_sorter in s:
s.remove(this_desc_sorter)
# Now add the sorter as primary user sorter
sorter = group_sort + [this_asc_sorter] + user_sort + view_sort
p = []
for s in sorter:
if len(s) == 2:
p.append((s[1] and '-' or '') + s[0])
else:
p.append((s[1] and '-' or '') + s[0] + '~' + s[2])
return ','.join(p)
def render(self, row):
row = join_row(row, self)
try:
tdclass, content = self.render_content(row)
except:
logger.exception("Failed to render painter '%s' (Row: %r)" %
(self._painter_name, row))
raise
if tdclass == None:
tdclass = ""
if tdclass == "" and content == "":
return "", ""
# Add the optional link to another view
if content and self._has_link():
content = link_to_view(content, row, self._link_view_name)
# Add the optional mouseover tooltip
if content and self.has_tooltip():
tooltip_cell = Cell(self._view, (self.tooltip_painter_name(), None))
tooltip_tdclass, tooltip_content = tooltip_cell.render_content(row)
tooltip_text = html.strip_tags(tooltip_content)
content = '<span title="%s">%s</span>' % (tooltip_text, content)
return tdclass, content
# Same as self.render() for HTML output: Gets a painter and a data
# row and creates the text for being painted.
def render_for_pdf(self, row, time_range):
# TODO: Move this somewhere else!
def find_htdocs_image_path(filename):
dirs = [
cmk.paths.local_web_dir + "/htdocs/",
cmk.paths.web_dir + "/htdocs/",
]
for d in dirs:
if os.path.exists(d + filename):
return d + filename
try:
row = join_row(row, self)
css_classes, txt = self.render_content(row)
if txt is None:
return css_classes, ""
txt = txt.strip()
# Handle <img...>. Our PDF writer cannot draw arbitrary
# images, but all that we need for showing simple icons.
# Current limitation: *one* image
if txt.lower().startswith("<img"):
img_filename = re.sub('.*src=["\']([^\'"]*)["\'].*', "\\1", str(txt))
img_path = find_htdocs_image_path(img_filename)
if img_path:
txt = ("icon", img_path)
else:
txt = img_filename
if isinstance(txt, HTML):
txt = "%s" % txt
elif not isinstance(txt, tuple):
txt = html.escaper.unescape_attributes(txt)
txt = html.strip_tags(txt)
return css_classes, txt
except Exception:
raise MKGeneralException('Failed to paint "%s": %s' %
(self.painter_name(), traceback.format_exc()))
def render_content(self, row):
if not row:
return "", "" # nothing to paint
painter = self.painter()
paint_func = painter["paint"]
# Painters can request to get the cell object handed over.
# Detect that and give the painter this argument.
arg_names = inspect.getargspec(paint_func)[0]
painter_args = []
for arg_name in arg_names:
if arg_name == "row":
painter_args.append(row)
elif arg_name == "cell":
painter_args.append(self)
# Add optional painter arguments from painter specification
if "args" in painter:
painter_args += painter["args"]
return painter["paint"](*painter_args)
def paint(self, row, tdattrs="", is_last_cell=False):
tdclass, content = self.render(row)
has_content = content != ""
if is_last_cell:
if tdclass == None:
tdclass = "last_col"
else:
tdclass += " last_col"
if tdclass:
html.write("<td %s class=\"%s\">" % (tdattrs, tdclass))
html.write(content)
html.close_td()
else:
html.write("<td %s>" % (tdattrs))
html.write(content)
html.close_td()
#html.guitest_record_output("view", ("cell", content))
return has_content
class JoinCell(Cell):
def __init__(self, view, painter_spec):
self._join_service_descr = None
self._custom_title = None
super(JoinCell, self).__init__(view, painter_spec)
def _from_view(self, painter_spec):
super(JoinCell, self)._from_view(painter_spec)
if len(painter_spec) >= 4:
self._join_service_descr = painter_spec[3]
if len(painter_spec) == 5:
self._custom_title = painter_spec[4]
def is_joined(self):
return True
def join_service(self):
return self._join_service_descr
def livestatus_filter(self, join_column_name):
return "Filter: %s = %s" % \
(livestatus.lqencode(join_column_name), livestatus.lqencode(self._join_service_descr))
def title(self, use_short=True):
if self._custom_title:
return self._custom_title
else:
return self._join_service_descr
def export_title(self):
return "%s.%s" % (self._painter_name, self.join_service())
class EmptyCell(Cell):
def __init__(self, view):
super(EmptyCell, self).__init__(view)
def render(self, row):
return "", ""
def paint(self, row):
return False
#.
# .--Table of views------------------------------------------------------.
# | _____ _ _ __ _ |
# | |_ _|_ _| |__ | | ___ ___ / _| __ _(_) _____ _____ |
# | | |/ _` | '_ \| |/ _ \ / _ \| |_ \ \ / / |/ _ \ \ /\ / / __| |
# | | | (_| | |_) | | __/ | (_) | _| \ V /| | __/\ V V /\__ \ |
# | |_|\__,_|_.__/|_|\___| \___/|_| \_/ |_|\___| \_/\_/ |___/ |
# | |
# +----------------------------------------------------------------------+
# | Show list of all views with buttons for editing |
# '----------------------------------------------------------------------'
def page_edit_views():
load_views()
cols = [ (_('Datasource'), lambda v: multisite_datasources[v["datasource"]]['title']) ]
visuals.page_list('views', _("Edit Views"), multisite_views, cols)
#.
# .--Create View---------------------------------------------------------.
# | ____ _ __ ___ |
# | / ___|_ __ ___ __ _| |_ ___ \ \ / (_) _____ __ |
# | | | | '__/ _ \/ _` | __/ _ \ \ \ / /| |/ _ \ \ /\ / / |
# | | |___| | | __/ (_| | || __/ \ V / | | __/\ V V / |
# | \____|_| \___|\__,_|\__\___| \_/ |_|\___| \_/\_/ |
# | |
# +----------------------------------------------------------------------+
# | Select the view type of the new view |
# '----------------------------------------------------------------------'
# First step: Select the data source
# Create datasource selection valuespec, also for other modules
# FIXME: Sort the datasources by (assumed) common usage
def DatasourceSelection():
# FIXME: Sort the datasources by (assumed) common usage
datasources = []
for ds_name, ds in multisite_datasources.items():
datasources.append((ds_name, ds['title']))
return DropdownChoice(
title = _('Datasource'),
help = _('The datasources define which type of objects should be displayed with this view.'),
choices = datasources,
sorted = True,
columns = 1,
default_value = 'services',
)
def page_create_view(next_url = None):
vs_ds = DatasourceSelection()
ds = 'services' # Default selection
html.header(_('Create View'), stylesheets=["pages"])
html.begin_context_buttons()
back_url = html.var("back", "")
html.context_button(_("Back"), back_url or "edit_views.py", "back")
html.end_context_buttons()
if html.var('save') and html.check_transaction():
try:
ds = vs_ds.from_html_vars('ds')
vs_ds.validate_value(ds, 'ds')
if not next_url:
next_url = html.makeuri([('datasource', ds)], filename = "create_view_infos.py")
else:
next_url = next_url + '&datasource=%s' % ds
html.http_redirect(next_url)
return
except MKUserError, e:
html.div(e, class_=["error"])
html.add_user_error(e.varname, e)
html.begin_form('create_view')
html.hidden_field('mode', 'create')
forms.header(_('Select Datasource'))
forms.section(vs_ds.title())
vs_ds.render_input('ds', ds)
html.help(vs_ds.help())
forms.end()
html.button('save', _('Continue'), 'submit')
html.hidden_fields()
html.end_form()
html.footer()
def page_create_view_infos():
ds_name = html.var('datasource')
if ds_name not in multisite_datasources:
raise MKGeneralException(_('The given datasource is not supported'))
visuals.page_create_visual('views', multisite_datasources[ds_name]['infos'],
next_url = 'edit_view.py?mode=create&datasource=%s&single_infos=%%s' % ds_name)
#.
# .--Edit View-----------------------------------------------------------.
# | _____ _ _ _ __ ___ |
# | | ____|__| (_) |_ \ \ / (_) _____ __ |
# | | _| / _` | | __| \ \ / /| |/ _ \ \ /\ / / |
# | | |__| (_| | | |_ \ V / | | __/\ V V / |
# | |_____\__,_|_|\__| \_/ |_|\___| \_/\_/ |
# | |
# +----------------------------------------------------------------------+
# | |
# '----------------------------------------------------------------------'
# Return list of available datasources (used to render filters)
def get_view_infos(view):
ds_name = view.get('datasource', html.var('datasource'))
return multisite_datasources[ds_name]['infos']
def page_edit_view():
load_views()
visuals.page_edit_visual('views', multisite_views,
custom_field_handler = render_view_config,
load_handler = transform_view_to_valuespec_value,
create_handler = create_view_from_valuespec,
info_handler = get_view_infos,
)
def view_choices(only_with_hidden = False):
choices = [("", "")]
for name, view in available_views.items():
if not only_with_hidden or view['single_infos']:
title = format_view_title(view)
choices.append(("%s" % name, title))
return choices
def format_view_title(view):
if view.get('mobile', False):
return _('Mobile: ') + _u(view["title"])
else:
return _u(view["title"])
def view_editor_options():
return [
('mobile', _('Show this view in the Mobile GUI')),
('mustsearch', _('Show data only on search')),
('force_checkboxes', _('Always show the checkboxes')),
('user_sortable', _('Make view sortable by user')),
('play_sounds', _('Play alarm sounds')),
]
def view_editor_specs(ds_name, general_properties=True):
load_views() # make sure that available_views is present
specs = []
if general_properties:
specs.append(
('view', Dictionary(
title = _('View Properties'),
render = 'form',
optional_keys = None,
elements = [
('datasource', FixedValue(ds_name,
title = _('Datasource'),
totext = multisite_datasources[ds_name]['title'],
help = _('The datasource of a view cannot be changed.'),
)),
('options', ListChoice(
title = _('Options'),
choices = view_editor_options(),
default_value = ['user_sortable'],
)),
('browser_reload', Integer(
title = _('Automatic page reload'),
unit = _('seconds'),
minvalue = 0,
help = _('Leave this empty or at 0 for no automatic reload.'),
)),
('layout', DropdownChoice(
title = _('Basic Layout'),
choices = [ (k, v["title"]) for k,v in multisite_layouts.items() if not v.get("hide")],
default_value = 'table',
sorted = True,
)),
('num_columns', Integer(
title = _('Number of Columns'),
default_value = 1,
minvalue = 1,
maxvalue = 50,
)),
('column_headers', DropdownChoice(
title = _('Column Headers'),
choices = [
("off", _("off")),
("pergroup", _("once per group")),
("repeat", _("repeat every 20'th row")),
],
default_value = 'pergroup',
)),
],
))
)
def column_spec(ident, title, ds_name):
painters = painters_of_datasource(ds_name)
allow_empty = True
empty_text = None
if ident == 'columns':
allow_empty = False
empty_text = _("Please add at least one column to your view.")
vs_column = Tuple(
title = _('Column'),
elements = [
CascadingDropdown(
title = _('Column'),
choices = painter_choices_with_params(painters),
no_preselect = True,
),
DropdownChoice(
title = _('Link'),
choices = view_choices,
sorted = True,
),
DropdownChoice(
title = _('Tooltip'),
choices = [(None, "")] + painter_choices(painters),
),
],
)
join_painters = join_painters_of_datasource(ds_name)
if ident == 'columns' and join_painters:
join_painters = join_painters_of_datasource(ds_name)
vs_column = Alternative(
elements = [
vs_column,
Tuple(
title = _('Joined column'),
help = _("A joined column can display information about specific services for "
"host objects in a view showing host objects. You need to specify the "
"service description of the service you like to show the data for."),
elements = [
CascadingDropdown(
title = _('Column'),
choices = painter_choices_with_params(join_painters),
no_preselect = True,
),
TextUnicode(
title = _('of Service'),
allow_empty = False,
),
DropdownChoice(
title = _('Link'),
choices = view_choices,
sorted = True,
),
DropdownChoice(
title = _('Tooltip'),
choices = [(None, "")] + painter_choices(join_painters),
),
TextUnicode(
title = _('Title'),
),
],
),
],
style = 'dropdown',
match = lambda x: 1 * (x is not None and len(x) == 5),
)
return (ident, Dictionary(
title = title,
render = 'form',
optional_keys = None,
elements = [
(ident, ListOf(vs_column,
title = title,
add_label = _('Add column'),
allow_empty = allow_empty,
empty_text = empty_text,
)),
],
))
specs.append(column_spec('columns', _('Columns'), ds_name))
specs.append(
('sorting', Dictionary(
title = _('Sorting'),
render = 'form',
optional_keys = None,
elements = [
('sorters', ListOf(
Tuple(
elements = [
DropdownChoice(
title = _('Column'),
choices = [ (name, get_painter_title_for_choices(p)) for name, p
in sorters_of_datasource(ds_name).items() ],
sorted = True,
no_preselect = True,
),
DropdownChoice(
title = _('Order'),
choices = [(False, _("Ascending")),
(True, _("Descending"))],
),
],
orientation = 'horizontal',
),
title = _('Sorting'),
add_label = _('Add sorter'),
)),
],
)),
)
specs.append(column_spec('grouping', _('Grouping'), ds_name))
return specs
def render_view_config(view, general_properties=True):
ds_name = view.get("datasource", html.var("datasource"))
if not ds_name:
raise MKInternalError(_("No datasource defined."))
if ds_name not in multisite_datasources:
raise MKInternalError(_('The given datasource is not supported.'))
view['datasource'] = ds_name
for ident, vs in view_editor_specs(ds_name, general_properties):
vs.render_input(ident, view.get(ident))
# Is used to change the view structure to be compatible to
# the valuespec This needs to perform the inverted steps of the
# transform_valuespec_value_to_view() function. FIXME: One day we should
# rewrite this to make no transform needed anymore
def transform_view_to_valuespec_value(view):
view["view"] = {} # Several global variables are put into a sub-dict
# Only copy our known keys. Reporting element, etc. might have their own keys as well
for key in [ "datasource", "browser_reload", "layout", "num_columns", "column_headers" ]:
if key in view:
view["view"][key] = view[key]
view["view"]['options'] = []
for key, title in view_editor_options():
if view.get(key):
view['view']['options'].append(key)
view['visibility'] = {}
for key in [ 'hidden', 'hidebutton', 'public' ]:
if view.get(key):
view['visibility'][key] = view[key]
view['grouping'] = { "grouping" : view.get('group_painters', []) }
view['sorting'] = { "sorters" : view.get('sorters', {}) }
columns = []
view['columns'] = { "columns" : columns }
for entry in view.get('painters', []):
if len(entry) == 5:
pname, viewname, tooltip, join_index, col_title = entry
columns.append((pname, join_index, viewname, tooltip or None, col_title))
elif len(entry) == 4:
pname, viewname, tooltip, join_index = entry
columns.append((pname, join_index, viewname, tooltip or None, ''))
elif len(entry) == 3:
pname, viewname, tooltip = entry
columns.append((pname, viewname, tooltip or None))
else:
pname, viewname = entry
columns.append((pname, viewname, None))
def transform_valuespec_value_to_view(view):
for ident, attrs in view.items():
# Transform some valuespec specific options to legacy view
# format. We do not want to change the view data structure
# at the moment.
if ident == 'view':
if "options" in attrs:
# First set all options to false
for option in dict(view_editor_options()).keys():
view[option] = False
# Then set the selected single options
for option in attrs['options']:
view[option] = True
# And cleanup
del attrs['options']
view.update(attrs)
del view["view"]
elif ident == 'sorting':
view.update(attrs)
del view["sorting"]
elif ident == 'grouping':
view['group_painters'] = attrs['grouping']
del view["grouping"]
elif ident == 'columns':
painters = []
for column in attrs['columns']:
if len(column) == 5:
pname, join_index, viewname, tooltip, col_title = column
else:
pname, viewname, tooltip = column
join_index, col_title = None, None
viewname = viewname if viewname else None
if join_index and col_title:
painters.append((pname, viewname, tooltip, join_index, col_title))
elif join_index:
painters.append((pname, viewname, tooltip, join_index))
else:
painters.append((pname, viewname, tooltip))
view['painters'] = painters
del view["columns"]
# Extract properties of view from HTML variables and construct
# view object, to be used for saving or displaying
#
# old_view is the old view dict which might be loaded from storage.
# view is the new dict object to be updated.
def create_view_from_valuespec(old_view, view):
ds_name = old_view.get('datasource', html.var('datasource'))
view['datasource'] = ds_name
vs_value = {}
for ident, vs in view_editor_specs(ds_name):
attrs = vs.from_html_vars(ident)
vs.validate_value(attrs, ident)
vs_value[ident] = attrs
transform_valuespec_value_to_view(vs_value)
view.update(vs_value)
return view
#.
# .--Display View--------------------------------------------------------.
# | ____ _ _ __ ___ |
# | | _ \(_)___ _ __ | | __ _ _ _ \ \ / (_) _____ __ |
# | | | | | / __| '_ \| |/ _` | | | | \ \ / /| |/ _ \ \ /\ / / |
# | | |_| | \__ \ |_) | | (_| | |_| | \ V / | | __/\ V V / |
# | |____/|_|___/ .__/|_|\__,_|\__, | \_/ |_|\___| \_/\_/ |
# | |_| |___/ |
# +----------------------------------------------------------------------+
# | |
# '----------------------------------------------------------------------'
def show_filter(f):
if not f.visible():
html.open_div(style="display:none;")
f.display()
html.close_div()
else:
visuals.show_filter(f)
def show_filter_form(is_open, filters):
# Table muss einen anderen Namen, als das Formular
html.open_div(id_="filters", class_=["view_form"], style="display: none;" if not is_open else None)
html.begin_form("filter")
html.open_table(class_=["filterform"], cellpadding="0", cellspacing="0", border="0")
html.open_tr()
html.open_td()
# sort filters according to title
s = [(f.sort_index, f.title, f) for f in filters if f.available()]
s.sort()
# First show filters with double height (due to better floating
# layout)
for sort_index, title, f in s:
if f.double_height():
show_filter(f)
# Now single height filters
for sort_index, title, f in s:
if not f.double_height():
show_filter(f)
html.close_td()
html.close_tr()
html.open_tr()
html.open_td()
html.button("search", _("Search"), "submit")
html.close_td()
html.close_tr()
html.close_table()
html.hidden_fields()
html.end_form()
html.close_div()
def page_view():
bi.reset_cache_status() # needed for status icon
load_views()
view_name = html.var("view_name")
if view_name == None:
raise MKGeneralException(_("Missing the variable view_name in the URL."))
view = available_views.get(view_name)
if not view:
raise MKGeneralException(_("No view defined with the name '%s'.") % html.attrencode(view_name))
# Gather the page context which is needed for the "add to visual" popup menu
# to add e.g. views to dashboards or reports
datasource = multisite_datasources[view['datasource']]
context = visuals.get_context_from_uri_vars(datasource['infos'])
context.update(visuals.get_singlecontext_html_vars(view))
html.set_page_context(context)
prepare_painter_options(view_name)
painter_options.update_from_url(view)
show_view(view, True, True, True)
def get_painter_columns(painter):
if type(lambda: None) == type(painter["columns"]):
return painter["columns"]()
else:
return painter["columns"]
# Display view with real data. This is *the* function everying
# is about.
def show_view(view, show_heading = False, show_buttons = True,
show_footer = True, render_function = None, only_count=False,
all_filters_active=False, limit=None):
weblib.prepare_display_options(globals())
# Load from hard painter options > view > hard coded default
num_columns = painter_options.get("num_columns", view.get("num_columns", 1))
browser_reload = painter_options.get("refresh", view.get("browser_reload", None))
force_checkboxes = view.get("force_checkboxes", False)
show_checkboxes = force_checkboxes or html.var('show_checkboxes', '0') == '1'
# Get the datasource (i.e. the logical table)
try:
datasource = multisite_datasources[view["datasource"]]
except KeyError:
if view["datasource"].startswith("mkeventd_"):
raise MKUserError(None,
_("The Event Console view '%s' can not be rendered. The Event Console is possibly "
"disabled.") % view["name"])
else:
raise MKUserError(None,
_("The view '%s' using the datasource '%s' can not be rendered "
"because the datasource does not exist.") % (view["name"], view["datasource"]))
tablename = datasource["table"]
# Filters to use in the view
# In case of single object views, the needed filters are fixed, but not always present
# in context. In this case, take them from the context type definition.
use_filters = visuals.filters_of_visual(view, datasource['infos'],
all_filters_active, datasource.get('link_filters', {}))
# Not all filters are really shown later in show_filter_form(), because filters which
# have a hardcoded value are not changeable by the user
show_filters = visuals.visible_filters_of_visual(view, use_filters)
# FIXME TODO HACK to make grouping single contextes possible on host/service infos
# Is hopefully cleaned up soon.
if view['datasource'] in ['hosts', 'services']:
if html.has_var('hostgroup') and not html.has_var("opthost_group"):
html.set_var("opthost_group", html.var("hostgroup"))
if html.has_var('servicegroup') and not html.has_var("optservice_group"):
html.set_var("optservice_group", html.var("servicegroup"))
# TODO: Another hack :( Just like the above one: When opening the view "ec_events_of_host",
# which is of single context "host" using a host name of a unrelated event, the list of
# events is always empty since the single context filter "host" is sending a "host_name = ..."
# filter to livestatus which is not matching a "unrelated event". Instead the filter event_host
# needs to be used.
# But this may only be done for the unrelated events view. The "ec_events_of_monhost" view still
# needs the filter. :-/
# Another idea: We could change these views to non single context views, but then we would not
# be able to show the buttons to other host related views, which is also bad. So better stick
# with the current mode.
if view["datasource"] in [ "mkeventd_events", "mkeventd_history" ] \
and "host" in view["single_infos"] and view["name"] != "ec_events_of_monhost":
# Remove the original host name filter
use_filters = [ f for f in use_filters if f.name != "host" ]
# Set the value for the event host filter
if not html.has_var("event_host"):
html.set_var("event_host", html.var("host"))
# Now populate the HTML vars with context vars from the view definition. Hard
# coded default values are treated differently:
#
# a) single context vars of the view are enforced
# b) multi context vars can be overwritten by existing HTML vars
visuals.add_context_to_uri_vars(view, datasource["infos"], only_count)
# Check that all needed information for configured single contexts are available
visuals.verify_single_contexts('views', view, datasource.get('link_filters', {}))
# Prepare Filter headers for Livestatus
# TODO: When this is used by the reporting then *all* filters are
# active. That way the inventory data will always be loaded. When
# we convert this to the visuals principle the we need to optimize
# this.
filterheaders = ""
all_active_filters = [ f for f in use_filters if f.available() ]
for filt in all_active_filters:
header = filt.filter(tablename)
filterheaders += header
# Apply the site hint / filter
if html.var("site"):
only_sites = [html.var("site")]
else:
only_sites = None
# Prepare limit:
# We had a problem with stats queries on the logtable where
# the limit was not applied on the resulting rows but on the
# lines of the log processed. This resulted in wrong stats.
# For these datasources we ignore the query limits.
if limit == None: # Otherwise: specified as argument
if not datasource.get('ignore_limit', False):
limit = get_limit()
# Fork to availability view. We just need the filter headers, since we do not query the normal
# hosts and service table, but "statehist". This is *not* true for BI availability, though (see later)
if html.var("mode") == "availability" and (
"aggr" not in datasource["infos"] or html.var("timeline_aggr")):
context = visuals.get_context_from_uri_vars(datasource['infos'])
context.update(visuals.get_singlecontext_html_vars(view))
return render_availability_page(view, datasource, context, filterheaders, only_sites, limit)
query = filterheaders + view.get("add_headers", "")
# Sorting - use view sorters and URL supplied sorters
if not only_count:
user_sorters = parse_url_sorters(html.var("sort"))
if user_sorters:
sorter_list = user_sorters
else:
sorter_list = view["sorters"]
sorters = [ (multisite_sorters[s[0]],) + s[1:] for s in sorter_list
if s[0] in multisite_sorters ]
else:
sorters = []
# Prepare cells of the view
# Group cells: Are displayed as titles of grouped rows
# Regular cells: Are displaying information about the rows of the type the view is about
# Join cells: Are displaying information of a joined source (e.g.service data on host views)
group_cells = get_group_cells(view)
cells = get_cells(view)
regular_cells = get_regular_cells(cells)
join_cells = get_join_cells(cells)
# Now compute the list of all columns we need to query via Livestatus.
# Those are: (1) columns used by the sorters in use, (2) columns use by
# column- and group-painters in use and - note - (3) columns used to
# satisfy external references (filters) of views we link to. The last bit
# is the trickiest. Also compute this list of view options use by the
# painters
columns = get_needed_regular_columns(group_cells + cells, sorters, datasource)
join_columns = get_needed_join_columns(join_cells, sorters, datasource)
# Fetch data. Some views show data only after pressing [Search]
if (only_count or (not view.get("mustsearch")) or html.var("filled_in") in ["filter", 'actions', 'confirm', 'painteroptions']):
# names for additional columns (through Stats: headers)
add_columns = datasource.get("add_columns", [])
# tablename may be a function instead of a livestatus tablename
# In that case that function is used to compute the result.
# It may also be a tuple. In this case the first element is a function and the second element
# is a list of argument to hand over to the function together with all other arguments that
# are passed to query_data().
if type(tablename) == type(lambda x:None):
rows = tablename(columns, query, only_sites, limit, all_active_filters)
elif type(tablename) == tuple:
func, args = tablename
rows = func(datasource, columns, add_columns, query, only_sites, limit, *args)
else:
rows = query_data(datasource, columns, add_columns, query, only_sites, limit)
# Now add join information, if there are join columns
if join_cells:
do_table_join(datasource, rows, filterheaders, join_cells, join_columns, only_sites)
# If any painter, sorter or filter needs the information about the host's
# inventory, then we load it and attach it as column "host_inventory"
if is_inventory_data_needed(group_cells, cells, sorters, all_active_filters):
for row in rows:
if "host_name" in row:
row["host_inventory"] = inventory.load_tree(row["host_name"])
sort_data(rows, sorters)
else:
rows = []
# Apply non-Livestatus filters
for filter in all_active_filters:
rows = filter.filter_table(rows)
if html.var("mode") == "availability":
render_bi_availability(view_title(view), rows)
return
# TODO: Use livestatus Stats: instead of fetching rows!
if only_count:
for fname, filter_vars in view["context"].items():
for varname, value in filter_vars.items():
html.del_var(varname)
return len(rows)
# The layout of the view: it can be overridden by several specifying
# an output format (like json or python). Note: the layout is not
# always needed. In case of an embedded view in the reporting this
# field is simply missing, because the rendering is done by the
# report itself.
# TODO: CSV export should be handled by the layouts. It cannot
# be done generic in most cases
if html.output_format == "html":
if "layout" in view:
layout = multisite_layouts[view["layout"]]
else:
layout = None
else:
if "layout" in view and "csv_export" in multisite_layouts[view["layout"]]:
multisite_layouts[view["layout"]]["csv_export"](rows, view, group_cells, cells)
return
else:
# Generic layout of export
layout = multisite_layouts.get(html.output_format)
if not layout:
layout = multisite_layouts["json"]
# Set browser reload
if browser_reload and display_options.enabled(display_options.R) and not only_count:
html.set_browser_reload(browser_reload)
# Until now no single byte of HTML code has been output.
# Now let's render the view. The render_function will be
# replaced by the mobile interface for an own version.
if not render_function:
render_function = render_view
render_function(view, rows, datasource, group_cells, cells,
show_heading, show_buttons,
show_checkboxes, layout, num_columns, show_filters, show_footer,
browser_reload)
def get_group_cells(view):
return [ Cell(view, e) for e in view["group_painters"]
if Cell.painter_exists(e) ]
def get_cells(view):
cells = []
for e in view["painters"]:
if not Cell.painter_exists(e):
continue
if Cell.is_join_cell(e):
cells.append(JoinCell(view, e))
else:
cells.append(Cell(view, e))
return cells
def get_join_cells(cell_list):
return filter(lambda x: type(x) == JoinCell, cell_list)
def get_regular_cells(cell_list):
return filter(lambda x: type(x) == Cell, cell_list)
def get_needed_regular_columns(cells, sorters, datasource):
# BI availability needs aggr_tree
# TODO: wtf? a full reset of the list? Move this far away to a special place!
if html.var("mode") == "availability" and "aggr" in datasource["infos"]:
return [ "aggr_tree", "aggr_name", "aggr_group" ]
columns = columns_of_cells(cells)
# Columns needed for sorters
# TODO: Move sorter parsing and logic to something like Cells()
for s in sorters:
if len(s) == 2:
columns.update(s[0]["columns"])
# Add key columns, needed for executing commands
columns.update(datasource["keys"])
# Add idkey columns, needed for identifying the row
columns.update(datasource["idkeys"])
# Remove (implicit) site column
try:
columns.remove("site")
except KeyError:
pass
return list(columns)
def get_needed_join_columns(join_cells, sorters, datasource):
join_columns = columns_of_cells(join_cells)
# Columns needed for sorters
# TODO: Move sorter parsing and logic to something like Cells()
for s in sorters:
if len(s) != 2:
join_columns.update(s[0]["columns"])
return list(join_columns)
def is_inventory_data_needed(group_cells, cells, sorters, all_active_filters):
for cell in cells:
if cell.has_tooltip():
if cell.tooltip_painter_name().startswith("inv_"):
return True
for s in sorters:
if s[0].get("load_inv"):
return True
for cell in group_cells + cells:
if cell.painter().get("load_inv"):
return True
for filt in all_active_filters:
if filt.need_inventory():
return True
return False
def columns_of_cells(cells):
columns = set([])
for cell in cells:
columns.update(cell.needed_columns())
return columns
# Output HTML code of a view. If you add or remove paramters here,
# then please also do this in htdocs/mobile.py!
def render_view(view, rows, datasource, group_painters, painters,
show_heading, show_buttons,
show_checkboxes, layout, num_columns, show_filters, show_footer,
browser_reload):
if html.transaction_valid() and html.do_actions():
html.set_browser_reload(0)
# Show heading (change between "preview" mode and full page mode)
if show_heading:
# Show/Hide the header with page title, MK logo, etc.
if display_options.enabled(display_options.H):
# FIXME: view/layout/module related stylesheets/javascripts e.g. in case of BI?
html.body_start(view_title(view), stylesheets=["pages","views","status","bi"])
if display_options.enabled(display_options.T):
html.top_heading(view_title(view))
has_done_actions = False
row_count = len(rows)
# This is a general flag which makes the command form render when the current
# view might be able to handle commands. When no commands are possible due missing
# permissions or datasources without commands, the form is not rendered
command_form = should_show_command_form(datasource)
if command_form:
weblib.init_selection()
# Is the layout able to display checkboxes?
can_display_checkboxes = layout.get('checkboxes', False)
if show_buttons:
show_combined_graphs_button = \
("host" in datasource["infos"] or "service" in datasource["infos"]) and \
(type(datasource["table"]) == str) and \
("host" in datasource["table"] or "service" in datasource["table"])
show_context_links(view, datasource, show_filters,
# Take into account: permissions, display_options
row_count > 0 and command_form,
# Take into account: layout capabilities
can_display_checkboxes and not view.get("force_checkboxes"), show_checkboxes,
# Show link to availability
datasource["table"] in [ "hosts", "services" ] or "aggr" in datasource["infos"],
# Show link to combined graphs
show_combined_graphs_button,)
# User errors in filters
html.show_user_errors()
# Filter form
filter_isopen = view.get("mustsearch") and not html.var("filled_in")
if display_options.enabled(display_options.F) and len(show_filters) > 0:
show_filter_form(filter_isopen, show_filters)
# Actions
if command_form:
# If we are currently within an action (confirming or executing), then
# we display only the selected rows (if checkbox mode is active)
if show_checkboxes and html.do_actions():
rows = filter_selected_rows(view, rows, weblib.get_rowselection('view-' + view['name']))
# There are one shot actions which only want to affect one row, filter the rows
# by this id during actions
if html.has_var("_row_id") and html.do_actions():
rows = filter_by_row_id(view, rows)
if html.do_actions() and html.transaction_valid(): # submit button pressed, no reload
try:
# Create URI with all actions variables removed
backurl = html.makeuri([], delvars=['filled_in', 'actions'])
has_done_actions = do_actions(view, datasource["infos"][0], rows, backurl)
except MKUserError, e:
html.show_error(e)
html.add_user_error(e.varname, e)
if display_options.enabled(display_options.C):
show_command_form(True, datasource)
elif display_options.enabled(display_options.C): # (*not* display open, if checkboxes are currently shown)
show_command_form(False, datasource)
# Also execute commands in cases without command form (needed for Python-
# web service e.g. for NagStaMon)
elif row_count > 0 and config.user.may("general.act") \
and html.do_actions() and html.transaction_valid():
# There are one shot actions which only want to affect one row, filter the rows
# by this id during actions
if html.has_var("_row_id") and html.do_actions():
rows = filter_by_row_id(view, rows)
try:
do_actions(view, datasource["infos"][0], rows, '')
except:
pass # currently no feed back on webservice
painter_options.show_form(view)
# The refreshing content container
if display_options.enabled(display_options.R):
html.open_div(id_="data_container")
if not has_done_actions:
# Limit exceeded? Show warning
if display_options.enabled(display_options.W):
check_limit(rows, get_limit())
layout["render"](rows, view, group_painters, painters, num_columns,
show_checkboxes and not html.do_actions())
headinfo = "%d %s" % (row_count, _("row") if row_count == 1 else _("rows"))
if show_checkboxes:
selected = filter_selected_rows(view, rows, weblib.get_rowselection('view-' + view['name']))
headinfo = "%d/%s" % (len(selected), headinfo)
if html.output_format == "html":
html.javascript("update_headinfo('%s');" % headinfo)
# The number of rows might have changed to enable/disable actions and checkboxes
if show_buttons:
update_context_links(
# don't take display_options into account here ('c' is set during reload)
row_count > 0 and should_show_command_form(datasource, ignore_display_option=True),
# and not html.do_actions(),
can_display_checkboxes
)
# Play alarm sounds, if critical events have been displayed
if display_options.enabled(display_options.S) and view.get("play_sounds"):
play_alarm_sounds()
else:
# Always hide action related context links in this situation
update_context_links(False, False)
# In multi site setups error messages of single sites do not block the
# output and raise now exception. We simply print error messages here.
# In case of the web service we show errors only on single site installations.
if config.show_livestatus_errors \
and display_options.enabled(display_options.W) \
and html.output_format == "html":
for sitename, info in sites.live().dead_sites().items():
html.show_error("<b>%s - %s</b><br>%s" %
(info["site"]["alias"], _('Livestatus error'), info["exception"]))
# FIXME: Sauberer waere noch die Status Icons hier mit aufzunehmen
if display_options.enabled(display_options.R):
html.close_div()
if show_footer:
pid = os.getpid()
if sites.live().successfully_persisted():
html.add_status_icon("persist", _("Reused persistent livestatus connection from earlier request (PID %d)") % pid)
if bi.reused_compilation():
html.add_status_icon("aggrcomp", _("Reused cached compiled BI aggregations (PID %d)") % pid)
html.bottom_focuscode()
if display_options.enabled(display_options.Z):
html.bottom_footer()
if display_options.enabled(display_options.H):
html.body_end()
def check_limit(rows, limit):
count = len(rows)
if limit != None and count >= limit + 1:
text = _("Your query produced more than %d results. ") % limit
if html.var("limit", "soft") == "soft" and config.user.may("general.ignore_soft_limit"):
text += html.render_a(_('Repeat query and allow more results.'),
target="_self",
href=html.makeuri([("limit", "hard")]))
elif html.var("limit") == "hard" and config.user.may("general.ignore_hard_limit"):
text += html.render_a(_('Repeat query without limit.'),
target="_self",
href=html.makeuri([("limit", "none")]))
text += " " + _("<b>Note:</b> the shown results are incomplete and do not reflect the sort order.")
html.show_warning(text)
del rows[limit:]
return False
return True
def do_table_join(master_ds, master_rows, master_filters, join_cells, join_columns, only_sites):
join_table, join_master_column = master_ds["join"]
slave_ds = multisite_datasources[join_table]
join_slave_column = slave_ds["joinkey"]
# Create additional filters
join_filters = []
for cell in join_cells:
join_filters.append(cell.livestatus_filter(join_slave_column))
join_filters.append("Or: %d" % len(join_filters))
query = "%s%s\n" % (master_filters, "\n".join(join_filters))
rows = query_data(slave_ds, [join_master_column, join_slave_column] + join_columns, [], query, only_sites, None)
per_master_entry = {}
current_key = None
current_entry = None
for row in rows:
master_key = (row["site"], row[join_master_column])
if master_key != current_key:
current_key = master_key
current_entry = {}
per_master_entry[current_key] = current_entry
current_entry[row[join_slave_column]] = row
# Add this information into master table in artificial column "JOIN"
for row in master_rows:
key = (row["site"], row[join_master_column])
joininfo = per_master_entry.get(key, {})
row["JOIN"] = joininfo
g_alarm_sound_states = set([])
def clear_alarm_sound_states():
g_alarm_sound_states.clear()
def save_state_for_playing_alarm_sounds(row):
if not config.enable_sounds or not config.sounds:
return
# TODO: Move this to a generic place. What about -1?
host_state_map = { 0: "up", 1: "down", 2: "unreachable"}
service_state_map = { 0: "up", 1: "warning", 2: "critical", 3: "unknown"}
for state_map, state in [
(host_state_map, row.get("host_hard_state", row.get("host_state"))),
(service_state_map, row.get("service_last_hard_state", row.get("service_state"))) ]:
if state is None:
continue
try:
state_name = state_map[int(state)]
except KeyError:
continue
g_alarm_sound_states.add(state_name)
def play_alarm_sounds():
if not config.enable_sounds or not config.sounds:
return
url = config.sound_url
if not url.endswith("/"):
url += "/"
for state_name, wav in config.sounds:
if not state_name or state_name in g_alarm_sound_states:
html.play_sound(url + wav)
break # only one sound at one time
# How many data rows may the user query?
def get_limit():
limitvar = html.var("limit", "soft")
if limitvar == "hard" and config.user.may("general.ignore_soft_limit"):
return config.hard_query_limit
elif limitvar == "none" and config.user.may("general.ignore_hard_limit"):
return None
else:
return config.soft_query_limit
def view_title(view):
return visuals.visual_title('view', view)
def view_optiondial(view, option, choices, help):
# Darn: The option "refresh" has the name "browser_reload" in the
# view definition
if option == "refresh":
name = "browser_reload"
else:
name = option
# Take either the first option of the choices, the view value or the
# configured painter option.
value = painter_options.get(option, dflt=view.get(name, choices[0][0]))
title = dict(choices).get(value, value)
html.begin_context_buttons() # just to be sure
# Remove unicode strings
choices = [ [c[0], str(c[1])] for c in choices ]
html.open_div(id_="optiondial_%s" % option,
class_=["optiondial", option, "val_%s" % value],
title=help,
onclick="view_dial_option(this, \'%s\', \'%s\', %r)"
% (view["name"], option, choices))
html.div(title)
html.close_div()
html.final_javascript("init_optiondial('optiondial_%s');" % option)
def view_optiondial_off(option):
html.div('', class_=["optiondial", "off", option])
# FIXME: Consolidate with html.toggle_button() rendering functions
def toggler(id, icon, help, onclick, value, hidden = False):
html.begin_context_buttons() # just to be sure
hide = ' style="display:none"' if hidden else ''
html.write('<div id="%s_on" title="%s" class="togglebutton %s %s" %s>'
'<a href="javascript:void(0)" onclick="%s"><img src="images/icon_%s.png"></a></div>' % (
id, help, icon, value and "down" or "up", hide, onclick, icon))
# Will be called when the user presses the upper button, in order
# to persist the new setting - and to make it active before the
# browser reload of the DIV containing the actual status data is done.
def ajax_set_viewoption():
view_name = html.var("view_name")
option = html.var("option")
value = html.var("value")
value = { 'true' : True, 'false' : False }.get(value, value)
if type(value) == str and value[0].isdigit():
try:
value = int(value)
except:
pass
po = PainterOptions(view_name)
po.load()
po.set(option, value)
po.save_to_config()
def show_context_links(thisview, datasource, show_filters,
enable_commands, enable_checkboxes, show_checkboxes,
show_availability, show_combined_graphs):
# html.begin_context_buttons() called automatically by html.context_button()
# That way if no button is painted we avoid the empty container
if display_options.enabled(display_options.B):
execute_hooks('buttons-begin')
filter_isopen = html.var("filled_in") != "filter" and thisview.get("mustsearch")
if display_options.enabled(display_options.F):
if html.var("filled_in") == "filter":
icon = "filters_set"
help = _("The current data is being filtered")
else:
icon = "filters"
help = _("Set a filter for refining the shown data")
html.toggle_button("filters", filter_isopen, icon, help, disabled=not show_filters)
if display_options.enabled(display_options.D):
html.toggle_button("painteroptions", False, "painteroptions", _("Modify display options"),
disabled=not painter_options.painter_option_form_enabled())
if display_options.enabled(display_options.C):
html.toggle_button("commands", False, "commands", _("Execute commands on hosts, services and other objects"),
hidden = not enable_commands)
html.toggle_button("commands", False, "commands", "", hidden=enable_commands, disabled=True)
selection_enabled = (enable_commands and enable_checkboxes) or thisview.get("force_checkboxes")
if not thisview.get("force_checkboxes"):
toggler("checkbox", "checkbox", _("Enable/Disable checkboxes for selecting rows for commands"),
"location.href='%s';" % html.makeuri([('show_checkboxes', show_checkboxes and '0' or '1')]),
show_checkboxes, hidden = True) # not selection_enabled)
html.toggle_button("checkbox", False, "checkbox", "", hidden=not thisview.get("force_checkboxes"), disabled=True)
html.javascript('g_selection_enabled = %s;' % ('true' if selection_enabled else 'false'))
if display_options.enabled(display_options.O):
if config.user.may("general.view_option_columns"):
choices = [ [x, "%s" % x] for x in config.view_option_columns ]
view_optiondial(thisview, "num_columns", choices, _("Change the number of display columns"))
else:
view_optiondial_off("num_columns")
if display_options.enabled(display_options.R) and config.user.may("general.view_option_refresh"):
choices = [ [x, {0:_("off")}.get(x, str(x) + "s") ] for x in config.view_option_refreshes ]
view_optiondial(thisview, "refresh", choices, _("Change the refresh rate"))
else:
view_optiondial_off("refresh")
if display_options.enabled(display_options.B):
# WATO: If we have a host context, then show button to WATO, if permissions allow this
if html.has_var("host") \
and config.wato_enabled \
and config.user.may("wato.use") \
and (config.user.may("wato.hosts") or config.user.may("wato.seeall")):
host = html.var("host")
if host:
url = wato.link_to_host_by_name(host)
else:
url = wato.link_to_folder_by_path(html.var("wato_folder", ""))
html.context_button(_("WATO"), url, "wato", id="wato",
bestof = config.context_buttons_to_show)
# Button for creating an instant report (if reporting is available)
if config.reporting_available() and config.user.may("general.reporting"):
html.context_button(_("Export as PDF"), html.makeuri([], filename="report_instant.py"),
"report", class_="context_pdf_export")
# Buttons to other views, dashboards, etc.
links = visuals.collect_context_links(thisview)
for linktitle, uri, icon, buttonid in links:
html.context_button(linktitle, url=uri, icon=icon, id=buttonid, bestof=config.context_buttons_to_show)
# Customize/Edit view button
if display_options.enabled(display_options.E) and config.user.may("general.edit_views"):
url_vars = [
("back", html.requested_url()),
("load_name", thisview["name"]),
]
if thisview["owner"] != config.user.id:
url_vars.append(("load_user", thisview["owner"]))
url = html.makeuri_contextless(url_vars, filename="edit_view.py")
html.context_button(_("Edit View"), url, "edit", id="edit", bestof=config.context_buttons_to_show)
if display_options.enabled(display_options.E):
if show_availability:
html.context_button(_("Availability"), html.makeuri([("mode", "availability")]), "availability")
if show_combined_graphs and config.combined_graphs_available():
html.context_button(_("Combined graphs"),
html.makeuri([
("single_infos", ",".join(thisview["single_infos"])),
("datasource", thisview["datasource"]),
("view_title", view_title(thisview)),
],
filename="combined_graphs.py"), "pnp")
if display_options.enabled(display_options.B):
execute_hooks('buttons-end')
html.end_context_buttons()
def update_context_links(enable_command_toggle, enable_checkbox_toggle):
html.javascript("update_togglebutton('commands', %d);" % (enable_command_toggle and 1 or 0))
html.javascript("update_togglebutton('checkbox', %d);" % (enable_command_toggle and enable_checkbox_toggle and 1 or 0, ))
def ajax_count_button():
id = html.var("id")
counts = config.user.load_file("buttoncounts", {})
for i in counts:
counts[i] *= 0.95
counts.setdefault(id, 0)
counts[id] += 1
config.user.save_file("buttoncounts", counts)
# Retrieve data via livestatus, convert into list of dicts,
# prepare row-function needed for painters
# datasource: the datasource object as defined in plugins/views/datasources.py
# columns: the list of livestatus columns to query
# add_columns: list of columns the datasource is known to add itself
# (couldn't we get rid of this parameter by looking that up ourselves?)
# add_headers: additional livestatus headers to add
# only_sites: list of sites the query is limited to
# limit: maximum number of data rows to query
def query_data(datasource, columns, add_columns, add_headers,
only_sites = None, limit = None, tablename=None):
if only_sites is None:
only_sites = []
if tablename == None:
tablename = datasource["table"]
add_headers += datasource.get("add_headers", "")
merge_column = datasource.get("merge_by")
if merge_column:
columns = [merge_column] + columns
# Most layouts need current state of object in order to
# choose background color - even if no painter for state
# is selected. Make sure those columns are fetched. This
# must not be done for the table 'log' as it cannot correctly
# distinguish between service_state and host_state
if "log" not in datasource["infos"]:
state_columns = []
if "service" in datasource["infos"]:
state_columns += [ "service_has_been_checked", "service_state" ]
if "host" in datasource["infos"]:
state_columns += [ "host_has_been_checked", "host_state" ]
for c in state_columns:
if c not in columns:
columns.append(c)
auth_domain = datasource.get("auth_domain", "read")
# Remove columns which are implicitely added by the datasource
columns = [ c for c in columns if c not in add_columns ]
query = "GET %s\n" % tablename
rows = do_query_data(query, columns, add_columns, merge_column,
add_headers, only_sites, limit, auth_domain)
# Datasource may have optional post processing function to filter out rows
post_process_func = datasource.get("post_process")
if post_process_func:
return post_process_func(rows)
else:
return rows
def do_query_data(query, columns, add_columns, merge_column,
add_headers, only_sites, limit, auth_domain):
query += "Columns: %s\n" % " ".join(columns)
query += add_headers
sites.live().set_prepend_site(True)
if limit != None:
sites.live().set_limit(limit + 1) # + 1: We need to know, if limit is exceeded
else:
sites.live().set_limit(None)
if config.debug_livestatus_queries \
and html.output_format == "html" and display_options.enabled(display_options.W):
html.open_div(class_=["livestatus", "message"])
html.tt(query.replace('\n', '<br>\n'))
html.close_div()
if only_sites:
sites.live().set_only_sites(only_sites)
sites.live().set_auth_domain(auth_domain)
data = sites.live().query(query)
sites.live().set_auth_domain("read")
sites.live().set_only_sites(None)
sites.live().set_prepend_site(False)
sites.live().set_limit() # removes limit
if merge_column:
data = merge_data(data, columns)
# convert lists-rows into dictionaries.
# performance, but makes live much easier later.
columns = ["site"] + columns + add_columns
rows = [ dict(zip(columns, row)) for row in data ]
return rows
# Merge all data rows with different sites but the same value
# in merge_column. We require that all column names are prefixed
# with the tablename. The column with the merge key is required
# to be the *second* column (right after the site column)
def merge_data(data, columns):
merged = {}
mergefuncs = [lambda a,b: ""] # site column is not merged
def worst_service_state(a, b):
if a == 2 or b == 2:
return 2
else:
return max(a, b)
def worst_host_state(a, b):
if a == 1 or b == 1:
return 1
else:
return max(a, b)
for c in columns:
tablename, col = c.split("_", 1)
if col.startswith("num_") or col.startswith("members"):
mergefunc = lambda a,b: a+b
elif col.startswith("worst_service"):
return worst_service_state
elif col.startswith("worst_host"):
return worst_host_state
else:
mergefunc = lambda a,b: a
mergefuncs.append(mergefunc)
for row in data:
mergekey = row[1]
if mergekey in merged:
oldrow = merged[mergekey]
merged[mergekey] = [ f(a,b) for f,a,b in zip(mergefuncs, oldrow, row) ]
else:
merged[mergekey] = row
# return all rows sorted according to merge key
mergekeys = merged.keys()
mergekeys.sort()
return [ merged[k] for k in mergekeys ]
# Sort data according to list of sorters. The tablename
# is needed in order to handle different column names
# for same objects (e.g. host_name in table services and
# simply name in table hosts)
def sort_data(data, sorters):
if len(sorters) == 0:
return
# Handle case where join columns are not present for all rows
def save_compare(compfunc, row1, row2, args):
if row1 == None and row2 == None:
return 0
elif row1 == None:
return -1
elif row2 == None:
return 1
else:
if args:
return compfunc(row1, row2, *args)
else:
return compfunc(row1, row2)
sort_cmps = []
for s in sorters:
cmpfunc = s[0]["cmp"]
negate = -1 if s[1] else 1
if len(s) > 2:
joinkey = s[2] # e.g. service description
else:
joinkey = None
sort_cmps.append((cmpfunc, negate, joinkey, s[0].get('args')))
def multisort(e1, e2):
for func, neg, joinkey, args in sort_cmps:
if joinkey: # Sorter for join column, use JOIN info
c = neg * save_compare(func, e1["JOIN"].get(joinkey), e2["JOIN"].get(joinkey), args)
else:
if args:
c = neg * func(e1, e2, *args)
else:
c = neg * func(e1, e2)
if c != 0: return c
return 0 # equal
data.sort(multisort)
def sorters_of_datasource(ds_name):
return allowed_for_datasource(multisite_sorters, ds_name)
def painters_of_datasource(ds_name):
return allowed_for_datasource(multisite_painters, ds_name)
def join_painters_of_datasource(ds_name):
ds = multisite_datasources[ds_name]
if "join" not in ds:
return {} # no joining with this datasource
# Get the painters allowed for the join "source" and "target"
painters = painters_of_datasource(ds_name)
join_painters_unfiltered = allowed_for_datasource(multisite_painters, ds['join'][0])
# Filter out painters associated with the "join source" datasource
join_painters = {}
for key, val in join_painters_unfiltered.items():
if key not in painters:
join_painters[key] = val
return join_painters
# Filters a list of sorters or painters and decides which of
# those are available for a certain data source
def allowed_for_datasource(collection, datasourcename):
datasource = multisite_datasources[datasourcename]
infos_available = set(datasource["infos"])
add_columns = datasource.get("add_columns", [])
allowed = {}
for name, item in collection.items():
infos_needed = infos_needed_by_painter(item, add_columns)
if len(infos_needed.difference(infos_available)) == 0:
allowed[name] = item
return allowed
def infos_needed_by_painter(painter, add_columns=None):
if add_columns is None:
add_columns = []
columns = get_painter_columns(painter)
return set([ c.split("_", 1)[0] for c in columns if c != "site" and c not in add_columns])
# Returns either the valuespec of the painter parameters or None
def get_painter_params_valuespec(painter):
if "params" not in painter:
return
if type(lambda: None) == type(painter["params"]):
return painter["params"]()
else:
return painter["params"]
def painter_choices(painters, add_params=False):
choices = []
for name, painter in painters.items():
title = get_painter_title_for_choices(painter)
# Add the optional valuespec for painter parameters
if add_params and "params" in painter:
vs_params = get_painter_params_valuespec(painter)
choices.append((name, title, vs_params))
else:
choices.append((name, title))
return sorted(choices, key=lambda x: x[1])
def get_painter_title_for_choices(painter):
info_title = "/".join([ visuals.infos[info_name]["title_plural"] for info_name
in sorted(infos_needed_by_painter(painter)) ])
# TODO: Cleanup the special case for sites. How? Add an info for it?
if painter["columns"] == ["site"]:
info_title = _("Site")
return "%s: %s" % (info_title, painter["title"])
def painter_choices_with_params(painters):
return painter_choices(painters, add_params=True)
#.
# .--Commands------------------------------------------------------------.
# | ____ _ |
# | / ___|___ _ __ ___ _ __ ___ __ _ _ __ __| |___ |
# | | | / _ \| '_ ` _ \| '_ ` _ \ / _` | '_ \ / _` / __| |
# | | |__| (_) | | | | | | | | | | | (_| | | | | (_| \__ \ |
# | \____\___/|_| |_| |_|_| |_| |_|\__,_|_| |_|\__,_|___/ |
# | |
# +----------------------------------------------------------------------+
# | Functions dealing with external commands send to the monitoring |
# | core. The commands themselves are defined as a plugin. Shipped |
# | command definitions are in plugins/views/commands.py. |
# | We apologize for the fact that we one time speak of "commands" and |
# | the other time of "action". Both is the same here... |
# '----------------------------------------------------------------------'
# Checks whether or not this view handles commands for the current user
# When it does not handle commands the command tab, command form, row
# selection and processing commands is disabled.
def should_show_command_form(datasource, ignore_display_option=False):
if not ignore_display_option and display_options.disabled(display_options.C):
return False
if not config.user.may("general.act"):
return False
# What commands are available depends on the Livestatus table we
# deal with. If a data source provides information about more
# than one table, (like services datasource also provide host
# information) then the first info is the primary table. So 'what'
# will be one of "host", "service", "command" or "downtime".
what = datasource["infos"][0]
for command in multisite_commands:
if what in command["tables"] and config.user.may(command["permission"]):
return True
return False
def show_command_form(is_open, datasource):
# What commands are available depends on the Livestatus table we
# deal with. If a data source provides information about more
# than one table, (like services datasource also provide host
# information) then the first info is the primary table. So 'what'
# will be one of "host", "service", "command" or "downtime".
what = datasource["infos"][0]
html.open_div(id_="commands",
class_=["view_form"],
style="display:none;" if not is_open else None)
html.begin_form("actions")
html.hidden_field("_do_actions", "yes")
html.hidden_field("actions", "yes")
html.hidden_fields() # set all current variables, exception action vars
# Show command forms, grouped by (optional) command group
by_group = {}
for command in multisite_commands:
if what in command["tables"] and config.user.may(command["permission"]):
# Some special commands can be shown on special views using this option.
# It is currently only used in custom views, not shipped with check_mk.
if command.get('only_view') and html.var('view_name') != command['only_view']:
continue
group = command.get("group", "various")
by_group.setdefault(group, []).append(command)
for group_ident, group_commands in sorted(by_group.items(),
key=lambda x: multisite_command_groups[x[0]]["sort_index"]):
forms.header(multisite_command_groups[group_ident]["title"], narrow=True)
for command in group_commands:
forms.section(command["title"])
command["render"]()
forms.end()
html.end_form()
html.close_div()
# Examine the current HTML variables in order determine, which
# command the user has selected. The fetch ids from a data row
# (host name, service description, downtime/commands id) and
# construct one or several core command lines and a descriptive
# title.
def core_command(what, row, row_nr, total_rows):
host = row.get("host_name")
descr = row.get("service_description")
if what == "host":
spec = host
cmdtag = "HOST"
elif what == "service":
spec = "%s;%s" % (host, descr)
cmdtag = "SVC"
else:
spec = row.get(what + "_id")
if descr:
cmdtag = "SVC"
else:
cmdtag = "HOST"
commands = None
title = None
# Call all command actions. The first one that detects
# itself to be executed (by examining the HTML variables)
# will return a command to execute and a title for the
# confirmation dialog.
for cmd in multisite_commands:
if config.user.may(cmd["permission"]):
# Does the command need information about the total number of rows
# and the number of the current row? Then specify that
if cmd.get("row_stats"):
result = cmd["action"](cmdtag, spec, row, row_nr, total_rows)
else:
result = cmd["action"](cmdtag, spec, row)
if result:
executor = cmd.get("executor", command_executor_livestatus)
commands, title = result
break
# Use the title attribute to determine if a command exists, since the list
# of commands might be empty (e.g. in case of "remove all downtimes" where)
# no downtime exists in a selection of rows.
if not title:
raise MKUserError(None, _("Sorry. This command is not implemented."))
# Some commands return lists of commands, others
# just return one basic command. Convert those
if type(commands) != list:
commands = [commands]
return commands, title, executor
def command_executor_livestatus(command, site):
sites.live().command("[%d] %s" % (int(time.time()), command), site)
# make gettext localize some magic texts
_("services")
_("hosts")
_("commands")
_("downtimes")
_("aggregations")
# Returns:
# True -> Actions have been done
# False -> No actions done because now rows selected
# [...] new rows -> Rows actions (shall/have) be performed on
def do_actions(view, what, action_rows, backurl):
if not config.user.may("general.act"):
html.show_error(_("You are not allowed to perform actions. "
"If you think this is an error, please ask "
"your administrator grant you the permission to do so."))
return False # no actions done
if not action_rows:
message = _("No rows selected to perform actions for.")
if html.output_format == "html": # sorry for this hack
message += '<br><a href="%s">%s</a>' % (backurl, _('Back to view'))
html.show_error(message)
return False # no actions done
command = None
title, executor = core_command(what, action_rows[0], 0, len(action_rows))[1:3] # just get the title and executor
if not html.confirm(_("Do you really want to %(title)s the following %(count)d %(what)s?") %
{ "title" : title, "count" : len(action_rows), "what" : visuals.infos[what]["title_plural"], }, method = 'GET'):
return False
count = 0
already_executed = set([])
for nr, row in enumerate(action_rows):
core_commands, title, executor = core_command(what, row, nr, len(action_rows))
for command_entry in core_commands:
site = row.get("site") # site is missing for BI rows (aggregations can spawn several sites)
if (site, command_entry) not in already_executed:
# Some command functions return the information about the site per-command (e.g. for BI)
if type(command_entry) == tuple:
site, command = command_entry
else:
command = command_entry
if type(command) == unicode:
command = command.encode("utf-8")
executor(command, site)
already_executed.add((site, command_entry))
count += 1
message = None
if command:
message = _("Successfully sent %d commands.") % count
if config.debug:
message += _("The last one was: <pre>%s</pre>") % command
elif count == 0:
message = _("No matching data row. No command sent.")
if message:
if html.output_format == "html": # sorry for this hack
message += '<br><a href="%s">%s</a>' % (backurl, _('Back to view'))
if html.var("show_checkboxes") == "1":
html.del_var("selection")
weblib.selection_id()
backurl += "&selection=" + html.var("selection")
message += '<br><a href="%s">%s</a>' % (backurl, _('Back to view with checkboxes reset'))
if html.var("_show_result") == "0":
html.immediate_browser_redirect(0.5, backurl)
html.message(message)
return True
def filter_by_row_id(view, rows):
wanted_row_id = html.var("_row_id")
for row in rows:
if row_id(view, row) == wanted_row_id:
return [row]
return []
def filter_selected_rows(view, rows, selected_ids):
action_rows = []
for row in rows:
if row_id(view, row) in selected_ids:
action_rows.append(row)
return action_rows
def get_context_link(user, viewname):
if viewname in available_views:
return "view.py?view_name=%s" % viewname
else:
return None
def ajax_export():
load_views()
for name, view in available_views.items():
view["owner"] = ''
view["public"] = True
html.write(pprint.pformat(available_views))
def get_view_by_name(view_name):
load_views()
return available_views[view_name]
#.
# .--Plugin Helpers------------------------------------------------------.
# | ____ _ _ _ _ _ |
# | | _ \| |_ _ __ _(_)_ __ | | | | ___| |_ __ ___ _ __ ___ |
# | | |_) | | | | |/ _` | | '_ \ | |_| |/ _ \ | '_ \ / _ \ '__/ __| |
# | | __/| | |_| | (_| | | | | | | _ | __/ | |_) | __/ | \__ \ |
# | |_| |_|\__,_|\__, |_|_| |_| |_| |_|\___|_| .__/ \___|_| |___/ |
# | |___/ |_| |
# +----------------------------------------------------------------------+
# | |
# '----------------------------------------------------------------------'
def register_command_group(ident, title, sort_index):
multisite_command_groups[ident] = {
"title" : title,
"sort_index" : sort_index,
}
def register_hook(hook, func):
if not hook in view_hooks:
view_hooks[hook] = []
if func not in view_hooks[hook]:
view_hooks[hook].append(func)
def execute_hooks(hook):
for hook_func in view_hooks.get(hook, []):
try:
hook_func()
except:
if config.debug:
raise MKGeneralException(_('Problem while executing hook function %s in hook %s: %s')
% (hook_func.__name__, hook, traceback.format_exc()))
else:
pass
def join_row(row, cell):
if type(cell) == JoinCell:
return row.get("JOIN", {}).get(cell.join_service())
else:
return row
def url_to_view(row, view_name):
if display_options.disabled(display_options.I):
return None
view = permitted_views().get(view_name)
if view:
# Get the context type of the view to link to, then get the parameters of this
# context type and try to construct the context from the data of the row
url_vars = []
datasource = multisite_datasources[view['datasource']]
for info_key in datasource['infos']:
if info_key in view['single_infos']:
# Determine which filters (their names) need to be set
# for specifying in order to select correct context for the
# target view.
for filter_name in visuals.info_params(info_key):
filter_object = visuals.get_filter(filter_name)
# Get the list of URI vars to be set for that filter
new_vars = filter_object.variable_settings(row)
url_vars += new_vars
# See get_link_filter_names() comment for details
for src_key, dst_key in visuals.get_link_filter_names(view, datasource['infos'],
datasource.get('link_filters', {})):
try:
url_vars += visuals.get_filter(src_key).variable_settings(row)
except KeyError:
pass
try:
url_vars += visuals.get_filter(dst_key).variable_settings(row)
except KeyError:
pass
# Some special handling for the site filter which is meant as optional hint
# Always add the site filter var when some useful information is available
add_site_hint = True
for filter_key in datasource.get('multiple_site_filters', []):
if filter_key in dict(url_vars):
add_site_hint = False
# Hack for servicedesc view which is meant to show all services with the given
# description: Don't add the site filter for this view.
if view_name == "servicedesc":
add_site_hint = False
if add_site_hint and row.get('site'):
url_vars.append(('site', row['site']))
do = html.var("display_options")
if do:
url_vars.append(("display_options", do))
filename = "mobile_view.py" if html.mobile else "view.py"
return filename + "?" + html.urlencode_vars([("view_name", view_name)] + url_vars)
def link_to_view(content, row, view_name):
if display_options.disabled(display_options.I):
return content
url = url_to_view(row, view_name)
if url:
return "<a href=\"%s\">%s</a>" % (url, content)
else:
return content
def docu_link(topic, text):
return '<a href="%s" target="_blank">%s</a>' % (config.doculink_urlformat % topic, text)
# Calculates a uniq id for each data row which identifies the current
# row accross different page loadings.
def row_id(view, row):
key = u''
for col in multisite_datasources[view['datasource']]['idkeys']:
key += u'~%s' % row[col]
return hashlib.sha256(key.encode('utf-8')).hexdigest()
def paint_stalified(row, text):
if is_stale(row):
return "stale", text
else:
return "", text
def substract_sorters(base, remove):
for s in remove:
if s in base:
base.remove(s)
elif (s[0], not s[1]) in base:
base.remove((s[0], not s[1]))
def parse_url_sorters(sort):
sorters = []
if not sort:
return sorters
for s in sort.split(','):
if not '~' in s:
sorters.append((s.replace('-', ''), s.startswith('-')))
else:
sorter, join_index = s.split('~', 1)
sorters.append((sorter.replace('-', ''), sorter.startswith('-'), join_index))
return sorters
def get_sorter_name_of_painter(painter_name):
painter = multisite_painters[painter_name]
if 'sorter' in painter:
return painter['sorter']
elif painter_name in multisite_sorters:
return painter_name
def get_primary_sorter_order(view, painter_name):
sorter_name = get_sorter_name_of_painter(painter_name)
this_asc_sorter = (sorter_name, False)
this_desc_sorter = (sorter_name, True)
group_sort, user_sort, view_sort = get_separated_sorters(view)
if user_sort and this_asc_sorter == user_sort[0]:
return 'asc'
elif user_sort and this_desc_sorter == user_sort[0]:
return 'desc'
else:
return ''
def get_separated_sorters(view):
group_sort = [ (get_sorter_name_of_painter(p[0]), False)
for p in view['group_painters']
if p[0] in multisite_painters
and get_sorter_name_of_painter(p[0]) is not None ]
view_sort = [ s for s in view['sorters'] if not s[0] in group_sort ]
# Get current url individual sorters. Parse the "sort" url parameter,
# then remove the group sorters. The left sorters must be the user
# individual sorters for this view.
# Then remove the user sorters from the view sorters
user_sort = parse_url_sorters(html.var('sort'))
substract_sorters(user_sort, group_sort)
substract_sorters(view_sort, user_sort)
return group_sort, user_sort, view_sort
# The Group-value of a row is used for deciding whether
# two rows are in the same group or not
def group_value(row, group_cells):
group = []
for cell in group_cells:
painter = cell.painter()
groupvalfunc = painter.get("groupby")
if groupvalfunc:
if "args" in painter:
group.append(groupvalfunc(row, *painter["args"]))
else:
group.append(groupvalfunc(row))
else:
for c in get_painter_columns(painter):
if c in row:
group.append(row[c])
return create_dict_key(group)
def create_dict_key(value):
if type(value) in (list, tuple):
return tuple(map(create_dict_key, value))
elif type(value) == dict:
return tuple([ (k, create_dict_key(v)) for (k, v) in sorted(value.items()) ])
else:
return value
def get_host_tags(row):
if type(row.get("host_custom_variables")) == dict:
return row["host_custom_variables"].get("TAGS", "")
if type(row.get("host_custom_variable_names")) != list:
return ""
for name, val in zip(row["host_custom_variable_names"],
row["host_custom_variable_values"]):
if name == "TAGS":
return val
return ""
# Get the definition of a tag group
g_taggroups_by_id = {}
def get_tag_group(tgid):
# Build a cache
if not g_taggroups_by_id:
for entry in config.host_tag_groups():
g_taggroups_by_id[entry[0]] = (entry[1], entry[2])
return g_taggroups_by_id.get(tgid, (_("N/A"), []))
def get_custom_var(row, key):
for name, val in zip(row["custom_variable_names"],
row["custom_variable_values"]):
if name == key:
return val
return ""
def is_stale(row):
return row.get('service_staleness', row.get('host_staleness', 0)) >= config.staleness_threshold
def cmp_insensitive_string(v1, v2):
c = cmp(v1.lower(), v2.lower())
# force a strict order in case of equal spelling but different
# case!
if c == 0:
return cmp(v1, v2)
else:
return c
# Sorting
def cmp_ip_address(column, r1, r2):
def split_ip(ip):
try:
return tuple(int(part) for part in ip.split('.'))
except:
return ip
v1, v2 = split_ip(r1.get(column, '')), split_ip(r2.get(column, ''))
return cmp(v1, v2)
def cmp_simple_string(column, r1, r2):
v1, v2 = r1.get(column, ''), r2.get(column, '')
return cmp_insensitive_string(v1, v2)
def cmp_num_split(column, r1, r2):
return utils.cmp_num_split(r1[column].lower(), r2[column].lower())
def cmp_string_list(column, r1, r2):
v1 = ''.join(r1.get(column, []))
v2 = ''.join(r2.get(column, []))
return cmp_insensitive_string(v1, v2)
def cmp_simple_number(column, r1, r2):
return cmp(r1.get(column), r2.get(column))
def cmp_custom_variable(r1, r2, key, cmp_func):
return cmp(get_custom_var(r1, key), get_custom_var(r2, key))
def cmp_service_name_equiv(r):
if r == "Check_MK":
return -6
elif r == "Check_MK Agent":
return -5
elif r == "Check_MK Discovery":
return -4
elif r == "Check_MK inventory":
return -3 # FIXME: Remove old name one day
elif r == "Check_MK HW/SW Inventory":
return -2
else:
return 0
def declare_simple_sorter(name, title, column, func):
multisite_sorters[name] = {
"title" : title,
"columns" : [ column ],
"cmp" : lambda r1, r2: func(column, r1, r2)
}
def declare_1to1_sorter(painter_name, func, col_num = 0, reverse = False):
multisite_sorters[painter_name] = {
"title" : multisite_painters[painter_name]['title'],
"columns" : multisite_painters[painter_name]['columns'],
}
if not reverse:
multisite_sorters[painter_name]["cmp"] = \
lambda r1, r2: func(multisite_painters[painter_name]['columns'][col_num], r1, r2)
else:
multisite_sorters[painter_name]["cmp"] = \
lambda r1, r2: func(multisite_painters[painter_name]['columns'][col_num], r2, r1)
return painter_name
# Ajax call for fetching parts of the tree
def ajax_inv_render_tree():
hostname = html.var("host")
invpath = html.var("path")
tree_id = html.var("treeid", "")
if html.var("show_internal_tree_paths"):
show_internal_tree_paths = True
else:
show_internal_tree_paths = False
if tree_id:
struct_tree = inventory.load_delta_tree(hostname, int(tree_id[1:]))
tree_renderer = DeltaNodeRenderer(hostname, tree_id, invpath)
else:
struct_tree = inventory.load_tree(hostname)
tree_renderer = AttributeRenderer(hostname, "", invpath,
show_internal_tree_paths=show_internal_tree_paths)
if struct_tree is None:
html.show_error(_("No such inventory tree."))
struct_tree = struct_tree.get_filtered_tree(inventory.get_permitted_inventory_paths())
parsed_path, attributes_key = inventory.parse_tree_path(invpath)
if parsed_path:
children = struct_tree.get_sub_children(parsed_path)
else:
children = [struct_tree.get_root_container()]
if children is None:
html.show_error(_("Invalid path in inventory tree: '%s' >> %s") % (invpath, repr(parsed_path)))
else:
for child in inventory.sort_children(children):
child.show(tree_renderer, path=invpath)
def output_csv_headers(view):
filename = '%s-%s.csv' % (view['name'], time.strftime('%Y-%m-%d_%H-%M-%S', time.localtime(time.time())))
if type(filename) == unicode:
filename = filename.encode("utf-8")
html.req.headers_out['Content-Disposition'] = 'Attachment; filename="%s"' % filename
def paint_host_list(site, hosts):
entries = []
for host in hosts:
args = [
("view_name", "hoststatus"),
("site", site),
("host", host),
]
if html.var("display_options"):
args.append(("display_options", html.var("display_options")))
url = html.makeuri_contextless(args, filename="view.py")
entries.append(html.render_a(host, href=url))
return "", HTML(", ").join(entries)
# There is common code with modules/events.py:format_plugin_output(). Please check
# whether or not that function needs to be changed too
# TODO(lm): Find a common place to unify this functionality.
def format_plugin_output(output, row = None):
ok_marker = '<b class="stmark state0">OK</b>'
warn_marker = '<b class="stmark state1">WARN</b>'
crit_marker = '<b class="stmark state2">CRIT</b>'
unknown_marker = '<b class="stmark state3">UNKN</b>'
shall_escape = config.escape_plugin_output
# In case we have a host or service row use the optional custom attribute
# ESCAPE_PLUGIN_OUTPUT (set by host / service ruleset) to override the global
# setting.
if row:
custom_vars = row.get("service_custom_variables", row.get("host_custom_variables", {}))
if "ESCAPE_PLUGIN_OUTPUT" in custom_vars:
shall_escape = custom_vars["ESCAPE_PLUGIN_OUTPUT"] == "1"
if shall_escape:
output = html.attrencode(output)
output = output.replace("(!)", warn_marker) \
.replace("(!!)", crit_marker) \
.replace("(?)", unknown_marker) \
.replace("(.)", ok_marker)
if row and "[running on" in output:
a = output.index("[running on")
e = output.index("]", a)
hosts = output[a+12:e].replace(" ","").split(",")
css, h = paint_host_list(row["site"], hosts)
output = output[:a] + "running on " + h + output[e+1:]
if shall_escape:
# (?:<A HREF="), (?: target="_blank">)? and endswith(" </A>") is a special
# handling for the HTML code produced by check_http when "clickable URL" option is active.
output = re.sub("(?:<A HREF=")?(http[s]?://[^\"'>\t\s\n,]+)(?: target="_blank">)?",
lambda p: '<a href="%s"><img class=pluginurl align=absmiddle title="%s" src="images/pluginurl.png"></a>' %
(p.group(1).replace('"', ''), p.group(1).replace('"', '')), output)
if output.endswith(" </A>"):
output = output[:-11]
return output
#.
# .--Icon Selector-------------------------------------------------------.
# | ___ ____ _ _ |
# | |_ _|___ ___ _ __ / ___| ___| | ___ ___| |_ ___ _ __ |
# | | |/ __/ _ \| '_ \ \___ \ / _ \ |/ _ \/ __| __/ _ \| '__| |
# | | | (_| (_) | | | | ___) | __/ | __/ (__| || (_) | | |
# | |___\___\___/|_| |_| |____/ \___|_|\___|\___|\__\___/|_| |
# | |
# +----------------------------------------------------------------------+
# | AJAX API call for rendering the icon selector |
# '----------------------------------------------------------------------'
def ajax_popup_icon_selector():
varprefix = html.var('varprefix')
value = html.var('value')
allow_empty = html.var('allow_empty') == '1'
vs = IconSelector(allow_empty=allow_empty)
vs.render_popup_input(varprefix, value)
#.
# .--Action Menu---------------------------------------------------------.
# | _ _ _ __ __ |
# | / \ ___| |_(_) ___ _ __ | \/ | ___ _ __ _ _ |
# | / _ \ / __| __| |/ _ \| '_ \ | |\/| |/ _ \ '_ \| | | | |
# | / ___ \ (__| |_| | (_) | | | | | | | | __/ | | | |_| | |
# | /_/ \_\___|\__|_|\___/|_| |_| |_| |_|\___|_| |_|\__,_| |
# | |
# +----------------------------------------------------------------------+
# | Realizes the popup action menu for hosts/services in views |
# '----------------------------------------------------------------------'
def query_action_data(what, host, site, svcdesc):
# Now fetch the needed data from livestatus
columns = list(iconpainter_columns(what, toplevel=False))
try:
columns.remove('site')
except KeyError:
pass
if site:
sites.live().set_only_sites([site])
sites.live().set_prepend_site(True)
query = 'GET %ss\n' \
'Columns: %s\n' \
'Filter: host_name = %s\n' \
% (what, ' '.join(columns), host)
if what == 'service':
query += 'Filter: service_description = %s\n' % svcdesc
row = sites.live().query_row(query)
sites.live().set_prepend_site(False)
sites.live().set_only_sites(None)
return dict(zip(['site'] + columns, row))
def ajax_popup_action_menu():
site = html.var('site')
host = html.var('host')
svcdesc = html.var('service')
what = 'service' if svcdesc else 'host'
weblib.prepare_display_options(globals())
row = query_action_data(what, host, site, svcdesc)
icons = get_icons(what, row, toplevel=False)
html.open_ul()
for icon in icons:
if len(icon) != 4:
html.open_li()
html.write(icon[1])
html.close_li()
else:
html.open_li()
icon_name, title, url_spec = icon[1:]
if url_spec:
url, target_frame = sanitize_action_url(url_spec)
url = replace_action_url_macros(url, what, row)
onclick = None
if url.startswith('onclick:'):
onclick = url[8:]
url = 'javascript:void(0);'
target = None
if target_frame and target_frame != "_self":
target = target_frame
html.open_a(href=url, target=target, onclick=onclick)
html.icon('', icon_name)
if title:
html.write(title)
else:
html.write_text(_("No title"))
if url_spec:
html.close_a()
html.close_li()
html.close_ul()
def sanitize_action_url(url_spec):
if type(url_spec) == tuple:
return url_spec
else:
return (url_spec, None)
#.
# .--Reschedule----------------------------------------------------------.
# | ____ _ _ _ |
# | | _ \ ___ ___ ___| |__ ___ __| |_ _| | ___ |
# | | |_) / _ \/ __|/ __| '_ \ / _ \/ _` | | | | |/ _ \ |
# | | _ < __/\__ \ (__| | | | __/ (_| | |_| | | __/ |
# | |_| \_\___||___/\___|_| |_|\___|\__,_|\__,_|_|\___| |
# | |
# +----------------------------------------------------------------------+
# | Ajax webservice for reschedulung host- and service checks |
# '----------------------------------------------------------------------'
def ajax_reschedule():
try:
do_reschedule()
except Exception, e:
html.write("['ERROR', '%s']\n" % e)
def do_reschedule():
if not config.user.may("action.reschedule"):
raise MKGeneralException("You are not allowed to reschedule checks.")
site = html.var("site")
host = html.var("host", "")
if not host:
raise MKGeneralException("Action reschedule: missing host name")
service = html.get_unicode_input("service", "")
wait_svc = html.get_unicode_input("wait_svc", "")
if service:
cmd = "SVC"
what = "service"
spec = "%s;%s" % (host, service.encode("utf-8"))
if wait_svc:
wait_spec = u'%s;%s' % (host, wait_svc)
add_filter = "Filter: service_description = %s\n" % livestatus.lqencode(wait_svc)
else:
wait_spec = spec
add_filter = "Filter: service_description = %s\n" % livestatus.lqencode(service)
else:
cmd = "HOST"
what = "host"
spec = host
wait_spec = spec
add_filter = ""
try:
now = int(time.time())
sites.live().command("[%d] SCHEDULE_FORCED_%s_CHECK;%s;%d" % (now, cmd, livestatus.lqencode(spec), now), site)
sites.live().set_only_sites([site])
query = u"GET %ss\n" \
"WaitObject: %s\n" \
"WaitCondition: last_check >= %d\n" \
"WaitTimeout: %d\n" \
"WaitTrigger: check\n" \
"Columns: last_check state plugin_output\n" \
"Filter: host_name = %s\n%s" \
% (what, livestatus.lqencode(wait_spec), now, config.reschedule_timeout * 1000, livestatus.lqencode(host), add_filter)
row = sites.live().query_row(query)
sites.live().set_only_sites()
last_check = row[0]
if last_check < now:
html.write("['TIMEOUT', 'Check not executed within %d seconds']\n" % (config.reschedule_timeout))
else:
if service == "Check_MK":
# Passive services triggered by Check_MK often are updated
# a few ms later. We introduce a small wait time in order
# to increase the chance for the passive services already
# updated also when we return.
time.sleep(0.7);
html.write("['OK', %d, %d, %r]\n" % (row[0], row[1], row[2].encode("utf-8")))
except Exception, e:
sites.live().set_only_sites()
raise MKGeneralException(_("Cannot reschedule check: %s") % e)
| huiyiqun/check_mk | web/htdocs/views.py | Python | gpl-2.0 | 129,902 |
#!/usr/bin/env python
###############################################################################
# $Id: rasterize.py 32165 2015-12-13 19:01:22Z goatbar $
#
# Project: GDAL/OGR Test Suite
# Purpose: Test RasterizeLayer() and related calls.
# Author: Frank Warmerdam <[email protected]>
#
###############################################################################
# Copyright (c) 2008, Frank Warmerdam <[email protected]>
#
# Permission is hereby granted, free of charge, to any person obtaining a
# copy of this software and associated documentation files (the "Software"),
# to deal in the Software without restriction, including without limitation
# the rights to use, copy, modify, merge, publish, distribute, sublicense,
# and/or sell copies of the Software, and to permit persons to whom the
# Software is furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
# OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
# THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
# DEALINGS IN THE SOFTWARE.
###############################################################################
import sys
sys.path.append( '../pymod' )
import gdaltest
import ogrtest
from osgeo import gdal, ogr, osr
###############################################################################
# Simple polygon rasterization.
def rasterize_1():
# Setup working spatial reference
sr_wkt = 'LOCAL_CS["arbitrary"]'
sr = osr.SpatialReference( sr_wkt )
# Create a memory raster to rasterize into.
target_ds = gdal.GetDriverByName('MEM').Create( '', 100, 100, 3,
gdal.GDT_Byte )
target_ds.SetGeoTransform( (1000,1,0,1100,0,-1) )
target_ds.SetProjection( sr_wkt )
# Create a memory layer to rasterize from.
rast_ogr_ds = \
ogr.GetDriverByName('Memory').CreateDataSource( 'wrk' )
rast_mem_lyr = rast_ogr_ds.CreateLayer( 'poly', srs=sr )
# Add a polygon.
wkt_geom = 'POLYGON((1020 1030,1020 1045,1050 1045,1050 1030,1020 1030))'
feat = ogr.Feature( rast_mem_lyr.GetLayerDefn() )
feat.SetGeometryDirectly( ogr.Geometry(wkt = wkt_geom) )
rast_mem_lyr.CreateFeature( feat )
# Add a linestring.
wkt_geom = 'LINESTRING(1000 1000, 1100 1050)'
feat = ogr.Feature( rast_mem_lyr.GetLayerDefn() )
feat.SetGeometryDirectly( ogr.Geometry(wkt = wkt_geom) )
rast_mem_lyr.CreateFeature( feat )
# Run the algorithm.
err = gdal.RasterizeLayer( target_ds, [3,2,1], rast_mem_lyr,
burn_values = [200,220,240] )
if err != 0:
print(err)
gdaltest.post_reason( 'got non-zero result code from RasterizeLayer' )
return 'fail'
# Check results.
expected = 6452
checksum = target_ds.GetRasterBand(2).Checksum()
if checksum != expected:
print(checksum)
gdaltest.post_reason( 'Did not get expected image checksum' )
gdal.GetDriverByName('GTiff').CreateCopy('tmp/rasterize_1.tif',target_ds)
return 'fail'
return 'success'
###############################################################################
# Test rasterization with ALL_TOUCHED.
def rasterize_2():
# Setup working spatial reference
sr_wkt = 'LOCAL_CS["arbitrary"]'
# Create a memory raster to rasterize into.
target_ds = gdal.GetDriverByName('MEM').Create( '', 12, 12, 3,
gdal.GDT_Byte )
target_ds.SetGeoTransform( (0,1,0,12,0,-1) )
target_ds.SetProjection( sr_wkt )
# Create a memory layer to rasterize from.
cutline_ds = ogr.Open( 'data/cutline.csv' )
# Run the algorithm.
gdal.PushErrorHandler( 'CPLQuietErrorHandler' )
err = gdal.RasterizeLayer( target_ds, [3,2,1], cutline_ds.GetLayer(0),
burn_values = [200,220,240],
options = ["ALL_TOUCHED=TRUE"] )
gdal.PopErrorHandler()
if err != 0:
print(err)
gdaltest.post_reason( 'got non-zero result code from RasterizeLayer' )
return 'fail'
# Check results.
expected = 121
checksum = target_ds.GetRasterBand(2).Checksum()
if checksum != expected:
print(checksum)
gdaltest.post_reason( 'Did not get expected image checksum' )
gdal.GetDriverByName('GTiff').CreateCopy('tmp/rasterize_2.tif',target_ds)
return 'fail'
return 'success'
###############################################################################
# Rasterization with BURN_VALUE_FROM.
def rasterize_3():
# Setup working spatial reference
sr_wkt = 'LOCAL_CS["arbitrary"]'
sr = osr.SpatialReference( sr_wkt )
# Create a memory raster to rasterize into.
target_ds = gdal.GetDriverByName('MEM').Create( '', 100, 100, 3,
gdal.GDT_Byte )
target_ds.SetGeoTransform( (1000,1,0,1100,0,-1) )
target_ds.SetProjection( sr_wkt )
# Create a memory layer to rasterize from.
rast_ogr_ds = \
ogr.GetDriverByName('Memory').CreateDataSource( 'wrk' )
rast_mem_lyr = rast_ogr_ds.CreateLayer( 'poly', srs=sr )
# Add polygons and linestrings.
wkt_geom = ['POLYGON((1020 1030 40,1020 1045 30,1050 1045 20,1050 1030 35,1020 1030 40))',
'POLYGON((1010 1046 85,1015 1055 35,1055 1060 26,1054 1048 35,1010 1046 85))',
'POLYGON((1020 1076 190,1025 1085 35,1065 1090 26,1064 1078 35,1020 1076 190),(1023 1079 5,1061 1081 35,1062 1087 26,1028 1082 35,1023 1079 85))',
'LINESTRING(1005 1000 10, 1100 1050 120)',
'LINESTRING(1000 1000 150, 1095 1050 -5, 1080 1080 200)']
for g in wkt_geom:
feat = ogr.Feature( rast_mem_lyr.GetLayerDefn() )
feat.SetGeometryDirectly( ogr.Geometry(wkt = g) )
rast_mem_lyr.CreateFeature( feat )
# Run the algorithm.
err = gdal.RasterizeLayer( target_ds, [3,2,1], rast_mem_lyr,
burn_values = [10,10,55], options = ["BURN_VALUE_FROM=Z"] )
if err != 0:
print(err)
gdaltest.post_reason( 'got non-zero result code from RasterizeLayer' )
return 'fail'
# Check results.
expected = 15006
checksum = target_ds.GetRasterBand(2).Checksum()
if checksum != expected:
print(checksum)
gdaltest.post_reason( 'Did not get expected image checksum' )
gdal.GetDriverByName('GTiff').CreateCopy('tmp/rasterize_3.tif',target_ds)
return 'fail'
return 'success'
###############################################################################
# Rasterization with ATTRIBUTE.
def rasterize_4():
# Setup working spatial reference
sr_wkt = 'LOCAL_CS["arbitrary"]'
sr = osr.SpatialReference( sr_wkt )
# Create a memory raster to rasterize into.
target_ds = gdal.GetDriverByName('MEM').Create( '', 100, 100, 3,
gdal.GDT_Byte )
target_ds.SetGeoTransform( (1000,1,0,1100,0,-1) )
target_ds.SetProjection( sr_wkt )
# Create a memory layer to rasterize from.
rast_ogr_ds = ogr.GetDriverByName('Memory').CreateDataSource( 'wrk' )
rast_mem_lyr = rast_ogr_ds.CreateLayer( 'poly', srs=sr )
# Setup Schema
ogrtest.quick_create_layer_def( rast_mem_lyr,
[ ('CELSIUS', ogr.OFTReal) ] )
# Add polygons and linestrings and a field named CELSIUS.
wkt_geom = ['POLYGON((1020 1030 40,1020 1045 30,1050 1045 20,1050 1030 35,1020 1030 40))',
'POLYGON((1010 1046 85,1015 1055 35,1055 1060 26,1054 1048 35,1010 1046 85))',
'POLYGON((1020 1076 190,1025 1085 35,1065 1090 26,1064 1078 35,1020 1076 190),(1023 1079 5,1061 1081 35,1062 1087 26,1028 1082 35,1023 1079 85))',
'LINESTRING(1005 1000 10, 1100 1050 120)',
'LINESTRING(1000 1000 150, 1095 1050 -5, 1080 1080 200)']
celsius_field_values = [50,255,60,100,180]
i = 0
for g in wkt_geom:
feat = ogr.Feature( rast_mem_lyr.GetLayerDefn() )
feat.SetGeometryDirectly( ogr.Geometry(wkt = g) )
feat.SetField( 'CELSIUS', celsius_field_values[i] )
rast_mem_lyr.CreateFeature( feat )
i = i + 1
# Run the algorithm.
err = gdal.RasterizeLayer( target_ds, [1,2,3], rast_mem_lyr,
options = ["ATTRIBUTE=CELSIUS"] )
if err != 0:
print(err)
gdaltest.post_reason( 'got non-zero result code from RasterizeLayer' )
return 'fail'
# Check results.
expected = 16265
checksum = target_ds.GetRasterBand(2).Checksum()
if checksum != expected:
print(checksum)
gdaltest.post_reason( 'Did not get expected image checksum' )
gdal.GetDriverByName('GTiff').CreateCopy('tmp/rasterize_4.tif',target_ds)
return 'fail'
return 'success'
###############################################################################
# Rasterization with MERGE_ALG=ADD.
def rasterize_5():
# Setup working spatial reference
sr_wkt = 'LOCAL_CS["arbitrary"]'
sr = osr.SpatialReference( sr_wkt )
# Create a memory raster to rasterize into.
target_ds = gdal.GetDriverByName('MEM').Create( '', 100, 100, 3,
gdal.GDT_Byte )
target_ds.SetGeoTransform( (1000,1,0,1100,0,-1) )
target_ds.SetProjection( sr_wkt )
# Create a memory layer to rasterize from.
rast_ogr_ds = \
ogr.GetDriverByName('Memory').CreateDataSource( 'wrk' )
rast_mem_lyr = rast_ogr_ds.CreateLayer( 'poly', srs=sr )
# Add polygons.
wkt_geom = 'POLYGON((1020 1030,1020 1045,1050 1045,1050 1030,1020 1030))'
feat = ogr.Feature( rast_mem_lyr.GetLayerDefn() )
feat.SetGeometryDirectly( ogr.Geometry(wkt = wkt_geom) )
rast_mem_lyr.CreateFeature( feat )
wkt_geom = 'POLYGON((1045 1050,1055 1050,1055 1020,1045 1020,1045 1050))'
feat = ogr.Feature( rast_mem_lyr.GetLayerDefn() )
feat.SetGeometryDirectly( ogr.Geometry(wkt = wkt_geom) )
rast_mem_lyr.CreateFeature( feat )
# Add linestrings.
wkt_geom = 'LINESTRING(1000 1000, 1100 1050)'
feat = ogr.Feature( rast_mem_lyr.GetLayerDefn() )
feat.SetGeometryDirectly( ogr.Geometry(wkt = wkt_geom) )
rast_mem_lyr.CreateFeature( feat )
wkt_geom = 'LINESTRING(1005 1000, 1000 1050)'
feat = ogr.Feature( rast_mem_lyr.GetLayerDefn() )
feat.SetGeometryDirectly( ogr.Geometry(wkt = wkt_geom) )
rast_mem_lyr.CreateFeature( feat )
# Run the algorithm.
err = gdal.RasterizeLayer( target_ds, [1, 2, 3], rast_mem_lyr,
burn_values = [100,110,120],
options = ["MERGE_ALG=ADD"])
if err != 0:
print(err)
gdaltest.post_reason( 'got non-zero result code from RasterizeLayer' )
return 'fail'
# Check results.
expected = 13022
checksum = target_ds.GetRasterBand(2).Checksum()
if checksum != expected:
print(checksum)
gdaltest.post_reason( 'Did not get expected image checksum' )
gdal.GetDriverByName('GTiff').CreateCopy('tmp/rasterize_5.tif',target_ds)
return 'fail'
return 'success'
gdaltest_list = [
rasterize_1,
rasterize_2,
rasterize_3,
rasterize_4,
rasterize_5,
]
if __name__ == '__main__':
gdaltest.setup_run( 'rasterize' )
gdaltest.run_tests( gdaltest_list )
gdaltest.summarize()
| nextgis-extra/tests | lib_gdal/alg/rasterize.py | Python | gpl-2.0 | 11,966 |
# These are some tools I am creating to help me build the map for the game. They are a work in progress.
# I am also learning python, so I apologize in advance if I do dumb things.
# Right now this consists of just two classes: locations and coordinate systems.
# As I have it implemented here, a coordinate system is simply a group of locations.
# I'm sure there is more I could do, for instance linking a coordinate system to a location so that
# you could, for example, enter a house (a location on a larger map) and get a coordinate system
# for more detailed movement inside the house. Right now this is very possible to do,
# but it has to be implemented outside of the class.
import array
import os
import sys
class location:
farDesc = "" # For Descriptions of locations from far away
nearDesc = "" # For descriptions of locations from close-up
name = ""
items = [] # I am investigating this, but apparently you can't use the "append" function with items and characters
# when there is an array of locations, i.e. a coordSystem locationArray. Don't know why this is.
characters = []
has_nEntrance = True
has_eEntrance = True
has_sEntrance = True
has_wEntrance = True
has_uEntrance = True
has_dEntrance = True
isCoordSys = False
def __init__(self):
self.farDesc = ""
self.nearDesc = ""
self.name = ""
self.items = []
self.characters = []
self.isCoordSys = False # save time so that we don't have to check for a matching coordSys for every
# single location
class coordSystem:
name = ""
xMax = 0
yMax = 0
zMax = 0
_locationArray = [] # I have it as a "private variable" (as far as they exist in Python) to avoid confusion:
# as the implementation stands, you have to access the array in the order of [z][y][x], but
# most people are used to the opposite ordering. Rather than screw around with naming and
# perhaps add confusion, I want people to only ever access the array by get and set methods.
def getLocation(self, x, y, z):
return self._locationArray[z][y][x]
def __init__(self, name, xMax, yMax, zMax):
self.name = name
self.xMax = xMax
self.yMax = yMax
self.zMax = zMax
self._locationArray = [[[location() for x in range(xMax)] for x in range(yMax)] for x in range(zMax)]
#The above line is weird, but I can't think of a better way to initialize an array of arrays
| nickdab/pygame | maptools.py | Python | gpl-2.0 | 2,622 |
'''
This file is part of the lenstractor project.
Copyright 2012 David W. Hogg (NYU) and Phil Marshall (Oxford).
Description
-----------
Wrapper class for tractor operation, to enable high level options (ie sampling or optimization).
To-do
-----
- debug, test
'''
import numpy as np
import os,subprocess
import tractor
import lenstractor
import emcee
emcee_defaults = {}
import pylab as plt
# ============================================================================
class LensTractor():
'''
PURPOSE
Optimize or sample a Lens or Nebula model using the Tractor.
COMMENTS
INPUTS
data A list of tractor.images
model A model (including a list of tractor.sources)
by A mode of operation ['optimizing','sampling']
using The required settings
survey The name of the survey being worked on
plot Make plots if desired
OUTPUTS
BUGS
HISTORY
2014-04-17 Started Marshall & Agnello (UCSB)
'''
# ----------------------------------------------------------------------------
def __init__(self,dataset,model,outstem,survey,counter=0,vb=0,noplots=True):
self.name = 'LensTractor'
self.survey = survey
self.settings = {}
# Optimization settings:
# self.settings['Nrounds'] = 5
# self.settings['Nsteps_optimizing_catalog'] = 100
# self.settings['Nsteps_optimizing_PSFs'] = 2
# Sampling settings:
self.settings['Nwalkers_per_dim'] = 8
self.settings['Nsnapshots'] = 3
self.settings['Nsteps_per_snapshot'] = 5000
self.settings['Restart'] = True
self.model = model
self.vb = vb
self.noplots = noplots
self.plot_all = True
self.outstem = outstem.split('.')[0]
self.bestpars = None
self.maxlnp = None
self.minchisq = None
self.psteps = None
self.counter = counter
# self.chug = tractor.Tractor(dataset)
self.chug = lenstractor.Trattore(dataset)
for src in self.model.srcs:
self.chug.addSource(src)
# Freeze the PSFs, wcs and photocal, leaving the sky and sources:
self.chug.thawParam('catalog')
for image in self.chug.getImages():
image.thawParams('sky')
image.freezeParams('photocal')
image.freezeParams('wcs')
image.freezeParams('psf')
# Plot initial state:
if not self.noplots:
self.plot_state('progress-%02d_initial_'%self.counter+self.model.name)
return None
# ----------------------------------------------------------------------------
# Drive the LensTractor. We have both steepest ascent and MCMC capability.
# Try a mixture!
def drive(self,by='cunning_and_guile'):
self.method = by
if self.method == 'sampling':
self.sample()
elif self.method == 'optimizing':
if self.model.flavor == 'Nebula':
# First optimize to get the Nebula model about right, at fixed PSF:
self.settings['Nrounds'] = 2
self.settings['Nsteps_optimizing_catalog'] = 100000
self.settings['Nsteps_optimizing_PSFs'] = 0
self.optimize()
# Now optimize PSF at fixed model:
self.settings['Nrounds'] = 1
self.settings['Nsteps_optimizing_catalog'] = 0
self.settings['Nsteps_optimizing_PSFs'] = 2
self.optimize()
# Refine Nebula model at best PSF:
self.settings['Nrounds'] = 2
self.settings['Nsteps_optimizing_catalog'] = 10000
self.settings['Nsteps_optimizing_PSFs'] = 0
self.optimize()
elif self.model.flavor == 'Lens':
# PSF is already optimized, during Nebula run.
# Just do the lens part:
self.settings['Nrounds'] = 2
self.settings['Nsteps_optimizing_catalog'] = 10000
self.settings['Nsteps_optimizing_PSFs'] = 0
self.optimize()
else: # Apply cunning and guile! Both optimization and sampling.
# First optimize to get the fluxes about right:
self.settings['Nrounds'] = 1
self.settings['Nsteps_optimizing_catalog'] = 100000
self.settings['Nsteps_optimizing_PSFs'] = 0
self.optimize()
# Now optimize PSF at fixed model:
self.settings['Nrounds'] = 1
self.settings['Nsteps_optimizing_catalog'] = 0
self.settings['Nsteps_optimizing_PSFs'] = 2
self.optimize()
self.settings['Nrounds'] = 1
self.settings['Nsteps_optimizing_catalog'] = 10000
self.settings['Nsteps_optimizing_PSFs'] = 0
self.optimize()
# Now draw a few samples to shuffle the positions:
self.settings['Nsnapshots'] = 1
self.settings['Nwalkers_per_dim'] = 4
self.settings['Nsteps_per_snapshot'] = 2500
self.settings['Restart'] = True
self.sample()
# Now optimize to refine model and PSF:
self.settings['Nrounds'] = 1
self.settings['Nsteps_optimizing_catalog'] = 50000
self.settings['Nsteps_optimizing_PSFs'] = 0
self.optimize()
self.settings['Nrounds'] = 1
self.settings['Nsteps_optimizing_catalog'] = 0
self.settings['Nsteps_optimizing_PSFs'] = 2
self.optimize()
self.settings['Nrounds'] = 1
self.settings['Nsteps_optimizing_catalog'] = 10000
self.settings['Nsteps_optimizing_PSFs'] = 0
self.optimize()
self.getBIC()
return None
# ----------------------------------------------------------------------------
# Fit the model to the image data by maximizing the posterior PDF
# ("optimizing") with respect to the parameters.
def optimize(self):
Nrounds = self.settings['Nrounds']
Nsteps_optimizing_catalog = self.settings['Nsteps_optimizing_catalog']
Nsteps_optimizing_PSFs = self.settings['Nsteps_optimizing_PSFs']
if self.vb:
print " "
print "Optimizing model:"
print " - no. of iterations per round to be spent on catalog: ",Nsteps_optimizing_catalog
print " - no. of iterations per round to be spent on PSFs: ",Nsteps_optimizing_PSFs
print " - no. of rounds: ",Nrounds
for round in range(Nrounds):
self.counter += 1
if self.vb: print "Fitting "+self.model.name+": seconds out, round",round
if self.vb:
print "Fitting "+self.model.name+": Catalog parameters to be optimized are:",self.chug.getParamNames()
print "Fitting "+self.model.name+": Initial values are:",self.chug.getParams()
print "Fitting "+self.model.name+": Step sizes:",self.chug.getStepSizes()
# Optimize sources:
for i in range(Nsteps_optimizing_catalog):
dlnp,X,a = self.chug.optimize(damp=3,shared_params=False)
# print "Fitting "+self.model.name+": at step",k,"parameter values are:",self.chug.getParams()
if self.vb:
print "Progress: counter,dlnp = ",self.counter,dlnp
print ""
print "Catalog parameters:",self.chug.getParamNames()
print "Catalog values:",self.chug.getParams()
if dlnp == 0:
print "Converged? Exiting..."
# Although this only leaves *this* loop...
break
if not self.noplots:
self.plot_state('progress-%02d_optimizing_'%self.counter+self.model.name)
if Nsteps_optimizing_PSFs > 0:
# Freeze the sources and calibration, and thaw the psfs:
if self.vb: print "Freezing catalog..."
self.chug.freezeParams('catalog')
for image in self.chug.getImages():
if self.vb: print "Thawing PSF..."
image.thawParams('psf')
if self.vb: print "Freezing photocal, WCS, sky (just to make sure...)"
image.freezeParams('photocal', 'wcs', 'sky')
if self.vb:
print "Fitting PSF: After thawing, zeroth PSF = ",self.chug.getImage(0).psf
print "Fitting PSF: PSF parameters to be optimized are:",self.chug.getParamNames()
print "Fitting PSF: Initial values are:",self.chug.getParams()
print "Fitting PSF: Step sizes:",self.chug.getStepSizes()
# Optimize everything that is not frozen:
for i in range(Nsteps_optimizing_PSFs):
dlnp,X,a = self.chug.optimize(shared_params=False)
if self.vb: print "Fitting PSF: at counter =",self.counter,"parameter values are:",self.chug.getParams()
self.counter += 1
if self.vb: print "Fitting PSF: After optimizing, zeroth PSF = ",self.chug.getImage(0).psf
if not self.noplots: self.plot_state(
'progress-%02d_optimizing_PSF_for_'%self.counter+self.model.name)
# Freeze the psf again, and thaw the sources:
if self.vb: print "Re-thawing catalog..."
self.chug.thawParams('catalog')
for image in self.chug.getImages():
if self.vb: print "Re-freezing PSF..."
image.freezeParams('psf')
if self.vb: print "Re-freezing photocal, WCS, sky (just to make sure...)"
image.freezeParams('photocal', 'wcs', 'sky')
# Save the best parameters!
self.maxlnp = self.chug.getLogProb()
self.bestpars = self.chug.getParams()
if self.vb: print "Optimizer: Best parameters: ",self.maxlnp,self.bestpars
self.minchisq = -2.0*self.chug.getLogLikelihood()
if self.vb: print "Optimizer: chisq at highest lnprob point: ",self.minchisq
return None
# ----------------------------------------------------------------------------
# Fit the model to the image data by drawing samples from the posterior PDF
# that have high probability density: note, this is not really sampling,
# its *sampling to optimize*...
def sample(self):
if self.vb:
print "Sampling model parameters with Emcee:"
# Magic numbers:
Nwalkers_per_dim = self.settings['Nwalkers_per_dim']
Nsnapshots = self.settings['Nsnapshots']
Nsteps_per_snapshot = self.settings['Nsteps_per_snapshot']
Restart = self.settings['Restart']
# Get the thawed parameters:
p0 = np.array(self.chug.getParams())
if self.vb: print 'Tractor parameters:'
for i,parname in enumerate(self.chug.getParamNames()):
print ' ', parname, '=', p0[i]
Ndim = len(p0)
if self.vb: print 'Number of parameter space dimensions: ',Ndim
# Make an emcee sampler that uses our tractor to compute its logprob:
Nw = Nwalkers_per_dim*Ndim # 8*ndim
sampler = emcee.EnsembleSampler(Nw, Ndim, self.chug, threads=4)
# Start the walkers off near the initialisation point -
# We need it to be ~1 pixel in position, and not too much
# flux restriction... But use any psteps we already have!
if self.psteps is None:
if self.model.name=='Lens':
# The following gets us 0.05" in dec:
self.psteps = np.zeros_like(p0) + 0.00001
# This could be optimized, to allow more initial freedom in eg flux.
else:
# Good first guess should be some fraction of the optimization step sizes:
self.psteps = 0.01*np.array(self.chug.getStepSizes())
if self.vb: print "Initial size (in each dimension) of sample ball = ",self.psteps
#pp = emcee.EnsembleSampler.sampleBall(p0, self.psteps, Nw)
pp = emcee.utils.sample_ball(p0, self.psteps, Nw)
rstate = None
lnp = None
# Take a few steps - memory leaks fast! (~10Mb per sec)
for snapshot in range(1,Nsnapshots+1):
self.counter += 1
if self.vb: print 'Emcee: MCMC snapshot:', snapshot
t0 = tractor.Time()
pp,lnp,rstate = sampler.run_mcmc(pp, Nsteps_per_snapshot, lnprob0=lnp, rstate0=rstate)
if self.vb: print 'Emcee: Mean acceptance fraction after', sampler.iterations, 'iterations =',np.mean(sampler.acceptance_fraction)
t_mcmc = (tractor.Time() - t0)
if self.vb: print 'Emcee: Runtime:', t_mcmc
# Find the current best sample, and sample ball:
self.maxlnp = np.max(lnp)
best = np.where(lnp == self.maxlnp)
self.bestpars = np.ravel(pp[best,:])
if self.vb: print "Emcee: Best parameters: ",self.maxlnp,self.bestpars
self.minchisq = -2.0*self.chug.getLogLikelihood()
if self.vb: print "Emcee: chisq at highest lnprob point: ",self.minchisq
if not self.noplots:
self.chug.setParams(self.bestpars)
self.plot_state('progress-%02d_sampling_'%self.counter+self.model.name)
if Restart:
# Make a new sample ball centred on the current best point,
# and with width given by the standard deviations in each
# dimension:
self.chug.setParams(self.bestpars)
p0 = np.array(self.chug.getParams())
self.psteps = np.std(pp,axis=0)
# pp = emcee.EnsembleSampler.sampleBall(p0, self.psteps, Nw)
pp = emcee.utils.sample_ball(p0, self.psteps, Nw)
rstate = None
lnp = None
if self.vb: print 'Emcee: total run time', t_mcmc, 'sec'
return None
# ----------------------------------------------------------------------------
def getBIC(self):
self.K = len(self.bestpars)
self.N = self.chug.getNdata()
self.BIC = self.minchisq + self.K*np.log(1.0*self.N)
return self.BIC
# ----------------------------------------------------------------------------
def write_catalog(self):
# Get parameter names and values:
parnames = self.chug.getParamNames()
values = np.array(np.outer(1,self.bestpars))
# Get image names:
imgnames = []
for image in self.chug.getImages():
imgnames.append(image.name)
# Set catalog file name:
outfile = self.outstem+'_'+self.model.name+'.cat'
# Open up a new file, over-writing any old one:
try: os.remove(outfile)
except OSError: pass
output = open(outfile,'w')
# Write header:
hdr = []
hdr.append('# LensTractor output parameter catalog')
# hdr.append('# ')
# hdr.append('# Date: %s' % datestring)
hdr.append('# ')
hdr.append('# Model: %s' % self.model.name)
hdr.append('# Notes:')
hdr.append('# * First source is always the galaxy, point sources follow')
for ii,imgname in enumerate(imgnames):
hdr.append('# * images.image%d = %s' % (ii,imgname))
hdr.append('# ')
# Last line contains the parameter names:
nameline = "# "
for name in parnames:
nameline += name+" "
hdr.append(nameline)
# Write to file:
for line in hdr:
output.write("%s\n" % line)
# Close file:
output.close()
np.savetxt('junk', values)
cat = subprocess.call("cat junk >> " + outfile, shell=True)
rm = subprocess.call("rm junk", shell=True)
if cat != 0 or rm != 0:
print "Error: write subprocesses failed in some way :-/"
sys.exit()
return outfile
# ----------------------------------------------------------------------------
def set_cookie(self,outstem,result):
outfile = self.outstem+'_result.cookie'
# Open up a new file, over-writing any old one:
try: os.remove(outfile)
except OSError: pass
output = open(outfile,'w')
# Write result to file:
output.write("%s\n" % result)
# Close file:
output.close()
return outfile
# ----------------------------------------------------------------------------
# Plot progress.
def plot_state(self,suffix):
'''
Make all the plots we need to assess the state of the LensTractor.
Mainly, a multi-panel figure of image, synthetic image and chi, for
each image being modelled.
self.chug is a Tractor object, containing a list of images.
'''
if self.plot_all:
# Make one giant plot with all progress panels on it, and
# save it to PNG:
# Plotting setup:
px,py = 4,len(self.chug.images)
figprops = dict(figsize=(5*px,5*py), dpi=128)
adjustprops = dict(\
left=0.05,\
bottom=0.05,\
right=0.95,\
top=0.95,\
wspace=0.1,\
hspace=0.1)
# Start the plot:
fig = plt.figure(**figprops)
fig.subplots_adjust(**adjustprops)
plt.clf()
plt.gray()
counter = 0
# Name the plot file:
pngfile = self.outstem+'_'+suffix+'.png'
else:
# Make one plot per image, and save each to PNG.
# Plotting setup:
px,py = 2,2
figprops = dict(figsize=(5*px,5*py), dpi=128)
adjustprops = dict(\
left=0.05,\
bottom=0.05,\
right=0.95,\
top=0.95,\
wspace=0.1,\
hspace=0.1)
# Loop over images, making plots:
for i,image in enumerate(self.chug.images):
if image.name is None:
imname = suffix+str(i)
else:
imname = image.name
chi = self.chug.getChiImage(i)
if self.survey == 'PS1':
ima, chia, psfa = lenstractor.PS1_imshow_settings(image,chi)
elif self.survey == 'KIDS':
ima, chia, psfa = lenstractor.KIDS_imshow_settings(image,chi)
else:
# Do the same as for PS1
scale = np.sqrt(np.median(1.0/image.invvar[image.invvar > 0.0]))
ima = dict(interpolation='nearest', origin='lower',
vmin=-100.*scale, vmax=3.*scale)
chia = dict(interpolation='nearest', origin='lower',
vmin=-5., vmax=5.)
psfa = dict(interpolation='nearest', origin='lower')
if not self.plot_all:
# Start a new plot:
fig = plt.figure(**figprops)
fig.subplots_adjust(**adjustprops)
plt.clf()
plt.gray()
counter = 0
# 1) Data Image
counter += 1
plt.subplot(py,px,counter)
plt.imshow(-image.data, **ima)
self.tidyup_plot()
plt.title('Observed image')
# Overlay image filter in lower left corner
plt.text(1,1,image.photocal.bandname+'-band')
# Figure out how to get this in bottom right hand corner instead
# 2) Predicted image
counter += 1
plt.subplot(py,px,counter)
model = self.chug.getModelImages()[i]
plt.imshow(-model, **ima)
self.tidyup_plot()
# Overlay cartoon of model:
self.model.plot(image.wcs,image.photocal.bandname)
plt.title('Predicted image')
# Overlay name of model in lower left corner
plt.text(1,1,self.model.name)
# Figure out how to get this in top left hand corner instead
# 3) Normalised residual
counter += 1
plt.subplot(py,px,counter)
plt.imshow(-chi, **chia)
self.tidyup_plot()
if self.survey == 'KIDS':
# It is not clear why the residual image is not in units of
# sigma. Perhaps this causes problems in the modelling.
# This code is not refactored into kids.py since it should
# not be necessary in the first place.
plt.title('Residuals (flexible scale)')
else:
plt.title('Residuals ($\pm 5\sigma$)')
# Overlay quantified goodness of fit, in sigma from acceptable...
# TBI!
# 4) PSF image
counter += 1
plt.subplot(py,px,counter)
psfimage = image.psf.getPointSourcePatch(*model.shape).patch
plt.imshow(-psfimage, **psfa)
self.tidyup_plot()
plt.title('PSF')
if not self.plot_all:
# Save this image's plot:
pngfile = imname+'_'+suffix+'.png'
plt.savefig(pngfile)
if self.vb:
print "Progress snapshot saved to "+pngfile
if self.plot_all:
# Save the giant plot:
plt.savefig(pngfile)
if self.vb:
print "Progress snapshot saved to "+pngfile
return
# ----------------------------------------------------------------------------
# Turn off the axis ticks and labels:
def tidyup_plot(self):
ax = plt.gca()
ax.xaxis.set_ticks([])
ax.yaxis.set_ticks([])
return
# ============================================================================
if __name__ == '__main__':
pass
| davidwhogg/LensTractor | lenstractor/driver.py | Python | gpl-2.0 | 21,961 |
# -*- coding: utf-8 -*-
import json
import datetime
from django.http import HttpResponse
from django.template import RequestContext
from django.shortcuts import render_to_response
from misc.decorators import staff_required, common_ajax_response, verify_permission
from common import cache, debug, page
from message.interface import GlobalNoticeBase
#--------------------------------- 缓存管理
@verify_permission('')
def caches(request, template_name='admin/caches.html'):
indexes = [{'name': cache.CACHE_INDEX[k][0], 'value': k} for k in cache.CACHE_INDEX.keys()]
descs = [{'name': cache.CACHE_KEYS_DESC[k], 'value': k} for k in cache.CACHE_KEYS_DESC.keys()]
return render_to_response(template_name, locals(), context_instance=RequestContext(request))
@verify_permission('modify_cache')
@common_ajax_response
def modify_cache(request):
index = request.REQUEST.get('index')
key = request.REQUEST.get('key_name')
value = request.REQUEST.get('key_value', '')
expire = request.REQUEST.get('key_expire', 3600)
try:
c = cache.Cache(cache.CACHE_INDEX[index][1])
c.set(key, value, expire)
return 0, u'修改成功!'
except Exception, e:
debug.get_debug_detail(e)
return 1, u'系统错误!'
@verify_permission('remove_cache')
@common_ajax_response
def remove_cache(request):
index = request.REQUEST.get('index')
key = request.REQUEST.get('key_name')
try:
c = cache.Cache(cache.CACHE_INDEX[index][1])
c.delete(key)
return 0, u'删除成功!'
except Exception, e:
debug.get_debug_detail(e)
return 1, u'系统错误!'
@verify_permission('get_cache')
@common_ajax_response
def get_cache(request):
index = request.REQUEST.get('index')
key = request.REQUEST.get('key_name')
try:
c = cache.Cache(cache.CACHE_INDEX[index][1])
return 0, [c.get(key) or '', c.ttl(key) or 0]
except Exception, e:
debug.get_debug_detail(e)
return 1, u'系统错误!'
#--------------------------------- 全站通告
@verify_permission('')
def notice(request, template_name='admin/notice.html'):
return render_to_response(template_name, locals(), context_instance=RequestContext(request))
@verify_permission('query_notice')
def search_notice(request):
data = []
gnb = GlobalNoticeBase()
page_index = int(request.REQUEST.get('page_index'))
page_objs = page.Cpt(gnb.get_all_global_notice(), count=10, page=page_index).info
num = 10 * (page_index - 1)
for obj in page_objs[0]:
num += 1
data.append({
'num': num,
'notice_id': obj.id,
'content': obj.content,
'start_time': str(obj.start_time),
'end_time': str(obj.end_time),
'level': obj.level,
'state': True if (obj.end_time - datetime.datetime.now()).total_seconds > 0 else False
})
return HttpResponse(
json.dumps({'data': data, 'page_count': page_objs[4], 'total_count': page_objs[5]}),
mimetype='application/json'
)
@verify_permission('add_notice')
@common_ajax_response
def add_notice(request):
content = request.REQUEST.get('content')
start_time = request.REQUEST.get('start_time')
end_time = request.REQUEST.get('end_time')
level = request.REQUEST.get('level')
return GlobalNoticeBase().create_global_notice(
content, start_time, end_time, request.user.id, level)
@verify_permission('query_notice')
def get_notice_by_id(request):
notice_id = request.REQUEST.get('notice_id')
data = ''
obj = GlobalNoticeBase().get_notice_by_id(notice_id)
if obj:
data = {
'num': 1,
'notice_id': obj.id,
'content': obj.content,
'start_time': str(obj.start_time)[:10],
'end_time': str(obj.end_time)[:10],
'level': obj.level,
'state': True if (obj.end_time - datetime.datetime.now()).total_seconds > 0 else False
}
return HttpResponse(json.dumps(data), mimetype='application/json')
@verify_permission('modify_notice')
@common_ajax_response
def modify_notice(request):
notice_id = request.REQUEST.get('notice_id')
content = request.REQUEST.get('content')
start_time = request.REQUEST.get('start_time')
end_time = request.REQUEST.get('end_time')
level = request.REQUEST.get('level')
return GlobalNoticeBase().modify_global_notice(
notice_id, content=content, start_time=start_time, end_time=end_time, level=level)
@verify_permission('remove_notice')
@common_ajax_response
def remove_notice(request):
notice_id = request.REQUEST.get('notice_id')
return GlobalNoticeBase().remove_global_notice(notice_id)
| lantianlz/zx | www/admin/views_tools.py | Python | gpl-2.0 | 4,764 |
import unittest
from mock import Mock
import os
from katello.tests.core.action_test_utils import CLIOptionTestCase, CLIActionTestCase
from katello.tests.core.repo import repo_data
import katello.client.core.repo
from katello.client.core.repo import Status
from katello.client.api.utils import ApiDataError
class RequiredCLIOptionsTests(CLIOptionTestCase):
#repo is defined by either (org, product, repo_name, env name) or repo_id
action = Status()
disallowed_options = [
('--name=repo1', '--product=product1'),
('--org=ACME', '--name=repo1'),
('--org=ACME', '--product=product1'),
(),
]
allowed_options = [
('--org=ACME', '--name=repo1', '--product=product1'),
('--id=repo_id1', ),
]
class RepoStatusTest(CLIActionTestCase):
ORG_NAME = "org_1"
PROD_NAME = "product_1"
REPO = repo_data.REPOS[0]
ENV_NAME = "env_1"
OPTIONS_WITH_ID = {
'id': REPO['id'],
}
OPTIONS_WITH_NAME = {
'name': REPO['name'],
'product': PROD_NAME,
'org': ORG_NAME,
'environment': ENV_NAME,
}
repo = None
def setUp(self):
self.set_action(Status())
self.set_module(katello.client.core.repo)
self.mock_printer()
self.mock_options(self.OPTIONS_WITH_NAME)
self.mock(self.action.api, 'repo', self.REPO)
self.mock(self.action.api, 'last_sync_status', repo_data.SYNC_RESULT_WITHOUT_ERROR)
self.repo = self.mock(self.module, 'get_repo', self.REPO).return_value
def tearDown(self):
self.restore_mocks()
def test_finds_repo_by_id(self):
self.mock_options(self.OPTIONS_WITH_ID)
self.run_action()
self.action.api.repo.assert_called_once_with(self.REPO['id'])
def test_finds_repo_by_name(self):
self.mock_options(self.OPTIONS_WITH_NAME)
self.run_action()
self.module.get_repo.assert_called_once_with(self.ORG_NAME,
self.REPO['name'],
self.PROD_NAME, None,
None, self.ENV_NAME,
False, None, None, None)
def test_returns_with_error_when_no_repo_found(self):
self.mock_options(self.OPTIONS_WITH_NAME)
self.mock(self.module, 'get_repo').side_effect = ApiDataError()
self.run_action(os.EX_DATAERR)
def test_it_calls_last_sync_status_api(self):
self.run_action()
self.action.api.last_sync_status.assert_called_once_with(self.REPO['id'])
def test_it_does_not_set_progress_for_not_running_sync(self):
self.run_action()
self.assertRaises(KeyError, lambda: self.repo['progress'] )
def test_it_sets_progress_for_running_sync(self):
self.mock(self.action.api, 'last_sync_status', repo_data.SYNC_RUNNING_RESULT)
self.run_action()
self.assertTrue(isinstance(self.repo['progress'], str))
| Katello/katello-cli | test/katello/tests/core/repo/repo_status_test.py | Python | gpl-2.0 | 3,038 |
#! /usr/bin/env python
# -*- coding: utf-8 -*-
import gtk # @UnusedImport
import gtk.glade
import gobject
from lib.pycsb19 import recibo
from lib.pycsb19 import ordenante
import sqlite3 as sqlite
import time
class Remesas:
def __init__(self):
self.Llamada=""
self.NifOrdenante=""
self.CodRemesa=""
self.fecha=""
glRemesas=gtk.glade.XML("./gld/remesas.glade")
self.ventana=glRemesas.get_widget("Remesas")
self.ventana.connect("destroy", self.Salir)
self.tvRecibos=glRemesas.get_widget("tvRecibos")
self.tvRecibos.connect("button_press_event",self.SacaCampos)
self.tNombre=glRemesas.get_widget("tNombre")
self.tOrdenante=glRemesas.get_widget("tOrdenante")
self.tImporte=glRemesas.get_widget("tImporte")
self.fecha=glRemesas.get_widget("tFecha")
self.btnOrdenante=glRemesas.get_widget("btnSeleccionar")
self.btnOrdenante.connect("clicked", self.SelOrdenante)
self.btnSalir=glRemesas.get_widget("btnSalir")
self.btnSalir.connect("clicked", self.Salir)
self.btnAnadir=glRemesas.get_widget("btnAnadir")
self.btnAnadir.connect("clicked", self.Anadir)
self.btnEliminar=glRemesas.get_widget("btnEliminar")
self.btnEliminar.connect("clicked", self.Eliminar)
self.btnModificar=glRemesas.get_widget("btnModificar")
self.btnModificar.connect("clicked", self.Modificar)
self.btnImprimir=glRemesas.get_widget("btnImprimir")
self.btnImprimir.connect("clicked", self.Imprimir)
self.btnGenerar=glRemesas.get_widget("btnGenerar")
self.btnGenerar.connect("clicked", self.Generar)
self.btnAyuda=glRemesas.get_widget("btnAyuda")
# self.btnAyuda.connect("clicked", self.Ayuda)
data=gtk.ListStore(gobject.TYPE_STRING,gobject.TYPE_STRING,gobject.TYPE_STRING)
data.clear()
self.tvRecibos.set_model(data)
column = gtk.TreeViewColumn("Cod ", gtk.CellRendererText(), text=0)
self.tvRecibos.append_column(column)
column = gtk.TreeViewColumn("Cliente ", gtk.CellRendererText(), text=1)
self.tvRecibos.append_column(column)
render=gtk.CellRendererText()
render.set_property('xalign', 1.0)
column = gtk.TreeViewColumn("Importe", render, text=2)
self.tvRecibos.append_column(column)
def AbreDb(self):
self.conexion=sqlite.connect(db="./dbCsb19/db", mode=077) # @UndefinedVariable
def CierraDb(self):
self.conexion.close()
def SacaCampos(self,widget,event):
if event.type==5:
self.Modificar(widget)
def MiraRemesa(self,Nombre):
con=sqlite.connect(db="./dbCsb19/db", mode=077) # @UndefinedVariable
cursor = con.cursor()
#Mira a ver si ya existe la remesa
sql="Select count(codigo) from remesas where titulo='"+self.tNombre.get_text()+"'"
cursor.execute(sql)
if int(cursor.fetchall()[0][0])<>0:
#no hay ninguno dado de alta
sql="Select titulo, ordenante, importe, codigo from remesas where titulo='"+Nombre+"'"
cursor.execute(sql)
for x in cursor.fetchall():
item=[]
for n in x:
item.append(n)
# En item[2] esta el importe
self.tImporte.set_text(str(item[2]))
self.NifOrdenante=item[1]
self.CodRemesa=str(item[3])
# miramos el nombre del ordenante
sql="Select nombre from ordenantes where nif='"+item[1].split(":")[0]+"' and sufijo='"+item[1].split(":")[1]+"'"
cursor.execute(sql)
self.tOrdenante.set_text(cursor.fetchall()[0][0])
#Mira el detalle
#Si no hay ningun detalle pasa de mirar mas porque si no da error
sql="SELECT count(codigo) FROM det_remesas"
cursor.execute(sql)
if int(cursor.fetchall()[0][0])<>0:
sql="SELECT det_remesas.indice, clientes.nombre, det_remesas.importe FROM det_remesas,clientes WHERE clientes.codigo=det_remesas.cliente AND det_remesas.codigo='"+self.CodRemesa+"'"
cursor.execute(sql)
for x in cursor.fetchall():
item=[]
numero=0
for n in x:
numero=numero+1
if numero==3:
#Todo este rollo es para completar el importe con dos decimales en el treeview
cadena=str(n)
if cadena.find(".")==-1:
cadena=cadena+".00"
elif (len(cadena)-1)-cadena.find(".")<2:
cadena=cadena+"0"
item.append(cadena)
else:
item.append(n)
self.VisualizaDatos(item)
con.close()
def Eliminar(self, widget):
if self.tvRecibos.get_selection().get_selected()[1]<>None:
store=self.tvRecibos.get_model()
self.AbreDb()
c = self.conexion.cursor()
#Borramos el recibo
sql="delete from det_remesas where codigo="+self.CodRemesa+" and indice="+store[self.tvRecibos.get_cursor()[0][0]][0]
c.execute(sql)
self.conexion.commit()
#Miramos a ver cuanto es el importe de la remesa ahora
sql="Select sum(importe) from det_remesas where codigo='"+self.CodRemesa+"'"
c.execute(sql)
Importe=0.0
Importe=float(c.fetchall()[0][0])
#Mete el importe en la casilla del importe
self.tImporte.set_text(str(Importe))
#Actualiza el importe en la base de datos de remesa
sql="UPDATE remesas SET importe="+str(Importe)+" WHERE codigo="+self.CodRemesa
c.execute(sql)
self.conexion.commit()
self.CierraDb()
store.remove(self.tvRecibos.get_selection().get_selected()[1])
self.tvRecibos.set_model(store)
else:
d=gtk.MessageDialog(None, gtk.DIALOG_MODAL, gtk.MESSAGE_QUESTION, gtk.BUTTONS_OK,"Debe de seleccionar una recibo para poder eliminarlo")
d.connect('response', lambda dialog, response: dialog.destroy())
d.show()
def MiraOrdenante(self):
if self.Otro.CodOrdenante<>"x":
if self.Otro.CodOrdenante<>"":
self.tOrdenante.set_text(self.Otro.NomOrdenante)
self.NifOrdenante=self.Otro.CodOrdenante
self.AbreDb()
cursor = self.conexion.cursor()
#Hay que mirar a ver si es la primera remesa de la base de datos
sql="Select count(codigo) from remesas"
cursor.execute(sql)
if int(cursor.fetchall()[0][0])==0:
#Es la primera remesa asin que el codigo es = 1 y solo hay que darla de alta
codigo="1"
self.CodRemesa=codigo
sql="insert into remesas(codigo, titulo, ordenante, generada, importe) values("+codigo+",'" +self.tNombre.get_text()+"','"+self.NifOrdenante+"','NO',0)"
cursor.execute(sql)
self.conexion.commit()
else:
#Mira a ver si ya existe la remesa por si es una modificacion del ordenante
sql="Select count(codigo) from remesas where titulo='"+self.tNombre.get_text()+"'"
cursor.execute(sql)
if int(cursor.fetchall()[0][0])==0:
#"no hay ninguno dado de alta"
# "Ahora miramos a ver cual es el ultimo codigo dado de alta y le sumamos 1"
sql="Select max(codigo) from remesas"
cursor.execute(sql)
codigo=str(int(cursor.fetchall()[0][0])+1)
self.CodRemesa=codigo
#Ahora la damos de alta
sql="insert into remesas(codigo, titulo, ordenante, generada, importe) values("+codigo+",'" +self.tNombre.get_text()+"','"+self.NifOrdenante+"','NO',0)"
cursor.execute(sql)
self.conexion.commit()
else:
# "ya esta dado de alta, Hay que hace un Update Tabla"
sql="Select codigo from remesas where titulo='"+self.tNombre.get_text()+"'"
cursor.execute(sql)
codigo=str(cursor.fetchall()[0][0])
self.CodRemesa=codigo
sql="UPDATE remesas SET titulo='" +self.tNombre.get_text()+"', ordenante='"+self.NifOrdenante+"' WHERE codigo="+codigo
cursor.execute(sql)
self.conexion.commit()
self.CierraDb()
return 0
else:
return 1
def VisualizaDatos(self,Datos):
store=gtk.ListStore(gobject.TYPE_STRING,gobject.TYPE_STRING,gobject.TYPE_STRING) # @UnusedVariable
store=self.tvRecibos.get_model()
itr=store.append()
store.set(itr,0,Datos[0],1,Datos[1],2,Datos[2])
self.tvRecibos.set_model(store)
def MiraRecibos(self):
#Aqui se graban los recibos que devuelve la pantalla de recibos
if self.VenRecibos.Llamada<>"remesas":
if self.VenRecibos.Llamada<>"Salir":
#Saca el codigo de la remesa en el improbable caso de que no exista
if self.CodRemesa=="":
self.AbreDb()
cursor = self.conexion.cursor()
sql="Select codigo from remesas where titulo='"+self.tNombre.get_text()+"'"
cursor.execute(sql)
self.CodRemesa=str(cursor.fetchall()[0][0])
self.CierraDb()
#lo primero es saber si es una modificacion de un recibo
if self.VenRecibos.Modificacion<>"":
#Es una modificacion
self.AbreDb()
cursor = self.conexion.cursor()
indice=self.VenRecibos.Modificacion
#Modificamos los datos del recibo
#sql="Update det_remesas SET cliente=?, importe=?, conceptos=? WHERE codigo='"+self.CodRemesa+"' AND indice='"+indice+"'"
#cursor.execute(sql, (self.VenRecibos.CodCliente,float(self.VenRecibos.Importe),self.VenRecibos.Conceptos))
sql="Update det_remesas SET cliente='"+self.VenRecibos.CodCliente+"', importe="+self.VenRecibos.Importe+", conceptos='"+self.VenRecibos.Conceptos+"' WHERE codigo="+self.CodRemesa+" AND indice="+indice
cursor.execute(sql)
self.conexion.commit()
cursor = self.conexion.cursor()
#Miramos a ver cuanto es el importe de la remesa ahora
sql="Select sum(importe) from det_remesas where codigo="+self.CodRemesa
cursor.execute(sql)
Importe=float(cursor.fetchall()[0][0])
#Mete el importe en la casilla del importe
self.tImporte.set_text(str(Importe))
cursor = self.conexion.cursor()
#Actualiza el importe en la base de datos de remesa
sql="UPDATE remesas SET importe="+str(Importe)+" WHERE codigo="+self.CodRemesa
cursor.execute(sql)
#sql="UPDATE remesas SET importe=? WHERE codigo=?"
#cursor.execute(sql,(Importe,self.CodRemesa))
self.conexion.commit()
#Carga los datos en el treeview
store=gtk.ListStore(gobject.TYPE_STRING,gobject.TYPE_STRING,gobject.TYPE_STRING)
self.tvRecibos.set_model(store)
self.MiraRemesa(self.tNombre.get_text())
self.CierraDb()
else:
#Es un recibo nuevo
self.AbreDb()
cursor = self.conexion.cursor()
#miramos a ver si es el primer recibo de esta remesa (si el importe es 0)
sql="SELECT sum(importe) FROM remesas WHERE codigo="+self.CodRemesa
cursor.execute(sql)
if float(cursor.fetchall()[0][0])==0:
#Es el primero asin que le ponemos el numero 1
indice=1
else:
#No es el primero
#Miramos a ver el codigo que le corresponde a este recibo
sql="SELECT max(indice) FROM det_remesas WHERE codigo='"+self.CodRemesa+"'"
cursor.execute(sql)
indice=str(int(cursor.fetchall()[0][0])+1)
#A�adimos los datos del recibo
#sql="insert into det_remesas (codigo,indice, cliente, importe, conceptos) values (?,?,?,?,?)"
#cursor.execute(sql, (str(self.CodRemesa),indice , self.VenRecibos.CodCliente, str(self.VenRecibos.Importe), self.VenRecibos.Conceptos))
sql="insert into det_remesas (codigo, indice, cliente, importe, conceptos) values ("+str(self.CodRemesa)+","+str(indice)+",'"+self.VenRecibos.CodCliente+"',"+str(self.VenRecibos.Importe)+",'"+self.VenRecibos.Conceptos+"')"
cursor.execute(sql)
self.conexion.commit()
sql="SELECT sum(importe) FROM det_remesas WHERE codigo='"+self.CodRemesa+"'"
cursor.execute(sql)
Importe = float(cursor.fetchall()[0][0])
self.tImporte.set_text(str(Importe))
#Actualiza el importe en la base de datos de remesa
sql="UPDATE remesas SET importe="+str(Importe)+" WHERE codigo='"+self.CodRemesa+"'"
cursor.execute(sql)
self.conexion.commit()
#Mete los datos del recibo en el TreeView
store=gtk.ListStore(gobject.TYPE_STRING,gobject.TYPE_STRING,gobject.TYPE_STRING)
self.tvRecibos.set_model(store)
self.MiraRemesa(self.tNombre.get_text())
self.CierraDb()
else:
pass
# print "salio por aqui"
return 0
else:
#print "no Es distinto"
return 1
def SelOrdenante(self,widget):
#Se llama a la pantalla del ordenante
self.Otro=ordenante.Ordenante()
self.Otro.Llamada="remesas"
#self.Otro.ventana.set_modal(True)
self.Otro.CodOrdenante="x"
self.timeout= gtk.timeout_add(250, self.MiraOrdenante)
def Anadir(self,widget):
if self.tNombre.get_text()=="":
self.Dialogo("No se puede anadir un recibo si no se le ha dado un nombre a la remesa",2)
elif self.tOrdenante.get_text()=="":
self.Dialogo("No se puede anadir un recibo si no se ha selecionado el ordenante",2)
else:
self.VenRecibos=recibo.Recibo()
self.VenRecibos.Llamada="remesas"
self.VenRecibos.Modificacion=""
self.VenRecibos.Remesa=self.tNombre.get_text()
#self.VenRecibos.ventana.set_modal(True)
self.timeout= gtk.timeout_add(250, self.MiraRecibos)
def Modificar(self,widget):
store=gtk.ListStore(gobject.TYPE_STRING,gobject.TYPE_STRING) # @UnusedVariable
store=self.tvRecibos.get_model()
if self.tvRecibos.get_cursor()[0]<>None:
self.VenRecibos=recibo.Recibo()
self.VenRecibos.Llamada="remesas"
#Mete el numero de recibo de la remesa
self.VenRecibos.Modificacion=store[self.tvRecibos.get_cursor()[0][0]][0]
self.AbreDb()
cClientes = self.conexion.cursor()
sql="SELECT codigo, nombre, banco, oficina, dc, cuenta FROM clientes WHERE nombre='"+store[self.tvRecibos.get_cursor()[0][0]][1]+"'"
cClientes.execute(sql)
pp=[]
pp=cClientes.fetchone()
CodCliente=pp[0]
self.VenRecibos.tCodCliente.set_text(CodCliente)
self.VenRecibos.tNomCliente.set_text(pp[1])
self.VenRecibos.tBanco.set_text(pp[2])
self.VenRecibos.tOficina.set_text(pp[3])
self.VenRecibos.tDc.set_text(pp[4])
self.VenRecibos.tCuenta.set_text(pp[5])
self.VenRecibos.tImporte.set_text(store[self.tvRecibos.get_cursor()[0][0]][2])
cDetalle = self.conexion.cursor()
sql="SELECT codigo, cliente, importe, conceptos FROM det_remesas WHERE codigo="+self.CodRemesa+" AND indice="+store[self.tvRecibos.get_cursor()[0][0]][0]
cDetalle.execute(sql)
n=cDetalle.fetchone()[3].split("�")
self.VenRecibos.tConcepto1.set_text(n[0])
self.VenRecibos.tConcepto2.set_text(n[1])
self.VenRecibos.tConcepto3.set_text(n[2])
self.VenRecibos.tConcepto4.set_text(n[3])
self.VenRecibos.tConcepto5.set_text(n[4])
self.VenRecibos.tConcepto6.set_text(n[5])
self.VenRecibos.tConcepto7.set_text(n[6])
self.VenRecibos.tConcepto8.set_text(n[7])
self.VenRecibos.tConcepto9.set_text(n[8])
self.VenRecibos.tConcepto10.set_text(n[9])
self.VenRecibos.tConcepto11.set_text(n[10])
self.VenRecibos.tConcepto12.set_text(n[11])
self.VenRecibos.tConcepto13.set_text(n[12])
self.VenRecibos.tConcepto14.set_text(n[13])
self.VenRecibos.tConcepto15.set_text(n[14])
self.VenRecibos.tConcepto16.set_text(n[15])
self.VenRecibos.Remesa=self.tNombre.get_text()
#self.VenRecibos.ventana.set_modal(True)
self.timeout= gtk.timeout_add(250, self.MiraRecibos)
self.CierraDb()
else:
d=gtk.MessageDialog(None, gtk.DIALOG_MODAL, gtk.MESSAGE_QUESTION, gtk.BUTTONS_OK,"Debe de seleccionar un recibo para poder abrirlo")
d.connect('response', lambda dialog, response: dialog.destroy())
d.show()
def Imprimir(self,widget):
pass
def Espacios(self,Numero):
d=""
for n in range(0,Numero): # @UnusedVariable
d=d+" "
return d
def Ceros(self,Numero):
d=""
for n in range(0,Numero): # @UnusedVariable
d=d+"0"
return d
def Generar(self,widget):
self.Fiche = gtk.FileSelection("Seleccionar Fichero")
self.Fiche.connect("destroy", self.CerrarAbrirFichero)
self.Fiche.ok_button.connect("clicked", self.FicheroSeleccionado)
self.Fiche.cancel_button.connect("clicked", self.CerrarAbrirFichero)
self.Fiche.set_filename("")
self.Fiche.set_modal(True)
self.Fiche.show()
def CerrarAbrirFichero(self,widget):
self.Fiche.destroy()
def FicheroSeleccionado(self, widget):
#Cerramos la ventana de seleccionar fichero
if self.Fiche.get_filename()[len(self.Fiche.get_filename())-1:len(self.Fiche.get_filename())]<>'\\':
self.GrabaCSB(self.Fiche.get_filename())
else:
d=gtk.MessageDialog(None, gtk.DIALOG_MODAL, gtk.MESSAGE_QUESTION, gtk.BUTTONS_OK,"Debe de introducir el nombre de un fichero para poder grabarlo")
d.connect('response', lambda dialog, response: dialog.destroy())
d.show()
self.Fiche.destroy()
def GrabaCSB(self,Fichero):
#Aqui se crea el fichero con el formato CSB19
self.AbreDb()
f=open(Fichero,"w")
#Cabecera de presentador
cur=self.conexion.cursor()
sql="SELECT ordenante FROM remesas WHERE codigo="+self.CodRemesa
cur.execute(sql)
ordenante=cur.fetchall()[0][0]
rem=self.conexion.cursor()
sql="SELECT nif, sufijo, nombre, banco, oficina, dc, cuenta FROM ordenantes WHERE nif='"+ordenante.split(":")[0]+"' and sufijo='"+ordenante.split(":")[1]+"'"
rem.execute(sql)
Linea=rem.fetchall()[0]
nif=Linea[0]
sufijo=Linea[1]
nombre=Linea[2]
banco=Linea[3]
oficina=Linea[4]
dc=Linea[5]
cuenta=Linea[6]
#a�o, mes, dia
dia=str(time.localtime()[2])
if len(dia)<2:
dia="0"+dia
mes=str(time.localtime()[1])
if len(mes)<2:
mes="0"+mes
ano=str(time.localtime()[0])[2:4]
FechaConfeccion=dia+mes+ano
#FechaCargo=FechaConfeccion
FechaCargo=self.fecha.get_text()
#Cambiar la FechaCargo por el valor del texto (casi nada...)
if len(nombre)<40:
nombre=nombre+self.Espacios(40-len(nombre))
Cadena="5180"+nif+sufijo+FechaConfeccion+self.Espacios(6)+nombre+self.Espacios(20)+banco+oficina+self.Espacios(12)+self.Espacios(40)+"***PyCsb19****"+"\r\n"
f.write(Cadena)
#Cabecera de Ordenante
Cadena="5380"+nif+sufijo+FechaConfeccion+FechaCargo+nombre+banco+oficina+dc+cuenta+self.Espacios(8)+"01"+self.Espacios(10)+self.Espacios(40)+"***PyCsb19****"+"\r\n"
f.write(Cadena)
#Registros de recibos
rec=self.conexion.cursor()
sql="SELECT indice, cliente, importe, conceptos FROM det_remesas WHERE codigo="+self.CodRemesa
rec.execute(sql)
nNumDomiciliaciones=0
nSuma=0.0
nNumRegistrosOrdenante=2 #Se pone con dos porque el fichero ya tiene las 2 cabeceras escritas
for remesa in rec.fetchall():
#El indice lo voy a utilizar para el codigo de devolucion
Indice=str(remesa[0])
nNumDomiciliaciones=nNumDomiciliaciones+1
if len(Indice)<6:
Indice=Indice+self.Espacios(6-len(Indice))
elif len(Indice)>6:
Indice=Indice[0:5]
Cliente=remesa[1]
nSuma=nSuma+remesa[2]
Importe=str(remesa[2])
if Importe.find(".")==-1:
Importe=Importe+self.Ceros(2)
else:
if len(Importe.split(".")[1])<2:
Importe=Importe.split(".")[0]+Importe.split(".")[1]+self.Ceros(2-len(Importe.split(".")[1]))
elif len(Importe.split(".")[1])>2:
Importe=Importe.split(".")[0]+Importe.split(".")[1][0:1]
else:
Importe=Importe.split(".")[0]+Importe.split(".")[1]
if len(Importe)<10:
Importe=self.Ceros(10-len(Importe))+Importe
Conceptos=[]
for n in remesa[3].split("�"):
if len(n)==0:
dato=""
elif len(n)<40:
dato=n+self.Espacios(40-len(n))
elif len(n)>40:
dato=n[0:40]
else:
dato=n
Conceptos.append(dato)
#Vamos a por los datos del cliente
cli=self.conexion.cursor()
sql="SELECT codigo, nif, nombre, direccion, ciudad, cp, banco, oficina, dc, cuenta FROM clientes WHERE codigo='"+Cliente+"'"
cli.execute(sql)
c=cli.fetchall()[0]
if len(c[0])<12:
CodCliente=c[0]+self.Espacios(12-len(c[0]))
else:
CodCliente=c[0]
#El nif lo voy a utilizar para el codigo de referencia interna
NifCliente=c[1]
if len(NifCliente)<10:
NifCliente=NifCliente+self.Espacios(10-len(NifCliente))
if len(c[2])<40:
NombreCliente=c[2]+self.Espacios(40-len(c[2]))
else:
NombreCliente=c[2]
DireCliente=c[3] # @UnusedVariable
CiudadCliente=c[4] # @UnusedVariable
CpCliente=c[5] # @UnusedVariable
BancoCliente=c[6]
OficinaCliente=c[7]
DcCliente=c[8]
CuentaCliente=c[9]
if len(Conceptos[0])<40:
Conceptos[0]=Conceptos[0]+self.Espacios(40-len(Conceptos[0]))
if len(Conceptos[0])>40:
Conceptos[0]=Conceptos[0][0:40]
Cadena="5680"+nif+sufijo+CodCliente+NombreCliente+BancoCliente+OficinaCliente+DcCliente+CuentaCliente+Importe+Indice+NifCliente+Conceptos[0]+self.Espacios(8)+"\r\n"
f.write(Cadena)
nNumRegistrosOrdenante=nNumRegistrosOrdenante+1
#Vamos a ver que pasa con los otros conceptos.
if len(Conceptos[1])<>0 or len(Conceptos[2])<>0 or len(Conceptos[3])<>0:
if len(Conceptos[1])<>40:
Conceptos[1]=Conceptos[1]+self.Espacios(40-len(Conceptos[1]))
if len(Conceptos[2])<>40:
Conceptos[2]=Conceptos[2]+self.Espacios(40-len(Conceptos[2]))
if len(Conceptos[3])<>40:
Conceptos[3]=Conceptos[3]+self.Espacios(40-len(Conceptos[3]))
Cadena="5681"+nif+sufijo+CodCliente+Conceptos[1]+Conceptos[2]+Conceptos[3]+self.Espacios(14)+"\r\n"
f.write(Cadena)
nNumRegistrosOrdenante=nNumRegistrosOrdenante+1
if len(Conceptos[4])<>0 or len(Conceptos[5])<>0 or len(Conceptos[6])<>0:
if len(Conceptos[4])<>40:
Conceptos[4]=Conceptos[4]+self.Espacios(40-len(Conceptos[4]))
if len(Conceptos[5])<>40:
Conceptos[5]=Conceptos[5]+self.Espacios(40-len(Conceptos[5]))
if len(Conceptos[6])<>40:
Conceptos[6]=Conceptos[6]+self.Espacios(40-len(Conceptos[6]))
Cadena="5682"+nif+sufijo+CodCliente+Conceptos[4]+Conceptos[5]+Conceptos[6]+self.Espacios(14)+"\r\n"
f.write(Cadena)
nNumRegistrosOrdenante=nNumRegistrosOrdenante+1
if len(Conceptos[7])<>0 or len(Conceptos[8])<>0 or len(Conceptos[9])<>0:
if len(Conceptos[7])<>40:
Conceptos[7]=Conceptos[7]+self.Espacios(40-len(Conceptos[7]))
if len(Conceptos[8])<>40:
Conceptos[8]=Conceptos[8]+self.Espacios(40-len(Conceptos[8]))
if len(Conceptos[9])<>40:
Conceptos[9]=Conceptos[9]+self.Espacios(40-len(Conceptos[9]))
Cadena="5683"+nif+sufijo+CodCliente+Conceptos[7]+Conceptos[8]+Conceptos[9]+self.Espacios(14)+"\r\n"
f.write(Cadena)
nNumRegistrosOrdenante=nNumRegistrosOrdenante+1
if len(Conceptos[10])<>0 or len(Conceptos[11])<>0 or len(Conceptos[12])<>0:
if len(Conceptos[10])<>40:
Conceptos[10]=Conceptos[10]+self.Espacios(40-len(Conceptos[10]))
if len(Conceptos[11])<>40:
Conceptos[11]=Conceptos[11]+self.Espacios(40-len(Conceptos[11]))
if len(Conceptos[12])<>40:
Conceptos[12]=Conceptos[12]+self.Espacios(40-len(Conceptos[12]))
Cadena="5684"+nif+sufijo+CodCliente+Conceptos[10]+Conceptos[11]+Conceptos[12]+self.Espacios(14)+"\r\n"
f.write(Cadena)
nNumRegistrosOrdenante=nNumRegistrosOrdenante+1
if len(Conceptos[13])<>0 or len(Conceptos[14])<>0 or len(Conceptos[15])<>0:
if len(Conceptos[13])<>40:
Conceptos[13]=Conceptos[13]+self.Espacios(40-len(Conceptos[13]))
if len(Conceptos[14])<>40:
Conceptos[14]=Conceptos[14]+self.Espacios(40-len(Conceptos[14]))
if len(Conceptos[15])<>40:
Conceptos[15]=Conceptos[15]+self.Espacios(40-len(Conceptos[15]))
Cadena="5685"+nif+sufijo+CodCliente+Conceptos[13]+Conceptos[14]+Conceptos[15]+self.Espacios(14)+"\r\n"
f.write(Cadena)
nNumRegistrosOrdenante=nNumRegistrosOrdenante+1
#La linea de datos del cliente no se implementa de monento
#Cadena="5686"+nif+sufijo+CodCliente
#Linea de totales de ordenante
Suma=str(nSuma)
if Suma.find(".")==-1:
Suma=Suma+self.Ceros(2)
else:
if len(Suma.split(".")[1])<2:
Suma=Suma.split(".")[0]+Suma.split(".")[1]+self.Ceros(2-len(Suma.split(".")[1]))
elif len(Suma.split(".")[1])>2:
Suma=Suma.split(".")[0]+Suma.split(".")[1][0:1]
else:
Suma=Suma.split(".")[0]+Suma.split(".")[1]
if len(Suma)<10:
Suma=self.Ceros(10-len(Suma))+Suma
NumDomiciliaciones=str(nNumDomiciliaciones)
if len(NumDomiciliaciones)<10:
NumDomiciliaciones=self.Ceros(10-len(NumDomiciliaciones))+NumDomiciliaciones
NumRegistrosOrdenante=str(nNumRegistrosOrdenante)
if len(NumRegistrosOrdenante)<10:
NumRegistrosOrdenante=self.Ceros(10-len(NumRegistrosOrdenante))+NumRegistrosOrdenante
Cadena="5880"+nif+sufijo+self.Espacios(12)+self.Espacios(40)+self.Espacios(20)+Suma+self.Espacios(6)+NumDomiciliaciones+NumRegistrosOrdenante+self.Espacios(20)+"*****PyCsb19******"+"\r\n"
f.write(Cadena)
nNumRegistrosOrdenante=nNumRegistrosOrdenante+1
NumRegistrosOrdenante=str(nNumRegistrosOrdenante+1)
if len(NumRegistrosOrdenante)<10:
NumRegistrosOrdenante=self.Ceros(10-len(NumRegistrosOrdenante))+NumRegistrosOrdenante
#Linea de total general
Cadena="5980"+nif+sufijo+self.Espacios(12)+self.Espacios(40)+"0001"+self.Espacios(16)+Suma+self.Espacios(6)+NumDomiciliaciones+NumRegistrosOrdenante+self.Espacios(20)+"*****PyCsb19******"+"\r\n"
f.write(Cadena)
f.close()
self.CierraDb()
self.Dialogo("El fichero se ha generado correctamente",1)
def Dialogo(self, msg, Tipo):
if Tipo==1:
dialog = gtk.MessageDialog(None, gtk.DIALOG_MODAL, gtk.MESSAGE_QUESTION, gtk.BUTTONS_OK,msg)
elif Tipo==2:
dialog = gtk.MessageDialog(None, gtk.DIALOG_MODAL, gtk.MESSAGE_QUESTION, gtk.BUTTONS_CLOSE,msg)
elif Tipo==3:
dialog = gtk.MessageDialog(None, gtk.DIALOG_MODAL, gtk.MESSAGE_QUESTION, gtk.BUTTONS_YES_NO,msg)
elif Tipo==4:
dialog = gtk.MessageDialog(None, gtk.DIALOG_MODAL, gtk.MESSAGE_QUESTION, gtk.BUTTONS_OK_CANCEL,msg)
dialog.connect('response', lambda dialog, response: dialog.destroy())
dialog.show()
return dialog
def Salir(self,*args):
#True
if self.Llamada<>"":
self.ventana.hide()
self.Cliente=""
self.Importe=""
self.Llamada=""
return True
else:
gtk.main_quit()
def Main(self):
self.Llamada=""
gtk.main()
if __name__ == "__main__":
gtk.rc_parse("gtkrc.txt")
ven = Remesas()
ven.Main()
| pacoqueen/ginn | ginn/lib/pycsb19/remesas.py | Python | gpl-2.0 | 32,357 |
from bs4 import BeautifulSoup
import urllib.request
html = urllib.request.urlopen('http://www.nlotto.co.kr/common.do?method=main’')
soup = BeautifulSoup(html)
hoi = soup.find("span", id="lottoDrwNo")
numbers=[]
for n in range(1,7):
strV ="drwtNo" + str(n)
first = soup.find('img', id=strV)['alt']
numbers.append(first)
bonus = soup.find('img', id="bnusNo")['alt']
print('Lotto numbers')
print(hoi.string + "results")
print(" ".join(numbers))
print('Bonus_number: '+bonus)
| YongJang/PythonTelegram | examples/referenced/a.py | Python | gpl-2.0 | 497 |
#!/usr/bin/python
# -*- coding: utf-8 -*-
"""
Random mutator based on C. Miller 'algorithm' for Nightmare Fuzzing Project.
Created on Sun May 12 10:57:06 2013
@author: joxean
"""
import sys
import math
import random
#-----------------------------------------------------------------------
class CCMillerMutator(object):
def __init__(self, buf, skip=5):
self.buf = buf
self.skip = 5
def mutate(self):
buf = self.buf
fuzz_factor = len(buf)/500.
if fuzz_factor < 1:
fuzz_factor = 1
numwrites = random.randrange(math.ceil((float(len(buf)) / fuzz_factor)))+1
#print "Total of %d" % numwrites
diff = []
for j in range(numwrites):
rbyte = random.randrange(256)
rn = random.randrange(len(buf))
rtotal = random.randint(0, 16)
c = "%c" % rbyte
buf = buf[:rn-1] + c*rtotal + buf[rn+rtotal:]
diff.append("%d, %d" % (rn, rtotal))
return buf, diff
#-----------------------------------------------------------------------
def main(template, output):
mut = CCMillerMutator(open(template, "rb").read())
buf, diff = mut.mutate()
f = open(output, "wb")
f.write(buf)
f.close()
diff.sort()
f = open(output + ".diff", "wb")
f.write("# Original file created by 'CMiller Mutator Rep' was %s\n" % template)
f.write("\n".join(diff))
f.close()
#-----------------------------------------------------------------------
def usage():
print "Usage:", sys.argv[0], "<template> <output filename>"
if __name__ == "__main__":
if len(sys.argv) != 3:
usage()
else:
main(sys.argv[1], sys.argv[2])
| joxeankoret/nightmare | mutators/cmiller_mutator_rep.py | Python | gpl-2.0 | 1,588 |
def float_example():
a = -10
print a.__float__()
print float(a)
if __name__ == '__main__':
float_example() | ramesharpu/python | basic-coding/built-in-functions/float.py | Python | gpl-2.0 | 124 |
#
# Gramps - a GTK+/GNOME based genealogy program
#
# Copyright (C) 2001 David R. Hampton
# Copyright (C) 2001-2006 Donald N. Allingham
# Copyright (C) 2007 Brian G. Matherly
# Copyright (C) 2010 Jakim Friant
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
#
#-------------------------------------------------------------------------
#
# Gramps modules
#
#-------------------------------------------------------------------------
from gramps.gen.utils.grampslocale import GrampsLocale
from gramps.gen.display.name import NameDisplay
from gramps.gen.config import config
#-------------------------------------------------------------------------
#
# Report
#
#-------------------------------------------------------------------------
class Report:
"""
The Report base class. This is a base class for generating
customized reports. It cannot be used as is, but it can be easily
sub-classed to create a functional report generator.
"""
def __init__(self, database, options_class, user):
self.database = database
self.options_class = options_class
self._user = user
self.doc = options_class.get_document()
creator = database.get_researcher().get_name()
self.doc.set_creator(creator)
output = options_class.get_output()
if output:
self.standalone = True
self.doc.open(options_class.get_output())
else:
self.standalone = False
def begin_report(self):
pass
def set_locale(self, language):
"""
Set the translator to one selected with
stdoptions.add_localization_option().
"""
if language == GrampsLocale.DEFAULT_TRANSLATION_STR:
language = None
locale = GrampsLocale(lang=language)
self._ = locale.translation.sgettext
self._get_date = locale.get_date
self._get_type = locale.get_type
self._ldd = locale.date_displayer
self._name_display = NameDisplay(locale) # a legacy/historical name
self._name_display.set_name_format(self.database.name_formats)
fmt_default = config.get('preferences.name-format')
self._name_display.set_default_format(fmt_default)
return locale
def write_report(self):
pass
def end_report(self):
if self.standalone:
self.doc.close()
| beernarrd/gramps | gramps/gen/plug/report/_reportbase.py | Python | gpl-2.0 | 3,048 |
from distutils.core import setup
setup(
name='tinyfsm',
version='0.1',
packages=[''],
url='https://github.com/tonyfunc/tinyfsm',
license='GNU Library',
author='tony',
author_email='[email protected]',
description='A tiny implementation of Finite State Machine in Python.'
)
| tonyfunc/tinyfsm | setup.py | Python | gpl-2.0 | 305 |
# -*- Mode: python; tab-width: 4; indent-tabs-mode:nil; coding:utf-8 -*-
# vim: tabstop=4 expandtab shiftwidth=4 softtabstop=4
#
# MDAnalysis --- https://www.mdanalysis.org
# Copyright (c) 2006-2017 The MDAnalysis Development Team and contributors
# (see the file AUTHORS for the full list of names)
#
# Released under the GNU Public Licence, v2 or any higher version
#
# Please cite your use of MDAnalysis in published work:
#
# R. J. Gowers, M. Linke, J. Barnoud, T. J. E. Reddy, M. N. Melo, S. L. Seyler,
# D. L. Dotson, J. Domanski, S. Buchoux, I. M. Kenney, and O. Beckstein.
# MDAnalysis: A Python package for the rapid analysis of molecular dynamics
# simulations. In S. Benthall and S. Rostrup editors, Proceedings of the 15th
# Python in Science Conference, pages 102-109, Austin, TX, 2016. SciPy.
# doi: 10.25080/majora-629e541a-00e
#
# N. Michaud-Agrawal, E. J. Denning, T. B. Woolf, and O. Beckstein.
# MDAnalysis: A Toolkit for the Analysis of Molecular Dynamics Simulations.
# J. Comput. Chem. 32 (2011), 2319--2327, doi:10.1002/jcc.21787
#
"""Water dynamics analysis --- :mod:`MDAnalysis.analysis.waterdynamics`
=======================================================================
:Author: Alejandro Bernardin
:Year: 2014-2015
:Copyright: GNU Public License v3
.. versionadded:: 0.11.0
This module provides functions to analize water dynamics trajectories and water
interactions with other molecules. The functions in this module are: water
orientational relaxation (WOR) [Yeh1999]_, hydrogen bond lifetimes (HBL)
[Rapaport1983]_, angular distribution (AD) [Grigera1995]_, mean square
displacement (MSD) [Brodka1994]_ and survival probability (SP) [Liu2004]_.
For more information about this type of analysis please refer to
[Araya-Secchi2014]_ (water in a protein cavity) and [Milischuk2011]_ (water in
a nanopore).
.. rubric:: References
.. [Rapaport1983] D.C. Rapaport (1983): Hydrogen bonds in water, Molecular
Physics: An International Journal at the Interface Between
Chemistry and Physics, 50:5, 1151-1162.
.. [Yeh1999] Yu-ling Yeh and Chung-Yuan Mou (1999). Orientational Relaxation
Dynamics of Liquid Water Studied by Molecular Dynamics Simulation,
J. Phys. Chem. B 1999, 103, 3699-3705.
.. [Grigera1995] Raul Grigera, Susana G. Kalko and Jorge Fischbarg
(1995). Wall-Water Interface. A Molecular Dynamics Study,
Langmuir 1996,12,154-158
.. [Liu2004] Pu Liu, Edward Harder, and B. J. Berne (2004).On the Calculation
of Diffusion Coefficients in Confined Fluids and Interfaces with
an Application to the Liquid-Vapor Interface of Water,
J. Phys. Chem. B 2004, 108, 6595-6602.
.. [Brodka1994] Aleksander Brodka (1994). Diffusion in restricted volume,
Molecular Physics, 1994, Vol. 82, No. 5, 1075-1078.
.. [Araya-Secchi2014] Araya-Secchi, R., Tomas Perez-Acle, Seung-gu Kang, Tien
Huynh, Alejandro Bernardin, Yerko Escalona, Jose-Antonio
Garate, Agustin D. Martinez, Isaac E. Garcia, Juan
C. Saez, Ruhong Zhou (2014). Characterization of a novel
water pocket inside the human Cx26 hemichannel
structure. Biophysical journal, 107(3), 599-612.
.. [Milischuk2011] Anatoli A. Milischuk and Branka M. Ladanyi. Structure and
dynamics of water confined in silica
nanopores. J. Chem. Phys. 135, 174709 (2011); doi:
10.1063/1.3657408
Example use of the analysis classes
-----------------------------------
HydrogenBondLifetimes
~~~~~~~~~~~~~~~~~~~~~
To analyse hydrogen bond lifetime, use
:meth:`MDAnalysis.analysis.hydrogenbonds.hbond_analysis.HydrogenBondAnalysis.liftetime`.
See Also
--------
:mod:`MDAnalysis.analysis.hydrogenbonds.hbond_analysis`
WaterOrientationalRelaxation
~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Analyzing water orientational relaxation (WOR)
:class:`WaterOrientationalRelaxation`. In this case we are analyzing "how fast"
water molecules are rotating/changing direction. If WOR is very stable we can
assume that water molecules are rotating/changing direction very slow, on the
other hand, if WOR decay very fast, we can assume that water molecules are
rotating/changing direction very fast::
import MDAnalysis
from MDAnalysis.analysis.waterdynamics import WaterOrientationalRelaxation as WOR
u = MDAnalysis.Universe(pdb, trajectory)
select = "byres name OH2 and sphzone 6.0 protein and resid 42"
WOR_analysis = WOR(universe, select, 0, 1000, 20)
WOR_analysis.run()
time = 0
#now we print the data ready to plot. The first two columns are WOR_OH vs t plot,
#the second two columns are WOR_HH vs t graph and the third two columns are WOR_dip vs t graph
for WOR_OH, WOR_HH, WOR_dip in WOR_analysis.timeseries:
print("{time} {WOR_OH} {time} {WOR_HH} {time} {WOR_dip}".format(time=time, WOR_OH=WOR_OH, WOR_HH=WOR_HH,WOR_dip=WOR_dip))
time += 1
#now, if we want, we can plot our data
plt.figure(1,figsize=(18, 6))
#WOR OH
plt.subplot(131)
plt.xlabel('time')
plt.ylabel('WOR')
plt.title('WOR OH')
plt.plot(range(0,time),[column[0] for column in WOR_analysis.timeseries])
#WOR HH
plt.subplot(132)
plt.xlabel('time')
plt.ylabel('WOR')
plt.title('WOR HH')
plt.plot(range(0,time),[column[1] for column in WOR_analysis.timeseries])
#WOR dip
plt.subplot(133)
plt.xlabel('time')
plt.ylabel('WOR')
plt.title('WOR dip')
plt.plot(range(0,time),[column[2] for column in WOR_analysis.timeseries])
plt.show()
where t0 = 0, tf = 1000 and dtmax = 20. In this way we create 20 windows
timesteps (20 values in the x axis), the first window is created with 1000
timestep average (1000/1), the second window is created with 500 timestep
average(1000/2), the third window is created with 333 timestep average (1000/3)
and so on.
AngularDistribution
~~~~~~~~~~~~~~~~~~~
Analyzing angular distribution (AD) :class:`AngularDistribution` for OH vector,
HH vector and dipole vector. It returns a line histogram with vector
orientation preference. A straight line in the output plot means no
preferential orientation in water molecules. In this case we are analyzing if
water molecules have some orientational preference, in this way we can see if
water molecules are under an electric field or if they are interacting with
something (residue, protein, etc)::
import MDAnalysis
from MDAnalysis.analysis.waterdynamics import AngularDistribution as AD
u = MDAnalysis.Universe(pdb, trajectory)
selection = "byres name OH2 and sphzone 6.0 (protein and (resid 42 or resid 26) )"
bins = 30
AD_analysis = AD(universe,selection,bins)
AD_analysis.run()
#now we print data ready to graph. The first two columns are P(cos(theta)) vs cos(theta) for OH vector ,
#the seconds two columns are P(cos(theta)) vs cos(theta) for HH vector and thirds two columns
#are P(cos(theta)) vs cos(theta) for dipole vector
for bin in range(bins):
print("{AD_analysisOH} {AD_analysisHH} {AD_analysisDip}".format(AD_analysis.graph0=AD_analysis.graph[0][bin], AD_analysis.graph1=AD_analysis.graph[1][bin],AD_analysis.graph2=AD_analysis.graph[2][bin]))
#and if we want to graph our results
plt.figure(1,figsize=(18, 6))
#AD OH
plt.subplot(131)
plt.xlabel('cos theta')
plt.ylabel('P(cos theta)')
plt.title('PDF cos theta for OH')
plt.plot([float(column.split()[0]) for column in AD_analysis.graph[0][:-1]],[float(column.split()[1]) for column in AD_analysis.graph[0][:-1]])
#AD HH
plt.subplot(132)
plt.xlabel('cos theta')
plt.ylabel('P(cos theta)')
plt.title('PDF cos theta for HH')
plt.plot([float(column.split()[0]) for column in AD_analysis.graph[1][:-1]],[float(column.split()[1]) for column in AD_analysis.graph[1][:-1]])
#AD dip
plt.subplot(133)
plt.xlabel('cos theta')
plt.ylabel('P(cos theta)')
plt.title('PDF cos theta for dipole')
plt.plot([float(column.split()[0]) for column in AD_analysis.graph[2][:-1]],[float(column.split()[1]) for column in AD_analysis.graph[2][:-1]])
plt.show()
where `P(cos(theta))` is the angular distribution or angular probabilities.
MeanSquareDisplacement
~~~~~~~~~~~~~~~~~~~~~~
Analyzing mean square displacement (MSD) :class:`MeanSquareDisplacement` for
water molecules. In this case we are analyzing the average distance that water
molecules travels inside protein in XYZ direction (cylindric zone of radius
11[nm], Zmax 4.0[nm] and Zmin -8.0[nm]). A strong rise mean a fast movement of
water molecules, a weak rise mean slow movement of particles::
import MDAnalysis
from MDAnalysis.analysis.waterdynamics import MeanSquareDisplacement as MSD
u = MDAnalysis.Universe(pdb, trajectory)
select = "byres name OH2 and cyzone 11.0 4.0 -8.0 protein"
MSD_analysis = MSD(universe, select, 0, 1000, 20)
MSD_analysis.run()
#now we print data ready to graph. The graph
#represents MSD vs t
time = 0
for msd in MSD_analysis.timeseries:
print("{time} {msd}".format(time=time, msd=msd))
time += 1
#Plot
plt.xlabel('time')
plt.ylabel('MSD')
plt.title('MSD')
plt.plot(range(0,time),MSD_analysis.timeseries)
plt.show()
.. _SP-examples:
SurvivalProbability
~~~~~~~~~~~~~~~~~~~
Analyzing survival probability (SP) :class:`SurvivalProbability` of molecules.
In this case we are analyzing how long water molecules remain in a
sphere of radius 12.3 centered in the geometrical center of resid 42 and 26.
A slow decay of SP means a long permanence time of water molecules in
the zone, on the other hand, a fast decay means a short permanence time::
import MDAnalysis
from MDAnalysis.analysis.waterdynamics import SurvivalProbability as SP
import matplotlib.pyplot as plt
universe = MDAnalysis.Universe(pdb, trajectory)
select = "byres name OH2 and sphzone 12.3 (resid 42 or resid 26) "
sp = SP(universe, select, verbose=True)
sp.run(start=0, stop=101, tau_max=20)
tau_timeseries = sp.tau_timeseries
sp_timeseries = sp.sp_timeseries
# print in console
for tau, sp in zip(tau_timeseries, sp_timeseries):
print("{time} {sp}".format(time=tau, sp=sp))
# plot
plt.xlabel('Time')
plt.ylabel('SP')
plt.title('Survival Probability')
plt.plot(tau_timeseries, sp_timeseries)
plt.show()
One should note that the `stop` keyword as used in the above example has an
`exclusive` behaviour, i.e. here the final frame used will be 100 not 101.
This behaviour is aligned with :class:`AnalysisBase` but currently differs from
other :mod:`MDAnalysis.analysis.waterdynamics` classes, which all exhibit
`inclusive` behaviour for their final frame selections.
Another example applies to the situation where you work with many different "residues".
Here we calculate the SP of a potassium ion around each lipid in a membrane and
average the results. In this example, if the SP analysis were run without treating each lipid
separately, potassium ions may hop from one lipid to another and still be counted as remaining
in the specified region. That is, the survival probability of the potassium ion around the
entire membrane will be calculated.
Note, for this example, it is advisable to use `Universe(in_memory=True)` to ensure that the
simulation is not being reloaded into memory for each lipid::
import MDAnalysis as mda
from MDAnalysis.analysis.waterdynamics import SurvivalProbability as SP
import numpy as np
u = mda.Universe("md.gro", "md100ns.xtc", in_memory=True)
lipids = u.select_atoms('resname LIPIDS')
joined_sp_timeseries = [[] for _ in range(20)]
for lipid in lipids.residues:
print("Lipid ID: %d" % lipid.resid)
select = "resname POTASSIUM and around 3.5 (resid %d and name O13 O14) " % lipid.resid
sp = SP(u, select, verbose=True)
sp.run(tau_max=20)
# Raw SP points for each tau:
for sps, new_sps in zip(joined_sp_timeseries, sp.sp_timeseries_data):
sps.extend(new_sps)
# average all SP datapoints
sp_data = [np.mean(sp) for sp in joined_sp_timeseries]
for tau, sp in zip(range(1, tau_max + 1), sp_data):
print("{time} {sp}".format(time=tau, sp=sp))
.. _Output:
Output
------
WaterOrientationalRelaxation
~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Water orientational relaxation (WOR) data is returned per window timestep,
which is stored in :attr:`WaterOrientationalRelaxation.timeseries`::
results = [
[ # time t0
<WOR_OH>, <WOR_HH>, <WOR_dip>
],
[ # time t1
<WOR_OH>, <WOR_HH>, <WOR_dip>
],
...
]
AngularDistribution
~~~~~~~~~~~~~~~~~~~
Angular distribution (AD) data is returned per vector, which is stored in
:attr:`AngularDistribution.graph`. In fact, AngularDistribution returns a
histogram::
results = [
[ # OH vector values
# the values are order in this way: <x_axis y_axis>
<cos_theta0 ang_distr0>, <cos_theta1 ang_distr1>, ...
],
[ # HH vector values
<cos_theta0 ang_distr0>, <cos_theta1 ang_distr1>, ...
],
[ # dip vector values
<cos_theta0 ang_distr0>, <cos_theta1 ang_distr1>, ...
],
]
MeanSquareDisplacement
~~~~~~~~~~~~~~~~~~~~~~
Mean Square Displacement (MSD) data is returned in a list, which each element
represents a MSD value in its respective window timestep. Data is stored in
:attr:`MeanSquareDisplacement.timeseries`::
results = [
#MSD values orders by window timestep
<MSD_t0>, <MSD_t1>, ...
]
SurvivalProbability
~~~~~~~~~~~~~~~~~~~
Survival Probability (SP) computes two lists: a list of taus (:attr:`SurvivalProbability.tau_timeseries`) and a list of
the corresponding survival probabilities (:attr:`SurvivalProbability.sp_timeseries`).
results = [ tau1, tau2, ..., tau_n ], [ sp_tau1, sp_tau2, ..., sp_tau_n]
Additionally, a list :attr:`SurvivalProbability.sp_timeseries_data`, is provided which contains
a list of all SPs calculated for each tau. This can be used to compute the distribution or time dependence of SP, etc.
Classes
--------
.. autoclass:: WaterOrientationalRelaxation
:members:
:inherited-members:
.. autoclass:: AngularDistribution
:members:
:inherited-members:
.. autoclass:: MeanSquareDisplacement
:members:
:inherited-members:
.. autoclass:: SurvivalProbability
:members:
:inherited-members:
"""
from MDAnalysis.lib.correlations import autocorrelation, correct_intermittency
import MDAnalysis.analysis.hbonds
from itertools import zip_longest
import logging
import warnings
import numpy as np
logger = logging.getLogger('MDAnalysis.analysis.waterdynamics')
from MDAnalysis.lib.log import ProgressBar
class WaterOrientationalRelaxation(object):
r"""Water orientation relaxation analysis
Function to evaluate the Water Orientational Relaxation proposed by Yu-ling
Yeh and Chung-Yuan Mou [Yeh1999_]. WaterOrientationalRelaxation indicates
"how fast" water molecules are rotating or changing direction. This is a
time correlation function given by:
.. math::
C_{\hat u}(\tau)=\langle \mathit{P}_2[\mathbf{\hat{u}}(t_0)\cdot\mathbf{\hat{u}}(t_0+\tau)]\rangle
where :math:`P_2=(3x^2-1)/2` is the second-order Legendre polynomial and :math:`\hat{u}` is
a unit vector along HH, OH or dipole vector.
Parameters
----------
universe : Universe
Universe object
selection : str
Selection string for water [‘byres name OH2’].
t0 : int
frame where analysis begins
tf : int
frame where analysis ends
dtmax : int
Maximum dt size, `dtmax` < `tf` or it will crash.
.. versionadded:: 0.11.0
.. versionchanged:: 1.0.0
Changed `selection` keyword to `select`
"""
def __init__(self, universe, select, t0, tf, dtmax, nproc=1):
self.universe = universe
self.selection = select
self.t0 = t0
self.tf = tf
self.dtmax = dtmax
self.nproc = nproc
self.timeseries = None
def _repeatedIndex(self, selection, dt, totalFrames):
"""
Indicates the comparation between all the t+dt.
The results is a list of list with all the repeated index per frame
(or time).
Ex: dt=1, so compare frames (1,2),(2,3),(3,4)...
Ex: dt=2, so compare frames (1,3),(3,5),(5,7)...
Ex: dt=3, so compare frames (1,4),(4,7),(7,10)...
"""
rep = []
for i in range(int(round((totalFrames - 1) / float(dt)))):
if (dt * i + dt < totalFrames):
rep.append(self._sameMolecTandDT(
selection, dt * i, (dt * i) + dt))
return rep
def _getOneDeltaPoint(self, universe, repInd, i, t0, dt):
"""
Gives one point to calculate the mean and gets one point of the plot
C_vect vs t.
Ex: t0=1 and tau=1 so calculate the t0-tau=1-2 intervale.
Ex: t0=5 and tau=3 so calcultate the t0-tau=5-8 intervale.
i = come from getMeanOnePoint (named j) (int)
"""
valOH = 0
valHH = 0
valdip = 0
n = 0
for j in range(len(repInd[i]) // 3):
begj = 3 * j
universe.trajectory[t0]
Ot0 = repInd[i][begj]
H1t0 = repInd[i][begj + 1]
H2t0 = repInd[i][begj + 2]
OHVector0 = H1t0.position - Ot0.position
HHVector0 = H1t0.position - H2t0.position
dipVector0 = ((H1t0.position + H2t0.position) * 0.5) - Ot0.position
universe.trajectory[t0 + dt]
Otp = repInd[i][begj]
H1tp = repInd[i][begj + 1]
H2tp = repInd[i][begj + 2]
OHVectorp = H1tp.position - Otp.position
HHVectorp = H1tp.position - H2tp.position
dipVectorp = ((H1tp.position + H2tp.position) * 0.5) - Otp.position
normOHVector0 = np.linalg.norm(OHVector0)
normOHVectorp = np.linalg.norm(OHVectorp)
normHHVector0 = np.linalg.norm(HHVector0)
normHHVectorp = np.linalg.norm(HHVectorp)
normdipVector0 = np.linalg.norm(dipVector0)
normdipVectorp = np.linalg.norm(dipVectorp)
unitOHVector0 = [OHVector0[0] / normOHVector0,
OHVector0[1] / normOHVector0,
OHVector0[2] / normOHVector0]
unitOHVectorp = [OHVectorp[0] / normOHVectorp,
OHVectorp[1] / normOHVectorp,
OHVectorp[2] / normOHVectorp]
unitHHVector0 = [HHVector0[0] / normHHVector0,
HHVector0[1] / normHHVector0,
HHVector0[2] / normHHVector0]
unitHHVectorp = [HHVectorp[0] / normHHVectorp,
HHVectorp[1] / normHHVectorp,
HHVectorp[2] / normHHVectorp]
unitdipVector0 = [dipVector0[0] / normdipVector0,
dipVector0[1] / normdipVector0,
dipVector0[2] / normdipVector0]
unitdipVectorp = [dipVectorp[0] / normdipVectorp,
dipVectorp[1] / normdipVectorp,
dipVectorp[2] / normdipVectorp]
valOH += self.lg2(np.dot(unitOHVector0, unitOHVectorp))
valHH += self.lg2(np.dot(unitHHVector0, unitHHVectorp))
valdip += self.lg2(np.dot(unitdipVector0, unitdipVectorp))
n += 1
return (valOH/n, valHH/n, valdip/n) if n > 0 else (0, 0, 0)
def _getMeanOnePoint(self, universe, selection1, selection_str, dt,
totalFrames):
"""
This function gets one point of the plot C_vec vs t. It uses the
_getOneDeltaPoint() function to calculate the average.
"""
repInd = self._repeatedIndex(selection1, dt, totalFrames)
sumsdt = 0
n = 0.0
sumDeltaOH = 0.0
sumDeltaHH = 0.0
sumDeltadip = 0.0
for j in range(totalFrames // dt - 1):
a = self._getOneDeltaPoint(universe, repInd, j, sumsdt, dt)
sumDeltaOH += a[0]
sumDeltaHH += a[1]
sumDeltadip += a[2]
sumsdt += dt
n += 1
# if no water molecules remain in selection, there is nothing to get
# the mean, so n = 0.
return (sumDeltaOH / n, sumDeltaHH / n, sumDeltadip / n) if n > 0 else (0, 0, 0)
def _sameMolecTandDT(self, selection, t0d, tf):
"""
Compare the molecules in the t0d selection and the t0d+dt selection and
select only the particles that are repeated in both frame. This is to
consider only the molecules that remains in the selection after the dt
time has elapsed.
The result is a list with the indexs of the atoms.
"""
a = set(selection[t0d])
b = set(selection[tf])
sort = sorted(list(a.intersection(b)))
return sort
def _selection_serial(self, universe, selection_str):
selection = []
for ts in ProgressBar(universe.trajectory, verbose=True,
total=universe.trajectory.n_frames):
selection.append(universe.select_atoms(selection_str))
return selection
@staticmethod
def lg2(x):
"""Second Legendre polynomial"""
return (3*x*x - 1)/2
def run(self, **kwargs):
"""Analyze trajectory and produce timeseries"""
# All the selection to an array, this way is faster than selecting
# later.
if self.nproc == 1:
selection_out = self._selection_serial(
self.universe, self.selection)
else:
# selection_out = self._selection_parallel(self.universe,
# self.selection, self.nproc)
# parallel selection to be implemented
selection_out = self._selection_serial(
self.universe, self.selection)
self.timeseries = []
for dt in list(range(1, self.dtmax + 1)):
output = self._getMeanOnePoint(
self.universe, selection_out, self.selection, dt, self.tf)
self.timeseries.append(output)
class AngularDistribution(object):
r"""Angular distribution function analysis
The angular distribution function (AD) is defined as the distribution
probability of the cosine of the :math:`\theta` angle formed by the OH
vector, HH vector or dipolar vector of water molecules and a vector
:math:`\hat n` parallel to chosen axis (z is the default value). The cosine
is define as :math:`\cos \theta = \hat u \cdot \hat n`, where :math:`\hat
u` is OH, HH or dipole vector. It creates a histogram and returns a list
of lists, see Output_. The AD is also know as Angular Probability (AP).
Parameters
----------
universe : Universe
Universe object
select : str
Selection string to evaluate its angular distribution ['byres name OH2']
bins : int (optional)
Number of bins to create the histogram by means of :func:`numpy.histogram`
axis : {'x', 'y', 'z'} (optional)
Axis to create angle with the vector (HH, OH or dipole) and calculate
cosine theta ['z'].
.. versionadded:: 0.11.0
.. versionchanged:: 1.0.0
Changed `selection` keyword to `select`
"""
def __init__(self, universe, select, bins=40, nproc=1, axis="z"):
self.universe = universe
self.selection_str = select
self.bins = bins
self.nproc = nproc
self.axis = axis
self.graph = None
def _getCosTheta(self, universe, selection, axis):
valOH = []
valHH = []
valdip = []
i = 0
while i <= (len(selection) - 1):
universe.trajectory[i]
line = selection[i].positions
Ot0 = line[::3]
H1t0 = line[1::3]
H2t0 = line[2::3]
OHVector0 = H1t0 - Ot0
HHVector0 = H1t0 - H2t0
dipVector0 = (H1t0 + H2t0) * 0.5 - Ot0
unitOHVector0 = OHVector0 / \
np.linalg.norm(OHVector0, axis=1)[:, None]
unitHHVector0 = HHVector0 / \
np.linalg.norm(HHVector0, axis=1)[:, None]
unitdipVector0 = dipVector0 / \
np.linalg.norm(dipVector0, axis=1)[:, None]
j = 0
while j < len(line) / 3:
if axis == "z":
valOH.append(unitOHVector0[j][2])
valHH.append(unitHHVector0[j][2])
valdip.append(unitdipVector0[j][2])
elif axis == "x":
valOH.append(unitOHVector0[j][0])
valHH.append(unitHHVector0[j][0])
valdip.append(unitdipVector0[j][0])
elif axis == "y":
valOH.append(unitOHVector0[j][1])
valHH.append(unitHHVector0[j][1])
valdip.append(unitdipVector0[j][1])
j += 1
i += 1
return (valOH, valHH, valdip)
def _getHistogram(self, universe, selection, bins, axis):
"""
This function gets a normalized histogram of the cos(theta) values. It
return a list of list.
"""
a = self._getCosTheta(universe, selection, axis)
cosThetaOH = a[0]
cosThetaHH = a[1]
cosThetadip = a[2]
lencosThetaOH = len(cosThetaOH)
lencosThetaHH = len(cosThetaHH)
lencosThetadip = len(cosThetadip)
histInterval = bins
histcosThetaOH = np.histogram(cosThetaOH, histInterval, density=True)
histcosThetaHH = np.histogram(cosThetaHH, histInterval, density=True)
histcosThetadip = np.histogram(cosThetadip, histInterval, density=True)
return (histcosThetaOH, histcosThetaHH, histcosThetadip)
def _hist2column(self, aList):
"""
This function transform from the histogram format
to a column format.
"""
a = []
for x in zip_longest(*aList, fillvalue="."):
a.append(" ".join(str(i) for i in x))
return a
def run(self, **kwargs):
"""Function to evaluate the angular distribution of cos(theta)"""
if self.nproc == 1:
selection = self._selection_serial(
self.universe, self.selection_str)
else:
# not implemented yet
# selection = self._selection_parallel(self.universe,
# self.selection_str,self.nproc)
selection = self._selection_serial(
self.universe, self.selection_str)
self.graph = []
output = self._getHistogram(
self.universe, selection, self.bins, self.axis)
# this is to format the exit of the file
# maybe this output could be improved
listOH = [list(output[0][1]), list(output[0][0])]
listHH = [list(output[1][1]), list(output[1][0])]
listdip = [list(output[2][1]), list(output[2][0])]
self.graph.append(self._hist2column(listOH))
self.graph.append(self._hist2column(listHH))
self.graph.append(self._hist2column(listdip))
def _selection_serial(self, universe, selection_str):
selection = []
for ts in ProgressBar(universe.trajectory, verbose=True,
total=universe.trajectory.n_frames):
selection.append(universe.select_atoms(selection_str))
return selection
class MeanSquareDisplacement(object):
r"""Mean square displacement analysis
Function to evaluate the Mean Square Displacement (MSD_). The MSD gives the
average distance that particles travels. The MSD is given by:
.. math::
\langle\Delta r(t)^2\rangle = 2nDt
where :math:`r(t)` is the position of particle in time :math:`t`,
:math:`\Delta r(t)` is the displacement after time lag :math:`t`,
:math:`n` is the dimensionality, in this case :math:`n=3`,
:math:`D` is the diffusion coefficient and :math:`t` is the time.
.. _MSD: http://en.wikipedia.org/wiki/Mean_squared_displacement
Parameters
----------
universe : Universe
Universe object
select : str
Selection string for water [‘byres name OH2’].
t0 : int
frame where analysis begins
tf : int
frame where analysis ends
dtmax : int
Maximum dt size, `dtmax` < `tf` or it will crash.
.. versionadded:: 0.11.0
.. versionchanged:: 1.0.0
Changed `selection` keyword to `select`
"""
def __init__(self, universe, select, t0, tf, dtmax, nproc=1):
self.universe = universe
self.selection = select
self.t0 = t0
self.tf = tf
self.dtmax = dtmax
self.nproc = nproc
self.timeseries = None
def _repeatedIndex(self, selection, dt, totalFrames):
"""
Indicate the comparation between all the t+dt.
The results is a list of list with all the repeated index per frame
(or time).
- Ex: dt=1, so compare frames (1,2),(2,3),(3,4)...
- Ex: dt=2, so compare frames (1,3),(3,5),(5,7)...
- Ex: dt=3, so compare frames (1,4),(4,7),(7,10)...
"""
rep = []
for i in range(int(round((totalFrames - 1) / float(dt)))):
if (dt * i + dt < totalFrames):
rep.append(self._sameMolecTandDT(
selection, dt * i, (dt * i) + dt))
return rep
def _getOneDeltaPoint(self, universe, repInd, i, t0, dt):
"""
Gives one point to calculate the mean and gets one point of the plot
C_vect vs t.
- Ex: t0=1 and dt=1 so calculate the t0-dt=1-2 interval.
- Ex: t0=5 and dt=3 so calcultate the t0-dt=5-8 interva
i = come from getMeanOnePoint (named j) (int)
"""
valO = 0
n = 0
for j in range(len(repInd[i]) // 3):
begj = 3 * j
universe.trajectory[t0]
# Plus zero is to avoid 0to be equal to 0tp
Ot0 = repInd[i][begj].position + 0
universe.trajectory[t0 + dt]
# Plus zero is to avoid 0to be equal to 0tp
Otp = repInd[i][begj].position + 0
# position oxygen
OVector = Ot0 - Otp
# here it is the difference with
# waterdynamics.WaterOrientationalRelaxation
valO += np.dot(OVector, OVector)
n += 1
# if no water molecules remain in selection, there is nothing to get
# the mean, so n = 0.
return valO/n if n > 0 else 0
def _getMeanOnePoint(self, universe, selection1, selection_str, dt,
totalFrames):
"""
This function gets one point of the plot C_vec vs t. It's uses the
_getOneDeltaPoint() function to calculate the average.
"""
repInd = self._repeatedIndex(selection1, dt, totalFrames)
sumsdt = 0
n = 0.0
sumDeltaO = 0.0
valOList = []
for j in range(totalFrames // dt - 1):
a = self._getOneDeltaPoint(universe, repInd, j, sumsdt, dt)
sumDeltaO += a
valOList.append(a)
sumsdt += dt
n += 1
# if no water molecules remain in selection, there is nothing to get
# the mean, so n = 0.
return sumDeltaO/n if n > 0 else 0
def _sameMolecTandDT(self, selection, t0d, tf):
"""
Compare the molecules in the t0d selection and the t0d+dt selection and
select only the particles that are repeated in both frame. This is to
consider only the molecules that remains in the selection after the dt
time has elapsed. The result is a list with the indexs of the atoms.
"""
a = set(selection[t0d])
b = set(selection[tf])
sort = sorted(list(a.intersection(b)))
return sort
def _selection_serial(self, universe, selection_str):
selection = []
for ts in ProgressBar(universe.trajectory, verbose=True,
total=universe.trajectory.n_frames):
selection.append(universe.select_atoms(selection_str))
return selection
def run(self, **kwargs):
"""Analyze trajectory and produce timeseries"""
# All the selection to an array, this way is faster than selecting
# later.
if self.nproc == 1:
selection_out = self._selection_serial(
self.universe, self.selection)
else:
# parallel not yet implemented
# selection = selection_parallel(universe, selection_str, nproc)
selection_out = self._selection_serial(
self.universe, self.selection)
self.timeseries = []
for dt in list(range(1, self.dtmax + 1)):
output = self._getMeanOnePoint(
self.universe, selection_out, self.selection, dt, self.tf)
self.timeseries.append(output)
class SurvivalProbability(object):
r"""
Survival Probability (SP) gives the probability for a group of particles to remain in a certain region.
The SP is given by:
.. math::
P(\tau) = \frac1T \sum_{t=1}^T \frac{N(t,t+\tau)}{N(t)}
where :math:`T` is the maximum time of simulation, :math:`\tau` is the
timestep, :math:`N(t)` the number of particles at time :math:`t`, and
:math:`N(t, t+\tau)` is the number of particles at every frame from :math:`t` to `\tau`.
Parameters
----------
universe : Universe
Universe object
select : str
Selection string; any selection is allowed. With this selection you
define the region/zone where to analyze, e.g.: "resname SOL and around 5 (resid 10)". See `SP-examples`_.
verbose : Boolean, optional
When True, prints progress and comments to the console.
Notes
-----
Currently :class:`SurvivalProbability` is the only on in
:mod:`MDAnalysis.analysis.waterdynamics` to support an `exclusive`
behaviour (i.e. similar to the current behaviour of :class:`AnalysisBase`
to the `stop` keyword passed to :meth:`SurvivalProbability.run`. Unlike
other :mod:`MDAnalysis.analysis.waterdynamics` final frame definitions
which are `inclusive`.
.. versionadded:: 0.11.0
.. versionchanged:: 1.0.0
Using the MDAnalysis.lib.correlations.py to carry out the intermittency
and autocorrelation calculations.
Changed `selection` keyword to `select`.
Removed support for the deprecated `t0`, `tf`, and `dtmax` keywords.
These should instead be passed to :meth:`SurvivalProbability.run` as
the `start`, `stop`, and `tau_max` keywords respectively.
The `stop` keyword as passed to :meth:`SurvivalProbability.run` has now
changed behaviour and will act in an `exclusive` manner (instead of it's
previous `inclusive` behaviour),
"""
def __init__(self, universe, select, verbose=False):
self.universe = universe
self.selection = select
self.verbose = verbose
def run(self, tau_max=20, start=None, stop=None, step=None, residues=False,
intermittency=0, verbose=False):
"""
Computes and returns the Survival Probability (SP) timeseries
Parameters
----------
start : int, optional
Zero-based index of the first frame to be analysed, Default: None
(first frame).
stop : int, optional
Zero-based index of the last frame to be analysed (exclusive),
Default: None (last frame).
step : int, optional
Jump every `step`-th frame. This is compatible but independant of
the taus used, and it is good to consider using the `step` equal
to `tau_max` to remove the overlap. Note that `step` and `tau_max`
work consistently with intermittency. Default: None
(use every frame).
tau_max : int, optional
Survival probability is calculated for the range
1 <= `tau` <= `tau_max`.
residues : Boolean, optional
If true, the analysis will be carried out on the residues
(.resids) rather than on atom (.ids). A single atom is sufficient
to classify the residue as within the distance.
intermittency : int, optional
The maximum number of consecutive frames for which an atom can
leave but be counted as present if it returns at the next frame.
An intermittency of `0` is equivalent to a continuous survival
probability, which does not allow for the leaving and returning of
atoms. For example, for `intermittency=2`, any given atom may leave
a region of interest for up to two consecutive frames yet be
treated as being present at all frames. The default is continuous
(0).
verbose : Boolean, optional
Print the progress to the console.
Returns
-------
tau_timeseries : list
tau from 1 to `tau_max`. Saved in the field tau_timeseries.
sp_timeseries : list
survival probability for each value of `tau`. Saved in the field
sp_timeseries.
sp_timeseries_data: list
raw datapoints from which the average is taken (sp_timeseries).
Time dependancy and distribution can be extracted.
.. versionchanged:: 1.0.0
To math other analysis methods, the `stop` keyword is now exclusive
rather than inclusive.
"""
start, stop, step = self.universe.trajectory.check_slice_indices(
start,
stop,
step
)
if tau_max > (stop - start):
raise ValueError("Too few frames selected for given tau_max.")
# preload the frames (atom IDs) to a list of sets
self._selected_ids = []
# fixme - to parallise: the section should be rewritten so that this loop only creates a list of indices,
# on which the parallel _single_frame can be applied.
# skip frames that will not be used in order to improve performance
# because AtomGroup.select_atoms is the most expensive part of this calculation
# Example: step 5 and tau 2: LLLSS LLLSS, ... where L = Load, and S = Skip
# Intermittency means that we have to load the extra frames to know if the atom is actually missing.
# Say step=5 and tau=1, intermittency=0: LLSSS LLSSS
# Say step=5 and tau=1, intermittency=1: LLLSL LLLSL
frame_loaded_counter = 0
# only for the first window (frames before t are not used)
frames_per_window = tau_max + 1 + intermittency
# This number will apply after the first windows was loaded
frames_per_window_subsequent = (tau_max + 1) + (2 * intermittency)
num_frames_to_skip = max(step - frames_per_window_subsequent, 0)
frame_no = start
while frame_no < stop: # we have already added 1 to stop, therefore <
if num_frames_to_skip != 0 and frame_loaded_counter == frames_per_window:
logger.info("Skipping the next %d frames:", num_frames_to_skip)
frame_no += num_frames_to_skip
frame_loaded_counter = 0
# Correct the number of frames to be loaded after the first window (which starts at t=0, and
# intermittency does not apply to the frames before)
frames_per_window = frames_per_window_subsequent
continue
# update the frame number
self.universe.trajectory[frame_no]
logger.info("Loading frame: %d", self.universe.trajectory.frame)
atoms = self.universe.select_atoms(self.selection)
# SP of residues or of atoms
ids = atoms.residues.resids if residues else atoms.ids
self._selected_ids.append(set(ids))
frame_no += 1
frame_loaded_counter += 1
# adjust for the frames that were not loaded (step>tau_max + 1),
# and for extra frames that were loaded (intermittency)
window_jump = step - num_frames_to_skip
self._intermittent_selected_ids = correct_intermittency(self._selected_ids, intermittency=intermittency)
tau_timeseries, sp_timeseries, sp_timeseries_data = autocorrelation(self._intermittent_selected_ids,
tau_max, window_jump)
# warn the user if the NaN are found
if all(np.isnan(sp_timeseries[1:])):
logger.warning('NaN Error: Most likely data was not found. Check your atom selections. ')
# user can investigate the distribution and sample size
self.sp_timeseries_data = sp_timeseries_data
self.tau_timeseries = tau_timeseries
self.sp_timeseries = sp_timeseries
return self
| MDAnalysis/mdanalysis | package/MDAnalysis/analysis/waterdynamics.py | Python | gpl-2.0 | 41,156 |
#!/usr/bin/env python
# encoding: utf-8
import sys
import argparse
if __name__ == "__main__":
parser = argparse.ArgumentParser()
parser.add_argument("-m", "--m_number", dest = "m", help = "pleaer enter the m...", type = int)
parser.add_argument("-n", "--n_number", dest = "n", help = "pleaer enter the n...", type = int)
args = parser.parse_args()
print "%d ^ %d = %d" % (args.m, args.n, args.m ** args.n) | gatieme/AderXCoding | language/python/argparse/m-n3.py | Python | gpl-2.0 | 435 |
# F3AT - Flumotion Asynchronous Autonomous Agent Toolkit
# Copyright (C) 2010,2011 Flumotion Services, S.A.
# All rights reserved.
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License along
# with this program; if not, write to the Free Software Foundation, Inc.,
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
# See "LICENSE.GPL" in the source distribution for more information.
# Headers in this file shall remain intact.
# -*- coding: utf-8 -*-
# -*- Mode: Python -*-
# vi:si:et:sw=4:sts=4:ts=4
import uuid
import time
from twisted.internet import defer
from zope.interface import implements
from feat.agents.base import descriptor, requester, replier, replay
from feat.agencies import message, retrying
from feat.interface.agency import ExecMode
from feat.database.interface import NotFoundError
from feat.interface.requests import RequestState
from feat.interface.protocols import ProtocolFailed, IInterest, InterestType
from feat.test import common, dummies #don't remove dummies, it defines adapter
class DummyRequester(requester.BaseRequester):
protocol_id = 'dummy-request'
timeout = 2
@replay.entry_point
def initiate(self, state, argument):
state._got_response = False
msg = message.RequestMessage()
msg.payload = argument
state.medium.request(msg)
@replay.entry_point
def got_reply(self, state, message):
state._got_response = True
return message.payload
@replay.immutable
def _get_medium(self, state):
self.log(state)
return state.medium
@replay.immutable
def got_response(self, state):
return state._got_response
class DummyReplier(replier.BaseReplier):
protocol_id = 'dummy-request'
@replay.entry_point
def requested(self, state, request):
state.agent.got_payload = request.payload
state.medium.reply(message.ResponseMessage(payload=request.payload))
class DummyInterest(object):
implements(IInterest)
def __init__(self):
self.protocol_type = "Contract"
self.protocol_id = "some-contract"
self.interest_type = InterestType.public
self.initiator = message.Announcement
class TestDependencies(common.TestCase, common.AgencyTestHelper):
@defer.inlineCallbacks
def setUp(self):
yield common.TestCase.setUp(self)
yield common.AgencyTestHelper.setUp(self)
def testGettingModes(self):
self.assertEqual(ExecMode.test, self.agency.get_mode('unknown'))
self.agency.set_mode('something', ExecMode.production)
self.assertEqual(ExecMode.production,
self.agency.get_mode('something'))
self.agency._set_default_mode(ExecMode.production)
self.assertEqual(ExecMode.production,
self.agency.get_mode('unknown'))
class TestAgencyAgent(common.TestCase, common.AgencyTestHelper):
timeout = 3
protocol_type = 'Request'
protocol_id = 'dummy-request'
@defer.inlineCallbacks
def setUp(self):
yield common.TestCase.setUp(self)
yield common.AgencyTestHelper.setUp(self)
desc = yield self.doc_factory(descriptor.Descriptor)
self.agent = yield self.agency.start_agent(desc)
self.assertEqual(1, self.agent.get_descriptor().instance_id)
self.endpoint, self.queue = self.setup_endpoint()
def testJoinShard(self):
messaging = self.agent._messaging
self.assertTrue(len(messaging.get_bindings('lobby')) > 1)
self.agent.leave_shard('lobby')
self.assertEqual(0, len(messaging.get_bindings('lobby')))
@defer.inlineCallbacks
def testSwitchingShardRebinding(self):
messaging = self.agent._messaging
initial = len(messaging.get_bindings('lobby'))
interest = DummyInterest()
self.agent.register_interest(interest)
self.assertEqual(initial + 1, len(messaging.get_bindings('lobby')))
yield self.agent.leave_shard('lobby')
self.assertEqual(0, len(messaging.get_bindings('lobby')))
yield self.agent.join_shard('new shard')
self.assertEqual(initial + 1,
len(messaging.get_bindings('new shard')))
self.assertEqual(0, len(messaging.get_bindings('lobby')))
@defer.inlineCallbacks
def testUpdateDocument(self):
desc = self.agent.get_descriptor()
self.assertIsInstance(desc, descriptor.Descriptor)
def update_fun(desc):
desc.shard = 'changed'
yield self.agent.update_descriptor(update_fun)
self.assertEqual('changed', self.agent._descriptor.shard)
def testRegisterTwice(self):
self.assertTrue(self.agent.register_interest(DummyReplier))
self.failIf(self.agent.register_interest(DummyReplier))
def testRegisteringAndRevokeReplier(self):
self.agent.register_interest(DummyReplier)
self.assertTrue('Request' in self.agent._interests)
self.assertTrue('dummy-request' in self.agent._interests['Request'])
self.agent.revoke_interest(DummyReplier)
self.assertFalse('dummy-request' in self.agent._interests['Request'])
#calling once again nothing bad should happened
req = self.agent.revoke_interest(DummyReplier)
self.assertFalse(req)
def tesGetingRequestWithoutInterest(self):
'''Current implementation just ignores such events. Update this test
in case we decide to do sth else'''
key = (self.agent.get_descriptor()).doc_id
msg = message.RequestMessage()
return self.recv_msg(msg, self.endpoint, key)
@defer.inlineCallbacks
def testTerminatingTheAgent(self):
# make him have running retrying request (covers all the hard cases)
d = self.cb_after(None, self.agent, 'initiate_protocol')
factory = retrying.RetryingProtocolFactory(DummyRequester)
self.agent.initiate_protocol(factory, self.endpoint, None)
yield d
yield self.agent._terminate()
self.assertCalled(self.agent.agent, 'shutdown')
doc_id = self.agent._descriptor.doc_id
d = self.agency._database.get_connection().get_document(doc_id)
self.assertFailure(d, NotFoundError)
yield d
self.assertEqual(0, len(self.agency._agents))
@common.attr(timescale=0.05)
class TestRequests(common.TestCase, common.AgencyTestHelper):
timeout = 3
protocol_type = 'Request'
protocol_id = 'dummy-request'
@defer.inlineCallbacks
def setUp(self):
yield common.TestCase.setUp(self)
yield common.AgencyTestHelper.setUp(self)
desc = yield self.doc_factory(descriptor.Descriptor)
self.agent = yield self.agency.start_agent(desc)
self.endpoint, self.queue = self.setup_endpoint()
def testRequester(self):
d = self.queue.get()
payload = 5
self.requester =\
self.agent.initiate_protocol(DummyRequester,
self.endpoint, payload)
self.medium = self.requester._get_medium()
self.finished = self.requester.notify_finish()
self.assertIsInstance(self.finished, defer.Deferred)
def assertsOnMessage(message):
desc = self.agent.get_descriptor()
self.assertEqual(desc.shard, \
message.reply_to.route)
self.assertEqual(desc.doc_id, \
message.reply_to.key)
self.assertEqual('Request', message.protocol_type)
self.assertEqual('dummy-request', message.protocol_id)
self.assertEqual(payload, message.payload)
self.assertTrue(message.expiration_time is not None)
guid = message.sender_id
self.assertEqual(guid, str(guid))
self.assertEqual(RequestState.requested, self.medium.state)
return guid, message
d.addCallback(assertsOnMessage)
def assertsOnAgency((guid, msg, )):
self.log('%r', self.agent._protocols.keys())
self.assertTrue(guid in self.agent._protocols.keys())
protocol = self.agent._protocols[guid]
self.assertEqual('AgencyRequester', protocol.__class__.__name__)
return guid, msg
d.addCallback(assertsOnAgency)
def mimicReceivingResponse((guid, msg, )):
response = message.ResponseMessage()
self.reply(response, self.endpoint, msg)
return guid
d.addCallback(mimicReceivingResponse)
d.addCallback(lambda _: self.finished)
def assertGotResponseAndTerminated(guid):
self.assertFalse(guid in self.agent._protocols.keys())
self.assertTrue(self.requester.got_response)
d.addCallback(assertGotResponseAndTerminated)
return d
@common.attr(timeout=10)
@defer.inlineCallbacks
def testRequestTimeout(self):
payload = 5
self.requester =\
yield self.agent.initiate_protocol(DummyRequester,
self.endpoint, payload)
self.medium = self.requester._get_medium()
self.finished = self.requester.notify_finish()
self.assertFailure(self.finished, ProtocolFailed)
yield self.finished
guid = self.medium.guid
self.assertFalse(guid in self.agent._protocols.keys())
self.assertFalse(self.requester.got_response())
self.assertEqual(RequestState.closed, self.medium.state)
msg = yield self.queue.get()
self.assertIsInstance(msg, message.RequestMessage)
def testReplierReplies(self):
self.agent.register_interest(DummyReplier)
key = (self.agent.get_descriptor()).doc_id
req = self._build_req_msg(self.endpoint)
d = self.recv_msg(req, self.endpoint, key)
d.addCallback(lambda _: self.queue.get())
def assert_on_msg(msg):
self.assertEqual('dummy-request', msg.protocol_id)
d.addCallback(assert_on_msg)
return d
def testNotProcessingExpiredRequests(self):
self.agent.register_interest(DummyReplier)
self.agent.agent.got_payload = False
key = (self.agent.get_descriptor()).doc_id
# define false sender, he will get the response later
req = self._build_req_msg(self.endpoint)
expiration_time = time.time() - 1
d = self.recv_msg(req, self.endpoint, key, expiration_time)
def asserts_after_procesing(return_value):
self.log(return_value)
self.assertFalse(return_value)
self.assertEqual(False, self.agent.agent.got_payload)
d.addCallback(asserts_after_procesing)
return d
@defer.inlineCallbacks
def testTwoAgentsTalking(self):
receiver = self.agent
desc = yield self.doc_factory(descriptor.Descriptor)
sender = yield self.agency.start_agent(desc)
receiver.register_interest(DummyReplier)
requester = sender.initiate_protocol(DummyRequester, receiver, 1)
r = yield requester.notify_finish()
self.assertEqual(1, r)
self.assertTrue(requester.got_response)
self.assertEqual(1, receiver.agent.got_payload)
def _build_req_msg(self, recp):
r = message.RequestMessage()
r.guid = str(uuid.uuid1())
r.traversal_id = str(uuid.uuid1())
r.payload = 10
return r
| f3at/feat | src/feat/test/test_agencies_emu_agency.py | Python | gpl-2.0 | 11,954 |
# exercise from http://www.ling.gu.se/~lager/python_exercises.html
# solution from http://rosettacode.org/wiki/99_Bottles_of_Beer#Python
# "99 bottle of beer" is a traditional song in the US and Canada.
# it is popular to sing on long trips, as it has a very repetitive
# format which is easy to memorize, and can take a long time to sing.
# the song's simple lyrics are as follows:
# 99 bottles of beer on the wall, 99 bottles of beer.
# take one down, pass it around, 98 bottles of beer on the wall.
# the same verse is repeated, each time with one fewer bottle.
# the song is completed when the singers reach zero.
# Task now is to write a Python program
# capable of generating all the verses of the song.
print '------------------------------'
print '99 Bottles of Beer on the Wall'
print '------------------------------'
bottles = 99
song = '''
%d bottles of beer on the wall, %d bottles of beer.\n
Take one down, pass it around, %d bottles of beer.
'''
for bottles in range(99,0,-1):
print song %(bottles,bottles,bottles-1)
| SurAnand/pyuthon | 99beer.py | Python | gpl-2.0 | 1,042 |
#####################################################################
# -*- coding: iso-8859-1 -*- #
# #
# Frets on Fire #
# Copyright (C) 2006 Sami Kyostila #
# 2008 Alarian #
# 2008 myfingershurt #
# 2008 Capo #
# 2008 Glorandwarf #
# 2008 QQStarS #
# 2008 Blazingamer #
# 2008 evilynux <[email protected]> #
# #
# This program is free software; you can redistribute it and/or #
# modify it under the terms of the GNU General Public License #
# as published by the Free Software Foundation; either version 2 #
# of the License, or (at your option) any later version. #
# #
# This program is distributed in the hope that it will be useful, #
# but WITHOUT ANY WARRANTY; without even the implied warranty of #
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the #
# GNU General Public License for more details. #
# #
# You should have received a copy of the GNU General Public License #
# along with this program; if not, write to the Free Software #
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, #
# MA 02110-1301, USA. #
#####################################################################
from Song import Note, Tempo
from Mesh import Mesh
from Neck import Neck
import random
from copy import deepcopy
from Shader import shaders
from OpenGL.GL import *
import math
#myfingershurt: needed for multi-OS file fetching
import os
import Log
import Song #need the base song defines as well
from Instrument import *
class Guitar(Instrument):
def __init__(self, engine, playerObj, editorMode = False, player = 0, bass = False):
Instrument.__init__(self, engine, playerObj, player)
self.isDrum = False
self.isBassGuitar = bass
self.isVocal = False
self.debugMode = False
self.gameMode2p = self.engine.world.multiMode
self.matchingNotes = []
self.starSpinFrameIndex = 0
self.starSpinFrames = 16
self.logClassInits = self.engine.config.get("game", "log_class_inits")
if self.logClassInits == 1:
Log.debug("Guitar class init...")
#death_au: fixed neck size
#if self.engine.theme.twoDnote == False or self.engine.theme.twoDkeys == False:
#self.boardWidth = 3.6
#self.boardLength = 9.0
self.lastPlayedNotes = [] #MFH - for reverting when game discovers it implied incorrectly
self.missedNotes = []
self.missedNoteNums = []
self.editorMode = editorMode
#########For Animations
self.Animspeed = 30#Lower value = Faster animations
#For Animated Starnotes
self.indexCount = 0
#Alarian, For animated hitglow
self.HCountAni = False
#myfingershurt:
self.hopoStyle = self.engine.config.get("game", "hopo_system")
self.gh2sloppy = self.engine.config.get("game", "gh2_sloppy")
if self.gh2sloppy == 1:
self.hopoStyle = 4
self.sfxVolume = self.engine.config.get("audio", "SFX_volume")
#blazingamer
self.killfx = self.engine.config.get("performance", "killfx")
self.killCount = 0
self.bigMax = 1
#Get theme
themename = self.engine.data.themeLabel
#now theme determination logic is only in data.py:
self.theme = self.engine.data.theme
self.oFlash = None
#myfingershurt:
self.bassGrooveNeckMode = self.engine.config.get("game", "bass_groove_neck")
self.starspin = self.engine.config.get("performance", "starspin")
if self.twoDnote == True:
#Spinning starnotes or not?
#myfingershurt: allowing any non-Rock Band theme to have spinning starnotes if the SpinNotes.png is available in that theme's folder
if self.starspin == True and self.theme < 2:
#myfingershurt: check for SpinNotes, if not there then no animation
if self.gameMode2p == 6:
if engine.loadImgDrawing(self, "noteButtons", os.path.join("themes",themename,"spinnotesbattle.png")):
self.starSpinFrames = 8
else:
self.starspin = False
if not engine.loadImgDrawing(self, "noteButtons", os.path.join("themes",themename,"notesbattle.png")):
engine.loadImgDrawing(self, "noteButtons", os.path.join("themes",themename,"notes.png"))
else:
if not engine.loadImgDrawing(self, "noteButtons", os.path.join("themes",themename,"spinnotes.png")):
self.starspin = False
engine.loadImgDrawing(self, "noteButtons", os.path.join("themes",themename,"notes.png"))
else:
if self.gameMode2p == 6:
if not engine.loadImgDrawing(self, "noteButtons", os.path.join("themes",themename,"notesbattle.png")):
engine.loadImgDrawing(self, "noteButtons", os.path.join("themes",themename,"notes.png"))
else:
engine.loadImgDrawing(self, "noteButtons", os.path.join("themes",themename,"notes.png"))
#mfh - adding fallback for beta option
else:
#MFH - can't use IOError for fallback logic for a Mesh() call...
if self.engine.fileExists(os.path.join("themes", themename, "note.dae")):
engine.resource.load(self, "noteMesh", lambda: Mesh(engine.resource.fileName("themes", themename, "note.dae")))
else:
engine.resource.load(self, "noteMesh", lambda: Mesh(engine.resource.fileName("note.dae")))
for i in range(5):
if engine.loadImgDrawing(self, "notetex"+chr(97+i), os.path.join("themes", themename, "notetex_"+chr(97+i)+".png")):
self.notetex = True
else:
self.notetex = False
break
if self.engine.fileExists(os.path.join("themes", themename, "star.dae")):
engine.resource.load(self, "starMesh", lambda: Mesh(engine.resource.fileName("themes", themename, "star.dae")))
else:
self.starMesh = None
for i in range(5):
if engine.loadImgDrawing(self, "startex"+chr(97+i), os.path.join("themes", themename, "startex_"+chr(97+i)+".png")):
self.startex = True
else:
self.startex = False
break
for i in range(5):
if engine.loadImgDrawing(self, "staratex"+chr(97+i), os.path.join("themes", themename, "staratex_"+chr(97+i)+".png")):
self.staratex = True
else:
self.staratex = False
break
if self.gameMode2p == 6:
if not engine.loadImgDrawing(self, "battleFrets", os.path.join("themes", themename,"battle_frets.png")):
self.battleFrets = None
if self.twoDkeys == True:
engine.loadImgDrawing(self, "fretButtons", os.path.join("themes",themename,"fretbuttons.png"))
else:
defaultKey = False
#MFH - can't use IOError for fallback logic for a Mesh() call...
if self.engine.fileExists(os.path.join("themes", themename, "key.dae")):
engine.resource.load(self, "keyMesh", lambda: Mesh(engine.resource.fileName("themes", themename, "key.dae")))
else:
engine.resource.load(self, "keyMesh", lambda: Mesh(engine.resource.fileName("key.dae")))
defaultKey = True
if defaultKey:
self.keytex = False
else:
for i in range(5):
if engine.loadImgDrawing(self, "keytex"+chr(97+i), os.path.join("themes", themename, "keytex_"+chr(97+i)+".png")):
self.keytex = True
else:
self.keytex = False
break
#inkk: loading theme-dependant tail images
#myfingershurt: must ensure the new tails don't affect the Rock Band mod...
self.simpleTails = False
for i in range(0,7):
if not engine.loadImgDrawing(self, "tail"+str(i), os.path.join("themes",themename,"tails","tail"+str(i)+".png"), textureSize = (128, 128)):
self.simpleTails = True
break
if not engine.loadImgDrawing(self, "taile"+str(i), os.path.join("themes",themename,"tails","taile"+str(i)+".png"), textureSize = (128, 128)):
self.simpleTails = True
break
if not engine.loadImgDrawing(self, "btail"+str(i), os.path.join("themes",themename,"tails","btail"+str(i)+".png"), textureSize = (128, 128)):
self.simpleTails = True
break
if not engine.loadImgDrawing(self, "btaile"+str(i), os.path.join("themes",themename,"tails","btaile"+str(i)+".png"), textureSize = (128, 128)):
self.simpleTails = True
break
if self.simpleTails:
Log.debug("Simple tails used; complex tail loading error...")
if not engine.loadImgDrawing(self, "tail1", os.path.join("themes",themename,"tail1.png"), textureSize = (128, 128)):
engine.loadImgDrawing(self, "tail1", "tail1.png", textureSize = (128, 128))
if not engine.loadImgDrawing(self, "tail2", os.path.join("themes",themename,"tail2.png"), textureSize = (128, 128)):
engine.loadImgDrawing(self, "tail2", "tail2.png", textureSize = (128, 128))
if not engine.loadImgDrawing(self, "bigTail1", os.path.join("themes",themename,"bigtail1.png"), textureSize = (128, 128)):
engine.loadImgDrawing(self, "bigTail1", "bigtail1.png", textureSize = (128, 128))
if not engine.loadImgDrawing(self, "bigTail2", os.path.join("themes",themename,"bigtail2.png"), textureSize = (128, 128)):
engine.loadImgDrawing(self, "bigTail2", "bigtail2.png", textureSize = (128, 128))
if not engine.loadImgDrawing(self, "kill1", os.path.join("themes", themename, "kill1.png"), textureSize = (128, 128)):
engine.loadImgDrawing(self, "kill1", "kill1.png", textureSize = (128, 128))
if not engine.loadImgDrawing(self, "kill2", os.path.join("themes", themename, "kill2.png"), textureSize = (128, 128)):
engine.loadImgDrawing(self, "kill2", "kill2.png", textureSize = (128, 128))
#MFH - freestyle tails (for drum fills & BREs)
if not engine.loadImgDrawing(self, "freestyle1", os.path.join("themes", themename, "freestyletail1.png"), textureSize = (128, 128)):
engine.loadImgDrawing(self, "freestyle1", "freestyletail1.png", textureSize = (128, 128))
if not engine.loadImgDrawing(self, "freestyle2", os.path.join("themes", themename, "freestyletail2.png"), textureSize = (128, 128)):
engine.loadImgDrawing(self, "freestyle2", "freestyletail2.png", textureSize = (128, 128))
self.twoChordMax = False
self.rockLevel = 0.0
self.neck = Neck(self.engine, self, playerObj)
def selectPreviousString(self):
self.selectedString = (self.selectedString - 1) % self.strings
def selectString(self, string):
self.selectedString = string % self.strings
def selectNextString(self):
self.selectedString = (self.selectedString + 1) % self.strings
def noteBeingHeld(self):
noteHeld = False
for i in range(0,5):
if self.hit[i] == True:
noteHeld = True
return noteHeld
def isKillswitchPossible(self):
possible = False
for i in range(0,5):
if self.hit[i] == True:
possible = True
return possible
def renderTail(self, length, sustain, kill, color, flat = False, tailOnly = False, isTappable = False, big = False, fret = 0, spNote = False, freestyleTail = 0, pos = 0):
#volshebnyi - if freestyleTail == 0, act normally.
# if freestyleTail == 1, render an freestyle tail
# if freestyleTail == 2, render highlighted freestyle tail
if not self.simpleTails:#Tail Colors
tailcol = (1,1,1, color[3])
else:
if big == False and tailOnly == True:
tailcol = (.6, .6, .6, color[3])
else:
tailcol = (color)
#volshebnyi - tail color when sp is active
if self.starPowerActive and self.theme != 2 and not color == (0,0,0,1):#8bit
c = self.fretColors[5]
tailcol = (.1 + .8 * c[0], .1 + .8 * c[1], .1 + .8 * c[2], color[3])
if flat:
tailscale = (1, .1, 1)
else:
tailscale = None
if sustain:
if not length == None:
size = (.08, length)
if size[1] > self.boardLength:
s = self.boardLength
else:
s = length
# if freestyleTail == 1, render freestyle tail
if freestyleTail == 0: #normal tail rendering
#myfingershurt: so any theme containing appropriate files can use new tails
if not self.simpleTails:
if big == True and tailOnly == True:
if kill and self.killfx == 0:
zsize = .25
tex1 = self.kill1
tex2 = self.kill2
#volshebnyi - killswitch tail width and color change
kEffect = ( math.sin( pos / 50 ) + 1 ) /2
size = (0.02+kEffect*0.15, s - zsize)
c = [self.killColor[0],self.killColor[1],self.killColor[2]]
if c != [0,0,0]:
for i in range(0,3):
c[i]=c[i]*kEffect+color[i]*(1-kEffect)
tailcol = (.1 + .8 * c[0], .1 + .8 * c[1], .1 + .8 * c[2], 1)
else:
zsize = .25
size = (.17, s - zsize)
if self.starPowerActive and not color == (0,0,0,1):
tex1 = self.btail6
tex2 = self.btaile6
else:
if fret == 0:
tex1 = self.btail1
tex2 = self.btaile1
elif fret == 1:
tex1 = self.btail2
tex2 = self.btaile2
elif fret == 2:
tex1 = self.btail3
tex2 = self.btaile3
elif fret == 3:
tex1 = self.btail4
tex2 = self.btaile4
elif fret == 4:
tex1 = self.btail5
tex2 = self.btaile5
else:
zsize = .15
size = (.1, s - zsize)
if tailOnly:#Note let go
tex1 = self.tail0
tex2 = self.taile0
else:
if self.starPowerActive and not color == (0,0,0,1):
tex1 = self.tail6
tex2 = self.taile6
else:
if fret == 0:
tex1 = self.tail1
tex2 = self.taile1
elif fret == 1:
tex1 = self.tail2
tex2 = self.taile2
elif fret == 2:
tex1 = self.tail3
tex2 = self.taile3
elif fret == 3:
tex1 = self.tail4
tex2 = self.taile4
elif fret == 4:
tex1 = self.tail5
tex2 = self.taile5
else:
if big == True and tailOnly == True:
if kill:
zsize = .25
tex1 = self.kill1
tex2 = self.kill2
#volshebnyi - killswitch tail width and color change
kEffect = ( math.sin( pos / 50 ) + 1 ) /2
size = (0.02+kEffect*0.15, s - zsize)
c = [self.killColor[0],self.killColor[1],self.killColor[2]]
if c != [0,0,0]:
for i in range(0,3):
c[i]=c[i]*kEffect+color[i]*(1-kEffect)
tailcol = (.1 + .8 * c[0], .1 + .8 * c[1], .1 + .8 * c[2], 1)
else:
zsize = .25
size = (.11, s - zsize)
tex1 = self.bigTail1
tex2 = self.bigTail2
else:
zsize = .15
size = (.08, s - zsize)
tex1 = self.tail1
tex2 = self.tail2
else: #freestyleTail > 0
# render an inactive freestyle tail (self.freestyle1 & self.freestyle2)
zsize = .25
if self.freestyleActive:
size = (.30, s - zsize) #was .15
else:
size = (.15, s - zsize)
tex1 = self.freestyle1
tex2 = self.freestyle2
if freestyleTail == 1:
#glColor4f(*color)
c1, c2, c3, c4 = color
tailGlow = 1 - (pos - self.freestyleLastFretHitTime[fret] ) / self.freestylePeriod
if tailGlow < 0:
tailGlow = 0
color = (c1 + c1*2.0*tailGlow, c2 + c2*2.0*tailGlow, c3 + c3*2.0*tailGlow, c4*0.6 + c4*0.4*tailGlow) #MFH - this fades inactive tails' color darker
tailcol = (color)
if self.theme == 2 and freestyleTail == 0 and big and tailOnly and shaders.enable("tail"):
color = (color[0]*1.5,color[1]*1.5,color[2]*1.5,1.0)
shaders.setVar("color",color)
if kill and self.killfx == 0:
h = shaders.getVar("height")
shaders.modVar("height",0.5,0.06/h-0.1)
shaders.setVar("offset",(5.0-size[1],0.0))
size=(size[0]*15,size[1])
self.engine.draw3Dtex(tex1, vertex = (-size[0], 0, size[0], size[1]), texcoord = (0.0, 0.0, 1.0, 1.0),
scale = tailscale, color = tailcol)
self.engine.draw3Dtex(tex2, vertex = (-size[0], size[1], size[0], size[1] + (zsize)),
scale = tailscale, texcoord = (0.0, 0.05, 1.0, 0.95), color = tailcol)
shaders.disable()
#MFH - this block of code renders the tail "beginning" - before the note, for freestyle "lanes" only
#volshebnyi
if freestyleTail > 0 and pos < self.freestyleStart + self.freestyleLength:
self.engine.draw3Dtex(tex2, vertex = (-size[0], 0-(zsize), size[0], 0 + (.05)),
scale = tailscale, texcoord = (0.0, 0.95, 1.0, 0.05), color = tailcol)
if tailOnly:
return
def renderNote(self, length, sustain, kill, color, flat = False, tailOnly = False, isTappable = False, big = False, fret = 0, spNote = False):
if flat:
glScalef(1, .1, 1)
if tailOnly:
return
if self.twoDnote == True:
#myfingershurt: this should be retrieved once at init, not repeatedly in-game whenever tails are rendered.
if self.notedisappear == True:#Notes keep on going when missed
notecol = (1,1,1)#capo
else:
if flat:#Notes disappear when missed
notecol = (.1,.1,.1)
else:
notecol = (1,1,1)
tailOnly == True
if self.theme < 2:
if self.starspin:
size = (self.boardWidth/self.strings/2, self.boardWidth/self.strings/2)
texSize = (fret/5.0,fret/5.0+0.2)
if spNote == True:
if isTappable:
texY = (0.150+self.starSpinFrameIndex*0.05, 0.175+self.starSpinFrameIndex*0.05)
else:
texY = (0.125+self.starSpinFrameIndex*0.05, 0.150+self.starSpinFrameIndex*0.05)
else:
if isTappable:
texY = (0.025,0.05)
else:
texY = (0,0.025)
if self.starPowerActive:
texY = (0.10,0.125) #QQstarS
if isTappable:
texSize = (0.2,0.4)
else:
texSize = (0,0.2)
else:
size = (self.boardWidth/self.strings/2, self.boardWidth/self.strings/2)
texSize = (fret/5.0,fret/5.0+0.2)
if spNote == True:
if isTappable:
texY = (0.6, 0.8)
else:
texY = (0.4,0.6)
else:
if isTappable:
texY = (0.2,0.4)
else:
texY = (0,0.2)
if self.starPowerActive:
texY = (0.8,1)
if isTappable:
texSize = (0.2,0.4)
else:
texSize = (0,0.2)
elif self.theme == 2:
size = (self.boardWidth/self.strings/2, self.boardWidth/self.strings/2)
texSize = (fret/5.0,fret/5.0+0.2)
if spNote == True:
if isTappable:
texY = (3*0.166667, 4*0.166667)
else:
texY = (2*0.166667, 3*0.166667)
else:
if isTappable:
texY = (1*0.166667, 2*0.166667)
else:
texY = (0, 1*0.166667)
#myfingershurt: adding spNote==False conditional so that star notes can appear in overdrive
if self.starPowerActive and spNote == False:
if isTappable:
texY = (5*0.166667, 1)
else:
texY = (4*0.166667, 5*0.166667)
self.engine.draw3Dtex(self.noteButtons, vertex = (-size[0],size[1],size[0],-size[1]), texcoord = (texSize[0],texY[0],texSize[1],texY[1]),
scale = (1,1,0), rot = (30,1,0,0), multiples = True, color = color, vertscale = .27)
else:
shaders.setVar("Material",color,"notes")
#mesh = outer ring (black)
#mesh_001 = main note (key color)
#mesh_002 = top (spot or hopo if no mesh_003)
#mesh_003 = hopo bump (hopo color)
if spNote == True and self.starMesh is not None:
meshObj = self.starMesh
else:
meshObj = self.noteMesh
glPushMatrix()
glEnable(GL_DEPTH_TEST)
glDepthMask(1)
glShadeModel(GL_SMOOTH)
if self.noterotate:
glRotatef(90, 0, 1, 0)
glRotatef(-90, 1, 0, 0)
if spNote == True and self.threeDspin == True:
glRotate(90 + self.time/3, 0, 1, 0)
#death_au: fixed 3D note colours
#volshebnyi - note color when sp is active
glColor4f(*color)
if self.starPowerActive and self.theme != 2 and not color == (0,0,0,1):
c = self.fretColors[5]
glColor4f(.1 + .8 * c[0], .1 + .8 * c[1], .1 + .8 * c[2], 1)
if fret == 0: # green note
glRotate(self.engine.theme.noterot[0], 0, 0, 1), glTranslatef(0, self.engine.theme.notepos[0], 0)
elif fret == 1: # red note
glRotate(self.engine.theme.noterot[1], 0, 0, 1), glTranslatef(0, self.engine.theme.notepos[1], 0)
elif fret == 2: # yellow
glRotate(self.engine.theme.noterot[2], 0, 0, 1), glTranslatef(0, self.engine.theme.notepos[2], 0)
elif fret == 3:# blue note
glRotate(self.engine.theme.noterot[3], 0, 0, 1), glTranslatef(0, self.engine.theme.notepos[3], 0)
elif fret == 4:# blue note
glRotate(self.engine.theme.noterot[4], 0, 0, 1), glTranslatef(0, self.engine.theme.notepos[4], 0)
if self.staratex == True and self.starPowerActive and spNote == False:
glColor3f(1,1,1)
glEnable(GL_TEXTURE_2D)
getattr(self,"staratex"+chr(97+fret)).texture.bind()
glMatrixMode(GL_TEXTURE)
glScalef(1, -1, 1)
glMatrixMode(GL_MODELVIEW)
glScalef(self.boardScaleX, self.boardScaleY, 1)
if isTappable:
mesh = "Mesh_001"
else:
mesh = "Mesh"
meshObj.render(mesh)
if shaders.enable("notes"):
shaders.setVar("isTextured",True)
meshObj.render(mesh)
shaders.disable()
glMatrixMode(GL_TEXTURE)
glLoadIdentity()
glMatrixMode(GL_MODELVIEW)
glDisable(GL_TEXTURE_2D)
elif self.notetex == True and spNote == False:
glColor3f(1,1,1)
glEnable(GL_TEXTURE_2D)
getattr(self,"notetex"+chr(97+fret)).texture.bind()
glMatrixMode(GL_TEXTURE)
glScalef(1, -1, 1)
glMatrixMode(GL_MODELVIEW)
glScalef(self.boardScaleX, self.boardScaleY, 1)
if isTappable:
mesh = "Mesh_001"
else:
mesh = "Mesh"
meshObj.render(mesh)
if shaders.enable("notes"):
shaders.setVar("isTextured",True)
meshObj.render(mesh)
shaders.disable()
glMatrixMode(GL_TEXTURE)
glLoadIdentity()
glMatrixMode(GL_MODELVIEW)
glDisable(GL_TEXTURE_2D)
elif self.startex == True and spNote == True:
glColor3f(1,1,1)
glEnable(GL_TEXTURE_2D)
getattr(self,"startex"+chr(97+fret)).texture.bind()
glMatrixMode(GL_TEXTURE)
glScalef(1, -1, 1)
glMatrixMode(GL_MODELVIEW)
glScalef(self.boardScaleX, self.boardScaleY, 1)
if isTappable:
mesh = "Mesh_001"
else:
mesh = "Mesh"
meshObj.render(mesh)
if shaders.enable("notes"):
shaders.setVar("isTextured",True)
meshObj.render(mesh)
shaders.disable()
glMatrixMode(GL_TEXTURE)
glLoadIdentity()
glMatrixMode(GL_MODELVIEW)
glDisable(GL_TEXTURE_2D)
else:
if shaders.enable("notes"):
shaders.setVar("isTextured",False)
meshObj.render("Mesh_001")
shaders.disable()
glColor3f(self.spotColor[0], self.spotColor[1], self.spotColor[2])
if isTappable:
if self.hopoColor[0] == -2:
glColor4f(*color)
else:
glColor3f(self.hopoColor[0], self.hopoColor[1], self.hopoColor[2])
if(meshObj.find("Mesh_003")) == True:
meshObj.render("Mesh_003")
glColor3f(self.spotColor[0], self.spotColor[1], self.spotColor[2])
meshObj.render("Mesh_002")
glColor3f(self.meshColor[0], self.meshColor[1], self.meshColor[2])
meshObj.render("Mesh")
glDepthMask(0)
glPopMatrix()
def renderFreestyleLanes(self, visibility, song, pos):
if not song:
return
if not song.readyToGo:
return
#boardWindowMin = pos - self.currentPeriod * 2
boardWindowMax = pos + self.currentPeriod * self.beatsPerBoard
track = song.midiEventTrack[self.player]
#MFH - render 5 freestyle tails when Song.freestyleMarkingNote comes up
if self.freestyleEnabled:
freestyleActive = False
#for time, event in track.getEvents(boardWindowMin, boardWindowMax):
for time, event in track.getEvents(pos - self.freestyleOffset , boardWindowMax + self.freestyleOffset):
if isinstance(event, Song.MarkerNote):
if event.number == Song.freestyleMarkingNote:
length = (event.length - 50) / self.currentPeriod / self.beatsPerUnit
w = self.boardWidth / self.strings
self.freestyleLength = event.length #volshebnyi
self.freestyleStart = time # volshebnyi
z = ((time - pos) / self.currentPeriod) / self.beatsPerUnit
z2 = ((time + event.length - pos) / self.currentPeriod) / self.beatsPerUnit
if z > self.boardLength * .8:
f = (self.boardLength - z) / (self.boardLength * .2)
elif z < 0:
f = min(1, max(0, 1 + z2))
else:
f = 1.0
#MFH - must extend the tail past the first fretboard section dynamically so we don't have to render the entire length at once
#volshebnyi - allow tail to move under frets
if time - self.freestyleOffset < pos:
freestyleActive = True
if z < -1.5:
length += z +1.5
z = -1.5
#MFH - render 5 freestyle tails
for theFret in range(0,5):
x = (self.strings / 2 - theFret) * w
c = self.fretColors[theFret]
color = (.1 + .8 * c[0], .1 + .8 * c[1], .1 + .8 * c[2], 1 * visibility * f)
glPushMatrix()
glTranslatef(x, (1.0 - visibility) ** (theFret + 1), z)
freestyleTailMode = 1
self.renderTail(length, sustain = True, kill = False, color = color, flat = False, tailOnly = True, isTappable = False, big = True, fret = theFret, spNote = False, freestyleTail = freestyleTailMode, pos = pos)
glPopMatrix()
self.freestyleActive = freestyleActive
def renderNotes(self, visibility, song, pos, killswitch):
if not song:
return
if not song.readyToGo:
return
# Update dynamic period
self.currentPeriod = self.neckSpeed
#self.targetPeriod = self.neckSpeed
self.killPoints = False
w = self.boardWidth / self.strings
track = song.track[self.player]
num = 0
enable = True
starEventsInView = False
renderedNotes = reversed(self.getRequiredNotesForRender(song,pos))
for time, event in renderedNotes:
#for time, event in reversed(track.getEvents(pos - self.currentPeriod * 2, pos + self.currentPeriod * self.beatsPerBoard)): #MFH - reverse order of note rendering
if isinstance(event, Tempo):
self.tempoBpm = event.bpm
if self.lastBpmChange > 0 and self.disableVBPM == True:
continue
if (pos - time > self.currentPeriod or self.lastBpmChange < 0) and time > self.lastBpmChange:
self.baseBeat += (time - self.lastBpmChange) / self.currentPeriod
self.targetBpm = event.bpm
self.lastBpmChange = time
self.neck.lastBpmChange = time
self.neck.baseBeat = self.baseBeat
# self.setBPM(self.targetBpm) # glorandwarf: was setDynamicBPM(self.targetBpm)
continue
if not isinstance(event, Note):
continue
if (event.noteBpm == 0.0):
event.noteBpm = self.tempoBpm
if self.coOpFailed:
if self.coOpRestart:
if time - self.coOpRescueTime < (self.currentPeriod * self.beatsPerBoard * 2):
continue
elif self.coOpRescueTime + (self.currentPeriod * self.beatsPerBoard * 2) < pos:
self.coOpFailed = False
self.coOpRestart = False
Log.debug("Turning off coOpFailed. Rescue successful.")
else:
continue #can't break. Tempo.
c = self.fretColors[event.number]
x = (self.strings / 2 - event.number) * w
z = ((time - pos) / self.currentPeriod) / self.beatsPerUnit
z2 = ((time + event.length - pos) / self.currentPeriod) / self.beatsPerUnit
if z > self.boardLength * .8:
f = (self.boardLength - z) / (self.boardLength * .2)
elif z < 0:
f = min(1, max(0, 1 + z2))
else:
f = 1.0
#volshebnyi - hide notes in BRE zone if BRE enabled
if self.freestyleEnabled and self.freestyleStart > 0:
if time >= self.freestyleStart-self.freestyleOffset and time < self.freestyleStart + self.freestyleLength+self.freestyleOffset:
z = -2.0
if self.twoDnote == True and not self.useFretColors:
color = (1,1,1, 1 * visibility * f)
else:
color = (.1 + .8 * c[0], .1 + .8 * c[1], .1 + .8 * c[2], 1 * visibility * f)
if event.length > 120:
length = (event.length - 50) / self.currentPeriod / self.beatsPerUnit
else:
length = 0
flat = False
tailOnly = False
spNote = False
#myfingershurt: user setting for starpower refill / replenish notes
if self.starPowerActive:
if self.spRefillMode == 0: #mode 0 = no starpower / overdrive refill notes
self.spEnabled = False
elif self.spRefillMode == 1 and self.theme != 2: #mode 1 = overdrive refill notes in RB themes only
self.spEnabled = False
elif self.spRefillMode == 2 and song.midiStyle != 1: #mode 2 = refill based on MIDI type
self.spEnabled = False
if event.star:
#self.isStarPhrase = True
starEventsInView = True
if event.finalStar:
self.finalStarSeen = True
starEventsInView = True
if event.star and self.spEnabled:
spNote = True
if event.finalStar and self.spEnabled:
spNote = True
if event.played or event.hopod:
if event.flameCount < 1 and not self.starPowerGained:
Log.debug("star power added")
if self.gameMode2p == 6:
if self.battleSuddenDeath:
self.battleObjects = [1] + self.battleObjects[:2]
else:
self.battleObjects = [self.battleObjectsEnabled[random.randint(0,len(self.battleObjectsEnabled)-1)]] + self.battleObjects[:2]
self.battleGetTime = pos
self.battleObjectGained = True
Log.debug("Battle Object Gained, Objects %s" % str(self.battleObjects))
else:
if self.starPower < 100:
self.starPower += 25
if self.starPower > 100:
self.starPower = 100
self.neck.overdriveFlashCount = 0 #MFH - this triggers the oFlash strings & timer
self.starPowerGained = True
if event.tappable < 2:
isTappable = False
else:
isTappable = True
# Clip the played notes to the origin
#myfingershurt: this should be loaded once at init, not every render...
if self.notedisappear == True:#Notes keep on going when missed
###Capo###
if event.played or event.hopod:
tailOnly = True
length += z
z = 0
if length <= 0:
continue
if z < 0 and not (event.played or event.hopod):
color = (.6, .6, .6, .5 * visibility * f)
flat = True
###endCapo###
else:#Notes disappear when missed
if z < 0:
if event.played or event.hopod:
tailOnly = True
length += z
z = 0
if length <= 0:
continue
else:
color = (.6, .6, .6, .5 * visibility * f)
flat = True
big = False
self.bigMax = 0
for i in range(0,5):
if self.hit[i]:
big = True
self.bigMax += 1
#MFH - filter out this tail whitening when starpower notes have been disbled from a screwup
if self.spEnabled and killswitch:
if event.star or event.finalStar:
if big == True and tailOnly == True:
self.killPoints = True
color = (1,1,1,1)
if z + length < -1.0:
continue
if event.length <= 120:
length = None
sustain = False
if event.length > (1.4 * (60000.0 / event.noteBpm) / 4):
sustain = True
glPushMatrix()
glTranslatef(x, (1.0 - visibility) ** (event.number + 1), z)
if shaders.turnon:
shaders.setVar("note_position",(x, (1.0 - visibility) ** (event.number + 1), z),"notes")
if self.battleStatus[8]:
renderNote = random.randint(0,2)
else:
renderNote = 0
if renderNote == 0:
if big == True and num < self.bigMax:
num += 1
self.renderNote(length, sustain = sustain, kill = killswitch, color = color, flat = flat, tailOnly = tailOnly, isTappable = isTappable, big = True, fret = event.number, spNote = spNote)
else:
self.renderNote(length, sustain = sustain, kill = killswitch, color = color, flat = flat, tailOnly = tailOnly, isTappable = isTappable, fret = event.number, spNote = spNote)
glPopMatrix()
if (not starEventsInView and self.finalStarSeen):
self.spEnabled = True
self.finalStarSeen = False
self.isStarPhrase = False
def renderTails(self, visibility, song, pos, killswitch):
if not song:
return
if not song.readyToGo:
return
# Update dynamic period
self.currentPeriod = self.neckSpeed
#self.targetPeriod = self.neckSpeed
self.killPoints = False
w = self.boardWidth / self.strings
track = song.track[self.player]
num = 0
enable = True
renderedNotes = self.getRequiredNotesForRender(song,pos)
for time, event in renderedNotes:
#for time, event in track.getEvents(pos - self.currentPeriod * 2, pos + self.currentPeriod * self.beatsPerBoard):
if isinstance(event, Tempo):
self.tempoBpm = event.bpm
continue
if not isinstance(event, Note):
continue
if (event.noteBpm == 0.0):
event.noteBpm = self.tempoBpm
if self.coOpFailed:
if self.coOpRestart:
if time - self.coOpRescueTime < (self.currentPeriod * self.beatsPerBoard * 2):
continue
elif self.coOpRescueTime + (self.currentPeriod * self.beatsPerBoard * 2) < pos:
self.coOpFailed = False
self.coOpRestart = False
Log.debug("Turning off coOpFailed. Rescue successful.")
else:
continue
c = self.fretColors[event.number]
x = (self.strings / 2 - event.number) * w
z = ((time - pos) / self.currentPeriod) / self.beatsPerUnit
z2 = ((time + event.length - pos) / self.currentPeriod) / self.beatsPerUnit
if z > self.boardLength * .8:
f = (self.boardLength - z) / (self.boardLength * .2)
elif z < 0:
f = min(1, max(0, 1 + z2))
else:
f = 1.0
color = (.1 + .8 * c[0], .1 + .8 * c[1], .1 + .8 * c[2], 1 * visibility * f)
if event.length > 120:
length = (event.length - 50) / self.currentPeriod / self.beatsPerUnit
else:
length = 0
flat = False
tailOnly = False
spNote = False
#myfingershurt: user setting for starpower refill / replenish notes
if event.star and self.spEnabled:
spNote = True
if event.finalStar and self.spEnabled:
spNote = True
if event.played or event.hopod:
if event.flameCount < 1 and not self.starPowerGained:
if self.gameMode2p == 6:
if self.battleSuddenDeath:
self.battleObjects = [1] + self.battleObjects[:2]
else:
self.battleObjects = [self.battleObjectsEnabled[random.randint(0,len(self.battleObjectsEnabled)-1)]] + self.battleObjects[:2]
self.battleGetTime = pos
self.battleObjectGained = True
Log.debug("Battle Object Gained, Objects %s" % str(self.battleObjects))
else:
if self.starPower < 100:
self.starPower += 25
if self.starPower > 100:
self.starPower = 100
self.neck.overdriveFlashCount = 0 #MFH - this triggers the oFlash strings & timer
self.starPowerGained = True
self.neck.ocount = 0
if event.tappable < 2:
isTappable = False
else:
isTappable = True
# Clip the played notes to the origin
#myfingershurt: this should be loaded once at init, not every render...
if self.notedisappear == True:#Notes keep on going when missed
###Capo###
if event.played or event.hopod:
tailOnly = True
length += z
z = 0
if length <= 0:
continue
if z < 0 and not (event.played or event.hopod):
color = (.6, .6, .6, .5 * visibility * f)
flat = True
###endCapo###
else:#Notes disappear when missed
if z < 0:
if event.played or event.hopod:
tailOnly = True
length += z
z = 0
if length <= 0:
continue
else:
color = (.6, .6, .6, .5 * visibility * f)
flat = True
big = False
self.bigMax = 0
for i in range(0,5):
if self.hit[i]:
big = True
self.bigMax += 1
if self.spEnabled and killswitch:
if event.star or event.finalStar:
if big == True and tailOnly == True:
self.killPoints = True
color = (1,1,1,1)
if z + length < -1.0:
continue
if event.length <= 120:
length = None
sustain = False
if event.length > (1.4 * (60000.0 / event.noteBpm) / 4):
sustain = True
glPushMatrix()
glTranslatef(x, (1.0 - visibility) ** (event.number + 1), z)
if self.battleStatus[8]:
renderNote = random.randint(0,2)
else:
renderNote = 0
if renderNote == 0:
if big == True and num < self.bigMax:
num += 1
self.renderTail(length, sustain = sustain, kill = killswitch, color = color, flat = flat, tailOnly = tailOnly, isTappable = isTappable, big = True, fret = event.number, spNote = spNote, pos = pos)
else:
self.renderTail(length, sustain = sustain, kill = killswitch, color = color, flat = flat, tailOnly = tailOnly, isTappable = isTappable, fret = event.number, spNote = spNote, pos = pos)
glPopMatrix()
if killswitch and self.killfx == 1:
glBlendFunc(GL_SRC_ALPHA, GL_ONE)
for time, event in self.playedNotes:
step = self.currentPeriod / 16
t = time + event.length
x = (self.strings / 2 - event.number) * w
c = self.fretColors[event.number]
s = t
proj = 1.0 / self.currentPeriod / self.beatsPerUnit
zStep = step * proj
def waveForm(t):
u = ((t - time) * -.1 + pos - time) / 64.0 + .0001
return (math.sin(event.number + self.time * -.01 + t * .03) + math.cos(event.number + self.time * .01 + t * .02)) * .1 + .1 + math.sin(u) / (5 * u)
glBegin(GL_TRIANGLE_STRIP)
f1 = 0
while t > time:
if ((t-pos)*proj) < self.boardLength:
z = (t - pos) * proj
else:
z = self.boardLength
if z < 0:
break
f2 = min((s - t) / (6 * step), 1.0)
a1 = waveForm(t) * f1
a2 = waveForm(t - step) * f2
if self.starPowerActive and self.theme != 2:#8bit
glColor4f(self.spColor[0],self.spColor[1],self.spColor[2],1) #(.3,.7,.9,1)
else:
glColor4f(c[0], c[1], c[2], .5)
glVertex3f(x - a1, 0, z)
glVertex3f(x - a2, 0, z - zStep)
glColor4f(1, 1, 1, .75)
glVertex3f(x, 0, z)
glVertex3f(x, 0, z - zStep)
if self.starPowerActive and self.theme != 2:#8bit
glColor4f(self.spColor[0],self.spColor[1],self.spColor[2],1) #(.3,.7,.9,1)
else:
glColor4f(c[0], c[1], c[2], .5)
glVertex3f(x + a1, 0, z)
glVertex3f(x + a2, 0, z - zStep)
glVertex3f(x + a2, 0, z - zStep)
glVertex3f(x - a2, 0, z - zStep)
t -= step
f1 = f2
glEnd()
glBlendFunc(GL_SRC_ALPHA, GL_ONE_MINUS_SRC_ALPHA)
def renderFrets(self, visibility, song, controls):
w = self.boardWidth / self.strings
size = (.22, .22)
v = 1.0 - visibility
glEnable(GL_DEPTH_TEST)
#Hitglow color option - myfingershurt sez this should be a Guitar class global, not retrieved ever fret render in-game...
for n in range(self.strings):
f = self.fretWeight[n]
c = self.fretColors[n]
if f and (controls.getState(self.actions[0]) or controls.getState(self.actions[1])):
f += 0.25
glColor4f(.1 + .8 * c[0] + f, .1 + .8 * c[1] + f, .1 + .8 * c[2] + f, visibility)
if self.fretPress:
y = v + f / 6
else:
y = v / 6
x = (self.strings / 2 - n) * w
if self.twoDkeys == True:
if self.battleStatus[4]:
fretWhamOffset = self.battleWhammyNow * .15
fretColor = (1,1,1,.5)
else:
fretWhamOffset = 0
fretColor = (1,1,1,1)
size = (self.boardWidth/self.strings/2, self.boardWidth/self.strings/2.4)
if self.battleStatus[3] and self.battleFrets != None and self.battleBreakString == n:
texSize = (n/5.0+.042,n/5.0+0.158)
size = (.30, .40)
fretPos = 8 - round((self.battleBreakNow/self.battleBreakLimit) * 8)
texY = (fretPos/8.0,(fretPos + 1.0)/8)
self.engine.draw3Dtex(self.battleFrets, vertex = (size[0],size[1],-size[0],-size[1]), texcoord = (texSize[0], texY[0], texSize[1], texY[1]),
coord = (x,v + .08 + fretWhamOffset,0), multiples = True,color = fretColor, depth = True)
else:
texSize = (n/5.0,n/5.0+0.2)
texY = (0.0,1.0/3.0)
if controls.getState(self.keys[n]) or controls.getState(self.keys[n+5]):
texY = (1.0/3.0,2.0/3.0)
if self.hit[n] or (self.battleStatus[3] and self.battleBreakString == n):
texY = (2.0/3.0,1.0)
self.engine.draw3Dtex(self.fretButtons, vertex = (size[0],size[1],-size[0],-size[1]), texcoord = (texSize[0], texY[0], texSize[1], texY[1]),
coord = (x,v + fretWhamOffset,0), multiples = True,color = fretColor, depth = True)
else:
if self.keyMesh:
glPushMatrix()
glDepthMask(1)
glEnable(GL_LIGHTING)
glEnable(GL_LIGHT0)
glShadeModel(GL_SMOOTH)
glRotatef(90, 0, 1, 0)
glLightfv(GL_LIGHT0, GL_POSITION, (5.0, 10.0, -10.0, 0.0))
glLightfv(GL_LIGHT0, GL_AMBIENT, (.2, .2, .2, 0.0))
glLightfv(GL_LIGHT0, GL_DIFFUSE, (1.0, 1.0, 1.0, 0.0))
glRotatef(-90, 1, 0, 0)
glRotatef(-90, 0, 0, 1)
if n == 0: #green fret button
glRotate(self.engine.theme.keyrot[0], 0, 1, 0), glTranslatef(0, 0, self.engine.theme.keypos[0])
elif n == 1: #red fret button
glRotate(self.engine.theme.keyrot[1], 0, 1, 0), glTranslatef(0, 0, self.engine.theme.keypos[1])
elif n == 2: #yellow fret button
glRotate(self.engine.theme.keyrot[2], 0, 1, 0), glTranslatef(0, 0, self.engine.theme.keypos[2])
elif n == 3: #blue fret button
glRotate(self.engine.theme.keyrot[3], 0, 1, 0), glTranslatef(0, 0, self.engine.theme.keypos[3])
elif n == 4: #orange fret button
glRotate(self.engine.theme.keyrot[4], 0, 1, 0), glTranslatef(0, 0, self.engine.theme.keypos[4])
#Mesh - Main fret
#Key_001 - Top of fret (key_color)
#Key_002 - Bottom of fret (key2_color)
#Glow_001 - Only rendered when a note is hit along with the glow.svg
#if self.complexkey == True:
# glColor4f(.1 + .8 * c[0], .1 + .8 * c[1], .1 + .8 * c[2], visibility)
# if self.battleStatus[4]:
# glTranslatef(x, y + self.battleWhammyNow * .15, 0)
# else:
# glTranslatef(x, y, 0)
if self.keytex == True:
glColor4f(1,1,1,visibility)
if self.battleStatus[4]:
glTranslatef(x, y + self.battleWhammyNow * .15, 0)
else:
glTranslatef(x, y, 0)
glEnable(GL_TEXTURE_2D)
getattr(self,"keytex"+chr(97+n)).texture.bind()
glMatrixMode(GL_TEXTURE)
glScalef(1, -1, 1)
glMatrixMode(GL_MODELVIEW)
glScalef(self.boardScaleX, self.boardScaleY, 1)
if f and not self.hit[n]:
self.keyMesh.render("Mesh_001")
elif self.hit[n]:
self.keyMesh.render("Mesh_002")
else:
self.keyMesh.render("Mesh")
glMatrixMode(GL_TEXTURE)
glLoadIdentity()
glMatrixMode(GL_MODELVIEW)
glDisable(GL_TEXTURE_2D)
else:
glColor4f(.1 + .8 * c[0] + f, .1 + .8 * c[1] + f, .1 + .8 * c[2] + f, visibility)
if self.battleStatus[4]:
glTranslatef(x, y + self.battleWhammyNow * .15 + v * 6, 0)
else:
glTranslatef(x, y + v * 6, 0)
key = self.keyMesh
if(key.find("Glow_001")) == True:
key.render("Mesh")
if(key.find("Key_001")) == True:
glColor3f(self.keyColor[0], self.keyColor[1], self.keyColor[2])
key.render("Key_001")
if(key.find("Key_002")) == True:
glColor3f(self.key2Color[0], self.key2Color[1], self.key2Color[2])
key.render("Key_002")
else:
key.render()
glDisable(GL_LIGHTING)
glDisable(GL_LIGHT0)
glDepthMask(0)
glPopMatrix()
######################
f = self.fretActivity[n]
if f and self.disableFretSFX != True:
if self.glowColor[0] == -1:
s = 1.0
else:
s = 0.0
while s < 1:
ms = s * (math.sin(self.time) * .25 + 1)
if self.glowColor[0] == -2:
glColor3f(c[0] * (1 - ms), c[1] * (1 - ms), c[2] * (1 - ms))
else:
glColor3f(self.glowColor[0] * (1 - ms), self.glowColor[1] * (1 - ms), self.glowColor[2] * (1 - ms))
glPushMatrix()
if self.battleStatus[4]:
glTranslatef(x, y + self.battleWhammyNow * .15, 0)
else:
glTranslatef(x, y, 0)
glScalef(.1 + .02 * ms * f, .1 + .02 * ms * f, .1 + .02 * ms * f)
glRotatef( 90, 0, 1, 0)
glRotatef(-90, 1, 0, 0)
glRotatef(-90, 0, 0, 1)
if self.twoDkeys == False and self.keytex == False:
if(self.keyMesh.find("Glow_001")) == True:
key.render("Glow_001")
else:
key.render()
glPopMatrix()
s += 0.2
#Hitglow color
if self.hitglow_color == 0:
glowcol = (c[0], c[1], c[2])#Same as fret
elif self.hitglow_color == 1:
glowcol = (1, 1, 1)#Actual color in .svg-file
f += 2
if self.battleStatus[4]:
self.engine.draw3Dtex(self.glowDrawing, coord = (x, y + self.battleWhammyNow * .15, 0.01), rot = (f * 90 + self.time, 0, 1, 0),
texcoord = (0.0, 0.0, 1.0, 1.0), vertex = (-size[0] * f, -size[1] * f, size[0] * f, size[1] * f),
multiples = True, alpha = True, color = glowcol)
else:
self.engine.draw3Dtex(self.glowDrawing, coord = (x, y, 0.01), rot = (f * 90 + self.time, 0, 1, 0),
texcoord = (0.0, 0.0, 1.0, 1.0), vertex = (-size[0] * f, -size[1] * f, size[0] * f, size[1] * f),
multiples = True, alpha = True, color = glowcol)
#self.hit[n] = False #MFH -- why? This prevents frets from being rendered under / before the notes...
glDisable(GL_DEPTH_TEST)
def renderFreestyleFlames(self, visibility, controls):
if self.flameColors[0][0][0] == -1:
return
w = self.boardWidth / self.strings
#track = song.track[self.player]
size = (.22, .22)
v = 1.0 - visibility
if self.disableFlameSFX != True:
flameLimit = 10.0
flameLimitHalf = round(flameLimit/2.0)
for fretNum in range(self.strings):
if controls.getState(self.keys[fretNum]) or controls.getState(self.keys[fretNum+5]):
if self.freestyleHitFlameCounts[fretNum] < flameLimit:
ms = math.sin(self.time) * .25 + 1
x = (self.strings / 2 - fretNum) * w
ff = 1 + 0.25
y = v + ff / 6
if self.theme == 2:
y -= 0.5
#flameSize = self.flameSizes[self.scoreMultiplier - 1][fretNum]
flameSize = self.flameSizes[self.cappedScoreMult - 1][fretNum]
if self.theme == 0 or self.theme == 1: #THIS SETS UP GH3 COLOR, ELSE ROCKBAND(which is DEFAULT in Theme.py)
flameColor = self.gh3flameColor
else: #MFH - fixing crash!
#try:
# flameColor = self.flameColors[self.scoreMultiplier - 1][fretNum]
#except IndexError:
flameColor = self.fretColors[fretNum]
if flameColor[0] == -2:
flameColor = self.fretColors[fretNum]
ff += 1.5 #ff first time is 2.75 after this
if self.freestyleHitFlameCounts[fretNum] < flameLimitHalf:
flamecol = tuple([flameColor[ifc] for ifc in range(3)])
rbStarColor = (.1, .1, .2, .3)
xOffset = (.0, - .005, .005, .0)
yOffset = (.20, .255, .255, .255)
scaleMod = .6 * ms * ff
scaleFix = (6.0, 5.5, 5.0, 4.7)
for step in range(4):
if self.starPowerActive and self.theme < 2:
flamecol = self.spColor
else: #Default starcolor (Rockband)
flamecol = (rbStarColor[step],)*3
hfCount = self.freestyleHitFlameCounts[fretNum]
if step == 0:
hfCount += 1
self.engine.draw3Dtex(self.hitflames2Drawing, coord = (x+xOffset[step], y+yOffset[step], 0), rot = (90, 1, 0, 0),
scale = (.25 + .05 * step + scaleMod, hfCount/scaleFix[step] + scaleMod, hfCount/scaleFix[step] + scaleMod),
vertex = (-flameSize * ff,-flameSize * ff,flameSize * ff,flameSize * ff),
texcoord = (0.0,0.0,1.0,1.0), multiples = True, alpha = True, color = flamecol)
else:
flameColorMod = 0.1 * (flameLimit - self.freestyleHitFlameCounts[fretNum])
flamecol = tuple([flameColor[ifc]*flameColorMod for ifc in range(3)])
xOffset = (.0, - .005, .005, .005)
yOffset = (.35, .405, .355, .355)
scaleMod = .6 * ms * ff
scaleFix = (3.0, 2.5, 2.0, 1.7)
for step in range(4):
hfCount = self.freestyleHitFlameCounts[fretNum]
if step == 0:
hfCount += 1
else:
if self.starPowerActive and self.theme < 2:
flamecol = self.spColor
else: #Default starcolor (Rockband)
flamecol = (.4+.1*step,)*3
self.engine.draw3Dtex(self.hitflames1Drawing, coord = (x+xOffset[step], y+yOffset[step], 0), rot = (90, 1, 0, 0),
scale = (.25 + .05 * step + scaleMod, hfCount/scaleFix[step] + scaleMod, hfCount/scaleFix[step] + scaleMod),
vertex = (-flameSize * ff,-flameSize * ff,flameSize * ff,flameSize * ff),
texcoord = (0.0,0.0,1.0,1.0), multiples = True, alpha = True, color = flamecol)
self.freestyleHitFlameCounts[fretNum] += 1
else: #MFH - flame count is done - reset it!
self.freestyleHitFlameCounts[fretNum] = 0 #MFH
def renderFlames(self, visibility, song, pos, controls):
if not song or self.flameColors[0][0][0] == -1:
return
w = self.boardWidth / self.strings
track = song.track[self.player]
size = (.22, .22)
v = 1.0 - visibility
if self.disableFlameSFX != True and (self.HCountAni == True and self.HCount2 > 12):
for n in range(self.strings):
f = self.fretWeight[n]
c = self.fretColors[n]
if f and (controls.getState(self.actions[0]) or controls.getState(self.actions[1])):
f += 0.25
y = v + f / 6
x = (self.strings / 2 - n) * w
f = self.fretActivity[n]
if f:
ms = math.sin(self.time) * .25 + 1
ff = f
ff += 1.2
#myfingershurt: need to cap flameSizes use of scoreMultiplier to 4x, the 5x and 6x bass groove mults cause crash:
self.cappedScoreMult = min(self.scoreMultiplier,4)
flameSize = self.flameSizes[self.cappedScoreMult - 1][n]
if self.theme == 0 or self.theme == 1: #THIS SETS UP GH3 COLOR, ELSE ROCKBAND(which is DEFAULT in Theme.py)
flameColor = self.gh3flameColor
else:
flameColor = self.flameColors[self.cappedScoreMult - 1][n]
flameColorMod = (1.19, 1.97, 10.59)
flamecol = tuple([flameColor[ifc]*flameColorMod[ifc] for ifc in range(3)])
if self.starPowerActive:
if self.theme == 0 or self.theme == 1: #GH3 starcolor
flamecol = self.spColor
else: #Default starcolor (Rockband)
flamecol = (.9,.9,.9)
if self.Hitanim != True:
self.engine.draw3Dtex(self.hitglowDrawing, coord = (x, y + .125, 0), rot = (90, 1, 0, 0),
scale = (0.5 + .6 * ms * ff, 1.5 + .6 * ms * ff, 1 + .6 * ms * ff),
vertex = (-flameSize * ff,-flameSize * ff,flameSize * ff,flameSize * ff),
texcoord = (0.0,0.0,1.0,1.0), multiples = True, alpha = True, color = flamecol)
#Alarian: Animated hitflames
else:
self.HCount = self.HCount + 1
if self.HCount > self.Animspeed-1:
self.HCount = 0
HIndex = (self.HCount * 16 - (self.HCount * 16) % self.Animspeed) / self.Animspeed
if HIndex > 15:
HIndex = 0
texX = (HIndex*(1/16.0), HIndex*(1/16.0)+(1/16.0))
self.engine.draw3Dtex(self.hitglowAnim, coord = (x, y + .225, 0), rot = (90, 1, 0, 0), scale = (2.4, 1, 3.3),
vertex = (-flameSize * ff,-flameSize * ff,flameSize * ff,flameSize * ff),
texcoord = (texX[0],0.0,texX[1],1.0), multiples = True, alpha = True, color = (1,1,1))
ff += .3
flameColorMod = (1.19, 1.78, 12.22)
flamecol = tuple([flameColor[ifc]*flameColorMod[ifc] for ifc in range(3)])
if self.starPowerActive:
if self.theme == 0 or self.theme == 1: #GH3 starcolor
flamecol = self.spColor
else: #Default starcolor (Rockband)
flamecol = (.8,.8,.8)
if self.Hitanim != True:
self.engine.draw3Dtex(self.hitglow2Drawing, coord = (x, y + .25, .05), rot = (90, 1, 0, 0),
scale = (.40 + .6 * ms * ff, 1.5 + .6 * ms * ff, 1 + .6 * ms * ff),
vertex = (-flameSize * ff,-flameSize * ff,flameSize * ff,flameSize * ff),
texcoord = (0.0,0.0,1.0,1.0), multiples = True, alpha = True, color = flamecol)
if self.disableFlameSFX != True:
flameLimit = 10.0
flameLimitHalf = round(flameLimit/2.0)
renderedNotes = self.getRequiredNotesForRender(song,pos)
for time, event in renderedNotes:
if isinstance(event, Tempo):
continue
if not isinstance(event, Note):
continue
if (event.played or event.hopod) and event.flameCount < flameLimit:
ms = math.sin(self.time) * .25 + 1
x = (self.strings / 2 - event.number) * w
xlightning = (self.strings / 2 - event.number)*2.2*w
ff = 1 + 0.25
y = v + ff / 6
if self.theme == 2:
y -= 0.5
flameSize = self.flameSizes[self.cappedScoreMult - 1][event.number]
if self.theme == 0 or self.theme == 1: #THIS SETS UP GH3 COLOR, ELSE ROCKBAND(which is DEFAULT in Theme.py)
flameColor = self.gh3flameColor
else:
flameColor = self.flameColors[self.cappedScoreMult - 1][event.number]
if flameColor[0] == -2:
flameColor = self.fretColors[event.number]
ff += 1.5 #ff first time is 2.75 after this
if self.Hitanim2 == True:
self.HCount2 = self.HCount2 + 1
self.HCountAni = False
if self.HCount2 > 12:
if not event.length > (1.4 * (60000.0 / event.noteBpm) / 4):
self.HCount2 = 0
else:
self.HCountAni = True
if event.flameCount < flameLimitHalf:
HIndex = (self.HCount2 * 13 - (self.HCount2 * 13) % 13) / 13
if HIndex > 12 and self.HCountAni != True:
HIndex = 0
texX = (HIndex*(1/13.0), HIndex*(1/13.0)+(1/13.0))
self.engine.draw3Dtex(self.hitflamesAnim, coord = (x, y + .665, 0), rot = (90, 1, 0, 0), scale = (1.6, 1.6, 4.9),
vertex = (-flameSize * ff,-flameSize * ff,flameSize * ff,flameSize * ff),
texcoord = (texX[0],0.0,texX[1],1.0), multiples = True, alpha = True, color = (1,1,1))
else:
flameColorMod = 0.1 * (flameLimit - event.flameCount)
flamecol = tuple([ifc*flameColorMod for ifc in flameColor])
scaleChange = (3.0,2.5,2.0,1.7)
yOffset = (.35, .405, .355, .355)
vtx = flameSize * ff
scaleMod = .6 * ms * ff
for step in range(4):
#draw lightning in GH themes on SP gain
if step == 0 and self.theme != 2 and event.finalStar and self.spEnabled:
self.engine.draw3Dtex(self.hitlightning, coord = (xlightning, y, 3.3), rot = (90, 1, 0, 0),
scale = (.15 + .5 * ms * ff, event.flameCount / 3.0 + .6 * ms * ff, 2), vertex = (.4,-2,-.4,2),
texcoord = (0.0,0.0,1.0,1.0), multiples = True, alpha = True, color = (1,1,1))
continue
if step == 0:
yzscaleMod = event.flameCount/ scaleChange[step]
else:
yzscaleMod = (event.flameCount + 1)/ scaleChange[step]
if self.starPowerActive:
if self.theme == 0 or self.theme == 1:
spcolmod = .7+step*.1
flamecol = tuple([isp*spcolmod for isp in self.spColor])
else:
flamecol = (.4+step*.1,)*3#Default starcolor (Rockband)
if self.hitFlamesPresent == True:
self.engine.draw3Dtex(self.hitflames1Drawing, coord = (x - .005, y + yOffset[step], 0), rot = (90, 1, 0, 0),
scale = (.25 + step*.05 + scaleMod, yzscaleMod + scaleMod, yzscaleMod + scaleMod),
vertex = (-vtx,-vtx,vtx,vtx), texcoord = (0.0,0.0,1.0,1.0),
multiples = True, alpha = True, color = flamecol)
elif self.hitFlamesPresent == True and self.Hitanim2 == False:
self.HCount2 = 13
self.HCountAni = True
if event.flameCount < flameLimitHalf:
flamecol = flameColor
if self.starPowerActive:
if self.theme == 0 or self.theme == 1: #GH3 starcolor
spcolmod = .3
flamecol = tuple([isp*spcolmod for isp in self.spColor])
else: #Default starcolor (Rockband)
flamecol = (.1,.1,.1)
self.engine.draw3Dtex(self.hitflames2Drawing, coord = (x, y + .20, 0), rot = (90, 1, 0, 0),
scale = (.25 + .6 * ms * ff, event.flameCount/6.0 + .6 * ms * ff, event.flameCount / 6.0 + .6 * ms * ff),
vertex = (-flameSize * ff,-flameSize * ff,flameSize * ff,flameSize * ff), texcoord = (0.0,0.0,1.0,1.0),
multiples = True, alpha = True, color = flamecol)
for i in range(3):
if self.starPowerActive:
if self.theme == 0 or self.theme == 1: #GH3 starcolor
spcolmod = 0.4+i*0.1
flamecol = tuple([isp*spcolmod for isp in self.spColor])
else: #Default starcolor (Rockband)
flamecol = (0.1+i*0.1,)*3
self.engine.draw3Dtex(self.hitflames2Drawing, coord = (x-.005, y + .255, 0), rot = (90, 1, 0, 0),
scale = (.30 + i*0.05 + .6 * ms * ff, event.flameCount/(5.5 - i*0.4) + .6 * ms * ff, event.flameCount / (5.5 - i*0.4) + .6 * ms * ff),
vertex = (-flameSize * ff,-flameSize * ff,flameSize * ff,flameSize * ff), texcoord = (0.0,0.0,1.0,1.0),
multiples = True, alpha = True, color = flamecol)
else:
flameColorMod = 0.1 * (flameLimit - event.flameCount)
flamecol = tuple([ifc*flameColorMod for ifc in flameColor])
scaleChange = (3.0,2.5,2.0,1.7)
yOffset = (.35, .405, .355, .355)
vtx = flameSize * ff
scaleMod = .6 * ms * ff
for step in range(4):
#draw lightning in GH themes on SP gain
if step == 0 and self.theme != 2 and event.finalStar and self.spEnabled:
self.engine.draw3Dtex(self.hitlightning, coord = (xlightning, y, 3.3), rot = (90, 1, 0, 0),
scale = (.15 + .5 * ms * ff, event.flameCount / 3.0 + .6 * ms * ff, 2), vertex = (.4,-2,-.4,2),
texcoord = (0.0,0.0,1.0,1.0), multiples = True, alpha = True, color = (1,1,1))
continue
if step == 0:
yzscaleMod = event.flameCount/ scaleChange[step]
else:
yzscaleMod = (event.flameCount + 1)/ scaleChange[step]
if self.starPowerActive:
if self.theme == 0 or self.theme == 1:
spcolmod = .7+step*.1
flamecol = tuple([isp*spcolmod for isp in self.spColor])
else:
flamecol = (.4+step*.1,)*3#Default starcolor (Rockband)
self.engine.draw3Dtex(self.hitflames1Drawing, coord = (x - .005, y + yOffset[step], 0), rot = (90, 1, 0, 0),
scale = (.25 + step*.05 + scaleMod, yzscaleMod + scaleMod, yzscaleMod + scaleMod),
vertex = (-vtx,-vtx,vtx,vtx), texcoord = (0.0,0.0,1.0,1.0),
multiples = True, alpha = True, color = flamecol)
event.flameCount += 1
def render(self, visibility, song, pos, controls, killswitch):
if shaders.turnon:
shaders.globals["dfActive"] = self.drumFillsActive
shaders.globals["breActive"] = self.freestyleActive
shaders.globals["rockLevel"] = self.rockLevel
if shaders.globals["killswitch"] != killswitch:
shaders.globals["killswitchPos"] = pos
shaders.globals["killswitch"] = killswitch
shaders.modVar("height",0.2,0.2,1.0,"tail")
if not self.starNotesSet == True:
self.totalNotes = 0
for time, event in song.track[self.player].getAllEvents():
if not isinstance(event, Note):
continue
self.totalNotes += 1
stars = []
maxStars = []
maxPhrase = self.totalNotes/120
for q in range(0,maxPhrase):
for n in range(0,10):
stars.append(self.totalNotes/maxPhrase*(q)+n+maxPhrase/4)
maxStars.append(self.totalNotes/maxPhrase*(q)+10+maxPhrase/4)
i = 0
for time, event in song.track[self.player].getAllEvents():
if not isinstance(event, Note):
continue
for a in stars:
if i == a:
self.starNotes.append(time)
event.star = True
for a in maxStars:
if i == a:
self.maxStars.append(time)
event.finalStar = True
i += 1
for time, event in song.track[self.player].getAllEvents():
if not isinstance(event, Note):
continue
for q in self.starNotes:
if time == q:
event.star = True
for q in self.maxStars:
#if time == q and not event.finalStar:
# event.star = True
if time == q: #MFH - no need to mark only the final SP phrase note as the finalStar as in drums, they will be hit simultaneously here.
event.finalStar = True
self.starNotesSet = True
if not (self.coOpFailed and not self.coOpRestart):
glEnable(GL_BLEND)
glBlendFunc(GL_SRC_ALPHA, GL_ONE_MINUS_SRC_ALPHA)
glEnable(GL_COLOR_MATERIAL)
if self.leftyMode:
if not self.battleStatus[6]:
glScalef(-1, 1, 1)
elif self.battleStatus[6]:
glScalef(-1, 1, 1)
if self.freestyleActive:
self.renderTails(visibility, song, pos, killswitch)
self.renderNotes(visibility, song, pos, killswitch)
self.renderFreestyleLanes(visibility, song, pos) #MFH - render the lanes on top of the notes.
self.renderFrets(visibility, song, controls)
if self.hitFlamesPresent: #MFH - only if present!
self.renderFreestyleFlames(visibility, controls) #MFH - freestyle hit flames
else:
self.renderTails(visibility, song, pos, killswitch)
if self.fretsUnderNotes: #MFH
if self.twoDnote == True:
self.renderFrets(visibility, song, controls)
self.renderNotes(visibility, song, pos, killswitch)
else:
self.renderNotes(visibility, song, pos, killswitch)
self.renderFrets(visibility, song, controls)
else:
self.renderNotes(visibility, song, pos, killswitch)
self.renderFrets(visibility, song, controls)
self.renderFreestyleLanes(visibility, song, pos) #MFH - render the lanes on top of the notes.
if self.hitFlamesPresent: #MFH - only if present!
self.renderFlames(visibility, song, pos, controls) #MFH - only when freestyle inactive!
if self.leftyMode:
if not self.battleStatus[6]:
glScalef(-1, 1, 1)
elif self.battleStatus[6]:
glScalef(-1, 1, 1)
#return notes
#MFH - corrected and optimized:
#def getRequiredNotesMFH(self, song, pos):
def getRequiredNotesMFH(self, song, pos, hopoTroubleCheck = False):
if self.battleStatus[2] and self.difficulty != 0:
if pos < self.battleStartTimes[2] + self.currentPeriod * self.beatsPerBoard or pos > self.battleStartTimes[2] - self.currentPeriod * self.beatsPerBoard + self.battleDiffUpLength:
song.difficulty[self.player] = Song.difficulties[self.battleDiffUpValue]
else:
song.difficulty[self.player] = Song.difficulties[self.battleDiffUpValue - 1]
track = song.track[self.player]
if hopoTroubleCheck:
notes = [(time, event) for time, event in track.getEvents(pos, pos + (self.earlyMargin*2)) if isinstance(event, Note)]
notes = [(time, event) for time, event in notes if not time==pos] #MFH - filter out the problem note that caused this check!
else:
notes = [(time, event) for time, event in track.getEvents(pos - self.lateMargin, pos + self.earlyMargin) if isinstance(event, Note)]
notes = [(time, event) for time, event in notes if not (event.hopod or event.played or event.skipped)]
notes = [(time, event) for time, event in notes if (time >= (pos - self.lateMargin)) and (time <= (pos + self.earlyMargin))]
sorted(notes, key=lambda x: x[0])
if self.battleStatus[7]:
notes = self.getDoubleNotes(notes)
return sorted(notes, key=lambda x: x[0]) #MFH - what the hell, this should be sorted by TIME not note number....
def getDoubleNotes(self, notes):
if self.battleStatus[7] and notes != []:
notes = sorted(notes, key=lambda x: x[0])
curTime = 0
tempnotes = []
tempnumbers = []
tempnote = None
curNumbers = []
noteCount = 0
for time, note in notes:
noteCount += 1
if not isinstance(note, Note):
if noteCount == len(notes) and len(curNumbers) < 3 and len(curNumbers) > 0:
maxNote = curNumbers[0]
minNote = curNumbers[0]
for i in range(0, len(curNumbers)):
if curNumbers[i] > maxNote:
maxNote = curNumbers[i]
if curNumbers[i] < minNote:
minNote = curNumbers[i]
curNumbers = []
if maxNote < 4:
tempnumbers.append(maxNote + 1)
elif minNote > 0:
tempnumbers.append(minNote - 1)
else:
tempnumbers.append(2)
elif noteCount == len(notes) and len(curNumbers) > 2:
tempnumbers.append(-1)
curNumbers = []
continue
if time != curTime:
if curTime != 0 and len(curNumbers) < 3:
maxNote = curNumbers[0]
minNote = curNumbers[0]
for i in range(0, len(curNumbers)):
if curNumbers[i] > maxNote:
maxNote = curNumbers[i]
if curNumbers[i] < minNote:
minNote = curNumbers[i]
curNumbers = []
if maxNote < 4:
tempnumbers.append(maxNote + 1)
elif minNote > 0:
tempnumbers.append(minNote - 1)
else:
tempnumbers.append(2)
elif (curTime != 0 or noteCount == len(notes)) and len(curNumbers) > 2:
tempnumbers.append(-1)
curNumbers = []
tempnotes.append((time,deepcopy(note)))
curTime = time
curNumbers.append(note.number)
if noteCount == len(notes) and len(curNumbers) < 3:
maxNote = curNumbers[0]
minNote = curNumbers[0]
for i in range(0, len(curNumbers)):
if curNumbers[i] > maxNote:
maxNote = curNumbers[i]
if curNumbers[i] < minNote:
minNote = curNumbers[i]
curNumbers = []
if maxNote < 4:
tempnumbers.append(maxNote + 1)
elif minNote > 0:
tempnumbers.append(minNote - 1)
else:
tempnumbers.append(2)
elif noteCount == len(notes) and len(curNumbers) > 2:
tempnumbers.append(-1)
curNumbers = []
else:
curNumbers.append(note.number)
if noteCount == len(notes) and len(curNumbers) < 3:
maxNote = curNumbers[0]
minNote = curNumbers[0]
for i in range(0, len(curNumbers)):
if curNumbers[i] > maxNote:
maxNote = curNumbers[i]
if curNumbers[i] < minNote:
minNote = curNumbers[i]
curNumbers = []
if maxNote < 4:
tempnumbers.append(maxNote + 1)
elif minNote > 0:
tempnumbers.append(minNote - 1)
else:
tempnumbers.append(2)
elif noteCount == len(notes) and len(curNumbers) > 2:
tempnumbers.append(-1)
curNumbers = []
noteCount = 0
for time, note in tempnotes:
if tempnumbers[noteCount] != -1:
note.number = tempnumbers[noteCount]
noteCount += 1
if time > self.battleStartTimes[7] + self.currentPeriod * self.beatsPerBoard and time < self.battleStartTimes[7] - self.currentPeriod * self.beatsPerBoard + self.battleDoubleLength:
notes.append((time,note))
else:
noteCount += 1
return sorted(notes, key=lambda x: x[0])
def getRequiredNotesForRender(self, song, pos):
if self.battleStatus[2] and self.difficulty != 0:
Log.debug(self.battleDiffUpValue)
song.difficulty[self.player] = Song.difficulties[self.battleDiffUpValue]
track0 = song.track[self.player]
notes0 = [(time, event) for time, event in track0.getEvents(pos - self.currentPeriod * 2, pos + self.currentPeriod * self.beatsPerBoard)]
song.difficulty[self.player] = Song.difficulties[self.battleDiffUpValue - 1]
track1 = song.track[self.player]
notes1 = [(time, event) for time, event in track1.getEvents(pos - self.currentPeriod * 2, pos + self.currentPeriod * self.beatsPerBoard)]
notes = []
for time,note in notes0:
if time < self.battleStartTimes[2] + self.currentPeriod * self.beatsPerBoard or time > self.battleStartTimes[2] - self.currentPeriod * self.beatsPerBoard + self.battleDiffUpLength:
notes.append((time,note))
for time,note in notes1:
if time > self.battleStartTimes[2] + self.currentPeriod * self.beatsPerBoard and time < self.battleStartTimes[2] - self.currentPeriod * self.beatsPerBoard + self.battleDiffUpLength:
notes.append((time,note))
notes0 = None
notes1 = None
track0 = None
track1 = None
notes = sorted(notes, key=lambda x: x[0])
#Log.debug(notes)
else:
track = song.track[self.player]
notes = [(time, event) for time, event in track.getEvents(pos - self.currentPeriod * 2, pos + self.currentPeriod * self.beatsPerBoard)]
if self.battleStatus[7]:
notes = self.getDoubleNotes(notes)
return notes
#MFH - corrected and optimized:
def getRequiredNotesForJurgenOnTime(self, song, pos):
track = song.track[self.player]
notes = [(time, event) for time, event in track.getEvents(pos - self.lateMargin, pos + 30) if isinstance(event, Note)]
notes = [(time, event) for time, event in notes if not (event.hopod or event.played or event.skipped)]
if self.battleStatus[7]:
notes = self.getDoubleNotes(notes)
return sorted(notes, key=lambda x: x[0]) #MFH - what the hell, this should be sorted by TIME not note number....
def controlsMatchNotes(self, controls, notes):
# no notes?
if not notes:
return False
# check each valid chord
chords = {}
for time, note in notes:
if not time in chords:
chords[time] = []
chords[time].append((time, note))
#Make sure the notes are in the right time order
chordlist = chords.values()
chordlist.sort(lambda a, b: cmp(a[0][0], b[0][0]))
twochord = 0
for chord in chordlist:
# matching keys?
requiredKeys = [note.number for time, note in chord]
requiredKeys = self.uniqify(requiredKeys)
if len(requiredKeys) > 2 and self.twoChordMax == True:
twochord = 0
for k in self.keys:
if controls.getState(k):
twochord += 1
if twochord == 2:
skipped = len(requiredKeys) - 2
requiredKeys = [min(requiredKeys), max(requiredKeys)]
else:
twochord = 0
for n in range(self.strings):
if n in requiredKeys and not (controls.getState(self.keys[n]) or controls.getState(self.keys[n+5])):
return False
if not n in requiredKeys and (controls.getState(self.keys[n]) or controls.getState(self.keys[n+5])):
# The lower frets can be held down
if n > max(requiredKeys):
return False
if twochord != 0:
if twochord != 2:
for time, note in chord:
note.played = True
else:
self.twoChordApply = True
for time, note in chord:
note.skipped = True
chord[0][1].skipped = False
chord[-1][1].skipped = False
chord[0][1].played = True
chord[-1][1].played = True
if twochord == 2:
self.twoChord += skipped
return True
def controlsMatchNotes2(self, controls, notes, hopo = False):
# no notes?
if not notes:
return False
# check each valid chord
chords = {}
for time, note in notes:
if note.hopod == True and (controls.getState(self.keys[note.number]) or controls.getState(self.keys[note.number + 5])):
#if hopo == True and controls.getState(self.keys[note.number]):
self.playedNotes = []
return True
if not time in chords:
chords[time] = []
chords[time].append((time, note))
#Make sure the notes are in the right time order
chordlist = chords.values()
chordlist.sort(lambda a, b: cmp(a[0][0], b[0][0]))
twochord = 0
for chord in chordlist:
# matching keys?
requiredKeys = [note.number for time, note in chord]
requiredKeys = self.uniqify(requiredKeys)
if len(requiredKeys) > 2 and self.twoChordMax == True:
twochord = 0
for n, k in enumerate(self.keys):
if controls.getState(k):
twochord += 1
if twochord == 2:
skipped = len(requiredKeys) - 2
requiredKeys = [min(requiredKeys), max(requiredKeys)]
else:
twochord = 0
for n in range(self.strings):
if n in requiredKeys and not (controls.getState(self.keys[n]) or controls.getState(self.keys[n+5])):
return False
if not n in requiredKeys and (controls.getState(self.keys[n]) or controls.getState(self.keys[n+5])):
# The lower frets can be held down
if hopo == False and n >= min(requiredKeys):
return False
if twochord != 0:
if twochord != 2:
for time, note in chord:
note.played = True
else:
self.twoChordApply = True
for time, note in chord:
note.skipped = True
chord[0][1].skipped = False
chord[-1][1].skipped = False
chord[0][1].played = True
chord[-1][1].played = True
if twochord == 2:
self.twoChord += skipped
return True
def controlsMatchNotes3(self, controls, notes, hopo = False):
# no notes?
if not notes:
return False
# check each valid chord
chords = {}
for time, note in notes:
if note.hopod == True and (controls.getState(self.keys[note.number]) or controls.getState(self.keys[note.number + 5])):
#if hopo == True and controls.getState(self.keys[note.number]):
self.playedNotes = []
return True
if not time in chords:
chords[time] = []
chords[time].append((time, note))
#Make sure the notes are in the right time order
chordlist = chords.values()
#chordlist.sort(lambda a, b: cmp(a[0][0], b[0][0]))
chordlist.sort(key=lambda a: a[0][0])
self.missedNotes = []
self.missedNoteNums = []
twochord = 0
for chord in chordlist:
# matching keys?
requiredKeys = [note.number for time, note in chord]
requiredKeys = self.uniqify(requiredKeys)
if len(requiredKeys) > 2 and self.twoChordMax == True:
twochord = 0
for n, k in enumerate(self.keys):
if controls.getState(k):
twochord += 1
if twochord == 2:
skipped = len(requiredKeys) - 2
requiredKeys = [min(requiredKeys), max(requiredKeys)]
else:
twochord = 0
if (self.controlsMatchNote3(controls, chord, requiredKeys, hopo)):
if twochord != 2:
for time, note in chord:
note.played = True
else:
self.twoChordApply = True
for time, note in chord:
note.skipped = True
chord[0][1].skipped = False
chord[-1][1].skipped = False
chord[0][1].played = True
chord[-1][1].played = True
break
if hopo == True:
break
self.missedNotes.append(chord)
else:
self.missedNotes = []
self.missedNoteNums = []
for chord in self.missedNotes:
for time, note in chord:
if self.debugMode:
self.missedNoteNums.append(note.number)
note.skipped = True
note.played = False
if twochord == 2:
self.twoChord += skipped
return True
#MFH - special function for HOPO intentions checking
def controlsMatchNextChord(self, controls, notes):
# no notes?
if not notes:
return False
# check each valid chord
chords = {}
for time, note in notes:
if not time in chords:
chords[time] = []
chords[time].append((time, note))
#Make sure the notes are in the right time order
chordlist = chords.values()
chordlist.sort(key=lambda a: a[0][0])
twochord = 0
for chord in chordlist:
# matching keys?
self.requiredKeys = [note.number for time, note in chord]
self.requiredKeys = self.uniqify(self.requiredKeys)
if len(self.requiredKeys) > 2 and self.twoChordMax == True:
twochord = 0
self.twoChordApply = True
for n, k in enumerate(self.keys):
if controls.getState(k):
twochord += 1
if twochord == 2:
skipped = len(self.requiredKeys) - 2
self.requiredKeys = [min(self.requiredKeys), max(self.requiredKeys)]
else:
twochord = 0
if (self.controlsMatchNote3(controls, chord, self.requiredKeys, False)):
return True
else:
return False
def uniqify(self, seq, idfun=None):
# order preserving
if idfun is None:
def idfun(x): return x
seen = {}
result = []
for item in seq:
marker = idfun(item)
# in old Python versions:
# if seen.has_key(marker)
# but in new ones:
if marker in seen: continue
seen[marker] = 1
result.append(item)
return result
def controlsMatchNote3(self, controls, chordTuple, requiredKeys, hopo):
if len(chordTuple) > 1:
#Chords must match exactly
for n in range(self.strings):
if (n in requiredKeys and not (controls.getState(self.keys[n]) or controls.getState(self.keys[n+5]))) or (n not in requiredKeys and (controls.getState(self.keys[n]) or controls.getState(self.keys[n+5]))):
return False
else:
#Single Note must match that note
requiredKey = requiredKeys[0]
if not controls.getState(self.keys[requiredKey]) and not controls.getState(self.keys[requiredKey+5]):
return False
#myfingershurt: this is where to filter out higher frets held when HOPOing:
if hopo == False or self.hopoStyle == 2 or self.hopoStyle == 3:
#Check for higher numbered frets if not a HOPO or if GH2 strict mode
for n, k in enumerate(self.keys):
if (n > requiredKey and n < 5) or (n > 4 and n > requiredKey + 5):
#higher numbered frets cannot be held
if controls.getState(k):
return False
return True
def areNotesTappable(self, notes):
if not notes:
return
for time, note in notes:
if note.tappable > 1:
return True
return False
def startPick(self, song, pos, controls, hopo = False):
if hopo == True:
res = startPick2(song, pos, controls, hopo)
return res
if not song:
return False
if not song.readyToGo:
return False
self.playedNotes = []
self.matchingNotes = self.getRequiredNotes(song, pos)
if self.controlsMatchNotes(controls, self.matchingNotes):
self.pickStartPos = pos
for time, note in self.matchingNotes:
if note.skipped == True:
continue
self.pickStartPos = max(self.pickStartPos, time)
note.played = True
self.playedNotes.append([time, note])
if self.guitarSolo:
self.currentGuitarSoloHitNotes += 1
return True
return False
def startPick2(self, song, pos, controls, hopo = False):
if not song:
return False
if not song.readyToGo:
return False
self.playedNotes = []
self.matchingNotes = self.getRequiredNotes2(song, pos, hopo)
if self.controlsMatchNotes2(controls, self.matchingNotes, hopo):
self.pickStartPos = pos
for time, note in self.matchingNotes:
if note.skipped == True:
continue
self.pickStartPos = max(self.pickStartPos, time)
if hopo:
note.hopod = True
else:
note.played = True
if note.tappable == 1 or note.tappable == 2:
self.hopoActive = time
self.wasLastNoteHopod = True
elif note.tappable == 3:
self.hopoActive = -time
self.wasLastNoteHopod = True
else:
self.hopoActive = 0
self.wasLastNoteHopod = False
self.playedNotes.append([time, note])
if self.guitarSolo:
self.currentGuitarSoloHitNotes += 1
self.hopoLast = note.number
return True
return False
def startPick3(self, song, pos, controls, hopo = False):
if not song:
return False
if not song.readyToGo:
return False
self.lastPlayedNotes = self.playedNotes
self.playedNotes = []
self.matchingNotes = self.getRequiredNotesMFH(song, pos)
self.controlsMatchNotes3(controls, self.matchingNotes, hopo)
#myfingershurt
for time, note in self.matchingNotes:
if note.played != True:
continue
if shaders.turnon:
shaders.var["fret"][self.player][note.number]=shaders.time()
shaders.var["fretpos"][self.player][note.number]=pos
self.pickStartPos = pos
self.pickStartPos = max(self.pickStartPos, time)
if hopo:
note.hopod = True
else:
note.played = True
#self.wasLastNoteHopod = False
if note.tappable == 1 or note.tappable == 2:
self.hopoActive = time
self.wasLastNoteHopod = True
elif note.tappable == 3:
self.hopoActive = -time
self.wasLastNoteHopod = True
if hopo: #MFH - you just tapped a 3 - make a note of it. (har har)
self.hopoProblemNoteNum = note.number
self.sameNoteHopoString = True
else:
self.hopoActive = 0
self.wasLastNoteHopod = False
self.hopoLast = note.number
self.playedNotes.append([time, note])
if self.guitarSolo:
self.currentGuitarSoloHitNotes += 1
#myfingershurt: be sure to catch when a chord is played
if len(self.playedNotes) > 1:
lastPlayedNote = None
for time, note in self.playedNotes:
if isinstance(lastPlayedNote, Note):
if note.tappable == 1 and lastPlayedNote.tappable == 1:
self.LastStrumWasChord = True
#self.sameNoteHopoString = False
else:
self.LastStrumWasChord = False
lastPlayedNote = note
elif len(self.playedNotes) > 0: #ensure at least that a note was played here
self.LastStrumWasChord = False
if len(self.playedNotes) != 0:
return True
return False
def soloFreestylePick(self, song, pos, controls):
numHits = 0
for theFret in range(5):
self.freestyleHit[theFret] = controls.getState(self.keys[theFret+5])
if self.freestyleHit[theFret]:
if shaders.turnon:
shaders.var["fret"][self.player][theFret]=shaders.time()
shaders.var["fretpos"][self.player][theFret]=pos
numHits += 1
return numHits
#MFH - TODO - handle freestyle picks here
def freestylePick(self, song, pos, controls):
numHits = 0
#if not song:
# return numHits
if not controls.getState(self.actions[0]) and not controls.getState(self.actions[1]):
return 0
for theFret in range(5):
self.freestyleHit[theFret] = controls.getState(self.keys[theFret])
if self.freestyleHit[theFret]:
if shaders.turnon:
shaders.var["fret"][self.player][theFret]=shaders.time()
shaders.var["fretpos"][self.player][theFret]=pos
numHits += 1
return numHits
def endPick(self, pos):
for time, note in self.playedNotes:
if time + note.length > pos + self.noteReleaseMargin:
self.playedNotes = []
return False
self.playedNotes = []
return True
def getPickLength(self, pos):
if not self.playedNotes:
return 0.0
# The pick length is limited by the played notes
pickLength = pos - self.pickStartPos
for time, note in self.playedNotes:
pickLength = min(pickLength, note.length)
return pickLength
def coOpRescue(self, pos):
self.coOpRestart = True #initializes Restart Timer
self.coOpRescueTime = pos
self.starPower = 0
Log.debug("Rescued at " + str(pos))
def run(self, ticks, pos, controls):
if not self.paused:
self.time += ticks
#MFH - Determine which frame to display for starpower notes
if self.starspin:
self.indexCount = self.indexCount + 1
if self.indexCount > self.Animspeed-1:
self.indexCount = 0
self.starSpinFrameIndex = (self.indexCount * self.starSpinFrames - (self.indexCount * self.starSpinFrames) % self.Animspeed) / self.Animspeed
if self.starSpinFrameIndex > self.starSpinFrames - 1:
self.starSpinFrameIndex = 0
#myfingershurt: must not decrease SP if paused.
if self.starPowerActive == True and self.paused == False:
self.starPower -= ticks/self.starPowerDecreaseDivisor
if self.starPower <= 0:
self.starPower = 0
self.starPowerActive = False
#MFH - call to play star power deactivation sound, if it exists (if not play nothing)
if self.engine.data.starDeActivateSoundFound:
#self.engine.data.starDeActivateSound.setVolume(self.sfxVolume)
self.engine.data.starDeActivateSound.play()
# update frets
if self.editorMode:
if (controls.getState(self.actions[0]) or controls.getState(self.actions[1])):
for i in range(self.strings):
if controls.getState(self.keys[i]) or controls.getState(self.keys[i+5]):
activeFrets.append(i)
activeFrets = activeFrets or [self.selectedString]
else:
activeFrets = []
else:
activeFrets = [note.number for time, note in self.playedNotes]
for n in range(self.strings):
if controls.getState(self.keys[n]) or controls.getState(self.keys[n+5]) or (self.editorMode and self.selectedString == n):
self.fretWeight[n] = 0.5
else:
self.fretWeight[n] = max(self.fretWeight[n] - ticks / 64.0, 0.0)
if n in activeFrets:
self.fretActivity[n] = min(self.fretActivity[n] + ticks / 32.0, 1.0)
else:
self.fretActivity[n] = max(self.fretActivity[n] - ticks / 64.0, 0.0)
#MFH - THIS is where note sustains should be determined... NOT in renderNotes / renderFrets / renderFlames -.-
if self.fretActivity[n]:
self.hit[n] = True
else:
self.hit[n] = False
if self.vbpmLogicType == 0: #MFH - VBPM (old)
if self.currentBpm != self.targetBpm:
diff = self.targetBpm - self.currentBpm
if (round((diff * .03), 4) != 0):
self.currentBpm = round(self.currentBpm + (diff * .03), 4)
else:
self.currentBpm = self.targetBpm
self.setBPM(self.currentBpm) # glorandwarf: was setDynamicBPM(self.currentBpm)
for time, note in self.playedNotes:
if pos > time + note.length:
return False
return True
| cherbib/fofix | src/Guitar.py | Python | gpl-2.0 | 95,964 |
# Copyright 1999 by Jeffrey Chang. All rights reserved.
# Copyright 2000 by Jeffrey Chang. All rights reserved.
# Revisions Copyright 2007 by Peter Cock. All rights reserved.
# This code is part of the Biopython distribution and governed by its
# license. Please see the LICENSE file that should have been included
# as part of this package.
"""Module for working with Prosite files from ExPASy (DEPRECATED).
Most of the functionality in this module has moved to Bio.ExPASy.Prosite;
please see
Bio.ExPASy.Prosite.read To read a Prosite file containing one entry.
Bio.ExPASy.Prosite.parse Iterates over entries in a Prosite file.
Bio.ExPASy.Prosite.Record Holds Prosite data.
For
scan_sequence_expasy Scan a sequence for occurrences of Prosite patterns.
_extract_pattern_hits Extract Prosite patterns from a web page.
PatternHit Holds data from a hit against a Prosite pattern.
please see the new module Bio.ExPASy.ScanProsite.
The other functions and classes in Bio.Prosite (including
Bio.Prosite.index_file and Bio.Prosite.Dictionary) are considered deprecated,
and were not moved to Bio.ExPASy.Prosite. If you use this functionality,
please contact the Biopython developers at [email protected] to
avoid permanent removal of this module from Biopython.
This module provides code to work with the prosite dat file from
Prosite.
http://www.expasy.ch/prosite/
Tested with:
Release 15.0, July 1998
Release 16.0, July 1999
Release 17.0, Dec 2001
Release 19.0, Mar 2006
Functions:
parse Iterates over entries in a Prosite file.
scan_sequence_expasy Scan a sequence for occurrences of Prosite patterns.
index_file Index a Prosite file for a Dictionary.
_extract_record Extract Prosite data from a web page.
_extract_pattern_hits Extract Prosite patterns from a web page.
Classes:
Record Holds Prosite data.
PatternHit Holds data from a hit against a Prosite pattern.
Dictionary Accesses a Prosite file using a dictionary interface.
RecordParser Parses a Prosite record into a Record object.
_Scanner Scans Prosite-formatted data.
_RecordConsumer Consumes Prosite data to a Record object.
"""
import warnings
import Bio
warnings.warn("Bio.Prosite is deprecated, and will be removed in a"\
" future release of Biopython. Most of the functionality "
" is now provided by Bio.ExPASy.Prosite. If you want to "
" continue to use Bio.Prosite, please get in contact "
" via the mailing lists to avoid its permanent removal from"\
" Biopython.", Bio.BiopythonDeprecationWarning)
from types import *
import re
import sgmllib
from Bio import File
from Bio import Index
from Bio.ParserSupport import *
# There is probably a cleaner way to write the read/parse functions
# if we don't use the "parser = RecordParser(); parser.parse(handle)"
# approach. Leaving that for the next revision of Bio.Prosite.
def parse(handle):
import cStringIO
parser = RecordParser()
text = ""
for line in handle:
text += line
if line[:2]=='//':
handle = cStringIO.StringIO(text)
record = parser.parse(handle)
text = ""
if not record: # Then this was the copyright notice
continue
yield record
def read(handle):
parser = RecordParser()
try:
record = parser.parse(handle)
except ValueError, error:
if error.message=="There doesn't appear to be a record":
raise ValueError("No Prosite record found")
else:
raise error
# We should have reached the end of the record by now
remainder = handle.read()
if remainder:
raise ValueError("More than one Prosite record found")
return record
class Record:
"""Holds information from a Prosite record.
Members:
name ID of the record. e.g. ADH_ZINC
type Type of entry. e.g. PATTERN, MATRIX, or RULE
accession e.g. PS00387
created Date the entry was created. (MMM-YYYY)
data_update Date the 'primary' data was last updated.
info_update Date data other than 'primary' data was last updated.
pdoc ID of the PROSITE DOCumentation.
description Free-format description.
pattern The PROSITE pattern. See docs.
matrix List of strings that describes a matrix entry.
rules List of rule definitions (from RU lines). (strings)
prorules List of prorules (from PR lines). (strings)
NUMERICAL RESULTS
nr_sp_release SwissProt release.
nr_sp_seqs Number of seqs in that release of Swiss-Prot. (int)
nr_total Number of hits in Swiss-Prot. tuple of (hits, seqs)
nr_positive True positives. tuple of (hits, seqs)
nr_unknown Could be positives. tuple of (hits, seqs)
nr_false_pos False positives. tuple of (hits, seqs)
nr_false_neg False negatives. (int)
nr_partial False negatives, because they are fragments. (int)
COMMENTS
cc_taxo_range Taxonomic range. See docs for format
cc_max_repeat Maximum number of repetitions in a protein
cc_site Interesting site. list of tuples (pattern pos, desc.)
cc_skip_flag Can this entry be ignored?
cc_matrix_type
cc_scaling_db
cc_author
cc_ft_key
cc_ft_desc
cc_version version number (introduced in release 19.0)
DATA BANK REFERENCES - The following are all
lists of tuples (swiss-prot accession,
swiss-prot name)
dr_positive
dr_false_neg
dr_false_pos
dr_potential Potential hits, but fingerprint region not yet available.
dr_unknown Could possibly belong
pdb_structs List of PDB entries.
"""
def __init__(self):
self.name = ''
self.type = ''
self.accession = ''
self.created = ''
self.data_update = ''
self.info_update = ''
self.pdoc = ''
self.description = ''
self.pattern = ''
self.matrix = []
self.rules = []
self.prorules = []
self.postprocessing = []
self.nr_sp_release = ''
self.nr_sp_seqs = ''
self.nr_total = (None, None)
self.nr_positive = (None, None)
self.nr_unknown = (None, None)
self.nr_false_pos = (None, None)
self.nr_false_neg = None
self.nr_partial = None
self.cc_taxo_range = ''
self.cc_max_repeat = ''
self.cc_site = []
self.cc_skip_flag = ''
self.dr_positive = []
self.dr_false_neg = []
self.dr_false_pos = []
self.dr_potential = []
self.dr_unknown = []
self.pdb_structs = []
class PatternHit:
"""Holds information from a hit against a Prosite pattern.
Members:
name ID of the record. e.g. ADH_ZINC
accession e.g. PS00387
pdoc ID of the PROSITE DOCumentation.
description Free-format description.
matches List of tuples (start, end, sequence) where
start and end are indexes of the match, and sequence is
the sequence matched.
"""
def __init__(self):
self.name = None
self.accession = None
self.pdoc = None
self.description = None
self.matches = []
def __str__(self):
lines = []
lines.append("%s %s %s" % (self.accession, self.pdoc, self.name))
lines.append(self.description)
lines.append('')
if len(self.matches) > 1:
lines.append("Number of matches: %s" % len(self.matches))
for i in range(len(self.matches)):
start, end, seq = self.matches[i]
range_str = "%d-%d" % (start, end)
if len(self.matches) > 1:
lines.append("%7d %10s %s" % (i+1, range_str, seq))
else:
lines.append("%7s %10s %s" % (' ', range_str, seq))
return "\n".join(lines)
class Dictionary:
"""Accesses a Prosite file using a dictionary interface.
"""
__filename_key = '__filename'
def __init__(self, indexname, parser=None):
"""__init__(self, indexname, parser=None)
Open a Prosite Dictionary. indexname is the name of the
index for the dictionary. The index should have been created
using the index_file function. parser is an optional Parser
object to change the results into another form. If set to None,
then the raw contents of the file will be returned.
"""
self._index = Index.Index(indexname)
self._handle = open(self._index[Dictionary.__filename_key])
self._parser = parser
def __len__(self):
return len(self._index)
def __getitem__(self, key):
start, len = self._index[key]
self._handle.seek(start)
data = self._handle.read(len)
if self._parser is not None:
return self._parser.parse(File.StringHandle(data))
return data
def __getattr__(self, name):
return getattr(self._index, name)
class RecordParser(AbstractParser):
"""Parses Prosite data into a Record object.
"""
def __init__(self):
self._scanner = _Scanner()
self._consumer = _RecordConsumer()
def parse(self, handle):
self._scanner.feed(handle, self._consumer)
return self._consumer.data
class _Scanner:
"""Scans Prosite-formatted data.
Tested with:
Release 15.0, July 1998
"""
def feed(self, handle, consumer):
"""feed(self, handle, consumer)
Feed in Prosite data for scanning. handle is a file-like
object that contains prosite data. consumer is a
Consumer object that will receive events as the report is scanned.
"""
if isinstance(handle, File.UndoHandle):
uhandle = handle
else:
uhandle = File.UndoHandle(handle)
consumer.finished = False
while not consumer.finished:
line = uhandle.peekline()
if not line:
break
elif is_blank_line(line):
# Skip blank lines between records
uhandle.readline()
continue
elif line[:2] == 'ID':
self._scan_record(uhandle, consumer)
elif line[:2] == 'CC':
self._scan_copyrights(uhandle, consumer)
else:
raise ValueError("There doesn't appear to be a record")
def _scan_copyrights(self, uhandle, consumer):
consumer.start_copyrights()
self._scan_line('CC', uhandle, consumer.copyright, any_number=1)
self._scan_terminator(uhandle, consumer)
consumer.end_copyrights()
def _scan_record(self, uhandle, consumer):
consumer.start_record()
for fn in self._scan_fns:
fn(self, uhandle, consumer)
# In Release 15.0, C_TYPE_LECTIN_1 has the DO line before
# the 3D lines, instead of the other way around.
# Thus, I'll give the 3D lines another chance after the DO lines
# are finished.
if fn is self._scan_do.im_func:
self._scan_3d(uhandle, consumer)
consumer.end_record()
def _scan_line(self, line_type, uhandle, event_fn,
exactly_one=None, one_or_more=None, any_number=None,
up_to_one=None):
# Callers must set exactly one of exactly_one, one_or_more, or
# any_number to a true value. I do not explicitly check to
# make sure this function is called correctly.
# This does not guarantee any parameter safety, but I
# like the readability. The other strategy I tried was have
# parameters min_lines, max_lines.
if exactly_one or one_or_more:
read_and_call(uhandle, event_fn, start=line_type)
if one_or_more or any_number:
while 1:
if not attempt_read_and_call(uhandle, event_fn,
start=line_type):
break
if up_to_one:
attempt_read_and_call(uhandle, event_fn, start=line_type)
def _scan_id(self, uhandle, consumer):
self._scan_line('ID', uhandle, consumer.identification, exactly_one=1)
def _scan_ac(self, uhandle, consumer):
self._scan_line('AC', uhandle, consumer.accession, exactly_one=1)
def _scan_dt(self, uhandle, consumer):
self._scan_line('DT', uhandle, consumer.date, exactly_one=1)
def _scan_de(self, uhandle, consumer):
self._scan_line('DE', uhandle, consumer.description, exactly_one=1)
def _scan_pa(self, uhandle, consumer):
self._scan_line('PA', uhandle, consumer.pattern, any_number=1)
def _scan_ma(self, uhandle, consumer):
self._scan_line('MA', uhandle, consumer.matrix, any_number=1)
## # ZN2_CY6_FUNGAL_2, DNAJ_2 in Release 15
## # contain a CC line buried within an 'MA' line. Need to check
## # for that.
## while 1:
## if not attempt_read_and_call(uhandle, consumer.matrix, start='MA'):
## line1 = uhandle.readline()
## line2 = uhandle.readline()
## uhandle.saveline(line2)
## uhandle.saveline(line1)
## if line1[:2] == 'CC' and line2[:2] == 'MA':
## read_and_call(uhandle, consumer.comment, start='CC')
## else:
## break
def _scan_pp(self, uhandle, consumer):
#New PP line, PostProcessing, just after the MA line
self._scan_line('PP', uhandle, consumer.postprocessing, any_number=1)
def _scan_ru(self, uhandle, consumer):
self._scan_line('RU', uhandle, consumer.rule, any_number=1)
def _scan_nr(self, uhandle, consumer):
self._scan_line('NR', uhandle, consumer.numerical_results,
any_number=1)
def _scan_cc(self, uhandle, consumer):
self._scan_line('CC', uhandle, consumer.comment, any_number=1)
def _scan_dr(self, uhandle, consumer):
self._scan_line('DR', uhandle, consumer.database_reference,
any_number=1)
def _scan_3d(self, uhandle, consumer):
self._scan_line('3D', uhandle, consumer.pdb_reference,
any_number=1)
def _scan_pr(self, uhandle, consumer):
#New PR line, ProRule, between 3D and DO lines
self._scan_line('PR', uhandle, consumer.prorule, any_number=1)
def _scan_do(self, uhandle, consumer):
self._scan_line('DO', uhandle, consumer.documentation, exactly_one=1)
def _scan_terminator(self, uhandle, consumer):
self._scan_line('//', uhandle, consumer.terminator, exactly_one=1)
#This is a list of scan functions in the order expected in the file file.
#The function definitions define how many times each line type is exected
#(or if optional):
_scan_fns = [
_scan_id,
_scan_ac,
_scan_dt,
_scan_de,
_scan_pa,
_scan_ma,
_scan_pp,
_scan_ru,
_scan_nr,
_scan_cc,
# This is a really dirty hack, and should be fixed properly at
# some point. ZN2_CY6_FUNGAL_2, DNAJ_2 in Rel 15 and PS50309
# in Rel 17 have lines out of order. Thus, I have to rescan
# these, which decreases performance.
_scan_ma,
_scan_nr,
_scan_cc,
_scan_dr,
_scan_3d,
_scan_pr,
_scan_do,
_scan_terminator
]
class _RecordConsumer(AbstractConsumer):
"""Consumer that converts a Prosite record to a Record object.
Members:
data Record with Prosite data.
"""
def __init__(self):
self.data = None
def start_record(self):
self.data = Record()
def end_record(self):
self._clean_record(self.data)
def identification(self, line):
cols = line.split()
if len(cols) != 3:
raise ValueError("I don't understand identification line\n%s" \
% line)
self.data.name = self._chomp(cols[1]) # don't want ';'
self.data.type = self._chomp(cols[2]) # don't want '.'
def accession(self, line):
cols = line.split()
if len(cols) != 2:
raise ValueError("I don't understand accession line\n%s" % line)
self.data.accession = self._chomp(cols[1])
def date(self, line):
uprline = line.upper()
cols = uprline.split()
# Release 15.0 contains both 'INFO UPDATE' and 'INF UPDATE'
if cols[2] != '(CREATED);' or \
cols[4] != '(DATA' or cols[5] != 'UPDATE);' or \
cols[7][:4] != '(INF' or cols[8] != 'UPDATE).':
raise ValueError("I don't understand date line\n%s" % line)
self.data.created = cols[1]
self.data.data_update = cols[3]
self.data.info_update = cols[6]
def description(self, line):
self.data.description = self._clean(line)
def pattern(self, line):
self.data.pattern = self.data.pattern + self._clean(line)
def matrix(self, line):
self.data.matrix.append(self._clean(line))
def postprocessing(self, line):
postprocessing = self._clean(line).split(";")
self.data.postprocessing.extend(postprocessing)
def rule(self, line):
self.data.rules.append(self._clean(line))
def numerical_results(self, line):
cols = self._clean(line).split(";")
for col in cols:
if not col:
continue
qual, data = [word.lstrip() for word in col.split("=")]
if qual == '/RELEASE':
release, seqs = data.split(",")
self.data.nr_sp_release = release
self.data.nr_sp_seqs = int(seqs)
elif qual == '/FALSE_NEG':
self.data.nr_false_neg = int(data)
elif qual == '/PARTIAL':
self.data.nr_partial = int(data)
elif qual in ['/TOTAL', '/POSITIVE', '/UNKNOWN', '/FALSE_POS']:
m = re.match(r'(\d+)\((\d+)\)', data)
if not m:
raise Exception("Broken data %s in comment line\n%s" \
% (repr(data), line))
hits = tuple(map(int, m.groups()))
if(qual == "/TOTAL"):
self.data.nr_total = hits
elif(qual == "/POSITIVE"):
self.data.nr_positive = hits
elif(qual == "/UNKNOWN"):
self.data.nr_unknown = hits
elif(qual == "/FALSE_POS"):
self.data.nr_false_pos = hits
else:
raise ValueError("Unknown qual %s in comment line\n%s" \
% (repr(qual), line))
def comment(self, line):
#Expect CC lines like this:
#CC /TAXO-RANGE=??EPV; /MAX-REPEAT=2;
#Can (normally) split on ";" and then on "="
cols = self._clean(line).split(";")
for col in cols:
if not col or col[:17] == 'Automatic scaling':
# DNAJ_2 in Release 15 has a non-standard comment line:
# CC Automatic scaling using reversed database
# Throw it away. (Should I keep it?)
continue
if col.count("=") == 0:
#Missing qualifier! Can we recover gracefully?
#For example, from Bug 2403, in PS50293 have:
#CC /AUTHOR=K_Hofmann; N_Hulo
continue
qual, data = [word.lstrip() for word in col.split("=")]
if qual == '/TAXO-RANGE':
self.data.cc_taxo_range = data
elif qual == '/MAX-REPEAT':
self.data.cc_max_repeat = data
elif qual == '/SITE':
pos, desc = data.split(",")
self.data.cc_site.append((int(pos), desc))
elif qual == '/SKIP-FLAG':
self.data.cc_skip_flag = data
elif qual == '/MATRIX_TYPE':
self.data.cc_matrix_type = data
elif qual == '/SCALING_DB':
self.data.cc_scaling_db = data
elif qual == '/AUTHOR':
self.data.cc_author = data
elif qual == '/FT_KEY':
self.data.cc_ft_key = data
elif qual == '/FT_DESC':
self.data.cc_ft_desc = data
elif qual == '/VERSION':
self.data.cc_version = data
else:
raise ValueError("Unknown qual %s in comment line\n%s" \
% (repr(qual), line))
def database_reference(self, line):
refs = self._clean(line).split(";")
for ref in refs:
if not ref:
continue
acc, name, type = [word.strip() for word in ref.split(",")]
if type == 'T':
self.data.dr_positive.append((acc, name))
elif type == 'F':
self.data.dr_false_pos.append((acc, name))
elif type == 'N':
self.data.dr_false_neg.append((acc, name))
elif type == 'P':
self.data.dr_potential.append((acc, name))
elif type == '?':
self.data.dr_unknown.append((acc, name))
else:
raise ValueError("I don't understand type flag %s" % type)
def pdb_reference(self, line):
cols = line.split()
for id in cols[1:]: # get all but the '3D' col
self.data.pdb_structs.append(self._chomp(id))
def prorule(self, line):
#Assume that each PR line can contain multiple ";" separated rules
rules = self._clean(line).split(";")
self.data.prorules.extend(rules)
def documentation(self, line):
self.data.pdoc = self._chomp(self._clean(line))
def terminator(self, line):
self.finished = True
def _chomp(self, word, to_chomp='.,;'):
# Remove the punctuation at the end of a word.
if word[-1] in to_chomp:
return word[:-1]
return word
def _clean(self, line, rstrip=1):
# Clean up a line.
if rstrip:
return line[5:].rstrip()
return line[5:]
def scan_sequence_expasy(seq=None, id=None, exclude_frequent=None):
"""scan_sequence_expasy(seq=None, id=None, exclude_frequent=None) ->
list of PatternHit's
Search a sequence for occurrences of Prosite patterns. You can
specify either a sequence in seq or a SwissProt/trEMBL ID or accession
in id. Only one of those should be given. If exclude_frequent
is true, then the patterns with the high probability of occurring
will be excluded.
"""
from Bio import ExPASy
if (seq and id) or not (seq or id):
raise ValueError("Please specify either a sequence or an id")
handle = ExPASy.scanprosite1(seq, id, exclude_frequent)
return _extract_pattern_hits(handle)
def _extract_pattern_hits(handle):
"""_extract_pattern_hits(handle) -> list of PatternHit's
Extract hits from a web page. Raises a ValueError if there
was an error in the query.
"""
class parser(sgmllib.SGMLParser):
def __init__(self):
sgmllib.SGMLParser.__init__(self)
self.hits = []
self.broken_message = 'Some error occurred'
self._in_pre = 0
self._current_hit = None
self._last_found = None # Save state of parsing
def handle_data(self, data):
if data.find('try again') >= 0:
self.broken_message = data
return
elif data == 'illegal':
self.broken_message = 'Sequence contains illegal characters'
return
if not self._in_pre:
return
elif not data.strip():
return
if self._last_found is None and data[:4] == 'PDOC':
self._current_hit.pdoc = data
self._last_found = 'pdoc'
elif self._last_found == 'pdoc':
if data[:2] != 'PS':
raise ValueError("Expected accession but got:\n%s" % data)
self._current_hit.accession = data
self._last_found = 'accession'
elif self._last_found == 'accession':
self._current_hit.name = data
self._last_found = 'name'
elif self._last_found == 'name':
self._current_hit.description = data
self._last_found = 'description'
elif self._last_found == 'description':
m = re.findall(r'(\d+)-(\d+) (\w+)', data)
for start, end, seq in m:
self._current_hit.matches.append(
(int(start), int(end), seq))
def do_hr(self, attrs):
# <HR> inside a <PRE> section means a new hit.
if self._in_pre:
self._current_hit = PatternHit()
self.hits.append(self._current_hit)
self._last_found = None
def start_pre(self, attrs):
self._in_pre = 1
self.broken_message = None # Probably not broken
def end_pre(self):
self._in_pre = 0
p = parser()
p.feed(handle.read())
if p.broken_message:
raise ValueError(p.broken_message)
return p.hits
def index_file(filename, indexname, rec2key=None):
"""index_file(filename, indexname, rec2key=None)
Index a Prosite file. filename is the name of the file.
indexname is the name of the dictionary. rec2key is an
optional callback that takes a Record and generates a unique key
(e.g. the accession number) for the record. If not specified,
the id name will be used.
"""
import os
if not os.path.exists(filename):
raise ValueError("%s does not exist" % filename)
index = Index.Index(indexname, truncate=1)
index[Dictionary._Dictionary__filename_key] = filename
handle = open(filename)
records = parse(handle)
end = 0L
for record in records:
start = end
end = handle.tell()
length = end - start
if rec2key is not None:
key = rec2key(record)
else:
key = record.name
if not key:
raise KeyError("empty key was produced")
elif key in index:
raise KeyError("duplicate key %s found" % key)
index[key] = start, length
| BlogomaticProject/Blogomatic | opt/blog-o-matic/usr/lib/python/Bio/Prosite/__init__.py | Python | gpl-2.0 | 27,006 |
import pathlib
import h5py
import numpy as np
import pytest
from scipy.ndimage.filters import gaussian_filter
import dclab
from dclab.rtdc_dataset.feat_anc_plugin.plugin_feature import (
PlugInFeature, import_plugin_feature_script,
remove_plugin_feature, remove_all_plugin_features,
PluginImportError)
from dclab.rtdc_dataset.feat_anc_core.ancillary_feature import (
BadFeatureSizeWarning)
from helper_methods import retrieve_data
data_dir = pathlib.Path(__file__).parent / "data"
@pytest.fixture(autouse=True)
def cleanup_plugin_features():
"""Fixture used to cleanup plugin feature tests"""
# code run before the test
pass
# then the test is run
yield
# code run after the test
# remove our test plugin examples
remove_all_plugin_features()
def compute_single_plugin_feature(rtdc_ds):
"""Basic plugin method"""
circ_per_area = rtdc_ds["circ"] / rtdc_ds["area_um"]
return circ_per_area
def compute_multiple_plugin_features(rtdc_ds):
"""Basic plugin method with dictionary returned"""
circ_per_area = rtdc_ds["circ"] / rtdc_ds["area_um"]
circ_times_area = rtdc_ds["circ"] * rtdc_ds["area_um"]
return {"circ_per_area": circ_per_area, "circ_times_area": circ_times_area}
def compute_non_scalar_plugin_feature(rtdc_ds):
"""Basic non-scalar plugin method"""
image_gauss_filter = gaussian_filter(rtdc_ds["image"], sigma=(0, 1, 1))
return {"image_gauss_filter": image_gauss_filter}
def example_plugin_info_single_feature():
"""plugin info for a single feature"""
info = {
"method": compute_single_plugin_feature,
"description": "This plugin will compute a feature",
"long description": "Even longer description that "
"can span multiple lines",
"feature names": ["circ_per_area"],
"feature labels": ["Circularity per Area"],
"features required": ["circ", "area_um"],
"config required": [],
"method check required": lambda x: True,
"scalar feature": [True],
"version": "0.1.0",
}
return info
def example_plugin_info_multiple_feature():
"""plugin info for multiple features"""
info = {
"method": compute_multiple_plugin_features,
"description": "This plugin will compute some features",
"long description": "Even longer description that "
"can span multiple lines",
"feature names": ["circ_per_area", "circ_times_area"],
"feature labels": ["Circularity per Area", "Circularity times Area"],
"features required": ["circ", "area_um"],
"config required": [],
"method check required": lambda x: True,
"scalar feature": [True, True],
"version": "0.1.0",
}
return info
def example_plugin_info_non_scalar_feature():
"""plugin info for non-scalar feature"""
info = {
"method": compute_non_scalar_plugin_feature,
"description": "This plugin will compute a non-scalar feature",
"long description": "This non-scalar feature is a Gaussian filter of "
"the image",
"feature names": ["image_gauss_filter"],
"feature labels": ["Gaussian Filtered Image"],
"features required": ["image"],
"config required": [],
"method check required": lambda x: True,
"scalar feature": [False],
"version": "0.1.0",
}
return info
def compute_with_user_section(rtdc_ds):
"""setup a plugin method that uses user config section
The "user:n_constrictions" metadata must be set
"""
nc = rtdc_ds.config["user"]["n_constrictions"]
assert isinstance(nc, int), (
'"n_constrictions" should be an integer value.')
area_of_region = rtdc_ds["area_um"] * nc
return {"area_of_region": area_of_region}
def test_pf_attribute_ancill_info():
"""Check the plugin feature attribute input to AncillaryFeature"""
info = example_plugin_info_single_feature()
pf = PlugInFeature("circ_per_area", info)
assert pf.plugin_feature_info["feature name"] == "circ_per_area"
assert pf.plugin_feature_info["method"] is compute_single_plugin_feature
assert pf.plugin_feature_info["config required"] == []
assert pf.plugin_feature_info["features required"] == ["circ", "area_um"]
def test_pf_attribute_plugin_feature_info():
"""Check the plugin feature info attribute"""
info = example_plugin_info_single_feature()
# comparing lambda functions fails due to differing memory locations
info.pop("method check required")
pf = PlugInFeature("circ_per_area", info)
pf.plugin_feature_info.pop("method check required")
plugin_feature_info = {
"method": compute_single_plugin_feature,
"description": "This plugin will compute a feature",
"long description": "Even longer description that "
"can span multiple lines",
"feature name": "circ_per_area",
"feature label": "Circularity per Area",
"feature shape": (1,),
"features required": ["circ", "area_um"],
"config required": [],
"scalar feature": True,
"version": "0.1.0",
"plugin path": None,
"identifier": "3a3e72c4cb015424ebbe6d4af63f2170",
}
assert pf.plugin_feature_info == plugin_feature_info
def test_pf_attributes():
"""Check the plugin feature attributes"""
plugin_path = data_dir / "feat_anc_plugin_creative.py"
plugin_list = dclab.load_plugin_feature(plugin_path)
pf1, pf2 = plugin_list
plugin_file_info = import_plugin_feature_script(plugin_path)
assert pf1.feature_name == pf1.feature_name == \
plugin_file_info["feature names"][0]
assert pf2.feature_name == pf2.feature_name == \
plugin_file_info["feature names"][1]
assert plugin_path.samefile(pf1.plugin_path)
assert plugin_path.samefile(pf1.plugin_feature_info["plugin path"])
assert plugin_path.samefile(pf2.plugin_path)
assert plugin_path.samefile(pf2.plugin_feature_info["plugin path"])
assert pf1._original_info == plugin_file_info
assert pf2._original_info == plugin_file_info
def test_pf_attributes_af_inherited():
"""Check the plugin feature attributes inherited from AncillaryFeature"""
plugin_path = data_dir / "feat_anc_plugin_creative.py"
plugin_list = dclab.load_plugin_feature(plugin_path)
pf, _ = plugin_list
plugin_file_info = import_plugin_feature_script(plugin_path)
assert pf.feature_name == plugin_file_info["feature names"][0]
assert pf.method == plugin_file_info["method"]
assert pf.req_config == plugin_file_info["config required"]
assert pf.req_features == plugin_file_info["features required"]
assert pf.req_func == plugin_file_info["method check required"]
assert pf.priority == 0
def test_pf_bad_plugin_feature_name_list():
"""Basic test of a bad feature name for PlugInFeature"""
info = example_plugin_info_single_feature()
info["feature names"] = "Peter-Pan's Best Friend!"
with pytest.raises(ValueError, match="must be a list, got"):
PlugInFeature("Peter-Pan's Best Friend!", info)
def test_pf_bad_plugin_feature_name():
"""Basic test of a bad feature name for PlugInFeature"""
info = example_plugin_info_single_feature()
info["feature names"] = ["Peter-Pan's Best Friend!"]
with pytest.raises(ValueError, match="only contain lower-case characters"):
PlugInFeature("Peter-Pan's Best Friend!", info)
@pytest.mark.filterwarnings(
"ignore::dclab.rtdc_dataset.config.WrongConfigurationTypeWarning")
def test_pf_exists_in_hierarchy():
"""Test that RTDCHierarchy works with PlugInFeature"""
info = example_plugin_info_single_feature()
pf = PlugInFeature("circ_per_area", info)
h5path = retrieve_data("fmt-hdf5_fl_2018.zip")
with dclab.new_dataset(h5path) as ds:
assert pf.feature_name in ds
assert dclab.dfn.feature_exists(pf.feature_name)
child = dclab.new_dataset(ds)
assert pf.feature_name in child
@pytest.mark.filterwarnings(
"ignore::dclab.rtdc_dataset.config.WrongConfigurationTypeWarning")
def test_pf_export_and_load():
"""Check that exported and loaded hdf5 file will keep a plugin feature"""
h5path = retrieve_data("fmt-hdf5_fl_2018.zip")
# initialize PlugInFeature instance
info = example_plugin_info_single_feature()
pf = PlugInFeature("circ_per_area", info)
with dclab.new_dataset(h5path) as ds:
# extract the feature information from the dataset
assert pf in PlugInFeature.features
circ_per_area = ds[pf.feature_name]
# export the data to a new file
expath = h5path.with_name("exported.rtdc")
ds.export.hdf5(expath, features=ds.features_innate + [pf.feature_name])
# make sure that worked
with h5py.File(expath, "r") as h5:
assert pf.feature_name in h5["events"]
assert np.allclose(h5["events"][pf.feature_name], circ_per_area)
# now check again with dclab
with dclab.new_dataset(expath) as ds2:
assert pf in PlugInFeature.features
assert pf.feature_name in ds2
assert pf.feature_name in ds2.features_innate
assert np.allclose(ds2[pf.feature_name], circ_per_area)
# and a control check
remove_plugin_feature(pf)
assert pf.feature_name not in ds2
@pytest.mark.filterwarnings(
"ignore::dclab.rtdc_dataset.config.WrongConfigurationTypeWarning")
def test_pf_export_non_scalar():
h5path = retrieve_data("fmt-hdf5_image-bg_2020.zip")
# initialize PlugInFeature instance
info = example_plugin_info_non_scalar_feature()
pf = PlugInFeature("image_gauss_filter", info)
with dclab.new_dataset(h5path) as ds:
# extract the feature information from the dataset
assert pf in PlugInFeature.features
image_gauss_filter = ds[pf.feature_name]
# export the data to a new file
expath = h5path.with_name("exported.rtdc")
with pytest.warns(UserWarning, match="out on a limb"):
ds.export.hdf5(expath,
features=[pf.feature_name])
# make sure that worked
with h5py.File(expath, "r") as h5:
assert pf.feature_name in h5["events"]
assert np.allclose(h5["events"][pf.feature_name], image_gauss_filter)
@pytest.mark.filterwarnings(
"ignore::dclab.rtdc_dataset.config.WrongConfigurationTypeWarning")
def test_pf_export_non_scalar_single_event():
h5path = retrieve_data("fmt-hdf5_image-bg_2020.zip")
# initialize PlugInFeature instance
info = example_plugin_info_non_scalar_feature()
info["feature shapes"] = [(80, 250)]
pf = PlugInFeature("image_gauss_filter", info)
with dclab.new_dataset(h5path) as ds:
# extract the feature information from the dataset
assert pf in PlugInFeature.features
image_gauss_filter = ds[pf.feature_name]
# export the data to a new file
expath = h5path.with_name("exported.rtdc")
ds.export.hdf5(expath, features=["image", pf.feature_name])
# write another single event
with dclab.RTDCWriter(expath) as hw:
hw.store_feature(pf.feature_name, ds["image"][0])
hw.store_feature("image", ds["image"][0])
# make sure that worked
with h5py.File(expath, "r") as h5:
assert pf.feature_name in h5["events"]
assert np.allclose(h5["events"][pf.feature_name][:-1],
image_gauss_filter)
assert np.allclose(h5["events"][pf.feature_name][-1],
h5["events/image"][0])
@pytest.mark.filterwarnings(
"ignore::dclab.rtdc_dataset.config.WrongConfigurationTypeWarning")
def test_pf_export_non_scalar_no_warning():
h5path = retrieve_data("fmt-hdf5_image-bg_2020.zip")
# initialize PlugInFeature instance
info = example_plugin_info_non_scalar_feature()
info["feature shapes"] = [(80, 250)]
pf = PlugInFeature("image_gauss_filter", info)
with dclab.new_dataset(h5path) as ds:
# extract the feature information from the dataset
assert pf in PlugInFeature.features
image_gauss_filter = ds[pf.feature_name]
# export the data to a new file
expath = h5path.with_name("exported.rtdc")
ds.export.hdf5(expath, features=[pf.feature_name])
# make sure that worked
with h5py.File(expath, "r") as h5:
assert pf.feature_name in h5["events"]
assert np.allclose(h5["events"][pf.feature_name], image_gauss_filter)
@pytest.mark.filterwarnings(
"ignore::dclab.rtdc_dataset.config.WrongConfigurationTypeWarning")
def test_pf_export_non_scalar_bad_shape():
h5path = retrieve_data("fmt-hdf5_image-bg_2020.zip")
# initialize PlugInFeature instance
info = example_plugin_info_non_scalar_feature()
info["feature shapes"] = [(42, 27)]
pf = PlugInFeature("image_gauss_filter", info)
with dclab.new_dataset(h5path) as ds:
# extract the feature information from the dataset
assert pf in PlugInFeature.features
# export the data to a new file
expath = h5path.with_name("exported.rtdc")
with pytest.raises(ValueError, match="Bad shape"):
ds.export.hdf5(expath, features=[pf.feature_name])
def test_pf_feature_exists():
"""Basic check that the plugin feature name exists in definitions"""
plugin_path = data_dir / "feat_anc_plugin_creative.py"
plugin_list = dclab.load_plugin_feature(plugin_path)
assert dclab.dfn.feature_exists(plugin_list[0].feature_name)
assert dclab.dfn.feature_exists(plugin_list[1].feature_name)
@pytest.mark.filterwarnings(
"ignore::dclab.rtdc_dataset.config.WrongConfigurationTypeWarning")
def test_pf_filtering_with_plugin_feature():
"""Filtering with plugin feature"""
h5path = retrieve_data("fmt-hdf5_fl_2018.zip")
with dclab.new_dataset(h5path) as ds:
info = example_plugin_info_single_feature()
pf = PlugInFeature("circ_per_area", info)
ds.config["filtering"][f"{pf.feature_name} min"] = 0.030
ds.config["filtering"][f"{pf.feature_name} max"] = 0.031
ds.apply_filter()
assert np.sum(ds.filter.all) == 1
assert ds.filter.all[4]
def test_pf_import_plugin_info():
"""Check the plugin test example info is a dict"""
plugin_path = data_dir / "feat_anc_plugin_creative.py"
info = import_plugin_feature_script(plugin_path)
assert isinstance(info, dict)
def test_pf_import_plugin_info_bad_path():
"""Raise error when a bad pathname is given"""
bad_plugin_path = "not/a/real/path/plugin.py"
with pytest.raises(PluginImportError, match="could be not be found"):
import_plugin_feature_script(bad_plugin_path)
def test_pf_incorrect_input_info():
"""Raise error when info is not a dictionary"""
info = ["this", "is", "not", "a", "dict"]
with pytest.raises(ValueError, match="must be a dict"):
PlugInFeature("feature_1", info)
def test_pf_incorrect_input_feature_name():
"""Raise error when the feature_name doesn't match info feature name"""
info = example_plugin_info_single_feature()
# `feature_name` is "circ_per_area" in info
with pytest.raises(ValueError, match="is not defined"):
PlugInFeature("not_the_correct_name", info)
def test_pf_incorrect_input_method():
"""Raise error when method is not callable"""
info = example_plugin_info_single_feature()
# set `info["method"]` to something that isn't callable
info["method"] = "this_is_a_string"
with pytest.raises(ValueError, match="is not callable"):
PlugInFeature("circ_per_area", info)
@pytest.mark.filterwarnings(
"ignore::dclab.rtdc_dataset.config.WrongConfigurationTypeWarning")
def test_pf_initialize_plugin_after_loading():
"""plugin feature loads correctly after feature added to hdf5 file"""
h5path = retrieve_data("fmt-hdf5_fl_2018.zip")
with dclab.new_dataset(h5path) as ds:
circ_per_area = compute_single_plugin_feature(ds)
with h5py.File(h5path, "a") as h5:
h5["events"]["circ_per_area"] = circ_per_area
with dclab.new_dataset(h5path) as ds:
assert "circ_per_area" not in ds
info = example_plugin_info_single_feature()
PlugInFeature("circ_per_area", info)
assert "circ_per_area" in ds
assert "circ_per_area" in ds.features_innate
@pytest.mark.filterwarnings(
"ignore::dclab.rtdc_dataset.config.WrongConfigurationTypeWarning")
def test_pf_initialize_plugin_feature_single():
"""Check that single plugin feature exists independant of loaded dataset"""
ds = dclab.new_dataset(retrieve_data("fmt-hdf5_fl_2018.zip"))
info = example_plugin_info_single_feature()
PlugInFeature("circ_per_area", info)
assert "circ_per_area" in ds
circ_per_area = ds["circ_per_area"]
assert np.allclose(circ_per_area, ds["circ"] / ds["area_um"])
# check that PlugInFeature exists independent of loaded ds
ds2 = dclab.new_dataset(retrieve_data("fmt-hdf5_fl_2018.zip"))
assert "circ_per_area" in ds2
@pytest.mark.filterwarnings(
"ignore::dclab.rtdc_dataset.config.WrongConfigurationTypeWarning")
def test_pf_initialize_plugin_feature_non_scalar():
"""Check that the non-scalar plugin feature works"""
ds = dclab.new_dataset(retrieve_data("fmt-hdf5_fl_2018.zip"))
info = example_plugin_info_non_scalar_feature()
PlugInFeature("image_gauss_filter", info)
assert "image_gauss_filter" in ds
image_gauss_filter = ds["image_gauss_filter"]
assert np.allclose(image_gauss_filter,
gaussian_filter(ds["image"], sigma=(0, 1, 1)))
@pytest.mark.filterwarnings(
"ignore::dclab.rtdc_dataset.config.WrongConfigurationTypeWarning")
def test_pf_initialize_plugin_features_multiple():
"""Check multiple plugin features exist independant of loaded dataset"""
ds = dclab.new_dataset(retrieve_data("fmt-hdf5_fl_2018.zip"))
assert "circ_per_area" not in ds.features_innate
assert "circ_times_area" not in ds.features_innate
info = example_plugin_info_multiple_feature()
PlugInFeature("circ_per_area", info)
PlugInFeature("circ_times_area", info)
assert "circ_per_area" in ds
assert "circ_times_area" in ds
assert dclab.dfn.feature_exists("circ_per_area")
assert dclab.dfn.feature_exists("circ_times_area")
circ_per_area = ds["circ_per_area"]
circ_times_area = ds["circ_times_area"]
assert np.allclose(circ_per_area, ds["circ"] / ds["area_um"])
assert np.allclose(circ_times_area, ds["circ"] * ds["area_um"])
def test_pf_input_no_feature_labels():
"""Check that feature labels are populated even if not given"""
info = example_plugin_info_single_feature()
info.pop("feature labels")
feature_name = "circ_per_area"
pf = PlugInFeature(feature_name, info)
assert dclab.dfn.feature_exists(feature_name)
label = dclab.dfn.get_feature_label(feature_name)
assert label == "Plugin feature {}".format(feature_name)
assert label == pf.plugin_feature_info["feature label"]
def test_pf_input_no_scalar_feature():
"""Check that scalar feature bools are populated even if not given"""
info = example_plugin_info_single_feature()
info.pop("scalar feature")
pf = PlugInFeature("circ_per_area", info)
assert pf.plugin_feature_info["scalar feature"]
@pytest.mark.filterwarnings(
"ignore::dclab.rtdc_dataset.config.WrongConfigurationTypeWarning")
def test_pf_load_plugin():
"""Basic check for loading a plugin feature via a script"""
ds = dclab.new_dataset(retrieve_data("fmt-hdf5_fl_2018.zip"))
assert "circ_per_area" not in ds.features_innate
assert "circ_times_area" not in ds.features_innate
plugin_path = data_dir / "feat_anc_plugin_creative.py"
plugin_list = dclab.load_plugin_feature(plugin_path)
assert isinstance(plugin_list[0], PlugInFeature)
assert isinstance(plugin_list[1], PlugInFeature)
assert "circ_per_area" in ds
assert "circ_times_area" in ds
circ_per_area = ds["circ_per_area"]
circ_times_area = ds["circ_times_area"]
assert np.allclose(circ_per_area, ds["circ"] / ds["area_um"])
assert np.allclose(circ_times_area, ds["circ"] * ds["area_um"])
def test_pf_minimum_info_input():
"""Only method and feature names are required to create PlugInFeature"""
info = {"method": compute_single_plugin_feature,
"feature names": ["circ_per_area"]}
pf = PlugInFeature("circ_per_area", info)
# check that all other plugin_feature_info is populated
assert "method" in pf.plugin_feature_info
assert callable(pf.plugin_feature_info["method"])
assert "description" in pf.plugin_feature_info
assert "long description" in pf.plugin_feature_info
assert "feature name" in pf.plugin_feature_info
assert "feature label" in pf.plugin_feature_info
assert "features required" in pf.plugin_feature_info
assert "config required" in pf.plugin_feature_info
assert "method check required" in pf.plugin_feature_info
assert "scalar feature" in pf.plugin_feature_info
assert "version" in pf.plugin_feature_info
assert "plugin path" in pf.plugin_feature_info
@pytest.mark.filterwarnings(
"ignore::dclab.rtdc_dataset.config.WrongConfigurationTypeWarning")
def test_pf_remove_all_plugin_features():
"""Remove all plugin features at once"""
ds = dclab.new_dataset(retrieve_data("fmt-hdf5_fl_2018.zip"))
assert "circ_per_area" not in ds.features_innate
assert "circ_times_area" not in ds.features_innate
plugin_path = data_dir / "feat_anc_plugin_creative.py"
dclab.load_plugin_feature(plugin_path)
assert "circ_per_area" in ds
assert "circ_times_area" in ds
assert dclab.dfn.feature_exists("circ_per_area")
assert dclab.dfn.feature_exists("circ_times_area")
remove_all_plugin_features()
assert "circ_per_area" not in ds
assert "circ_times_area" not in ds
assert not dclab.dfn.feature_exists("circ_per_area")
assert not dclab.dfn.feature_exists("circ_times_area")
@pytest.mark.filterwarnings(
"ignore::dclab.rtdc_dataset.config.WrongConfigurationTypeWarning")
def test_pf_remove_plugin_feature():
"""Remove individual plugin features"""
ds = dclab.new_dataset(retrieve_data("fmt-hdf5_fl_2018.zip"))
assert "circ_per_area" not in ds
assert "circ_times_area" not in ds
plugin_path = data_dir / "feat_anc_plugin_creative.py"
plugin_list = dclab.load_plugin_feature(plugin_path)
assert len(plugin_list) == 2
assert "circ_per_area" in ds
assert "circ_per_area" not in ds.features_innate
assert "circ_times_area" in ds
assert "circ_times_area" not in ds.features_innate
assert dclab.dfn.feature_exists("circ_per_area")
assert dclab.dfn.feature_exists("circ_times_area")
remove_plugin_feature(plugin_list[0])
remove_plugin_feature(plugin_list[1])
assert "circ_per_area" not in ds
assert "circ_times_area" not in ds
assert not dclab.dfn.feature_exists("circ_per_area")
assert not dclab.dfn.feature_exists("circ_times_area")
with pytest.raises(TypeError,
match="hould be an instance of PlugInFeature"):
not_a_plugin_instance = [4, 6, 5]
remove_plugin_feature(not_a_plugin_instance)
def test_pf_try_existing_feature_fails():
"""An existing feature name is not allowed"""
info = example_plugin_info_single_feature()
info["feature names"] = ["deform"]
with pytest.raises(ValueError, match="Feature 'deform' already exists"):
PlugInFeature("deform", info)
def test_pf_with_empty_feature_label_string():
"""An empty string is replaced with a real feature label
Show that an empty `feature_label` will still give a descriptive
feature label. See `dclab.dfn._add_feature_to_definitions` for details.
"""
info = example_plugin_info_single_feature()
info["feature labels"] = [""]
feature_name = "circ_per_area"
PlugInFeature(feature_name, info)
assert dclab.dfn.feature_exists("circ_per_area")
label = dclab.dfn.get_feature_label("circ_per_area")
assert label != ""
assert label == "Plugin feature {}".format(feature_name)
def test_pf_with_feature_label():
"""Check that a plugin feature label is added to definitions"""
info = example_plugin_info_single_feature()
info["feature labels"] = ["Circ / Area [1/µm²]"]
feature_name = "circ_per_area"
PlugInFeature(feature_name, info)
assert dclab.dfn.feature_exists("circ_per_area")
label = dclab.dfn.get_feature_label("circ_per_area")
assert label == "Circ / Area [1/µm²]"
def test_pf_with_no_feature_label():
"""A feature label of None is replaced with a real feature label
Show that `feature_label=None` will still give a descriptive
feature label. See `dclab.dfn._add_feature_to_definitions` for details.
"""
info = example_plugin_info_single_feature()
info["feature labels"] = [None]
feature_name = "circ_per_area"
PlugInFeature(feature_name, info)
assert dclab.dfn.feature_exists("circ_per_area")
label = dclab.dfn.get_feature_label("circ_per_area")
assert label is not None
assert label == "Plugin feature {}".format(feature_name)
@pytest.mark.filterwarnings(
"ignore::dclab.rtdc_dataset.config.WrongConfigurationTypeWarning")
def test_pf_with_user_config_section():
"""Use a plugin feature with the user defined config section"""
info = {"method": compute_with_user_section,
"feature names": ["area_of_region"],
"config required": [["user", ["n_constrictions"]]]}
PlugInFeature("area_of_region", info)
ds = dclab.new_dataset(retrieve_data("fmt-hdf5_fl_2018.zip"))
assert "area_of_region" not in ds, "not available b/c missing metadata"
# add some metadata to the user config section
metadata = {"channel": True,
"n_constrictions": 3}
ds.config["user"].update(metadata)
assert ds.config["user"] == metadata
assert "area_of_region" in ds, "available b/c metadata is set"
area_of_region1 = ds["area_of_region"]
area_of_region1_calc = (ds["area_um"] *
ds.config["user"]["n_constrictions"])
assert np.allclose(area_of_region1, area_of_region1_calc)
@pytest.mark.filterwarnings(
"ignore::dclab.rtdc_dataset.config.WrongConfigurationTypeWarning")
def test_pf_with_user_config_section_fails():
"""Use a plugin feature with the user defined config section"""
info = {"method": compute_with_user_section,
"feature names": ["area_of_region"],
"config required": [["user", ["n_constrictions"]]]}
PlugInFeature("area_of_region", info)
ds = dclab.new_dataset(retrieve_data("fmt-hdf5_fl_2018.zip"))
# show that the plugin feature is not available before setting the
# user metadata
ds.config["user"].clear()
with pytest.raises(KeyError,
match=r"Feature \'area_of_region\' does not exist"):
ds["area_of_region"]
# show that the plugin fails when the user metadata type is wrong
ds.config["user"]["n_constrictions"] = 4.99
with pytest.raises(AssertionError, match="should be an integer value"):
ds["area_of_region"]
@pytest.mark.filterwarnings(
"ignore::dclab.rtdc_dataset.config.WrongConfigurationTypeWarning")
def test_pf_wrong_data_shape_1():
h5path = retrieve_data("fmt-hdf5_fl_2018.zip")
with dclab.new_dataset(h5path) as ds:
info = example_plugin_info_single_feature()
info["scalar feature"] = [False]
pf = PlugInFeature("circ_per_area", info)
with pytest.raises(ValueError, match="is not a scalar feature"):
ds[pf.feature_name]
@pytest.mark.filterwarnings(
"ignore::dclab.rtdc_dataset.config.WrongConfigurationTypeWarning")
def test_pf_wrong_data_shape_2():
h5path = retrieve_data("fmt-hdf5_fl_2018.zip")
with dclab.new_dataset(h5path) as ds:
info = example_plugin_info_single_feature()
info["scalar feature"] = [True]
info["method"] = lambda x: np.arange(len(ds) * 2).reshape(-1, 2)
pf = PlugInFeature("circ_per_area", info)
with pytest.raises(ValueError, match="is a scalar feature"):
ds[pf.feature_name]
@pytest.mark.filterwarnings(
"ignore::dclab.rtdc_dataset.config.WrongConfigurationTypeWarning")
def test_pf_wrong_length_1():
"""plugin feature should have same length"""
h5path = retrieve_data("fmt-hdf5_fl_2018.zip")
with dclab.new_dataset(h5path) as ds:
info = example_plugin_info_single_feature()
info["method"] = lambda x: np.arange(len(ds) // 2)
pf = PlugInFeature("circ_per_area", info)
with pytest.warns(BadFeatureSizeWarning,
match="to match event number"):
ds[pf.feature_name]
@pytest.mark.filterwarnings(
"ignore::dclab.rtdc_dataset.config.WrongConfigurationTypeWarning")
def test_pf_wrong_length_2():
"""plugin feature should have same length"""
h5path = retrieve_data("fmt-hdf5_fl_2018.zip")
with dclab.new_dataset(h5path) as ds:
info = example_plugin_info_single_feature()
info["method"] = lambda x: np.arange(len(ds) * 2)
pf = PlugInFeature("circ_per_area", info)
with pytest.warns(BadFeatureSizeWarning,
match="to match event number"):
ds[pf.feature_name]
if __name__ == "__main__":
# Run all tests
loc = locals()
for key in list(loc.keys()):
if key.startswith("test_") and hasattr(loc[key], "__call__"):
loc[key]()
remove_all_plugin_features()
| ZellMechanik-Dresden/dclab | tests/test_rtdc_feat_anc_plugin.py | Python | gpl-2.0 | 29,760 |
"""
SALTS XBMC Addon
Copyright (C) 2014 tknorris
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
"""
import re
import urllib
import urlparse
import kodi
import dom_parser
from salts_lib import scraper_utils
from salts_lib.constants import FORCE_NO_MATCH
from salts_lib.constants import QUALITIES
from salts_lib.constants import VIDEO_TYPES
import scraper
BASE_URL = 'http://watch8now.me'
class Scraper(scraper.Scraper):
base_url = BASE_URL
def __init__(self, timeout=scraper.DEFAULT_TIMEOUT):
self.timeout = timeout
self.base_url = kodi.get_setting('%s-base_url' % (self.get_name()))
@classmethod
def provides(cls):
return frozenset([VIDEO_TYPES.TVSHOW, VIDEO_TYPES.EPISODE])
@classmethod
def get_name(cls):
return 'Watch8Now'
def resolve_link(self, link):
html = self._http_get(link, cache_limit=.5)
match = re.search('<iframe[^>]*src="([^"]+)', html, re.I)
if match:
return match.group(1)
else:
match = re.search('Nothing in HERE<br>([^<]+)', html, re.I)
if match:
return match.group(1).strip()
return link
def get_sources(self, video):
source_url = self.get_url(video)
hosters = []
if source_url and source_url != FORCE_NO_MATCH:
url = urlparse.urljoin(self.base_url, source_url)
html = self._http_get(url, cache_limit=.5)
for table_cell in dom_parser.parse_dom(html, 'td', {'class': 'domain'}):
match = re.search('href="([^"]+)(?:[^>]+>){2}\s*([^<]+)', table_cell)
if match:
link, host = match.groups()
hoster = {'multi-part': False, 'host': host, 'class': self, 'quality': scraper_utils.get_quality(video, host, QUALITIES.HIGH), 'views': None, 'rating': None, 'url': link, 'direct': False}
hosters.append(hoster)
return hosters
def _get_episode_url(self, show_url, video):
episode_pattern = 'href="([^"]+[sS]%s[eE]%s\.html)"' % (video.season, video.episode)
title_pattern = 'href="(?P<url>[^"]+[sS]\d+[eE]\d+\.html)"(?:[^>]+>){6}(?P<title>[^<]+)'
return self._default_get_episode_url(show_url, video, episode_pattern, title_pattern)
def search(self, video_type, title, year, season=''):
search_url = urlparse.urljoin(self.base_url, '/search?q=')
search_url += urllib.quote_plus(title)
html = self._http_get(search_url, cache_limit=8)
results = []
for item in dom_parser.parse_dom(html, 'h4', {'class': 'media-heading'}):
match = re.search('href="([^"]+)">([^<]+)', item)
if match:
url, match_title = match.groups()
result = {'url': scraper_utils.pathify_url(url), 'title': scraper_utils.cleanse_title(match_title), 'year': ''}
results.append(result)
return results
| felipenaselva/repo.felipe | plugin.video.salts/scrapers/watch8now_scraper.py | Python | gpl-2.0 | 3,576 |
import mainUi.main
mainUi.main.main()
| MiiRaGe/cryptanalysis-tools | run.py | Python | gpl-2.0 | 39 |
# -*- coding: utf-8 -*-
import sys
sys.path.append(sys.argv[1])
from scriptlib import *
"""Файлы пакета"""
FILES = (
'7z920-x64.msi',
'7z920.msi'
)
"""Имена исполняемых файлов"""
if ARCH == '64':
INSTALLER = os.path.join('', DIR, FILES[0])
else:
INSTALLER = os.path.join('', DIR, FILES[1])
def install():
run_msi('/i', INSTALLER)
def remove():
run_msi('/x', INSTALLER)
check_files(FILES)
if ACTION == 'install':
install()
elif ACTION == 'remove':
remove() | kuchiman/wpm-pkg | 7zip/script.py | Python | gpl-2.0 | 529 |
from unittest import mock
from flask import request
from routersploit.modules.exploits.routers.tplink.wdr740nd_wdr740n_backdoor import Exploit
def apply_response(*args, **kwargs):
cmd = request.args["cmd"]
data = 'TEST; var cmdResult = new Array(\n"' + cmd + '",\n0,0 ); TEST'
return data, 200
@mock.patch("routersploit.modules.exploits.routers.tplink.wdr740nd_wdr740n_backdoor.shell")
def test_check_success(mocked_shell, target):
""" Test scenario - successful check """
route_mock = target.get_route_mock("/userRpm/DebugResultRpm.htm", methods=["GET"])
route_mock.side_effect = apply_response
exploit = Exploit()
assert exploit.target == ""
assert exploit.port == 80
assert exploit.username == "admin"
assert exploit.password == "admin"
exploit.target = target.host
exploit.port = target.port
assert exploit.check()
assert exploit.run() is None
| dasseclab/dasseclab | clones/routersploit/tests/exploits/routers/tplink/test_wdr740nd_wdr740n_backdoor.py | Python | gpl-2.0 | 917 |
"""
USAGE:
twitter [action] [options]
ACTIONS:
follow add the specified user to your follow list
friends get latest tweets from your friends (default action)
help print this help text that you are currently reading
leave remove the specified user from your following list
public get latest public tweets
replies get latest replies
set set your twitter status
shell login the twitter shell
OPTIONS:
-e --email <email> your email to login to twitter
-p --password <password> your twitter password
-r --refresh run this command forever, polling every once
in a while (default: every 5 minutes)
-R --refresh-rate <rate> set the refresh rate (in seconds)
-f --format <format> specify the output format for status updates
-c --config <filename> read username and password from given config
file (default ~/.twitter)
-l --length <count> specify number of status updates shown
(default: 20, max: 200)
-t --timestamp show time before status lines
-d --datestamp shoe date before status lines
FORMATS for the --format option
default one line per status
verbose multiple lines per status, more verbose status info
urls nothing but URLs
ansi ansi colour (rainbow mode)
CONFIG FILES
The config file should contain a [twitter] header, and all the desired options
you wish to set, like so:
[twitter]
email: <username>
password: <password>
format: <desired_default_format_for_output>
prompt: <twitter_shell_prompt e.g. '[cyan]twitter[R]> '>
"""
import sys
import time
from getopt import gnu_getopt as getopt, GetoptError
from getpass import getpass
import re
import os.path
from ConfigParser import SafeConfigParser
import datetime
from api import Twitter, TwitterError
import ansi
# Please don't change this, it was provided by the fine folks at Twitter.
# If you change it, it will not work.
AGENT_STR = "twittercommandlinetoolpy"
OPTIONS = {
'email': None,
'password': None,
'action': 'friends',
'refresh': False,
'refresh_rate': 600,
'format': 'default',
'prompt': '[cyan]twitter[R]> ',
'config_filename': os.environ.get('HOME', '') + os.sep + '.twitter',
'length': 20,
'timestamp': False,
'datestamp': False,
'extra_args': []
}
def parse_args(args, options):
long_opts = ['email', 'password', 'help', 'format', 'refresh',
'refresh-rate', 'config', 'length', 'timestamp', 'datestamp']
short_opts = "e:p:f:h?rR:c:l:td"
opts, extra_args = getopt(args, short_opts, long_opts)
for opt, arg in opts:
if opt in ('-e', '--email'):
options['email'] = arg
elif opt in ('-p', '--password'):
options['password'] = arg
elif opt in ('-f', '--format'):
options['format'] = arg
elif opt in ('-r', '--refresh'):
options['refresh'] = True
elif opt in ('-R', '--refresh-rate'):
options['refresh_rate'] = int(arg)
elif opt in ('-l', '--length'):
options["length"] = int(arg)
elif opt in ('-t', '--timestamp'):
options["timestamp"] = True
elif opt in ('-d', '--datestamp'):
options["datestamp"] = True
elif opt in ('-?', '-h', '--help'):
options['action'] = 'help'
elif opt in ('-c', '--config'):
options['config_filename'] = arg
if extra_args and not ('action' in options and options['action'] == 'help'):
options['action'] = extra_args[0]
options['extra_args'] = extra_args[1:]
def get_time_string(status, options):
timestamp = options["timestamp"]
datestamp = options["datestamp"]
t = time.strptime(status['created_at'], "%a %b %d %H:%M:%S +0000 %Y")
i_hate_timezones = time.timezone
if (time.daylight):
i_hate_timezones = time.altzone
dt = datetime.datetime(*t[:-3]) - datetime.timedelta(
seconds=i_hate_timezones)
t = dt.timetuple()
if timestamp and datestamp:
return time.strftime("%Y-%m-%d %H:%M:%S ", t)
elif timestamp:
return time.strftime("%H:%M:%S ", t)
elif datestamp:
return time.strftime("%Y-%m-%d ", t)
return ""
class StatusFormatter(object):
def __call__(self, status):
return (u"%S%s %s" %(
get_time_string(status, options),
status['user']['screen_name'], status['text']))
class AnsiStatusFormatter(object):
def __init__(self):
self._colourMap = ansi.ColourMap()
def __call__(self, status, options):
colour = self._colourMap.colourFor(status['user']['screen_name'])
return (u"%s%s%s%s %s" %(
get_time_string(status, options),
ansi.cmdColour(colour), status['user']['screen_name'],
ansi.cmdReset(), status['text']))
class VerboseStatusFormatter(object):
def __call__(self, status, options):
return (u"-- %s (%s) on %s\n%s\n" %(
status['user']['screen_name'],
status['user']['location'],
status['created_at'],
status['text']))
class URLStatusFormatter(object):
urlmatch = re.compile(r'https?://\S+')
def __call__(self, status, options):
urls = self.urlmatch.findall(status['text'])
return u'\n'.join(urls) if urls else ""
class AdminFormatter(object):
def __call__(self, action, user):
user_str = u"%s (%s)" %(user['screen_name'], user['name'])
if action == "follow":
return u"You are now following %s.\n" %(user_str)
else:
return u"You are no longer following %s.\n" %(user_str)
class VerboseAdminFormatter(object):
def __call__(self, action, user):
return(u"-- %s: %s (%s): %s" % (
"Following" if action == "follow" else "Leaving",
user['screen_name'],
user['name'],
user['url']))
status_formatters = {
'default': StatusFormatter,
'verbose': VerboseStatusFormatter,
'urls': URLStatusFormatter,
'ansi': AnsiStatusFormatter
}
admin_formatters = {
'default': AdminFormatter,
'verbose': VerboseAdminFormatter,
'urls': AdminFormatter,
'ansi': AdminFormatter
}
def get_status_formatter(options):
sf = status_formatters.get(options['format'])
if (not sf):
raise TwitterError(
"Unknown formatter '%s'" %(options['format']))
return sf()
def get_admin_formatter(options):
sf = admin_formatters.get(options['format'])
if (not sf):
raise TwitterError(
"Unknown formatter '%s'" %(options['format']))
return sf()
class Action(object):
def ask(self, subject='perform this action', careful=False):
'''
Requests fromt he user using `raw_input` if `subject` should be
performed. When `careful`, the default answer is NO, otherwise YES.
Returns the user answer in the form `True` or `False`.
'''
sample = '(y/N)'
if not careful:
sample = '(Y/n)'
prompt = 'You really want to %s %s? ' %(subject, sample)
try:
answer = raw_input(prompt).lower()
if careful:
return answer in ('yes', 'y')
else:
return answer not in ('no', 'n')
except EOFError:
print >>sys.stderr # Put Newline since Enter was never pressed
# TODO:
# Figure out why on OS X the raw_input keeps raising
# EOFError and is never able to reset and get more input
# Hint: Look at how IPython implements their console
default = True
if careful:
default = False
return default
def __call__(self, twitter, options):
action = actions.get(options['action'], NoSuchAction)()
try:
doAction = lambda : action(twitter, options)
if (options['refresh'] and isinstance(action, StatusAction)):
while True:
doAction()
time.sleep(options['refresh_rate'])
else:
doAction()
except KeyboardInterrupt:
print >>sys.stderr, '\n[Keyboard Interrupt]'
pass
class NoSuchActionError(Exception):
pass
class NoSuchAction(Action):
def __call__(self, twitter, options):
raise NoSuchActionError("No such action: %s" %(options['action']))
def printNicely(string):
if sys.stdout.encoding:
print string.encode(sys.stdout.encoding, 'replace')
else:
print string.encode('utf-8')
class StatusAction(Action):
def __call__(self, twitter, options):
statuses = self.getStatuses(twitter, options)
sf = get_status_formatter(options)
for status in statuses:
statusStr = sf(status, options)
if statusStr.strip():
printNicely(statusStr)
class AdminAction(Action):
def __call__(self, twitter, options):
if not (options['extra_args'] and options['extra_args'][0]):
raise TwitterError("You need to specify a user (screen name)")
af = get_admin_formatter(options)
try:
user = self.getUser(twitter, options['extra_args'][0])
except TwitterError, e:
print "There was a problem following or leaving the specified user."
print "You may be trying to follow a user you are already following;"
print "Leaving a user you are not currently following;"
print "Or the user may not exist."
print "Sorry."
print
print e
else:
printNicely(af(options['action'], user))
class FriendsAction(StatusAction):
def getStatuses(self, twitter, options):
return reversed(twitter.statuses.friends_timeline(count=options["length"]))
class PublicAction(StatusAction):
def getStatuses(self, twitter, options):
return reversed(twitter.statuses.public_timeline(count=options["length"]))
class RepliesAction(StatusAction):
def getStatuses(self, twitter, options):
return reversed(twitter.statuses.replies(count=options["length"]))
class FollowAction(AdminAction):
def getUser(self, twitter, user):
return twitter.friendships.create(id=user)
class LeaveAction(AdminAction):
def getUser(self, twitter, user):
return twitter.friendships.destroy(id=user)
class SetStatusAction(Action):
def __call__(self, twitter, options):
statusTxt = (u" ".join(options['extra_args'])
if options['extra_args']
else unicode(raw_input("message: ")))
status = (statusTxt.encode('utf8', 'replace'))
twitter.statuses.update(status=status)
class TwitterShell(Action):
def render_prompt(self, prompt):
'''Parses the `prompt` string and returns the rendered version'''
prompt = prompt.strip("'").replace("\\'","'")
for colour in ansi.COLOURS_NAMED:
if '[%s]' %(colour) in prompt:
prompt = prompt.replace(
'[%s]' %(colour), ansi.cmdColourNamed(colour))
prompt = prompt.replace('[R]', ansi.cmdReset())
return prompt
def __call__(self, twitter, options):
prompt = self.render_prompt(options.get('prompt', 'twitter> '))
while True:
options['action'] = ""
try:
args = raw_input(prompt).split()
parse_args(args, options)
if not options['action']:
continue
elif options['action'] == 'exit':
raise SystemExit(0)
elif options['action'] == 'shell':
print >>sys.stderr, 'Sorry Xzibit does not work here!'
continue
elif options['action'] == 'help':
print >>sys.stderr, '''\ntwitter> `action`\n
The Shell Accepts all the command line actions along with:
exit Leave the twitter shell (^D may also be used)
Full CMD Line help is appended below for your convinience.'''
Action()(twitter, options)
options['action'] = ''
except NoSuchActionError, e:
print >>sys.stderr, e
except KeyboardInterrupt:
print >>sys.stderr, '\n[Keyboard Interrupt]'
except EOFError:
print >>sys.stderr
leaving = self.ask(subject='Leave')
if not leaving:
print >>sys.stderr, 'Excellent!'
else:
raise SystemExit(0)
class HelpAction(Action):
def __call__(self, twitter, options):
print __doc__
actions = {
'follow' : FollowAction,
'friends' : FriendsAction,
'help' : HelpAction,
'leave' : LeaveAction,
'public' : PublicAction,
'replies' : RepliesAction,
'set' : SetStatusAction,
'shell' : TwitterShell,
}
def loadConfig(filename):
options = dict(OPTIONS)
if os.path.exists(filename):
cp = SafeConfigParser()
cp.read([filename])
for option in ('email', 'password', 'format', 'prompt'):
if cp.has_option('twitter', option):
options[option] = cp.get('twitter', option)
return options
def main(args=sys.argv[1:]):
arg_options = {}
try:
parse_args(args, arg_options)
except GetoptError, e:
print >> sys.stderr, "I can't do that, %s." %(e)
print >> sys.stderr
raise SystemExit(1)
config_options = loadConfig(
arg_options.get('config_filename') or OPTIONS.get('config_filename'))
# Apply the various options in order, the most important applied last.
# Defaults first, then what's read from config file, then command-line
# arguments.
options = dict(OPTIONS)
for d in config_options, arg_options:
for k,v in d.items():
if v: options[k] = v
if options['refresh'] and options['action'] not in (
'friends', 'public', 'replies'):
print >> sys.stderr, "You can only refresh the friends, public, or replies actions."
print >> sys.stderr, "Use 'twitter -h' for help."
raise SystemExit(1)
if options['email'] and not options['password']:
options['password'] = getpass("Twitter password: ")
twitter = Twitter(options['email'], options['password'], agent=AGENT_STR)
try:
Action()(twitter, options)
except NoSuchActionError, e:
print >>sys.stderr, e
raise SystemExit(1)
except TwitterError, e:
print >> sys.stderr, e.args[0]
print >> sys.stderr, "Use 'twitter -h' for help."
raise SystemExit(1)
| avsm/lifedb-plugins | Twitter/twitter/cmdline.py | Python | gpl-2.0 | 15,027 |
#! /usr/bin/python
######
# Copyright 2007-2009 Sun Microsystems, Inc. All Rights Reserved.
# DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER
#
# This code is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License version 2
# only, as published by the Free Software Foundation.
#
# This code is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License version 2 for more details (a copy is
# included in the LICENSE file that accompanied this code).
#
# You should have received a copy of the GNU General Public License
# version 2 along with this work; if not, write to the Free Software
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
# 02110-1301 USA
#
# Please contact Sun Microsystems, Inc., 16 Network Circle, Menlo
# Park, CA 94025 or visit www.sun.com if you need additional
# information or have any questions.
#####
import os
import cPickle as C
import pyaura.bridge as B
from pyaura.lib import j2py
DATA_PREFIX= "out500k"
class SimpleStats():
"""
Generate simple statistics and plots for a set of tags.
"""
def __init__(self, prefix=DATA_PREFIX, regHost="brannigan"):
self.tags = C.load(open(os.path.join(prefix, "alltags.dump")))
self._dataFileCache = DataFileCache(prefix, self.tags)
self._is_plot_init = False
if regHost!=None:
self._aB = B.AuraBridge(regHost=regHost)
else:
self._aB = None
def _init_plotting(self, backend="MacOSX"):
if not self._is_plot_init:
self._is_plot_init = True
import matplotlib as M
M.use(backend)
def plot_taglen_hist(self):
self._init_plotting()
import pylab
tag_len = [len(x) for x in self.tags.keys() ]
pylab.hist(tag_len)
def plot_tagpop_itemcnt(self, itemtype="a"):
"""
Tag the histogram of the number of tagged items for each tag
"""
self._init_plotting()
import pylab
itemcnt = [x.get_itemcount(itemtype) for x in self.tags.values()]
pylab.hist(itemcnt, bins=1000)
def plot_tagpop_appcnt(self, itemtype="a"):
"""
Tag the histogram of application counts for each tag
"""
self._init_plotting()
import pylab
appcnt = [x.get_totals(itemtype) for x in self.tags.values()]
pylab.hist(appcnt, bins=1000)
def get_average_len(self):
w_running_tot=0
running_tot=0
tot_cnt=0
nbr_cnt=0
for t in self.tags.values():
w_running_tot+=len(t.name)*t.totals['artist']
running_tot+=len(t.name)
tot_cnt+=t.totals['artist']
nbr_cnt+=1
print "Weighted avg:\t%0.4f" % (float(w_running_tot)/tot_cnt)
print "Avg:\t\t%0.4f" % (float(running_tot)/nbr_cnt)
def find_most_co_ocurr(self, tagname, n=10):
vals = {}
for t in self.tags:
vals[t] = self.co_ocurr(tagname, t)
return sorted(vals.items(), key=lambda (k,v): (v,k), reverse=True)
def find_similar(self, tagname, n=10):
return j2py( self._aB.mdb.find_similar_rawtags(tagname, n) )
def co_ocurr(self, tagname1, tagname2):
"""
Get relative co-occurence (Jaccard coefficient)
"""
tagdata1 = self._dataFileCache.get(tagname1)
tagdata2 = self._dataFileCache.get(tagname2)
kt1 = frozenset(tagdata1['artist'].keys())
kt2 = frozenset(tagdata2['artist'].keys())
return float(len(kt1.intersection(kt2))) / len(kt1.union(kt2))
class DataFileCache():
def __init__(self, prefix, taginfo):
self._prefix = prefix
self._taginfo = taginfo
self._datafiles = {}
def get(self, tagname):
if tagname not in self._taginfo:
raise KeyError("Tag '%s' is not in the db" % tagname)
file_id = self._taginfo[tagname].file_location
if not file_id in self._datafiles:
print " > Loading datafile %s" % file_id
path = os.path.join(self._prefix, "tagcut-%s.tagdata.dump" % file_id)
self._datafiles[file_id] = C.load(open(path))
return self._datafiles[file_id][tagname]
| SunLabsAST/AURA | Bridge/pyaura/tagclustering/simplestats.py | Python | gpl-2.0 | 4,439 |
import codecs
import collections
import os
import shutil
from PyQt4 import QtCore, QtGui, QtSvg
from Code.QT import Colocacion
from Code.QT import Controles
from Code.QT import FormLayout
from Code.QT import Iconos
from Code.QT import QTVarios
from Code import TrListas
from Code import VarGen
from Code.Constantes import *
class ConjuntoPiezas:
def __init__(self, nombre):
self.nombre = nombre
self.dicPiezas = self.leePiezas(nombre)
def leePiezas(self, nombre):
try:
dic = {}
for pieza in "rnbqkpRNBQKP":
fich = os.path.join("Pieces", nombre, "%s%s.svg" % ("w" if pieza.isupper() else "b", pieza.lower()))
f = codecs.open(fich, "r", 'utf-8', 'ignore')
qb = QtCore.QByteArray(f.read())
f.close()
dic[pieza] = qb
return dic
except:
return self.leePiezas("Cburnett")
def render(self, pieza):
return QtSvg.QSvgRenderer(self.dicPiezas[pieza])
def widget(self, pieza):
w = QtSvg.QSvgWidget()
w.load(self.dicPiezas[pieza])
return w
def pixmap(self, pieza, tam=24):
pm = QtGui.QPixmap(tam, tam)
pm.fill(QtCore.Qt.transparent)
render = self.render(pieza)
painter = QtGui.QPainter()
painter.begin(pm)
render.render(painter)
painter.end()
return pm
def label(self, owner, pieza, tam):
pm = self.pixmap(pieza, tam)
lb = Controles.LB(owner)
lb.ponImagen(pm)
lb.pieza = pieza
lb.tam_pieza = tam
return lb
def change_label(self, lb, tam):
if lb.tam_pieza <> tam:
pm = self.pixmap(lb.pieza, tam)
lb.ponImagen(pm)
def icono(self, pieza):
icon = QtGui.QIcon(self.pixmap(pieza, 32))
return icon
def cursor(self, pieza):
return QtGui.QCursor(self.pixmap(pieza))
class TodasPiezas:
def __init__(self):
self.dicConjuntos = {}
def selecciona(self, nombre):
if nombre in self.dicConjuntos:
return self.dicConjuntos[nombre]
else:
return self.nuevo(nombre)
def nuevo(self, nombre):
self.dicConjuntos[nombre] = ConjuntoPiezas(nombre)
return self.dicConjuntos[nombre]
def icono(self, pieza, nombre):
fich = os.path.join("Pieces", nombre, "%s%s.svg" % ("w" if pieza.isupper() else "b", pieza.lower()))
f = codecs.open(fich, "r", 'utf-8', 'ignore')
qb = QtCore.QByteArray(f.read())
f.close()
pm = QtGui.QPixmap(32, 32)
pm.fill(QtCore.Qt.transparent)
render = QtSvg.QSvgRenderer(qb)
painter = QtGui.QPainter()
painter.begin(pm)
render.render(painter)
painter.end()
icon = QtGui.QIcon(pm)
return icon
def iconoDefecto(self, pieza):
return self.icono(pieza, "Cburnett")
def saveAllPNG(self, nombre, px):
for pieza in "pnbrqk":
for color in "wb":
fich = os.path.join("Pieces", nombre, "%s%s.svg" % (color, pieza))
f = codecs.open(fich, "r", 'utf-8', 'ignore')
qb = QtCore.QByteArray(f.read())
f.close()
pm = QtGui.QPixmap(px, px)
pm.fill(QtCore.Qt.transparent)
render = QtSvg.QSvgRenderer(qb)
painter = QtGui.QPainter()
painter.begin(pm)
render.render(painter)
painter.end()
pm.save("IntFiles/Figs/%s%s.png" % (color, pieza), "PNG")
HIDE, GREY, CHECKER, SHOW = range(4)
class BlindfoldConfig:
def __init__(self, nomPiezasOri, dicPiezas=None):
self.nomPiezasOri = nomPiezasOri
if dicPiezas is None:
self.restore()
else:
self.dicPiezas = dicPiezas
def ficheroBase(self, pz, siWhite):
pz = pz.lower()
if siWhite:
pzT = pz.upper()
else:
pzT = pz
tipo = self.dicPiezas[pzT]
if tipo == SHOW:
pz = ("w" if siWhite else "b") + pz
return os.path.join("Pieces", self.nomPiezasOri, pz + ".svg")
if tipo == HIDE:
fich = "h"
elif tipo == GREY:
fich = "g"
elif tipo == CHECKER:
fich = "w" if siWhite else "b"
return os.path.join("././IntFiles", "blind_%s.svg" % fich)
def restore(self):
self.dicPiezas = VarGen.configuracion.leeVariables("BLINDFOLD")
if not self.dicPiezas:
for pieza in "rnbqkpRNBQKP":
self.dicPiezas[pieza] = HIDE
def save(self):
VarGen.configuracion.escVariables("BLINDFOLD", self.dicPiezas)
class Blindfold(ConjuntoPiezas):
def __init__(self, nomPiezasOri, tipo=kBlindfoldConfig):
self.nombre = "BlindFold"
self.carpetaBF = os.path.join(VarGen.configuracion.carpeta, "BlindFoldPieces")
self.carpetaPZ = "./IntFiles"
self.tipo = tipo
self.reset(nomPiezasOri)
def leePiezas(self, nombre=None): # nombre usado por compatibilidad
dic = {}
for pieza in "rnbqkpRNBQKP":
fich = os.path.join(self.carpetaBF, "%s%s.svg" % ("w" if pieza.isupper() else "b", pieza.lower()))
f = codecs.open(fich, "r", 'utf-8', 'ignore')
qb = QtCore.QByteArray(f.read())
f.close()
dic[pieza] = qb
return dic
def reset(self, nomPiezasOri):
if self.tipo == kBlindfoldConfig:
dicTPiezas = None
else:
w = b = HIDE
if self.tipo == kBlindfoldWhite:
b = SHOW
elif self.tipo == kBlindfoldBlack:
w = SHOW
dicTPiezas = {}
for pieza in "rnbqkp":
dicTPiezas[pieza] = b
dicTPiezas[pieza.upper()] = w
self.configBF = BlindfoldConfig(nomPiezasOri, dicPiezas=dicTPiezas)
if not os.path.isdir(self.carpetaBF):
os.mkdir(self.carpetaBF)
for siWhite in (True, False):
for pieza in "rnbqkp":
ori = self.configBF.ficheroBase(pieza, siWhite)
bs = "w" if siWhite else "b"
dest = os.path.join(self.carpetaBF, "%s%s.svg" % (bs, pieza))
shutil.copy(ori, dest)
self.dicPiezas = self.leePiezas()
class WBlindfold(QTVarios.WDialogo):
def __init__(self, owner, nomPiezasOri):
titulo = _("Blindfold") + " - " + _("Configuration")
icono = Iconos.Ojo()
extparam = "wblindfold"
QTVarios.WDialogo.__init__(self, owner, titulo, icono, extparam)
self.config = BlindfoldConfig(nomPiezasOri)
self.nomPiezasOri = nomPiezasOri
lbWhite = Controles.LB(self, _("White")).ponTipoLetra(peso=75, puntos=10)
lbBlack = Controles.LB(self, _("Black")).ponTipoLetra(peso=75, puntos=10)
self.dicWidgets = collections.OrderedDict()
self.dicImgs = {}
liOpciones = (
(_("Hide"), HIDE),
(_("Green"), GREY),
(_("Checker"), CHECKER),
(_("Show"), SHOW),
)
dicNomPiezas = TrListas.dicNomPiezas()
def haz(pz):
tpW = self.config.dicPiezas[pz.upper()]
tpB = self.config.dicPiezas[pz]
lbPZw = Controles.LB(self)
cbPZw = Controles.CB(self, liOpciones, tpW).capturaCambiado(self.reset)
lbPZ = Controles.LB(self, dicNomPiezas[pz.upper()]).ponTipoLetra(peso=75, puntos=10)
lbPZb = Controles.LB(self)
cbPZb = Controles.CB(self, liOpciones, tpB).capturaCambiado(self.reset)
self.dicWidgets[pz] = [lbPZw, cbPZw, lbPZ, lbPZb, cbPZb, None, None]
for pz in "kqrbnp":
haz(pz)
btAllW = Controles.PB(self, _("All White"), self.allWhite, plano=False)
self.cbAll = Controles.CB(self, liOpciones, HIDE)
btAllB = Controles.PB(self, _("All Black"), self.allBlack, plano=False)
btSwap = Controles.PB(self, _("Swap"), self.swap, plano=False)
liAcciones = ((_("Save"), Iconos.Grabar(), "grabar"), None,
(_("Cancel"), Iconos.Cancelar(), "cancelar"), None,
(_("Configurations"), Iconos.Opciones(), "configurations"), None,
)
tb = Controles.TB(self, liAcciones)
ly = Colocacion.G()
ly.controlc(lbWhite, 0, 1).controlc(lbBlack, 0, 3)
fila = 1
for pz in "kqrbnp":
lbPZw, cbPZw, lbPZ, lbPZb, cbPZb, tipoW, tipoB = self.dicWidgets[pz]
ly.control(cbPZw, fila, 0)
ly.controlc(lbPZw, fila, 1)
ly.controlc(lbPZ, fila, 2)
ly.controlc(lbPZb, fila, 3)
ly.control(cbPZb, fila, 4)
fila += 1
ly.filaVacia(fila, 20)
fila += 1
ly.controld(btAllW, fila, 0, 1, 2)
ly.control(self.cbAll, fila, 2)
ly.control(btAllB, fila, 3, 1, 2)
ly.controlc(btSwap, fila + 1, 0, 1, 5)
ly.margen(20)
layout = Colocacion.V().control(tb).otro(ly)
self.setLayout(layout)
self.reset()
def procesarTB(self):
getattr(self, self.sender().clave)()
def closeEvent(self):
self.guardarVideo()
def grabar(self):
self.guardarVideo()
self.config.save()
self.accept()
def cancelar(self):
self.guardarVideo()
self.reject()
def configurations(self):
dic = VarGen.configuracion.leeVariables("BLINDFOLD")
dicConf = collections.OrderedDict()
for k in dic:
if k.startswith("_"):
cl = k[1:]
dicConf[cl] = dic[k]
menu = QTVarios.LCMenu(self)
for k in dicConf:
menu.opcion((True, k), k, Iconos.PuntoAzul())
menu.separador()
menu.opcion((True, None), _("Save current configuration"), Iconos.PuntoVerde())
if dicConf:
menu.separador()
menudel = menu.submenu(_("Remove"), Iconos.Delete())
for k in dicConf:
menudel.opcion((False, k), k, Iconos.PuntoNegro())
resp = menu.lanza()
if resp is None:
return
si, cual = resp
if si:
if cual:
dpz = dic["_" + cual]
for pz in "kqrbnp":
lbPZw, cbPZw, lbPZ, lbPZb, cbPZb, tipoW, tipoB = self.dicWidgets[pz]
cbPZw.ponValor(dpz[pz.upper()])
cbPZb.ponValor(dpz[pz])
self.reset()
else:
liGen = [(None, None)]
liGen.append((_("Name") + ":", ""))
resultado = FormLayout.fedit(liGen, title=_("Save current configuration"), parent=self, anchoMinimo=460,
icon=Iconos.TutorialesCrear())
if resultado is None:
return None
accion, liResp = resultado
name = liResp[0].strip()
if not name:
return None
dic["_%s" % name] = self.config.dicPiezas
VarGen.configuracion.escVariables("BLINDFOLD", dic)
else:
del dic["_%s" % cual]
VarGen.configuracion.escVariables("BLINDFOLD", dic)
def allWhite(self):
tp = self.cbAll.valor()
for pzB in "rnbqkp":
lbPZw, cbPZw, lbPZ, lbPZb, cbPZb, tipoW, tipoB = self.dicWidgets[pzB]
cbPZw.ponValor(tp)
self.reset()
def allBlack(self):
tp = self.cbAll.valor()
for pzB in "rnbqkp":
lbPZw, cbPZw, lbPZ, lbPZb, cbPZb, tipoW, tipoB = self.dicWidgets[pzB]
cbPZb.ponValor(tp)
self.reset()
def swap(self):
for pzB in "rnbqkp":
lbPZw, cbPZw, lbPZ, lbPZb, cbPZb, tipoW, tipoB = self.dicWidgets[pzB]
tpB = cbPZb.valor()
tpW = cbPZw.valor()
cbPZb.ponValor(tpW)
cbPZw.ponValor(tpB)
self.reset()
def reset(self):
for pzB in "kqrbnp":
lbPZw, cbPZw, lbPZ, lbPZb, cbPZb, tipoW, tipoB = self.dicWidgets[pzB]
tipoNv = cbPZw.valor()
if tipoW != tipoNv:
pzW = pzB.upper()
self.config.dicPiezas[pzW] = tipoNv
self.dicWidgets[pzB][5] = tipoNv # tiene que ser pzB que esta en misnusculas
fich = self.config.ficheroBase(pzB, True)
if fich in self.dicImgs:
pm = self.dicImgs[fich]
else:
pm = QTVarios.fsvg2pm(fich, 32)
self.dicImgs[fich] = pm
lbPZw.ponImagen(pm)
tipoNv = cbPZb.valor()
if tipoB != tipoNv:
self.config.dicPiezas[pzB] = tipoNv
self.dicWidgets[pzB][6] = tipoNv
fich = self.config.ficheroBase(pzB, False)
if fich in self.dicImgs:
pm = self.dicImgs[fich]
else:
pm = QTVarios.fsvg2pm(fich, 32)
self.dicImgs[fich] = pm
lbPZb.ponImagen(pm)
| lukasmonk/lucaschess | Code/QT/Piezas.py | Python | gpl-2.0 | 13,281 |
# -*- coding: utf-8 -*-
##
## This file is part of Invenio.
## Copyright (C) 2014 CERN.
##
## Invenio is free software; you can redistribute it and/or
## modify it under the terms of the GNU General Public License as
## published by the Free Software Foundation; either version 2 of the
## License, or (at your option) any later version.
##
## Invenio is distributed in the hope that it will be useful, but
## WITHOUT ANY WARRANTY; without even the implied warranty of
## MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
## General Public License for more details.
##
## You should have received a copy of the GNU General Public License
## along with Invenio; if not, write to the Free Software Foundation, Inc.,
## 59 Temple Place, Suite 330, Boston, MA 02111-1307, USA.
from __future__ import absolute_import, print_function
from datetime import date
from invenio.base.i18n import _
from invenio.testsuite import make_test_suite, run_test_suite
from flask import url_for
from .helpers import DepositionTestCase
class SimpleRecordTest(DepositionTestCase):
def setUp(self):
self.clear('simple')
from invenio.modules.deposit.form import WebDepositForm
from invenio.modules.deposit import fields
from invenio.modules.deposit import field_widgets
from invenio.modules.deposit.types import SimpleRecordDeposition
class SimpleRecordTestForm(WebDepositForm):
keywords = fields.DynamicFieldList(
fields.TextField(
widget_classes='form-control',
widget=field_widgets.ColumnInput(class_="col-xs-10"),
),
label='Keywords',
add_label='Add another keyword',
icon='fa fa-tags fa-fw',
widget_classes='',
min_entries=1,
)
publication_date = fields.Date(
label=_('Publication date'),
icon='fa fa-calendar fa-fw',
description='Required. Format: YYYY-MM-DD.',
default=date.today(),
validators=[],
widget=field_widgets.date_widget,
widget_classes='input-sm',
export_key='imprint.date',
)
class simple(SimpleRecordDeposition):
name = "Simple Test"
name_plural = "Simple Tests"
group = "Tests"
draft_definitions = {
'default': SimpleRecordTestForm,
}
@classmethod
def process_sip_metadata(cls, deposition, metadata):
self.assert_process_metadata(deposition, metadata)
self.register(simple)
def tearDown(self):
self.unregister()
def assert_process_metadata(self, deposition, metadata):
pass
def test_registration(self):
self.assert401(self.client.get(url_for('webdeposit.index'),
follow_redirects=True))
self.login("admin", "")
res = self.client.get(url_for('webdeposit.index'))
self.assert200(res)
assert "Tests" in res.data
assert "Simple Test" in res.data
self.assert200(self.client.get(url_for(
'webdeposit.deposition_type_index', deposition_type='simple'
)))
def test_create_delete(self):
self.login("admin", "")
dep_id = self.create('simple')
self.assert200(self.client.get(url_for(
'webdeposit.run', deposition_type='simple', uuid=dep_id
)))
self.assert200(self.client.get(
url_for('webdeposit.delete',
deposition_type='simple', uuid=dep_id),
follow_redirects=True)
)
TEST_SUITE = make_test_suite(
SimpleRecordTest,
)
if __name__ == "__main__":
run_test_suite(TEST_SUITE)
| MSusik/invenio | invenio/modules/deposit/testsuite/test_type_simplerecord.py | Python | gpl-2.0 | 3,865 |
# vim:fileencoding=utf-8:noet
try:
import pygit2 as git
class Repository(object):
__slots__ = ('directory')
def __init__(self, directory):
self.directory = directory
def _repo(self):
return git.Repository(self.directory)
def status(self, path=None):
'''Return status of repository or file.
Without file argument: returns status of the repository:
:First column: working directory status (D: dirty / space)
:Second column: index status (I: index dirty / space)
:Third column: presense of untracked files (U: untracked files / space)
:None: repository clean
With file argument: returns status of this file. Output is
equivalent to the first two columns of "git status --porcelain"
(except for merge statuses as they are not supported by libgit2).
'''
if path:
try:
status = self._repo().status_file(path)
except (KeyError, ValueError):
return None
if status == git.GIT_STATUS_CURRENT:
return None
else:
if status & git.GIT_STATUS_WT_NEW:
return '??'
if status & git.GIT_STATUS_IGNORED:
return '!!'
if status & git.GIT_STATUS_INDEX_NEW:
index_status = 'A'
elif status & git.GIT_STATUS_INDEX_DELETED:
index_status = 'D'
elif status & git.GIT_STATUS_INDEX_MODIFIED:
index_status = 'M'
else:
index_status = ' '
if status & git.GIT_STATUS_WT_DELETED:
wt_status = 'D'
elif status & git.GIT_STATUS_WT_MODIFIED:
wt_status = 'M'
else:
wt_status = ' '
return index_status + wt_status
else:
wt_column = ' '
index_column = ' '
untracked_column = ' '
for status in self._repo().status().values():
if status & git.GIT_STATUS_WT_NEW:
untracked_column = 'U'
continue
if status & (git.GIT_STATUS_WT_DELETED
| git.GIT_STATUS_WT_MODIFIED):
wt_column = 'D'
if status & (git.GIT_STATUS_INDEX_NEW
| git.GIT_STATUS_INDEX_MODIFIED
| git.GIT_STATUS_INDEX_DELETED):
index_column = 'I'
r = wt_column + index_column + untracked_column
return r if r != ' ' else None
def branch(self):
try:
ref = self._repo().lookup_reference('HEAD')
except KeyError:
return None
try:
target = ref.target
except ValueError:
return '[DETACHED HEAD]'
if target.startswith('refs/heads/'):
return target[11:]
else:
return '[DETACHED HEAD]'
except ImportError:
from subprocess import Popen, PIPE
def readlines(cmd, cwd):
p = Popen(cmd, shell=False, stdout=PIPE, stderr=PIPE, cwd=cwd)
p.stderr.close()
with p.stdout:
for line in p.stdout:
yield line[:-1].decode('utf-8')
class Repository(object):
__slots__ = ('directory',)
def __init__(self, directory):
self.directory = directory
def _gitcmd(self, *args):
return readlines(('git',) + args, self.directory)
def status(self, path=None):
if path:
try:
return next(self._gitcmd('status', '--porcelain', '--ignored', '--', path))[:2]
except StopIteration:
return None
else:
wt_column = ' '
index_column = ' '
untracked_column = ' '
for line in self._gitcmd('status', '--porcelain'):
if line[0] == '?':
untracked_column = 'U'
continue
elif line[0] == '!':
continue
if line[0] != ' ':
index_column = 'I'
if line[1] != ' ':
wt_column = 'D'
r = wt_column + index_column + untracked_column
return r if r != ' ' else None
def branch(self):
for line in self._gitcmd('branch', '-l'):
if line[0] == '*':
return line[2:]
return None
| DaneelOliwan/dotfiles-vim | .vim/bundle/powerline/powerline/lib/vcs/git.py | Python | gpl-2.0 | 3,605 |
from random import randint, shuffle, choice
from time import clock
from _utils import InvalidTableException, copy_table
def solve(table):
table = _check_table(table)
if _is_done(table):
return table
else:
coordinates = _get_minimum_array(table)
trials = table[coordinates[0]][coordinates[1]]["possibilities"][:]
shuffle(trials)
for elem in trials:
try:
ctable = copy_table(table)
ctable[coordinates[0]][coordinates[1]]["possibilities"] = [elem]
# print_table(ctable)
# print
return solve(ctable)
except InvalidTableException:
pass
# print "subiu"
raise InvalidTableException("Nao ha solucao")
def _is_done(table):
for line in table:
for elem in line:
if len(elem["possibilities"]) > 1:
return False
elif len(elem["possibilities"]) == 0:
raise InvalidTableException("Celula Vazia")
return True
def _check_table(table):
#find unchecked
#clean other possibilities
#raise exception if some possibility becomes void
while not _is_check_done(table):
unchecked_set = _find_unchecked(table)
for value,x,y in unchecked_set:
_fix_house(table,value,x,y)
_fix_column(table,value,x,y)
_fix_line(table,value,x,y)
table[y][x]["checked"] = True
return table
def _is_check_done(table):
for line in table:
for elem in line:
if len(elem["possibilities"]) == 1 and not elem["checked"]:
return False
return True
def _find_unchecked(table):
result = set()
for i, line in enumerate(table):
for j, elem in enumerate(line):
if len(elem["possibilities"]) == 1 and not elem["checked"]:
result.add((elem["possibilities"][0],j,i))
return result
def _fix_house(table,value,x,y):
x0 = (x/3)*3 + 1
y0 = (y/3)*3 + 1
to_fix = [(x0-1,y0-1),(x0-1,y0),(x0-1,y0+1),(x0,y0-1),(x0,y0),(x0,y0+1),(x0+1,y0-1),(x0+1,y0),(x0+1,y0+1)]
to_fix.remove((x,y))
for i,j in to_fix:
_fix(table,value,i,j)
def _fix_column(table,value,x,y):
columns = range(len(table))
columns.remove(y)
for j in columns:
_fix(table,value,x,j)
def _fix_line(table,value,x,y):
lines = range(len(table))
lines.remove(x)
for i in lines:
_fix(table,value,i,y)
def _fix(table, value, x, y):
try:
table[y][x]["possibilities"].remove(value)
except ValueError:
pass
if len(table[y][x]["possibilities"]) < 1:
raise InvalidTableException("Nao deu!")
def _get_minimum_array(table):
result = list()
size = len(table)
for i, line in enumerate(table):
for j, elem in enumerate(line):
if not elem["checked"]:
if not result:
result.append((i,j))
size = len(elem["possibilities"])
elif len(elem["possibilities"]) == size:
result.append((i,j))
elif len(elem["possibilities"]) < size:
result = list()
result.append((i,j))
size = len(elem["possibilities"])
return choice(result)
| jonasrla/web_sudoku | helper/solver.py | Python | gpl-2.0 | 3,362 |
# -*- coding: utf-8 -*-
from sqlalchemy import text
from listenbrainz import db
class User(object):
""" User class required by the api-compat """
def __init__(self, id, created, name, api_key):
self.id = id
self.created = created
self.name = name
self.api_key = api_key
@staticmethod
def get_id(mb_id):
with db.engine.connect() as connection:
result = connection.execute(text(""" SELECT id FROM "user" WHERE
musicbrainz_id = :mb_id """), {"mb_id": mb_id})
row = result.fetchone()
if row:
return row[0]
return None
@staticmethod
def load_by_name(mb_id):
with db.engine.connect() as connection:
result = connection.execute(text(""" SELECT id, created, musicbrainz_id, auth_token \
FROM "user" \
WHERE musicbrainz_id = :mb_id """), {"mb_id": mb_id})
row = result.fetchone()
if row:
return User(row['id'], row['created'], row['musicbrainz_id'], row['auth_token'])
return None
@staticmethod
def load_by_id(serial):
with db.engine.connect() as connection:
result = connection.execute(text(""" SELECT id, created, musicbrainz_id, auth_token \
FROM "user"
WHERE id=:id """), {"id": serial})
row = result.fetchone()
if row:
return User(row['id'], row['created'], row['musicbrainz_id'], row['auth_token'])
return None
@staticmethod
def load_by_sessionkey(session_key, api_key):
with db.engine.connect() as connection:
result = connection.execute(text("""
SELECT "user".id
, "user".created
, "user".musicbrainz_id
, "user".auth_token
FROM api_compat.session, "user"
WHERE api_key = :api_key AND sid = :sk AND "user".id = session.user_id
"""), {
"api_key": api_key,
"sk": session_key
})
row = result.fetchone()
if row:
return User(row['id'], row['created'], row['musicbrainz_id'], row['auth_token'])
return None
@staticmethod
def get_play_count(user_id, listenstore):
""" Get playcount from the given user name.
"""
user = User.load_by_id(user_id)
return listenstore.get_listen_count_for_user(user.id)
| metabrainz/listenbrainz-server | listenbrainz/db/lastfm_user.py | Python | gpl-2.0 | 2,716 |
# datepicker-1.py
from wax import *
from wax.tools.datepicker import DatePicker
import datetime
import time
class MainFrame(VerticalFrame):
def Body(self):
p1 = HorizontalPanel(self)
dp1 = DatePicker(p1)
p1.AddComponent(dp1)
p1.AddSpace(10)
b1 = Button(p1, "Add 1 day", event=self.AddOneDay)
p1.AddComponent(b1)
p1.Pack()
self.AddComponent(p1, expand='h', border=4)
p2 = HorizontalPanel(self)
dp2 = DatePicker(p2, style='dropdown', show_century=1)
p2.AddComponent(dp2)
p2.AddSpace(10)
b2 = Button(p2, "Yesterday", event=self.SetToYesterday)
p2.AddComponent(b2)
p2.Pack()
self.AddComponent(p2, expand='h', border=4)
self.Pack()
self.BackgroundColor = p1.BackgroundColor
self.dp1 = dp1
self.dp2 = dp2
# restrict dp2's range to current year
thisyear = time.localtime(time.time())[0]
dp2.SetRange((thisyear, 1, 1), (thisyear, 12, 31))
def AddOneDay(self, event):
self.dp1.Inc()
print "Date set to:", self.dp1.Value
def SetToYesterday(self, event):
now = time.localtime(time.time())
self.dp2.Value = now[:3] # tuple: (year, month, day)
self.dp2.Dec()
app = Application(MainFrame, title='datepicker-1')
app.Run()
| MSMBA/msmba-workflow | msmba-workflow/srclib/wax/examples/datepicker-1.py | Python | gpl-2.0 | 1,480 |
# -*- coding: utf-8 -*-
#
# Picard, the next-generation MusicBrainz tagger
#
# Copyright (C) 2006 Lukáš Lalinský
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
| Sophist-UK/Sophist_picard | picard/browser/__init__.py | Python | gpl-2.0 | 823 |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
class User(object):
def __init__(self):
self.first_name = None
self.last_name = None
self.username = None
self.password = None
def set_full_name(self, name):
# Jaa välilyönnin mukaan
pass
| mtpajula/ijonmap | core/users/user.py | Python | gpl-2.0 | 308 |
# UPDATE THIS SECRET INFORMATION ! #
# UNCOMMENT THIS FILE IN .gitignore BEFORE YOU COMMIT! #
# SuperUser Default Password
SUPER_USER_PASSWORD = 'CHANGEME'
# Log into your Loggly account, visit: https://<Username>.loggly.com/tokens and copy the token here
LOGGLY_TOKEN = 'CHANGEME'
# Generate a very secure Django Secret to replace this one
DJANGO_SECRET = 'CHANGEME'
# OPTIONAL #
# Replace the following with a copy of your environment variables if you wish to run the code locally
# THe variables will only be available after you first deploy an app to Bluemix, whether the deployment succeeds or not.
LOCALDEV_VCAP = {
"cloudamqp": [
{
"name": "CloudAMQP-sa",
"label": "cloudamqp",
"plan": "lemur",
"credentials": {
"uri": "amqp://CHANGEME:CHANGEME/CHANGEME",
"http_api_uri": "https://CHANGEME:CHANGEME/api/"
}
}
],
"user-provided": [
{
"name": "PostgreSQL by Compose-lj",
"label": "user-provided",
"credentials": {
"username": "CHANGEME",
"password": "CHANGEME",
"public_hostname": "localhost:5432"
}
}
]
}
| Chaffleson/blupy | settings_local.py | Python | gpl-2.0 | 1,195 |
from getdist import loadMCSamples,plots,covmat
import numpy as np
import os,fnmatch
#filenames = fnmatch.filter(os.listdir("../output/chains/"),"mcmc_*.txt")
#for index in range(len(filenames)):
# os.rename("../output/chains/"+str(filenames[index]),"../output/chains/mcmc_final_output_"+str(index+1)+".txt")
number_of_parameters = 10
samples = loadMCSamples('../output/chains/NC-run4/mcmc_final_output',settings={'ignore_rows':0.})
p = samples.getParams()
samples.addDerived(np.log(1.e1**10*p.A_s),name='ln1010As',label='\ln 10^{10}A_s')
samples.addDerived(np.log10(p.cs2_fld),name='logcs2fld',label='\log c_s^2')
bestfit = samples.getLikeStats()
means = samples.setMeans()
filebestfit = open("../output/chains/bestfit.txt",'w')
filemeans = open("../output/chains/means.txt",'w')
for index in range(number_of_parameters) :
filebestfit.write(str(bestfit.names[index].bestfit_sample)+"\n")
filemeans.write(str(means[index])+"\n")
filebestfit.close()
filemeans.close()
covariance_matrix = samples.getCov(pars=[0,1,2,10,4,5,6,11,8,9])#nparam=number_of_parameters)
covariance_matrix_2 = covmat.CovMat(matrix=covariance_matrix)
covariance_matrix_2.saveToFile('../output/chains/covariance_matrix.txt')
print 'COVARIANCE MATRIX CREATED'
exit()
| wilmarcardonac/fisher-mcmc | analyzer/compute_cov.py | Python | gpl-2.0 | 1,275 |
import os
from core import aleinst
CURRENT_DIR = os.path.dirname(os.path.abspath(__file__))
class Formula():
def __init__(self, request):
self.request = request
def search(self):
package = aleinst.Aleinst(request=self.request[0:])
package.search()
def main(self):
self.search() | darker0n/ale | core/Formula/install.py | Python | gpl-2.0 | 326 |
# -*- coding: utf-8 -*-
#
# papyon - a python client library for Msn
#
# Copyright (C) 2009 Collabora Ltd.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
class MediaRelay(object):
def __init__(self):
self.username = None
self.password = None
self.ip = ""
self.port = 0
def __repr__(self):
return "<Media Relay: %s %i username=\"%s\" password=\"%s\">" % (self.ip,
self.port, self.username, self.password)
| Kjir/papyon | papyon/media/relay.py | Python | gpl-2.0 | 1,127 |
#!/usr/bin/env python
# Copyright (C) 2007--2016 the X-ray Polarimetry Explorer (XPE) team.
#
# For the license terms see the file LICENSE, distributed along with this
# software.
#
# This program is free software; you can redistribute it and/or modify it
# under the terms of the GNU General Public License as published by the
# Free Software Foundation; either version 2 of the License, or (at your
# option) any later version.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License along
# with this program; if not, write to the Free Software Foundation Inc.,
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
import struct
import numpy
import os
import logging as logger
# python2/3 compatibility fix
try:
xrange
except NameError:
xrange = range
# color core fror creen printout, work only on posix
class ixpeAnsiColors:
HEADER = '\033[95m'
BLUE = '\033[94m'
GREEN = '\033[92m'
YELLOW = '\033[93m'
RED = '\033[91m'
ENDC = '\033[0m'
BOLD = '\033[1m'
UNDERLINE = '\033[4m'
#
# Useful constants
#
XPOL_NUM_COLUMNS = 300
XPOL_NUM_ROWS = 352
XPOL_NUM_PIXELS = XPOL_NUM_COLUMNS*XPOL_NUM_ROWS
#
# Class for a windowed event
#
class ixpeEventWindowed:
"""Basic class representing an event aquired in windowed mode.
"""
HEADER_MARKER = 65535
HEADER_LENGTH = 20
def __init__(self, xmin, xmax, ymin, ymax, buffer_id, t1, t2, s1, s2,
adc_values):
"""Constructor.
"""
self.xmin = xmin
self.xmax = xmax
self.ymin = ymin
self.ymax = ymax
self.buffer_id = buffer_id
self.microseconds = (t1 + t2*65534)*0.8
self.adc_values = adc_values
def size(self):
"""Return the total number of bytes in the event.
"""
return self.HEADER_LENGTH + 2*self.num_pixels()
def num_columns(self):
"""Return the number of columns.
"""
return (self.xmax - self.xmin + 1)
def num_rows(self):
"""Return the number of rows.
"""
return (self.ymax - self.ymin + 1)
def num_pixels(self):
"""Return the total number of pixels in the window.
"""
return self.num_rows()*self.num_columns()
def adc_value(self, col, row):
"""Return the pulse height for a given pixel in the window.
"""
return self.adc_values[col, row]
def highest_pixel(self):
"""Return the coordinats of the pixel with the maximum value of
ADC counts.
"""
return numpy.unravel_index(numpy.argmax(self.adc_values),
self.adc_values.shape)
def highest_adc_value(self):
"""Return the maximum value of ADC counts for the pixels in the event.
"""
return self.adc_values.max()
def ascii(self, zero_suppression=5, max_threshold=0.75, width=4,
color=True):
"""Return a pretty-printed ASCII representation of the event.
"""
if os.name != 'posix':
color = False
_fmt = '%%%dd' % width
_max = self.highest_adc_value()
text = ''
text += ' '*(2*width + 2)
for col in xrange(self.num_columns()):
text += _fmt % (col + self.xmin)
text += '\n'
text += ' '*(2*width + 2)
for col in xrange(self.num_columns()):
text += _fmt % col
text += '\n'
text += ' '*(2*width + 1) + '+' + '-'*(width*self.num_columns()) + '\n'
for row in xrange(self.num_rows()):
text += (_fmt % (row + self.ymin)) + ' ' + (_fmt % row) + '|'
for col in xrange(self.num_columns()):
adc = self.adc_value(col, row)
pix = _fmt % adc
if color and adc == _max:
pix = '%s%s%s' %\
(ixpeAnsiColors.RED, pix, ixpeAnsiColors.ENDC)
elif color and adc >= max_threshold*_max:
pix = '%s%s%s' %\
(ixpeAnsiColors.YELLOW, pix, ixpeAnsiColors.ENDC)
elif color and adc > zero_suppression:
pix = '%s%s%s' %\
(ixpeAnsiColors.GREEN, pix, ixpeAnsiColors.ENDC)
text += pix
text += '\n%s|\n' % (' '*(2*width + 1))
return text
def draw_ascii(self, zero_suppression=5):
"""Print the ASCII representation of the event.
"""
print(self.ascii(zero_suppression))
def __str__(self):
"""String representation.
"""
text = 'buffer %5d, w(%3d, %3d)--(%3d, %3d), %d px, t = %d us' %\
(self.buffer_id, self.xmin, self.ymin, self.xmax, self.ymax,
self.num_pixels(), self.microseconds)
return text
#
# Class for a windowed file
#
class ixpeBinaryFileWindowed:
"""Binary file acquired in windowed mode.
"""
def __init__(self, filePath):
"""Constructor.
"""
logger.info('Opening input binary file %s...' % filePath)
self.__file = open(filePath, 'rb')
def seek(self, offset):
""" redefine seek
"""
self.__file.seek(offset)
def read(self, n):
""" redefine read
"""
return self.__file.read(n)
def close(self):
""" redefine
"""
self.__file.close()
def read_word(self):
"""Read and byte-swap a single 2-bytes binary word from file.
Note that struct.unpack returns a tuple even when we read a single
number, and here we're returning the first (and only) element of the
tuple.
"""
return struct.unpack('H', self.read(2))[0]
def read_words(self, num_words):
"""Read and byte-swap a fixed number of 2-bytes binary words from file.
Args
----
num_words : int
The number of words to be read from the input file.
"""
return struct.unpack('%dH' % num_words, self.read(2*num_words))
def read_adc_word(self):
"""Read and byte-swap a single 2-bytes binary word from file.
Same as read word, but adc value are now signed
"""
return struct.unpack('h', self.read(2))[0]
def read_adc_words(self, num_words):
"""Read and byte-swap a fixed number of 2-bytes binary words from file.
Same as read_words, but adc values are now signed.
Args
----
num_words : int
The number of words to be read from the input file.
"""
return struct.unpack('%dh' % num_words, self.read(2*num_words))
def __iter__(self):
"""Basic iterator implementation.
"""
return self
def next(self):
"""Read the next event in the file.
"""
try:
header = self.read_word()
except Exception:
raise StopIteration()
if header != ixpeEventWindowed.HEADER_MARKER:
msg = 'Event header mismatch at byte %d' % self.tell()
msg += ' (expected %s, got %s).' %\
(hex(ixpeEventWindowed.HEADER_MARKER), hex(header))
logger.error(msg)
logger.info('Moving ahead to the next event header...')
while header != ixpeEventWindowed.HEADER_MARKER:
header = self.read_word()
logger.info('Got back in synch at byte %d.' % self.tell())
xmin, xmax, ymin, ymax, buf_id, t1, t2, s1, s2 = self.read_words(9)
num_columns = (xmax - xmin + 1)
num_rows = (ymax - ymin + 1)
data = self.read_adc_words(num_rows*num_columns)
adc = numpy.array(data).reshape((num_rows, num_columns)).T
return ixpeEventWindowed(xmin, xmax, ymin, ymax, buf_id, t1, t2, s1, s2,
adc)
if __name__ == '__main__':
import argparse
formatter = argparse.ArgumentDefaultsHelpFormatter
parser = argparse.ArgumentParser(formatter_class=formatter)
parser.add_argument('infile', type=str,
help='the input binary file')
parser.add_argument('-n', '--num_events', type=int, default=10,
help = 'number of events to be processed')
args = parser.parse_args()
#test_windowed
input_file = ixpeBinaryFileWindowed(args.infile)
for i in xrange(args.num_events):
event = input_file.next()
print (event)
event.draw_ascii()
try:
input("e2g")
except NameError:
raw_input("e2g")
| lucabaldini/xpedaq | scripts/ixpe_evt_lib.py | Python | gpl-2.0 | 7,540 |
# -*- coding: utf-8 -*-
#
# Copyright (C) 2010-2016 Red Hat, Inc.
#
# Authors:
# Thomas Woerner <[email protected]>
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
import os.path
import copy
from firewall.core.base import SHORTCUTS, DEFAULT_ZONE_TARGET
from firewall.core.prog import runProg
from firewall.core.logger import log
from firewall.functions import tempFile, readfile, splitArgs, check_mac, portStr, \
check_single_address
from firewall import config
from firewall.errors import FirewallError, INVALID_PASSTHROUGH, INVALID_RULE, UNKNOWN_ERROR
from firewall.core.rich import Rich_Accept, Rich_Reject, Rich_Drop, Rich_Mark, \
Rich_Masquerade, Rich_ForwardPort, Rich_IcmpBlock
import string
BUILT_IN_CHAINS = {
"security": [ "INPUT", "OUTPUT", "FORWARD" ],
"raw": [ "PREROUTING", "OUTPUT" ],
"mangle": [ "PREROUTING", "POSTROUTING", "INPUT", "OUTPUT", "FORWARD" ],
"nat": [ "PREROUTING", "POSTROUTING", "OUTPUT" ],
"filter": [ "INPUT", "OUTPUT", "FORWARD" ],
}
DEFAULT_REJECT_TYPE = {
"ipv4": "icmp-host-prohibited",
"ipv6": "icmp6-adm-prohibited",
}
ICMP = {
"ipv4": "icmp",
"ipv6": "ipv6-icmp",
}
# ipv ebtables also uses this
#
def common_reverse_rule(args):
""" Inverse valid rule """
replace_args = {
# Append
"-A": "-D",
"--append": "--delete",
# Insert
"-I": "-D",
"--insert": "--delete",
# New chain
"-N": "-X",
"--new-chain": "--delete-chain",
}
ret_args = args[:]
for arg in replace_args:
try:
idx = ret_args.index(arg)
except Exception:
continue
if arg in [ "-I", "--insert" ]:
# With insert rulenum, then remove it if it is a number
# Opt at position idx, chain at position idx+1, [rulenum] at
# position idx+2
try:
int(ret_args[idx+2])
except Exception:
pass
else:
ret_args.pop(idx+2)
ret_args[idx] = replace_args[arg]
return ret_args
def common_reverse_passthrough(args):
""" Reverse valid passthough rule """
replace_args = {
# Append
"-A": "-D",
"--append": "--delete",
# Insert
"-I": "-D",
"--insert": "--delete",
# New chain
"-N": "-X",
"--new-chain": "--delete-chain",
}
ret_args = args[:]
for x in replace_args:
try:
idx = ret_args.index(x)
except ValueError:
continue
if x in [ "-I", "--insert" ]:
# With insert rulenum, then remove it if it is a number
# Opt at position idx, chain at position idx+1, [rulenum] at
# position idx+2
try:
int(ret_args[idx+2])
except ValueError:
pass
else:
ret_args.pop(idx+2)
ret_args[idx] = replace_args[x]
return ret_args
raise FirewallError(INVALID_PASSTHROUGH,
"no '-A', '-I' or '-N' arg")
# ipv ebtables also uses this
#
def common_check_passthrough(args):
""" Check if passthough rule is valid (only add, insert and new chain
rules are allowed) """
args = set(args)
not_allowed = set(["-C", "--check", # check rule
"-D", "--delete", # delete rule
"-R", "--replace", # replace rule
"-L", "--list", # list rule
"-S", "--list-rules", # print rules
"-F", "--flush", # flush rules
"-Z", "--zero", # zero rules
"-X", "--delete-chain", # delete chain
"-P", "--policy", # policy
"-E", "--rename-chain"]) # rename chain)
# intersection of args and not_allowed is not empty, i.e.
# something from args is not allowed
if len(args & not_allowed) > 0:
raise FirewallError(INVALID_PASSTHROUGH,
"arg '%s' is not allowed" %
list(args & not_allowed)[0])
# args need to contain one of -A, -I, -N
needed = set(["-A", "--append",
"-I", "--insert",
"-N", "--new-chain"])
# empty intersection of args and needed, i.e.
# none from args contains any needed command
if len(args & needed) == 0:
raise FirewallError(INVALID_PASSTHROUGH,
"no '-A', '-I' or '-N' arg")
class ip4tables(object):
ipv = "ipv4"
name = "ip4tables"
zones_supported = True
def __init__(self, fw):
self._fw = fw
self._command = config.COMMANDS[self.ipv]
self._restore_command = config.COMMANDS["%s-restore" % self.ipv]
self.wait_option = self._detect_wait_option()
self.restore_wait_option = self._detect_restore_wait_option()
self.fill_exists()
self.available_tables = []
self.rich_rule_priority_counts = {}
self.our_chains = {} # chains created by firewalld
def fill_exists(self):
self.command_exists = os.path.exists(self._command)
self.restore_command_exists = os.path.exists(self._restore_command)
def __run(self, args):
# convert to string list
if self.wait_option and self.wait_option not in args:
_args = [self.wait_option] + ["%s" % item for item in args]
else:
_args = ["%s" % item for item in args]
log.debug2("%s: %s %s", self.__class__, self._command, " ".join(_args))
(status, ret) = runProg(self._command, _args)
if status != 0:
raise ValueError("'%s %s' failed: %s" % (self._command,
" ".join(_args), ret))
return ret
def split_value(self, rules, opts=None):
"""Split values combined with commas for options in opts"""
if opts is None:
return rules
out_rules = [ ]
for rule in rules:
processed = False
for opt in opts:
try:
i = rule.index(opt)
except ValueError:
pass
else:
if len(rule) > i and "," in rule[i+1]:
# For all items in the comma separated list in index
# i of the rule, a new rule is created with a single
# item from this list
processed = True
items = rule[i+1].split(",")
for item in items:
_rule = rule[:]
_rule[i+1] = item
out_rules.append(_rule)
if not processed:
out_rules.append(rule)
return out_rules
def _rule_replace(self, rule, pattern, replacement):
try:
i = rule.index(pattern)
except ValueError:
return False
else:
rule[i:i+1] = replacement
return True
def is_chain_builtin(self, ipv, table, chain):
return table in BUILT_IN_CHAINS and \
chain in BUILT_IN_CHAINS[table]
def build_chain_rules(self, add, table, chain):
rule = [ "-t", table ]
if add:
rule.append("-N")
else:
rule.append("-X")
rule.append(chain)
return [rule]
def build_rule(self, add, table, chain, index, args):
rule = [ "-t", table ]
if add:
rule += [ "-I", chain, str(index) ]
else:
rule += [ "-D", chain ]
rule += args
return rule
def reverse_rule(self, args):
return common_reverse_rule(args)
def check_passthrough(self, args):
common_check_passthrough(args)
def reverse_passthrough(self, args):
return common_reverse_passthrough(args)
def passthrough_parse_table_chain(self, args):
table = "filter"
try:
i = args.index("-t")
except ValueError:
pass
else:
if len(args) >= i+1:
table = args[i+1]
chain = None
for opt in [ "-A", "--append",
"-I", "--insert",
"-N", "--new-chain" ]:
try:
i = args.index(opt)
except ValueError:
pass
else:
if len(args) >= i+1:
chain = args[i+1]
return (table, chain)
def _set_rule_replace_rich_rule_priority(self, rule, rich_rule_priority_counts):
"""
Change something like
-t filter -I public_IN %%RICH_RULE_PRIORITY%% 123
or
-t filter -A public_IN %%RICH_RULE_PRIORITY%% 321
into
-t filter -I public_IN 4
or
-t filter -I public_IN
"""
try:
i = rule.index("%%RICH_RULE_PRIORITY%%")
except ValueError:
pass
else:
rule_add = True
insert = False
insert_add_index = -1
rule.pop(i)
priority = rule.pop(i)
if type(priority) != int:
raise FirewallError(INVALID_RULE, "rich rule priority must be followed by a number")
table = "filter"
for opt in [ "-t", "--table" ]:
try:
j = rule.index(opt)
except ValueError:
pass
else:
if len(rule) >= j+1:
table = rule[j+1]
for opt in [ "-A", "--append",
"-I", "--insert",
"-D", "--delete" ]:
try:
insert_add_index = rule.index(opt)
except ValueError:
pass
else:
if len(rule) >= insert_add_index+1:
chain = rule[insert_add_index+1]
if opt in [ "-I", "--insert" ]:
insert = True
if opt in [ "-D", "--delete" ]:
rule_add = False
chain = (table, chain)
# Add the rule to the priority counts. We don't need to store the
# rule, just bump the ref count for the priority value.
if not rule_add:
if chain not in rich_rule_priority_counts or \
priority not in rich_rule_priority_counts[chain] or \
rich_rule_priority_counts[chain][priority] <= 0:
raise FirewallError(UNKNOWN_ERROR, "nonexistent or underflow of rich rule priority count")
rich_rule_priority_counts[chain][priority] -= 1
else:
if chain not in rich_rule_priority_counts:
rich_rule_priority_counts[chain] = {}
if priority not in rich_rule_priority_counts[chain]:
rich_rule_priority_counts[chain][priority] = 0
# calculate index of new rule
index = 1
for p in sorted(rich_rule_priority_counts[chain].keys()):
if p == priority and insert:
break
index += rich_rule_priority_counts[chain][p]
if p == priority:
break
rich_rule_priority_counts[chain][priority] += 1
rule[insert_add_index] = "-I"
rule.insert(insert_add_index+2, "%d" % index)
def set_rules(self, rules, log_denied):
temp_file = tempFile()
table_rules = { }
rich_rule_priority_counts = copy.deepcopy(self.rich_rule_priority_counts)
for _rule in rules:
rule = _rule[:]
# replace %%REJECT%%
self._rule_replace(rule, "%%REJECT%%", \
["REJECT", "--reject-with", DEFAULT_REJECT_TYPE[self.ipv]])
# replace %%ICMP%%
self._rule_replace(rule, "%%ICMP%%", [ICMP[self.ipv]])
# replace %%LOGTYPE%%
try:
i = rule.index("%%LOGTYPE%%")
except ValueError:
pass
else:
if log_denied == "off":
continue
if log_denied in [ "unicast", "broadcast", "multicast" ]:
rule[i:i+1] = [ "-m", "pkttype", "--pkt-type", log_denied ]
else:
rule.pop(i)
self._set_rule_replace_rich_rule_priority(rule, rich_rule_priority_counts)
table = "filter"
# get table form rule
for opt in [ "-t", "--table" ]:
try:
i = rule.index(opt)
except ValueError:
pass
else:
if len(rule) >= i+1:
rule.pop(i)
table = rule.pop(i)
# we can not use joinArgs here, because it would use "'" instead
# of '"' for the start and end of the string, this breaks
# iptables-restore
for i in range(len(rule)):
for c in string.whitespace:
if c in rule[i] and not (rule[i].startswith('"') and
rule[i].endswith('"')):
rule[i] = '"%s"' % rule[i]
table_rules.setdefault(table, []).append(rule)
for table in table_rules:
rules = table_rules[table]
rules = self.split_value(rules, [ "-s", "--source" ])
rules = self.split_value(rules, [ "-d", "--destination" ])
temp_file.write("*%s\n" % table)
for rule in rules:
temp_file.write(" ".join(rule) + "\n")
temp_file.write("COMMIT\n")
temp_file.close()
stat = os.stat(temp_file.name)
log.debug2("%s: %s %s", self.__class__, self._restore_command,
"%s: %d" % (temp_file.name, stat.st_size))
args = [ ]
if self.restore_wait_option:
args.append(self.restore_wait_option)
args.append("-n")
(status, ret) = runProg(self._restore_command, args,
stdin=temp_file.name)
if log.getDebugLogLevel() > 2:
lines = readfile(temp_file.name)
if lines is not None:
i = 1
for line in lines:
log.debug3("%8d: %s" % (i, line), nofmt=1, nl=0)
if not line.endswith("\n"):
log.debug3("", nofmt=1)
i += 1
os.unlink(temp_file.name)
if status != 0:
raise ValueError("'%s %s' failed: %s" % (self._restore_command,
" ".join(args), ret))
self.rich_rule_priority_counts = rich_rule_priority_counts
return ret
def set_rule(self, rule, log_denied):
# replace %%REJECT%%
self._rule_replace(rule, "%%REJECT%%", \
["REJECT", "--reject-with", DEFAULT_REJECT_TYPE[self.ipv]])
# replace %%ICMP%%
self._rule_replace(rule, "%%ICMP%%", [ICMP[self.ipv]])
# replace %%LOGTYPE%%
try:
i = rule.index("%%LOGTYPE%%")
except ValueError:
pass
else:
if log_denied == "off":
return ""
if log_denied in [ "unicast", "broadcast", "multicast" ]:
rule[i:i+1] = [ "-m", "pkttype", "--pkt-type", log_denied ]
else:
rule.pop(i)
rich_rule_priority_counts = copy.deepcopy(self.rich_rule_priority_counts)
self._set_rule_replace_rich_rule_priority(rule, self.rich_rule_priority_counts)
output = self.__run(rule)
self.rich_rule_priority_counts = rich_rule_priority_counts
return output
def get_available_tables(self, table=None):
ret = []
tables = [ table ] if table else BUILT_IN_CHAINS.keys()
for table in tables:
if table in self.available_tables:
ret.append(table)
else:
try:
self.__run(["-t", table, "-L", "-n"])
self.available_tables.append(table)
ret.append(table)
except ValueError:
log.debug1("%s table '%s' does not exist (or not enough permission to check)." % (self.ipv, table))
return ret
def _detect_wait_option(self):
wait_option = ""
ret = runProg(self._command, ["-w", "-L", "-n"]) # since iptables-1.4.20
if ret[0] == 0:
wait_option = "-w" # wait for xtables lock
ret = runProg(self._command, ["-w10", "-L", "-n"]) # since iptables > 1.4.21
if ret[0] == 0:
wait_option = "-w10" # wait max 10 seconds
log.debug2("%s: %s will be using %s option.", self.__class__, self._command, wait_option)
return wait_option
def _detect_restore_wait_option(self):
temp_file = tempFile()
temp_file.write("#foo")
temp_file.close()
wait_option = ""
for test_option in ["-w", "--wait=2"]:
ret = runProg(self._restore_command, [test_option], stdin=temp_file.name)
if ret[0] == 0 and "invalid option" not in ret[1] \
and "unrecognized option" not in ret[1]:
wait_option = test_option
break
log.debug2("%s: %s will be using %s option.", self.__class__, self._restore_command, wait_option)
os.unlink(temp_file.name)
return wait_option
def build_flush_rules(self):
self.rich_rule_priority_counts = {}
rules = []
for table in BUILT_IN_CHAINS.keys():
# Flush firewall rules: -F
# Delete firewall chains: -X
# Set counter to zero: -Z
for flag in [ "-F", "-X", "-Z" ]:
rules.append(["-t", table, flag])
return rules
def build_set_policy_rules(self, policy):
rules = []
for table in BUILT_IN_CHAINS.keys():
if table == "nat":
continue
for chain in BUILT_IN_CHAINS[table]:
rules.append(["-t", table, "-P", chain, policy])
return rules
def supported_icmp_types(self):
"""Return ICMP types that are supported by the iptables/ip6tables command and kernel"""
ret = [ ]
output = ""
try:
output = self.__run(["-p",
"icmp" if self.ipv == "ipv4" else "ipv6-icmp",
"--help"])
except ValueError as ex:
if self.ipv == "ipv4":
log.debug1("iptables error: %s" % ex)
else:
log.debug1("ip6tables error: %s" % ex)
lines = output.splitlines()
in_types = False
for line in lines:
#print(line)
if in_types:
line = line.strip().lower()
splits = line.split()
for split in splits:
if split.startswith("(") and split.endswith(")"):
x = split[1:-1]
else:
x = split
if x not in ret:
ret.append(x)
if self.ipv == "ipv4" and line.startswith("Valid ICMP Types:") or \
self.ipv == "ipv6" and line.startswith("Valid ICMPv6 Types:"):
in_types = True
return ret
def build_default_tables(self):
# nothing to do, they always exist
return []
def build_default_rules(self, log_denied="off"):
default_rules = {}
default_rules["security"] = [ ]
self.our_chains["security"] = set()
for chain in BUILT_IN_CHAINS["security"]:
default_rules["security"].append("-N %s_direct" % chain)
default_rules["security"].append("-A %s -j %s_direct" % (chain, chain))
self.our_chains["security"].add("%s_direct" % chain)
default_rules["raw"] = [ ]
self.our_chains["raw"] = set()
for chain in BUILT_IN_CHAINS["raw"]:
default_rules["raw"].append("-N %s_direct" % chain)
default_rules["raw"].append("-A %s -j %s_direct" % (chain, chain))
self.our_chains["raw"].add("%s_direct" % chain)
if chain == "PREROUTING":
default_rules["raw"].append("-N %s_ZONES_SOURCE" % chain)
default_rules["raw"].append("-N %s_ZONES" % chain)
default_rules["raw"].append("-A %s -j %s_ZONES_SOURCE" % (chain, chain))
default_rules["raw"].append("-A %s -j %s_ZONES" % (chain, chain))
self.our_chains["raw"].update(set(["%s_ZONES_SOURCE" % chain, "%s_ZONES" % chain]))
default_rules["mangle"] = [ ]
self.our_chains["mangle"] = set()
for chain in BUILT_IN_CHAINS["mangle"]:
default_rules["mangle"].append("-N %s_direct" % chain)
default_rules["mangle"].append("-A %s -j %s_direct" % (chain, chain))
self.our_chains["mangle"].add("%s_direct" % chain)
if chain == "PREROUTING":
default_rules["mangle"].append("-N %s_ZONES_SOURCE" % chain)
default_rules["mangle"].append("-N %s_ZONES" % chain)
default_rules["mangle"].append("-A %s -j %s_ZONES_SOURCE" % (chain, chain))
default_rules["mangle"].append("-A %s -j %s_ZONES" % (chain, chain))
self.our_chains["mangle"].update(set(["%s_ZONES_SOURCE" % chain, "%s_ZONES" % chain]))
default_rules["nat"] = [ ]
self.our_chains["nat"] = set()
for chain in BUILT_IN_CHAINS["nat"]:
default_rules["nat"].append("-N %s_direct" % chain)
default_rules["nat"].append("-A %s -j %s_direct" % (chain, chain))
self.our_chains["nat"].add("%s_direct" % chain)
if chain in [ "PREROUTING", "POSTROUTING" ]:
default_rules["nat"].append("-N %s_ZONES_SOURCE" % chain)
default_rules["nat"].append("-N %s_ZONES" % chain)
default_rules["nat"].append("-A %s -j %s_ZONES_SOURCE" % (chain, chain))
default_rules["nat"].append("-A %s -j %s_ZONES" % (chain, chain))
self.our_chains["nat"].update(set(["%s_ZONES_SOURCE" % chain, "%s_ZONES" % chain]))
default_rules["filter"] = [
"-N INPUT_direct",
"-N INPUT_ZONES_SOURCE",
"-N INPUT_ZONES",
"-A INPUT -m conntrack --ctstate RELATED,ESTABLISHED -j ACCEPT",
"-A INPUT -i lo -j ACCEPT",
"-A INPUT -j INPUT_direct",
"-A INPUT -j INPUT_ZONES_SOURCE",
"-A INPUT -j INPUT_ZONES",
]
if log_denied != "off":
default_rules["filter"].append("-A INPUT -m conntrack --ctstate INVALID %%LOGTYPE%% -j LOG --log-prefix 'STATE_INVALID_DROP: '")
default_rules["filter"].append("-A INPUT -m conntrack --ctstate INVALID -j DROP")
if log_denied != "off":
default_rules["filter"].append("-A INPUT %%LOGTYPE%% -j LOG --log-prefix 'FINAL_REJECT: '")
default_rules["filter"].append("-A INPUT -j %%REJECT%%")
default_rules["filter"] += [
"-N FORWARD_direct",
"-N FORWARD_IN_ZONES_SOURCE",
"-N FORWARD_IN_ZONES",
"-N FORWARD_OUT_ZONES_SOURCE",
"-N FORWARD_OUT_ZONES",
"-A FORWARD -m conntrack --ctstate RELATED,ESTABLISHED -j ACCEPT",
"-A FORWARD -i lo -j ACCEPT",
"-A FORWARD -j FORWARD_direct",
"-A FORWARD -j FORWARD_IN_ZONES_SOURCE",
"-A FORWARD -j FORWARD_IN_ZONES",
"-A FORWARD -j FORWARD_OUT_ZONES_SOURCE",
"-A FORWARD -j FORWARD_OUT_ZONES",
]
if log_denied != "off":
default_rules["filter"].append("-A FORWARD -m conntrack --ctstate INVALID %%LOGTYPE%% -j LOG --log-prefix 'STATE_INVALID_DROP: '")
default_rules["filter"].append("-A FORWARD -m conntrack --ctstate INVALID -j DROP")
if log_denied != "off":
default_rules["filter"].append("-A FORWARD %%LOGTYPE%% -j LOG --log-prefix 'FINAL_REJECT: '")
default_rules["filter"].append("-A FORWARD -j %%REJECT%%")
default_rules["filter"] += [
"-N OUTPUT_direct",
"-A OUTPUT -o lo -j ACCEPT",
"-A OUTPUT -j OUTPUT_direct",
]
self.our_chains["filter"] = set(["INPUT_direct", "INPUT_ZONES_SOURCE", "INPUT_ZONES",
"FORWARD_direct", "FORWARD_IN_ZONES_SOURCE",
"FORWARD_IN_ZONES", "FORWARD_OUT_ZONES_SOURCE",
"FORWARD_OUT_ZONES", "OUTPUT_direct"])
final_default_rules = []
for table in default_rules:
if table not in self.get_available_tables():
continue
for rule in default_rules[table]:
final_default_rules.append(["-t", table] + splitArgs(rule))
return final_default_rules
def get_zone_table_chains(self, table):
if table == "filter":
return { "INPUT", "FORWARD_IN", "FORWARD_OUT" }
if table == "mangle":
if "mangle" in self.get_available_tables() and \
"nat" in self.get_available_tables():
return { "PREROUTING" }
if table == "nat":
if "nat" in self.get_available_tables():
return { "PREROUTING", "POSTROUTING" }
if table == "raw":
if "raw" in self.get_available_tables():
return { "PREROUTING" }
return {}
def build_zone_source_interface_rules(self, enable, zone, zone_target,
interface, table, chain,
append=False):
# handle all zones in the same way here, now
# trust and block zone targets are handled now in __chain
opt = {
"PREROUTING": "-i",
"POSTROUTING": "-o",
"INPUT": "-i",
"FORWARD_IN": "-i",
"FORWARD_OUT": "-o",
"OUTPUT": "-o",
}[chain]
target = DEFAULT_ZONE_TARGET.format(chain=SHORTCUTS[chain], zone=zone)
if zone_target == DEFAULT_ZONE_TARGET:
action = "-g"
else:
action = "-j"
if enable and not append:
rule = [ "-I", "%s_ZONES" % chain, "1" ]
elif enable:
rule = [ "-A", "%s_ZONES" % chain ]
else:
rule = [ "-D", "%s_ZONES" % chain ]
rule += [ "-t", table, opt, interface, action, target ]
return [rule]
def build_zone_source_address_rules(self, enable, zone, zone_target,
address, table, chain):
add_del = { True: "-A", False: "-D" }[enable]
opt = {
"PREROUTING": "-s",
"POSTROUTING": "-d",
"INPUT": "-s",
"FORWARD_IN": "-s",
"FORWARD_OUT": "-d",
"OUTPUT": "-d",
}[chain]
target = DEFAULT_ZONE_TARGET.format(chain=SHORTCUTS[chain], zone=zone)
if zone_target == DEFAULT_ZONE_TARGET:
action = "-g"
else:
action = "-j"
if address.startswith("ipset:"):
name = address[6:]
if opt == "-d":
opt = "dst"
else:
opt = "src"
flags = ",".join([opt] * self._fw.ipset.get_dimension(name))
rule = [ add_del,
"%s_ZONES_SOURCE" % chain, "-t", table,
"-m", "set", "--match-set", name,
flags, action, target ]
else:
if check_mac(address):
# outgoing can not be set
if opt == "-d":
return ""
rule = [ add_del,
"%s_ZONES_SOURCE" % chain, "-t", table,
"-m", "mac", "--mac-source", address.upper(),
action, target ]
else:
rule = [ add_del,
"%s_ZONES_SOURCE" % chain, "-t", table,
opt, address, action, target ]
return [rule]
def build_zone_chain_rules(self, zone, table, chain):
_zone = DEFAULT_ZONE_TARGET.format(chain=SHORTCUTS[chain], zone=zone)
self.our_chains[table].update(set([_zone,
"%s_log" % _zone,
"%s_deny" % _zone,
"%s_pre" % _zone,
"%s_post" % _zone,
"%s_allow" % _zone]))
rules = []
rules.append([ "-N", _zone, "-t", table ])
rules.append([ "-N", "%s_pre" % _zone, "-t", table ])
rules.append([ "-N", "%s_log" % _zone, "-t", table ])
rules.append([ "-N", "%s_deny" % _zone, "-t", table ])
rules.append([ "-N", "%s_allow" % _zone, "-t", table ])
rules.append([ "-N", "%s_post" % _zone, "-t", table ])
rules.append([ "-A", _zone, "-t", table, "-j", "%s_pre" % _zone ])
rules.append([ "-A", _zone, "-t", table, "-j", "%s_log" % _zone ])
rules.append([ "-A", _zone, "-t", table, "-j", "%s_deny" % _zone ])
rules.append([ "-A", _zone, "-t", table, "-j", "%s_allow" % _zone ])
rules.append([ "-A", _zone, "-t", table, "-j", "%s_post" % _zone ])
target = self._fw.zone._zones[zone].target
if self._fw.get_log_denied() != "off":
if table == "filter" and \
chain in [ "INPUT", "FORWARD_IN", "FORWARD_OUT", "OUTPUT" ]:
if target in [ "REJECT", "%%REJECT%%" ]:
rules.append([ "-A", _zone, "-t", table, "%%LOGTYPE%%",
"-j", "LOG", "--log-prefix",
"\"%s_REJECT: \"" % _zone ])
if target == "DROP":
rules.append([ "-A", _zone, "-t", table, "%%LOGTYPE%%",
"-j", "LOG", "--log-prefix",
"\"%s_DROP: \"" % _zone ])
# Handle trust, block and drop zones:
# Add an additional rule with the zone target (accept, reject
# or drop) to the base zone only in the filter table.
# Otherwise it is not be possible to have a zone with drop
# target, that is allowing traffic that is locally initiated
# or that adds additional rules. (RHBZ#1055190)
if table == "filter" and \
target in [ "ACCEPT", "REJECT", "%%REJECT%%", "DROP" ] and \
chain in [ "INPUT", "FORWARD_IN", "FORWARD_OUT", "OUTPUT" ]:
rules.append([ "-A", _zone, "-t", table, "-j", target ])
return rules
def _rule_limit(self, limit):
if limit:
return [ "-m", "limit", "--limit", limit.value ]
return []
def _rich_rule_chain_suffix(self, rich_rule):
if type(rich_rule.element) in [Rich_Masquerade, Rich_ForwardPort, Rich_IcmpBlock]:
# These are special and don't have an explicit action
pass
elif rich_rule.action:
if type(rich_rule.action) not in [Rich_Accept, Rich_Reject, Rich_Drop, Rich_Mark]:
raise FirewallError(INVALID_RULE, "Unknown action %s" % type(rich_rule.action))
else:
raise FirewallError(INVALID_RULE, "No rule action specified.")
if rich_rule.priority == 0:
if type(rich_rule.element) in [Rich_Masquerade, Rich_ForwardPort] or \
type(rich_rule.action) in [Rich_Accept, Rich_Mark]:
return "allow"
elif type(rich_rule.element) in [Rich_IcmpBlock] or \
type(rich_rule.action) in [Rich_Reject, Rich_Drop]:
return "deny"
elif rich_rule.priority < 0:
return "pre"
else:
return "post"
def _rich_rule_chain_suffix_from_log(self, rich_rule):
if not rich_rule.log and not rich_rule.audit:
raise FirewallError(INVALID_RULE, "Not log or audit")
if rich_rule.priority == 0:
return "log"
elif rich_rule.priority < 0:
return "pre"
else:
return "post"
def _rich_rule_priority_fragment(self, rich_rule):
if rich_rule.priority == 0:
return []
return ["%%RICH_RULE_PRIORITY%%", rich_rule.priority]
def _rich_rule_log(self, rich_rule, enable, table, target, rule_fragment):
if not rich_rule.log:
return []
add_del = { True: "-A", False: "-D" }[enable]
chain_suffix = self._rich_rule_chain_suffix_from_log(rich_rule)
rule = ["-t", table, add_del, "%s_%s" % (target, chain_suffix)]
rule += self._rich_rule_priority_fragment(rich_rule)
rule += rule_fragment + [ "-j", "LOG" ]
if rich_rule.log.prefix:
rule += [ "--log-prefix", "'%s'" % rich_rule.log.prefix ]
if rich_rule.log.level:
rule += [ "--log-level", "%s" % rich_rule.log.level ]
rule += self._rule_limit(rich_rule.log.limit)
return rule
def _rich_rule_audit(self, rich_rule, enable, table, target, rule_fragment):
if not rich_rule.audit:
return []
add_del = { True: "-A", False: "-D" }[enable]
chain_suffix = self._rich_rule_chain_suffix_from_log(rich_rule)
rule = ["-t", table, add_del, "%s_%s" % (target, chain_suffix)]
rule += self._rich_rule_priority_fragment(rich_rule)
rule += rule_fragment
if type(rich_rule.action) == Rich_Accept:
_type = "accept"
elif type(rich_rule.action) == Rich_Reject:
_type = "reject"
elif type(rich_rule.action) == Rich_Drop:
_type = "drop"
else:
_type = "unknown"
rule += [ "-j", "AUDIT", "--type", _type ]
rule += self._rule_limit(rich_rule.audit.limit)
return rule
def _rich_rule_action(self, zone, rich_rule, enable, table, target, rule_fragment):
if not rich_rule.action:
return []
add_del = { True: "-A", False: "-D" }[enable]
chain_suffix = self._rich_rule_chain_suffix(rich_rule)
chain = "%s_%s" % (target, chain_suffix)
if type(rich_rule.action) == Rich_Accept:
rule_action = [ "-j", "ACCEPT" ]
elif type(rich_rule.action) == Rich_Reject:
rule_action = [ "-j", "REJECT" ]
if rich_rule.action.type:
rule_action += [ "--reject-with", rich_rule.action.type ]
elif type(rich_rule.action) == Rich_Drop:
rule_action = [ "-j", "DROP" ]
elif type(rich_rule.action) == Rich_Mark:
target = DEFAULT_ZONE_TARGET.format(chain=SHORTCUTS["PREROUTING"],
zone=zone)
table = "mangle"
chain = "%s_%s" % (target, chain_suffix)
rule_action = [ "-j", "MARK", "--set-xmark", rich_rule.action.set ]
else:
raise FirewallError(INVALID_RULE,
"Unknown action %s" % type(rich_rule.action))
rule = ["-t", table, add_del, chain]
rule += self._rich_rule_priority_fragment(rich_rule)
rule += rule_fragment + rule_action
rule += self._rule_limit(rich_rule.action.limit)
return rule
def _rich_rule_destination_fragment(self, rich_dest):
if not rich_dest:
return []
rule_fragment = []
if rich_dest.invert:
rule_fragment.append("!")
rule_fragment += [ "-d", rich_dest.addr ]
return rule_fragment
def _rich_rule_source_fragment(self, rich_source):
if not rich_source:
return []
rule_fragment = []
if rich_source.addr:
if rich_source.invert:
rule_fragment.append("!")
rule_fragment += [ "-s", rich_source.addr ]
elif hasattr(rich_source, "mac") and rich_source.mac:
rule_fragment += [ "-m", "mac" ]
if rich_source.invert:
rule_fragment.append("!")
rule_fragment += [ "--mac-source", rich_source.mac ]
elif hasattr(rich_source, "ipset") and rich_source.ipset:
rule_fragment += [ "-m", "set" ]
if rich_source.invert:
rule_fragment.append("!")
flags = self._fw.zone._ipset_match_flags(rich_source.ipset, "src")
rule_fragment += [ "--match-set", rich_source.ipset, flags ]
return rule_fragment
def build_zone_ports_rules(self, enable, zone, proto, port, destination=None, rich_rule=None):
add_del = { True: "-A", False: "-D" }[enable]
table = "filter"
target = DEFAULT_ZONE_TARGET.format(chain=SHORTCUTS["INPUT"],
zone=zone)
rule_fragment = [ "-p", proto ]
if port:
rule_fragment += [ "--dport", "%s" % portStr(port) ]
if destination:
rule_fragment += [ "-d", destination ]
if rich_rule:
rule_fragment += self._rich_rule_destination_fragment(rich_rule.destination)
rule_fragment += self._rich_rule_source_fragment(rich_rule.source)
if not rich_rule or rich_rule.action != Rich_Mark:
rule_fragment += [ "-m", "conntrack", "--ctstate", "NEW,UNTRACKED" ]
rules = []
if rich_rule:
rules.append(self._rich_rule_log(rich_rule, enable, table, target, rule_fragment))
rules.append(self._rich_rule_audit(rich_rule, enable, table, target, rule_fragment))
rules.append(self._rich_rule_action(zone, rich_rule, enable, table, target, rule_fragment))
else:
rules.append([add_del, "%s_allow" % (target), "-t", table] +
rule_fragment + [ "-j", "ACCEPT" ])
return rules
def build_zone_protocol_rules(self, enable, zone, protocol, destination=None, rich_rule=None):
add_del = { True: "-A", False: "-D" }[enable]
table = "filter"
target = DEFAULT_ZONE_TARGET.format(chain=SHORTCUTS["INPUT"], zone=zone)
rule_fragment = [ "-p", protocol ]
if destination:
rule_fragment += [ "-d", destination ]
if rich_rule:
rule_fragment += self._rich_rule_destination_fragment(rich_rule.destination)
rule_fragment += self._rich_rule_source_fragment(rich_rule.source)
if not rich_rule or rich_rule.action != Rich_Mark:
rule_fragment += [ "-m", "conntrack", "--ctstate", "NEW,UNTRACKED" ]
rules = []
if rich_rule:
rules.append(self._rich_rule_log(rich_rule, enable, table, target, rule_fragment))
rules.append(self._rich_rule_audit(rich_rule, enable, table, target, rule_fragment))
rules.append(self._rich_rule_action(zone, rich_rule, enable, table, target, rule_fragment))
else:
rules.append([add_del, "%s_allow" % (target), "-t", table] +
rule_fragment + [ "-j", "ACCEPT" ])
return rules
def build_zone_source_ports_rules(self, enable, zone, proto, port,
destination=None, rich_rule=None):
add_del = { True: "-A", False: "-D" }[enable]
table = "filter"
target = DEFAULT_ZONE_TARGET.format(chain=SHORTCUTS["INPUT"], zone=zone)
rule_fragment = [ "-p", proto ]
if port:
rule_fragment += [ "--sport", "%s" % portStr(port) ]
if destination:
rule_fragment += [ "-d", destination ]
if rich_rule:
rule_fragment += self._rich_rule_destination_fragment(rich_rule.destination)
rule_fragment += self._rich_rule_source_fragment(rich_rule.source)
if not rich_rule or rich_rule.action != Rich_Mark:
rule_fragment += [ "-m", "conntrack", "--ctstate", "NEW,UNTRACKED" ]
rules = []
if rich_rule:
rules.append(self._rich_rule_log(rich_rule, enable, table, target, rule_fragment))
rules.append(self._rich_rule_audit(rich_rule, enable, table, target, rule_fragment))
rules.append(self._rich_rule_action(zone, rich_rule, enable, table, target, rule_fragment))
else:
rules.append([add_del, "%s_allow" % (target), "-t", table] +
rule_fragment + [ "-j", "ACCEPT" ])
return rules
def build_zone_helper_ports_rules(self, enable, zone, proto, port,
destination, helper_name):
add_del = { True: "-A", False: "-D" }[enable]
target = DEFAULT_ZONE_TARGET.format(chain=SHORTCUTS["PREROUTING"],
zone=zone)
rule = [ add_del, "%s_allow" % (target), "-t", "raw", "-p", proto ]
if port:
rule += [ "--dport", "%s" % portStr(port) ]
if destination:
rule += [ "-d", destination ]
rule += [ "-j", "CT", "--helper", helper_name ]
return [rule]
def build_zone_masquerade_rules(self, enable, zone, rich_rule=None):
add_del = { True: "-A", False: "-D" }[enable]
target = DEFAULT_ZONE_TARGET.format(chain=SHORTCUTS["POSTROUTING"],
zone=zone)
rule_fragment = []
if rich_rule:
chain_suffix = self._rich_rule_chain_suffix(rich_rule)
rule_fragment += self._rich_rule_priority_fragment(rich_rule)
rule_fragment += self._rich_rule_destination_fragment(rich_rule.destination)
rule_fragment += self._rich_rule_source_fragment(rich_rule.source)
else:
chain_suffix = "allow"
rules = []
rules.append(["-t", "nat", add_del, "%s_%s" % (target, chain_suffix)]
+ rule_fragment +
[ "!", "-o", "lo", "-j", "MASQUERADE" ])
# FORWARD_OUT
target = DEFAULT_ZONE_TARGET.format(chain=SHORTCUTS["FORWARD_OUT"],
zone=zone)
rule_fragment = []
if rich_rule:
chain_suffix = self._rich_rule_chain_suffix(rich_rule)
rule_fragment += self._rich_rule_priority_fragment(rich_rule)
rule_fragment += self._rich_rule_destination_fragment(rich_rule.destination)
rule_fragment += self._rich_rule_source_fragment(rich_rule.source)
else:
chain_suffix = "allow"
rules.append(["-t", "filter", add_del, "%s_%s" % (target, chain_suffix)]
+ rule_fragment +
["-m", "conntrack", "--ctstate", "NEW,UNTRACKED", "-j", "ACCEPT" ])
return rules
def build_zone_forward_port_rules(self, enable, zone, filter_chain, port,
protocol, toport, toaddr, mark_id, rich_rule=None):
add_del = { True: "-A", False: "-D" }[enable]
mark_str = "0x%x" % mark_id
mark = [ "-m", "mark", "--mark", mark_str ]
to = ""
if toaddr:
if check_single_address("ipv6", toaddr):
to += "[%s]" % toaddr
else:
to += toaddr
if toport and toport != "":
to += ":%s" % portStr(toport, "-")
target = DEFAULT_ZONE_TARGET.format(chain=SHORTCUTS["PREROUTING"],
zone=zone)
rule_fragment = [ "-p", protocol, "--dport", portStr(port) ]
rich_rule_priority_fragment = []
if rich_rule:
chain_suffix = self._rich_rule_chain_suffix(rich_rule)
rich_rule_priority_fragment = self._rich_rule_priority_fragment(rich_rule)
rule_fragment += self._rich_rule_destination_fragment(rich_rule.destination)
rule_fragment += self._rich_rule_source_fragment(rich_rule.source)
else:
chain_suffix = "allow"
rules = []
if rich_rule:
rules.append(self._rich_rule_log(rich_rule, enable, "mangle", target, rule_fragment))
rules.append(["-t", "mangle", add_del, "%s_%s" % (target, chain_suffix)]
+ rich_rule_priority_fragment + rule_fragment +
[ "-j", "MARK", "--set-mark", mark_str ])
# local and remote
rules.append(["-t", "nat", add_del, "%s_%s" % (target, chain_suffix)]
+ rich_rule_priority_fragment +
["-p", protocol ] + mark +
[ "-j", "DNAT", "--to-destination", to ])
target = DEFAULT_ZONE_TARGET.format(chain=SHORTCUTS[filter_chain],
zone=zone)
rules.append(["-t", "filter", add_del, "%s_%s" % (target, chain_suffix)]
+ rich_rule_priority_fragment +
["-m", "conntrack", "--ctstate", "NEW,UNTRACKED" ]
+ mark +
[ "-j", "ACCEPT" ])
return rules
def build_zone_icmp_block_rules(self, enable, zone, ict, rich_rule=None):
table = "filter"
add_del = { True: "-A", False: "-D" }[enable]
if self.ipv == "ipv4":
proto = [ "-p", "icmp" ]
match = [ "-m", "icmp", "--icmp-type", ict.name ]
else:
proto = [ "-p", "ipv6-icmp" ]
match = [ "-m", "icmp6", "--icmpv6-type", ict.name ]
rules = []
for chain in ["INPUT", "FORWARD_IN"]:
target = DEFAULT_ZONE_TARGET.format(chain=SHORTCUTS[chain],
zone=zone)
if self._fw.zone.query_icmp_block_inversion(zone):
final_chain = "%s_allow" % target
final_target = "ACCEPT"
else:
final_chain = "%s_deny" % target
final_target = "%%REJECT%%"
rule_fragment = []
if rich_rule:
rule_fragment += self._rich_rule_destination_fragment(rich_rule.destination)
rule_fragment += self._rich_rule_source_fragment(rich_rule.source)
rule_fragment += proto + match
if rich_rule:
rules.append(self._rich_rule_log(rich_rule, enable, table, target, rule_fragment))
rules.append(self._rich_rule_audit(rich_rule, enable, table, target, rule_fragment))
if rich_rule.action:
rules.append(self._rich_rule_action(zone, rich_rule, enable, table, target, rule_fragment))
else:
chain_suffix = self._rich_rule_chain_suffix(rich_rule)
rules.append(["-t", table, add_del, "%s_%s" % (target, chain_suffix)]
+ self._rich_rule_priority_fragment(rich_rule)
+ rule_fragment +
[ "-j", "%%REJECT%%" ])
else:
if self._fw.get_log_denied() != "off" and final_target != "ACCEPT":
rules.append([ add_del, final_chain, "-t", table ]
+ rule_fragment +
[ "%%LOGTYPE%%", "-j", "LOG",
"--log-prefix", "\"%s_ICMP_BLOCK: \"" % zone ])
rules.append([ add_del, final_chain, "-t", table ]
+ rule_fragment +
[ "-j", final_target ])
return rules
def build_zone_icmp_block_inversion_rules(self, enable, zone):
table = "filter"
rules = []
for chain in [ "INPUT", "FORWARD_IN" ]:
rule_idx = 6
_zone = DEFAULT_ZONE_TARGET.format(chain=SHORTCUTS[chain],
zone=zone)
if self._fw.zone.query_icmp_block_inversion(zone):
ibi_target = "%%REJECT%%"
if self._fw.get_log_denied() != "off":
if enable:
rule = [ "-I", _zone, str(rule_idx) ]
else:
rule = [ "-D", _zone ]
rule = rule + [ "-t", table, "-p", "%%ICMP%%",
"%%LOGTYPE%%",
"-j", "LOG", "--log-prefix",
"\"%s_ICMP_BLOCK: \"" % _zone ]
rules.append(rule)
rule_idx += 1
else:
ibi_target = "ACCEPT"
if enable:
rule = [ "-I", _zone, str(rule_idx) ]
else:
rule = [ "-D", _zone ]
rule = rule + [ "-t", table, "-p", "%%ICMP%%", "-j", ibi_target ]
rules.append(rule)
return rules
def build_zone_rich_source_destination_rules(self, enable, zone, rich_rule):
table = "filter"
target = DEFAULT_ZONE_TARGET.format(chain=SHORTCUTS["INPUT"],
zone=zone)
rule_fragment = []
rule_fragment += self._rich_rule_destination_fragment(rich_rule.destination)
rule_fragment += self._rich_rule_source_fragment(rich_rule.source)
rules = []
rules.append(self._rich_rule_log(rich_rule, enable, table, target, rule_fragment))
rules.append(self._rich_rule_audit(rich_rule, enable, table, target, rule_fragment))
rules.append(self._rich_rule_action(zone, rich_rule, enable, table, target, rule_fragment))
return rules
def is_ipv_supported(self, ipv):
return ipv == self.ipv
class ip6tables(ip4tables):
ipv = "ipv6"
name = "ip6tables"
def build_rpfilter_rules(self, log_denied=False):
rules = []
rules.append([ "-I", "PREROUTING", "-t", "raw",
"-m", "rpfilter", "--invert", "-j", "DROP" ])
if log_denied != "off":
rules.append([ "-I", "PREROUTING", "-t", "raw",
"-m", "rpfilter", "--invert",
"-j", "LOG",
"--log-prefix", "rpfilter_DROP: " ])
rules.append([ "-I", "PREROUTING", "-t", "raw",
"-p", "ipv6-icmp",
"--icmpv6-type=neighbour-solicitation",
"-j", "ACCEPT" ]) # RHBZ#1575431, kernel bug in 4.16-4.17
rules.append([ "-I", "PREROUTING", "-t", "raw",
"-p", "ipv6-icmp",
"--icmpv6-type=router-advertisement",
"-j", "ACCEPT" ]) # RHBZ#1058505
return rules
def build_rfc3964_ipv4_rules(self):
daddr_list = [
"::0.0.0.0/96", # IPv4 compatible
"::ffff:0.0.0.0/96", # IPv4 mapped
"2002:0000::/24", # 0.0.0.0/8 (the system has no address assigned yet)
"2002:0a00::/24", # 10.0.0.0/8 (private)
"2002:7f00::/24", # 127.0.0.0/8 (loopback)
"2002:ac10::/28", # 172.16.0.0/12 (private)
"2002:c0a8::/32", # 192.168.0.0/16 (private)
"2002:a9fe::/32", # 169.254.0.0/16 (IANA Assigned DHCP link-local)
"2002:e000::/19", # 224.0.0.0/4 (multicast), 240.0.0.0/4 (reserved and broadcast)
]
chain_name = "RFC3964_IPv4"
self.our_chains["filter"].add(chain_name)
rules = []
rules.append(["-t", "filter", "-N", chain_name])
for daddr in daddr_list:
rules.append(["-t", "filter", "-I", chain_name,
"-d", daddr, "-j", "REJECT", "--reject-with",
"addr-unreach"])
if self._fw._log_denied in ["unicast", "all"]:
rules.append(["-t", "filter", "-I", chain_name,
"-d", daddr, "-j", "LOG",
"--log-prefix", "\"RFC3964_IPv4_REJECT: \""])
# Inject into FORWARD and OUTPUT chains
rules.append(["-t", "filter", "-I", "OUTPUT", "3",
"-j", chain_name])
rules.append(["-t", "filter", "-I", "FORWARD", "4",
"-j", chain_name])
return rules
| hos7ein/firewalld | src/firewall/core/ipXtables.py | Python | gpl-2.0 | 53,449 |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Copyright 2009, 2013 Zuza Software Foundation
#
# This file is part of Pootle.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, see <http://www.gnu.org/licenses/>.
import json
import time
from translate.storage import factory, statsdb
from pootle.tests import PootleTestCase
from pootle_store.models import Store, Unit
class UnitTests(PootleTestCase):
def setUp(self):
super(UnitTests, self).setUp()
self.store = Store.objects.get(pootle_path="/af/tutorial/pootle.po")
def _update_translation(self, item, newvalues):
unit = self.store.getitem(item)
time.sleep(1)
if 'target' in newvalues:
unit.target = newvalues['target']
if 'fuzzy' in newvalues:
unit.markfuzzy(newvalues['fuzzy'])
if 'translator_comment' in newvalues:
unit.translator_comment = newvalues['translator_comment']
unit.save()
self.store.sync(update_translation=True)
return self.store.getitem(item)
def test_getorig(self):
for dbunit in self.store.units.iterator():
storeunit = dbunit.getorig()
self.assertEqual(dbunit.getid(), storeunit.getid())
def test_convert(self):
for dbunit in self.store.units.iterator():
if dbunit.hasplural() and not dbunit.istranslated():
# skip untranslated plural units, they will always look different
continue
storeunit = dbunit.getorig()
newunit = dbunit.convert(self.store.file.store.UnitClass)
self.assertEqual(str(newunit), str(storeunit))
def test_update_target(self):
dbunit = self._update_translation(0, {'target': u'samaka'})
storeunit = dbunit.getorig()
self.assertEqual(dbunit.target, u'samaka')
self.assertEqual(dbunit.target, storeunit.target)
pofile = factory.getobject(self.store.file.path)
self.assertEqual(dbunit.target, pofile.units[dbunit.index].target)
def test_empty_plural_target(self):
"""test we don't delete empty plural targets"""
dbunit = self._update_translation(2, {'target': [u'samaka']})
storeunit = dbunit.getorig()
self.assertEqual(len(storeunit.target.strings), 2)
dbunit = self._update_translation(2, {'target': u''})
self.assertEqual(len(storeunit.target.strings), 2)
def test_update_plural_target(self):
dbunit = self._update_translation(2, {'target': [u'samaka', u'samak']})
storeunit = dbunit.getorig()
self.assertEqual(dbunit.target.strings, [u'samaka', u'samak'])
self.assertEqual(dbunit.target.strings, storeunit.target.strings)
pofile = factory.getobject(self.store.file.path)
self.assertEqual(dbunit.target.strings, pofile.units[dbunit.index].target.strings)
self.assertEqual(dbunit.target, u'samaka')
self.assertEqual(dbunit.target, storeunit.target)
self.assertEqual(dbunit.target, pofile.units[dbunit.index].target)
def test_update_plural_target_dict(self):
dbunit = self._update_translation(2, {'target': {0: u'samaka', 1: u'samak'}})
storeunit = dbunit.getorig()
self.assertEqual(dbunit.target.strings, [u'samaka', u'samak'])
self.assertEqual(dbunit.target.strings, storeunit.target.strings)
pofile = factory.getobject(self.store.file.path)
self.assertEqual(dbunit.target.strings, pofile.units[dbunit.index].target.strings)
self.assertEqual(dbunit.target, u'samaka')
self.assertEqual(dbunit.target, storeunit.target)
self.assertEqual(dbunit.target, pofile.units[dbunit.index].target)
def test_update_fuzzy(self):
dbunit = self._update_translation(0, {'target': u'samaka', 'fuzzy': True})
storeunit = dbunit.getorig()
self.assertTrue(dbunit.isfuzzy())
self.assertEqual(dbunit.isfuzzy(), storeunit.isfuzzy())
pofile = factory.getobject(self.store.file.path)
self.assertEqual(dbunit.isfuzzy(), pofile.units[dbunit.index].isfuzzy())
time.sleep(1)
dbunit = self._update_translation(0, {'fuzzy': False})
storeunit = dbunit.getorig()
self.assertFalse(dbunit.isfuzzy())
self.assertEqual(dbunit.isfuzzy(), storeunit.isfuzzy())
pofile = factory.getobject(self.store.file.path)
self.assertEqual(dbunit.isfuzzy(), pofile.units[dbunit.index].isfuzzy())
def test_update_comment(self):
dbunit = self._update_translation(0, {'translator_comment': u'7amada'})
storeunit = dbunit.getorig()
self.assertEqual(dbunit.getnotes(origin="translator"), u'7amada')
self.assertEqual(dbunit.getnotes(origin="translator"), storeunit.getnotes(origin="translator"))
pofile = factory.getobject(self.store.file.path)
self.assertEqual(dbunit.getnotes(origin="translator"), pofile.units[dbunit.index].getnotes(origin="translator"))
class SuggestionTests(PootleTestCase):
def setUp(self):
super(SuggestionTests, self).setUp()
self.store = Store.objects.get(pootle_path="/af/tutorial/pootle.po")
def test_hash(self):
unit = self.store.getitem(0)
suggestion = unit.add_suggestion("gras")
first_hash = suggestion.target_hash
suggestion.translator_comment = "my nice comment"
second_hash = suggestion.target_hash
assert first_hash != second_hash
suggestion.target = "gras++"
assert first_hash != second_hash != suggestion.target_hash
class StoreTests(PootleTestCase):
def setUp(self):
super(StoreTests, self).setUp()
self.store = Store.objects.get(pootle_path="/af/tutorial/pootle.po")
def test_quickstats(self):
statscache = statsdb.StatsCache()
dbstats = self.store.getquickstats()
filestats = statscache.filetotals(self.store.file.path)
self.assertEqual(dbstats['total'], filestats['total'])
self.assertEqual(dbstats['totalsourcewords'], filestats['totalsourcewords'])
self.assertEqual(dbstats['untranslated'], filestats['untranslated'])
self.assertEqual(dbstats['untranslatedsourcewords'], filestats['untranslatedsourcewords'])
self.assertEqual(dbstats['fuzzy'], filestats['fuzzy'])
self.assertEqual(dbstats['fuzzysourcewords'], filestats['fuzzysourcewords'])
self.assertEqual(dbstats['translated'], filestats['translated'])
self.assertEqual(dbstats['translatedsourcewords'], filestats['translatedsourcewords'])
self.assertEqual(dbstats['translatedtargetwords'], filestats['translatedtargetwords'])
class XHRTestAnonymous(PootleTestCase):
"""
Base class for testing the XHR views.
"""
def setUp(self):
# FIXME: We should test on a bigger dataset (with a fixture maybe)
super(XHRTestAnonymous, self).setUp()
self.store = Store.objects.get(pootle_path="/af/tutorial/pootle.po")
self.unit = self.store.units[0]
self.uid = self.unit.id
self.bad_uid = 69696969
self.path = self.store.pootle_path
self.bad_path = "/foo/bar/baz.po"
self.post_data = {'id': self.uid,
'index': 1,
'path': self.path,
'pootle_path': self.path,
'store': self.path,
'source_f_0': 'fish',
'target_f_0': 'arraina'}
#
# Tests for the get_view_units() view.
#
def test_get_view_units_response_ok(self):
"""AJAX request, should return HTTP 200."""
r = self.client.get("%(pootle_path)s/view" %\
{'pootle_path': self.path},
HTTP_X_REQUESTED_WITH='XMLHttpRequest')
self.assertEqual(r.status_code, 200)
def test_get_view_units_bad_store(self):
"""Checks for store correctness when passing an invalid path."""
r = self.client.get("%(pootle_path)s/view" %\
{'pootle_path': self.bad_path},
HTTP_X_REQUESTED_WITH='XMLHttpRequest')
self.assertEqual(r.status_code, 404)
#
# Tests for the get_more_context() view.
#
def test_get_more_context_response_ok(self):
"""AJAX request, should return HTTP 200."""
r = self.client.get("/unit/context/%s" % self.uid,
HTTP_X_REQUESTED_WITH='XMLHttpRequest')
self.assertEqual(r.status_code, 200)
def test_get_more_context_bad_unit(self):
"""Checks for store correctness when passing an invalid uid."""
r = self.client.get("/unit/context/%s" % self.bad_uid,
HTTP_X_REQUESTED_WITH='XMLHttpRequest')
self.assertEqual(r.status_code, 404)
def test_get_more_context_bad_store_unit(self):
"""Checks for store/unit correctness when passing an invalid path/uid."""
r = self.client.get("/unit/context/%s" % self.bad_uid,
HTTP_X_REQUESTED_WITH='XMLHttpRequest')
self.assertEqual(r.status_code, 404)
#
# Tests for the get_edit_unit() view.
#
def test_get_edit_unit_response_ok(self):
"""AJAX request, should return HTTP 200."""
r = self.client.get("/unit/edit/%s" % self.uid,
HTTP_X_REQUESTED_WITH='XMLHttpRequest')
self.assertEqual(r.status_code, 200)
def test_get_edit_unit_bad_unit(self):
"""Checks for unit correctness when passing an invalid uid."""
r = self.client.get("/unit/edit/%s" % self.bad_uid,
HTTP_X_REQUESTED_WITH='XMLHttpRequest')
self.assertEqual(r.status_code, 404)
def test_get_edit_unit_bad_store_unit(self):
"""Checks for store/unit correctness when passing an invalid path/uid."""
r = self.client.get("/unit/edit/%s" % self.bad_uid,
HTTP_X_REQUESTED_WITH='XMLHttpRequest')
self.assertEqual(r.status_code, 404)
def test_get_edit_unit_good_response(self):
"""Checks for returned data correctness."""
r = self.client.get("/unit/edit/%s" % self.uid,
HTTP_X_REQUESTED_WITH='XMLHttpRequest')
self.assertEqual(r.status_code, 200)
self.assertTemplateUsed(r, 'unit/edit.html')
#
# Tests for the get_failing_checks() view.
#
def test_get_failing_checks_response_ok(self):
"""AJAX request, should return HTTP 200."""
r = self.client.get("%(pootle_path)s/checks/" %\
{'pootle_path': self.path},
HTTP_X_REQUESTED_WITH='XMLHttpRequest')
self.assertEqual(r.status_code, 200)
def test_get_failing_checks_bad_store(self):
"""Checks for store correctness when passing an invalid path."""
r = self.client.get("%(pootle_path)s/checks/" %\
{'pootle_path': self.bad_path},
HTTP_X_REQUESTED_WITH='XMLHttpRequest')
self.assertEqual(r.status_code, 404)
#
# Tests for the process_submit() view.
#
def test_process_submit_response_ok(self):
"""AJAX request, should return HTTP 200."""
for m in ("submission", "suggestion"):
r = self.client.post("/unit/process/%(uid)s/%(method)s" %\
{'uid': self.uid, 'method': m},
self.post_data,
HTTP_X_REQUESTED_WITH='XMLHttpRequest')
self.assertEqual(r.status_code, 200)
def test_process_submit_bad_unit(self):
"""Checks for unit correctness when passing an invalid uid."""
for m in ("submission", "suggestion"):
r = self.client.post("/unit/process/%(uid)s/%(method)s" %\
{'uid': self.bad_uid, 'method': m},
self.post_data,
HTTP_X_REQUESTED_WITH='XMLHttpRequest')
self.assertEqual(r.status_code, 200)
j = json.loads(r.content)
self.assertTrue('captcha' in j.keys())
def test_process_submit_bad_store_unit(self):
"""Checks for store/unit correctness when passing an invalid path/uid."""
for m in ("submission", "suggestion"):
r = self.client.post("/unit/process/%(uid)s/%(method)s" %\
{'uid': self.bad_uid, 'method': m},
self.post_data,
HTTP_X_REQUESTED_WITH='XMLHttpRequest')
self.assertEqual(r.status_code, 200)
j = json.loads(r.content)
self.assertTrue('captcha' in j.keys())
def test_process_submit_bad_form(self):
"""Checks for form correctness when bad POST data is passed."""
form_data = self.post_data
del(form_data['index'])
for m in ("submission", "suggestion"):
r = self.client.post("/unit/process/%(uid)s/%(method)s" %\
{'uid': self.uid, 'method': m},
form_data,
HTTP_X_REQUESTED_WITH='XMLHttpRequest')
self.assertEqual(r.status_code, 200)
j = json.loads(r.content)
self.assertTrue('captcha' in j.keys())
def test_process_submit_good_response(self):
"""Checks for returned data correctness."""
for m in ("submission", "suggestion"):
r = self.client.post("/unit/process/%(uid)s/%(method)s" %\
{'uid': self.uid, 'method': m},
self.post_data,
HTTP_X_REQUESTED_WITH='XMLHttpRequest')
self.assertEqual(r.status_code, 200)
j = json.loads(r.content)
self.assertTrue('captcha' in j.keys())
class XHRTestNobody(XHRTestAnonymous):
"""
Tests the XHR views as a non-privileged user.
"""
username = 'nonpriv'
password = 'nonpriv'
def setUp(self):
super(XHRTestNobody, self).setUp()
self.client.login(username=self.username, password=self.password)
def test_process_submit_bad_unit(self):
"""Checks for unit correctness when passing an invalid uid."""
for m in ("submission", "suggestion"):
r = self.client.post("/unit/process/%(uid)s/%(method)s" %\
{'uid': self.bad_uid, 'method': m},
self.post_data,
HTTP_X_REQUESTED_WITH='XMLHttpRequest')
self.assertEqual(r.status_code, 404)
def test_process_submit_bad_store_unit(self):
"""Checks for store/unit correctness when passing an invalid path/uid."""
for m in ("submission", "suggestion"):
r = self.client.post("/unit/process/%(uid)s/%(method)s" %\
{'uid': self.bad_uid, 'method': m},
self.post_data,
HTTP_X_REQUESTED_WITH='XMLHttpRequest')
self.assertEqual(r.status_code, 404)
def test_process_submit_bad_form(self):
"""Checks for form correctness when bad POST data is passed."""
form_data = self.post_data
del(form_data['index'])
for m in ("submission", "suggestion"):
r = self.client.post("/unit/process/%(uid)s/%(method)s" %\
{'uid': self.uid, 'method': m},
form_data,
HTTP_X_REQUESTED_WITH='XMLHttpRequest')
self.assertEqual(r.status_code, 400)
def test_process_submit_good_response(self):
"""Checks for returned data correctness."""
r = self.client.post("/unit/process/%(uid)s/%(method)s" %\
{'uid': self.uid, 'method': "suggestion"}, self.post_data,
HTTP_X_REQUESTED_WITH='XMLHttpRequest')
self.assertEqual(r.status_code, 200)
unit = Unit.objects.get(id=self.uid)
sugg = unit.get_suggestions()[0]
self.assertEqual(sugg.target, self.post_data['target_f_0'])
r = self.client.post("/unit/process/%(uid)s/%(method)s" %\
{'uid': self.uid, 'method': "submission"}, self.post_data,
HTTP_X_REQUESTED_WITH='XMLHttpRequest')
self.assertEqual(r.status_code, 200)
unit = Unit.objects.get(id=self.uid)
self.assertEqual(unit.target, self.post_data['target_f_0'])
class XHRTestAdmin(XHRTestNobody):
"""
Tests the XHR views as admin user.
"""
username = 'admin'
password = 'admin'
| arky/pootle-dev | pootle/apps/pootle_store/tests.py | Python | gpl-2.0 | 17,327 |
#-*- coding: utf-8 -*-
from openerp.osv import fields, osv
class partner_add_contact(osv.osv_memory):
_name = "partner.add.contact"
_columns = {
"name": fields.char("Nom", size=128, required=True),
"partner_id": fields.many2one("res.partner", u"Partenaire associé"),
"firstname": fields.char("Prénom", size=128, required=True),
"phone": fields.char(u"Numéro de téléphone", size=30),
"mobile": fields.char(u"Téléphone portable"),
"email": fields.char("Email", size=128),
"position": fields.char(u"Fonction dans l'entreprise", size=128),
'civilite': fields.selection([('mr', 'Monsieur'),('mme', 'Madame'),('mlle','Mademoiselle')], u'Civilité'),
}
def set_contact(self, cr, uid, ids, context=None):
obj = self.browse(cr, uid, ids[0], context=context)
vals = {
"name": obj.name,
"partner_id": obj.partner_id.id,
"firstname": obj.firstname,
"phone": obj.phone,
"mobile": obj.mobile,
"email": obj.email,
"position": obj.position,
"civilite": obj.civilite
}
return self.pool.get('magellanes.contact').create(cr, uid, vals, context)
| ATSTI/administra | open_corretora/brokerage/wizard/partner_add_contact.py | Python | gpl-2.0 | 1,249 |
from settings import *
import pymysql
def getconn():
conn = pymysql.connect( charset = 'utf8',
host = DATABASES['default']['HOST'],
port = DATABASES['default']['PORT'],
user = DATABASES['default']['USER'],
passwd = DATABASES['default']['PASSWORD'],
db = DATABASES['default']['NAME'])
return conn
def loadSqlScript(sqlScript):
f = open(sqlScript)
query = ''
for line in f:
query = query + line
return query
| kassine/caparis2mysql | sqlTools.py | Python | gpl-2.0 | 602 |
#!/usr/bin/env python
#Mercurial extension to robustly integrate prompts with other processes
#Copyright (C) 2010-2011 Willem Verstraeten
#
#This program is free software; you can redistribute it and/or
#modify it under the terms of the GNU General Public License
#as published by the Free Software Foundation; either version 2
#of the License, or (at your option) any later version.
#
#This program is distributed in the hope that it will be useful,
#but WITHOUT ANY WARRANTY; without even the implied warranty of
#MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
#GNU General Public License for more details.
#
#You should have received a copy of the GNU General Public License
#along with this program; if not, write to the Free Software
#Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
import urllib2
from mercurial import ui, util
import struct, socket
from mercurial.i18n import _
try:
from mercurial.url import passwordmgr
except:
from mercurial.httprepo import passwordmgr
def sendInt( client, number):
length = struct.pack('>L', number)
client.sendall( length )
def send( client, data ):
if data is None:
sendInt(client, 0)
else:
sendInt(client, len(data))
client.sendall( data )
def receiveIntWithMessage(client, message):
requiredLength = struct.calcsize('>L')
buffer = ''
while len(buffer)<requiredLength:
chunk = client.recv(requiredLength-len(buffer))
if chunk == '':
raise util.Abort( message )
buffer = buffer + chunk
# struct.unpack always returns a tuple, even if that tuple only contains a single
# item. The trailing , is to destructure the tuple into its first element.
intToReturn, = struct.unpack('>L', buffer)
return intToReturn
def receiveInt(client):
return receiveIntWithMessage(client, "could not get information from server")
def receive( client ):
receiveWithMessage(client, "could not get information from server")
def receiveWithMessage( client, message ):
length = receiveIntWithMessage(client, message)
buffer = ''
while len(buffer) < length :
chunk = client.recv(length - len(buffer))
if chunk == '':
raise util.Abort( message)
buffer = buffer+chunk
return buffer
# decorator to cleanly monkey patch methods in mercurial
def monkeypatch_method(cls):
def decorator(func):
setattr(cls, func.__name__, func)
return func
return decorator
def sendchoicestoidea(ui, msg, choices, default):
port = int(ui.config( 'hg4ideaprompt', 'port', None, True))
if not port:
raise util.Abort("No port was specified")
numOfChoices = len(choices)
if not numOfChoices:
return default
client = socket.socket( socket.AF_INET, socket.SOCK_STREAM )
try:
client.connect( ('127.0.0.1', port) )
send( client, msg )
sendInt( client, numOfChoices )
for choice in choices:
send( client, choice )
sendInt( client, default )
answer = receiveInt( client )
if answer == -1:
raise util.Abort("User cancelled")
else:
return answer
except:
raise
# determine which method to monkey patch :
# in Mercurial 1.4 the prompt method was renamed to promptchoice
if getattr(ui.ui, 'promptchoice', None):
@monkeypatch_method(ui.ui)
def promptchoice(self, msg, choices=None, default=0):
return sendchoicestoidea(self, msg, choices, default)
else:
@monkeypatch_method(ui.ui)
def prompt(self, msg, choices=None, default="y"):
resps = [s[s.index('&')+1].lower() for s in choices]
defaultIndex = resps.index( default )
responseIndex = sendchoicestoidea( self, msg, choices, defaultIndex)
return resps[responseIndex]
original_warn = ui.ui.warn
@monkeypatch_method(ui.ui)
def warn(self, *msg):
original_warn(self, *msg)
port = int(self.config( 'hg4ideawarn', 'port', None, True))
if not port:
raise util.Abort("No port was specified")
self.debug( "hg4idea prompt server waiting on port %s" % port )
client = socket.socket( socket.AF_INET, socket.SOCK_STREAM )
self.debug( "connecting ..." )
client.connect( ('127.0.0.1', port) )
self.debug( "connected, sending data ..." )
sendInt( client, len(msg) )
for message in msg:
send( client, message )
def retrieve_pass_from_server(ui, uri,path, proposed_user):
port = int(ui.config('hg4ideapass', 'port', None, True))
if port is None:
raise util.Abort("No port was specified")
client = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
ui.debug("connecting ...")
client.connect(('127.0.0.1', port))
ui.debug("connected, sending data ...")
send(client, "getpass")
send(client, uri)
send(client, path)
send(client, proposed_user)
user = receiveWithMessage(client, "http authorization required")
password = receiveWithMessage(client, "http authorization required")
return user, password
original_retrievepass=passwordmgr.find_user_password
@monkeypatch_method(passwordmgr)
def find_user_password(self, realm, authuri):
try:
return original_retrievepass(self, realm, authuri)
except util.Abort:
# In mercurial 1.8 the readauthtoken method was replaced with
# the readauthforuri method, which has different semantics
if getattr(self, 'readauthtoken', None):
def read_hgrc_authtoken(ui, authuri):
return self.readauthtoken(authuri)
else:
def read_hgrc_authtoken(ui, authuri):
# hg 1.8
from mercurial.url import readauthforuri
res = readauthforuri(self.ui, authuri)
if res:
group, auth = res
return auth
else:
return None
user, password = urllib2.HTTPPasswordMgrWithDefaultRealm.find_user_password(self, realm, authuri)
if user is None:
auth = read_hgrc_authtoken(self.ui, authuri)
if auth:
user = auth.get("username")
reduced_uri, path= self.reduce_uri(authuri, False)
retrievedPass = retrieve_pass_from_server(self.ui, reduced_uri, path, user)
if retrievedPass is None:
raise util.Abort(_('http authorization required'))
user, passwd = retrievedPass
self.add_password(realm, authuri, user, passwd)
return retrievedPass | willemv/mercurial_prompthooks | prompthooks.py | Python | gpl-2.0 | 6,665 |
# -*- coding: utf-8 -*-
#
from django.contrib.staticfiles.templatetags.staticfiles import static
from django.conf import settings
from django.utils.translation import gettext_lazy as _
def jumpserver_processor(request):
# Setting default pk
context = {
'DEFAULT_PK': '00000000-0000-0000-0000-000000000000',
'LOGO_URL': static('img/logo.png'),
'LOGO_TEXT_URL': static('img/logo_text.png'),
'LOGIN_IMAGE_URL': static('img/login_image.png'),
'FAVICON_URL': static('img/facio.ico'),
'JMS_TITLE': 'Jumpserver',
'VERSION': settings.VERSION,
'COPYRIGHT': 'FIT2CLOUD 飞致云' + ' © 2014-2019',
'SECURITY_COMMAND_EXECUTION': settings.SECURITY_COMMAND_EXECUTION,
'SECURITY_MFA_VERIFY_TTL': settings.SECURITY_MFA_VERIFY_TTL,
}
return context
| eli261/jumpserver | apps/jumpserver/context_processor.py | Python | gpl-2.0 | 835 |
# replaces the '%' symbol with '+', leaving the return values of actions usable for other things, such as type information,
# and leaving whitespace intact.
from dparser import Parser
# turn a tree of strings into a single string (slowly):
def stringify(s):
if not isinstance(s, str):
return ''.join(map(stringify, s))
return s
def d_add1(t, s):
"add : add '%' exp"
s[1] = '+ ' # replace the % with +
def d_add2(t, s):
"add : exp"
def d_exp(t):
'exp : "[0-9]+" '
# if the start action specifies the 's' argument, then parser
# will contain a member, s,
parser = Parser()
parsedmessage = parser.parse('1 % 2 % 3')
if stringify(parsedmessage.getStringLeft()) != '1 + 2 + 3':
print 'error'
| charlesDGY/coflo | coflo-0.0.4/third_party/d/python/tests/test6.py | Python | gpl-2.0 | 743 |
# -*- encoding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2009 Tiny SPRL (<http://tiny.be>). All Rights Reserved
# d$
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from openerp.osv import fields, osv
from openerp.tools.translate import _
class stock_incoterms(osv.Model):
"""
stock_incoterm
"""
_inherit = 'stock.incoterms'
_columns = {
'description': fields.text('Description',
help='Formal description for this incoterm.'),
}
| 3dfxsoftware/cbss-addons | incoterm_ext/incoterm.py | Python | gpl-2.0 | 1,298 |
import os
from unipath import Path
try:
# expect at ~/source/tshilo-dikotla/etc/default.cnf
# etc folder is not in the git repo
PATH = Path(os.path.dirname(os.path.realpath(__file__))).ancestor(
1).child('etc')
if not os.path.exists(PATH):
raise TypeError(
'Path to database credentials at \'{}\' does not exist'.format(PATH))
with open(os.path.join(PATH, 'secret_key.txt')) as f:
PRODUCTION_SECRET_KEY = f.read().strip()
PRODUCTION_POSTGRES = {
'default': {
'ENGINE': 'django.db.backends.mysql',
'OPTIONS': {
'read_default_file': os.path.join(PATH, 'default.cnf'),
},
'HOST': '',
'PORT': '',
'ATOMIC_REQUESTS': True,
},
# 'lab_api': {
# 'ENGINE': 'django.db.backends.mysql',
# 'OPTIONS': {
# 'read_default_file': os.path.join(PATH, 'lab_api.cnf'),
# },
# 'HOST': '',
# 'PORT': '',
# 'ATOMIC_REQUESTS': True,
# },
}
except TypeError:
PRODUCTION_POSTGRES = None
PRODUCTION_SECRET_KEY = None
print('Path to production database credentials does not exist')
TRAVIS_POSTGRES = {
'default': {
'ENGINE': 'django.db.backends.mysql',
'NAME': 'td',
'USER': 'travis',
'HOST': '',
'PORT': '',
'ATOMIC_REQUESTS': True,
},
'lab_api': {
'ENGINE': 'django.db.backends.mysql',
'NAME': 'td_lab',
'USER': 'travis',
'HOST': '',
'PORT': '',
'ATOMIC_REQUESTS': True,
},
'test_server': {
'ENGINE': 'django.db.backends.mysql',
'NAME': 'td_test',
'USER': 'travis',
'HOST': '',
'PORT': '',
'ATOMIC_REQUESTS': True,
},
}
TEST_HOSTS_POSTGRES = {
'default': {
'ENGINE': 'django.db.backends.postgresql_psycopg2',
'NAME': 'td',
'USER': 'django',
'PASSWORD': 'django',
'HOST': '',
'PORT': '',
'ATOMIC_REQUESTS': True,
},
'lab_api': {
'ENGINE': 'django.db.backends.postgresql_psycopg2',
'NAME': 'td_lab',
'USER': 'django',
'PASSWORD': 'django',
'HOST': '',
'PORT': '',
'ATOMIC_REQUESTS': True,
},
}
| botswana-harvard/tshilo-dikotla | tshilo_dikotla/databases.py | Python | gpl-2.0 | 2,422 |
""" This is script doc string. """
g = 1 # block 1 at level 0
# this is a comment that starts a line after indent
# this comment starts in column 1
h=2 # another inline comment
# see if this comment is preceded by whitespace
def f(a,b,c): # function with multiple returns
""" This is multi-line
function doc string.
"""
if a > b: # block 2 at level 1
if b > c: # block 3 at level 2
a = c # block 4 at level 3
else: # block 3 at level 2
return # block 4 at level 3 - explicit return
print a # block 2 at level 1
# implicit return
f(1,2,3) # block 1 at level 0
[f(i,3,5) for i in range(5)]
def g(a,b):
a += 1
if a > 'c': print "a is larger"
def h(b):
return b*b
return h(b)
g(3,2)
v = 123
# this function definition shows that parameter names are not
# defined until after the parameter list is completed. That is,
# it is invalid to define: def h(a,b=a): ...
def h(a, b=v,c=(v,),d={'a':(2,4,6)},e=[3,5,7]):
print "b: a=%s b=%s" % (a,b)
def k( a , b ) :
c = a
d = b
if c > d: return d
def kk( c ):
return c*c
return kk(c+d)
class C:
""" This is class doc string."""
def __init__( self ):
""" This is a member function doc string"""
if 1:
return
elif 0: return
class D (object):
def dd( self, *args, **kwds ):
return args, kwds
def main():
"This is single line doc string"
h(3)
c = C()
g(3,2)
f(7,5,3)
# next, test if whitespace affects token sequences
h ( 'a' , 'z' ) [ 1 : ]
# next, test tokenization for statement that crosses lines
h (
'b'
,
'y'
) [
1
:
]
if __name__ == "__main__":
main()
| ipmb/PyMetrics | PyMetrics/examples/sample.py | Python | gpl-2.0 | 1,928 |
from re import compile as re_compile
from os import path as os_path, listdir
from MenuList import MenuList
from Components.Harddisk import harddiskmanager
from Tools.Directories import SCOPE_CURRENT_SKIN, resolveFilename, fileExists
from enigma import RT_HALIGN_LEFT, eListboxPythonMultiContent, \
eServiceReference, eServiceCenter, gFont
from Tools.LoadPixmap import LoadPixmap
EXTENSIONS = {
"m4a": "music",
"mp2": "music",
"mp3": "music",
"wav": "music",
"ogg": "music",
"flac": "music",
"jpg": "picture",
"jpeg": "picture",
"png": "picture",
"bmp": "picture",
"ts": "movie",
"avi": "movie",
"divx": "movie",
"m4v": "movie",
"mpg": "movie",
"mpeg": "movie",
"mkv": "movie",
"mp4": "movie",
"mov": "movie",
"m2ts": "movie",
}
def FileEntryComponent(name, absolute = None, isDir = False):
res = [ (absolute, isDir) ]
res.append((eListboxPythonMultiContent.TYPE_TEXT, 35, 1, 470, 20, 0, RT_HALIGN_LEFT, name))
if isDir:
png = LoadPixmap(cached=True, path=resolveFilename(SCOPE_CURRENT_SKIN, "extensions/directory.png"))
else:
extension = name.split('.')
extension = extension[-1].lower()
if EXTENSIONS.has_key(extension):
png = LoadPixmap(resolveFilename(SCOPE_CURRENT_SKIN, "extensions/" + EXTENSIONS[extension] + ".png"))
else:
png = None
if png is not None:
res.append((eListboxPythonMultiContent.TYPE_PIXMAP_ALPHATEST, 10, 2, 20, 20, png))
return res
class FileList(MenuList):
def __init__(self, directory, showDirectories = True, showFiles = True, showMountpoints = True, matchingPattern = None, useServiceRef = False, inhibitDirs = False, inhibitMounts = False, isTop = False, enableWrapAround = False, additionalExtensions = None):
MenuList.__init__(self, list, enableWrapAround, eListboxPythonMultiContent)
self.additional_extensions = additionalExtensions
self.mountpoints = []
self.current_directory = None
self.current_mountpoint = None
self.useServiceRef = useServiceRef
self.showDirectories = showDirectories
self.showMountpoints = showMountpoints
self.showFiles = showFiles
self.isTop = isTop
# example: matching .nfi and .ts files: "^.*\.(nfi|ts)"
self.matchingPattern = matchingPattern
self.inhibitDirs = inhibitDirs or []
self.inhibitMounts = inhibitMounts or []
self.refreshMountpoints()
self.changeDir(directory)
self.l.setFont(0, gFont("Regular", 18))
self.l.setItemHeight(23)
self.serviceHandler = eServiceCenter.getInstance()
def refreshMountpoints(self):
self.mountpoints = [os_path.join(p.mountpoint, "") for p in harddiskmanager.getMountedPartitions()]
self.mountpoints.sort(reverse = True)
def getMountpoint(self, file):
file = os_path.join(os_path.realpath(file), "")
for m in self.mountpoints:
if file.startswith(m):
return m
return False
def getMountpointLink(self, file):
if os_path.realpath(file) == file:
return self.getMountpoint(file)
else:
if file[-1] == "/":
file = file[:-1]
mp = self.getMountpoint(file)
last = file
file = os_path.dirname(file)
while last != "/" and mp == self.getMountpoint(file):
last = file
file = os_path.dirname(file)
return os_path.join(last, "")
def getSelection(self):
if self.l.getCurrentSelection() is None:
return None
return self.l.getCurrentSelection()[0]
def getCurrentEvent(self):
l = self.l.getCurrentSelection()
if not l or l[0][1] == True:
return None
else:
return self.serviceHandler.info(l[0][0]).getEvent(l[0][0])
def getFileList(self):
return self.list
def inParentDirs(self, dir, parents):
dir = os_path.realpath(dir)
for p in parents:
if dir.startswith(p):
return True
return False
def changeDir(self, directory, select = None):
self.list = []
# if we are just entering from the list of mount points:
if self.current_directory is None:
if directory and self.showMountpoints:
self.current_mountpoint = self.getMountpointLink(directory)
else:
self.current_mountpoint = None
self.current_directory = directory
directories = []
files = []
if directory is None and self.showMountpoints: # present available mountpoints
for p in harddiskmanager.getMountedPartitions():
path = os_path.join(p.mountpoint, "")
if path not in self.inhibitMounts and not self.inParentDirs(path, self.inhibitDirs):
self.list.append(FileEntryComponent(name = p.description, absolute = path, isDir = True))
files = [ ]
directories = [ ]
elif directory is None:
files = [ ]
directories = [ ]
elif self.useServiceRef:
# we should not use the 'eServiceReference(string)' constructor, because it doesn't allow ':' in the directoryname
root = eServiceReference(2, 0, directory)
if self.additional_extensions:
root.setName(self.additional_extensions)
serviceHandler = eServiceCenter.getInstance()
list = serviceHandler.list(root)
while 1:
s = list.getNext()
if not s.valid():
del list
break
if s.flags & s.mustDescent:
directories.append(s.getPath())
else:
files.append(s)
directories.sort()
files.sort()
else:
if fileExists(directory):
try:
files = listdir(directory)
except:
files = []
files.sort()
tmpfiles = files[:]
for x in tmpfiles:
if os_path.isdir(directory + x):
directories.append(directory + x + "/")
files.remove(x)
if directory is not None and self.showDirectories and not self.isTop:
if directory == self.current_mountpoint and self.showMountpoints:
self.list.append(FileEntryComponent(name = "<" +_("List of Storage Devices") + ">", absolute = None, isDir = True))
elif (directory != "/") and not (self.inhibitMounts and self.getMountpoint(directory) in self.inhibitMounts):
self.list.append(FileEntryComponent(name = "<" +_("Parent Directory") + ">", absolute = '/'.join(directory.split('/')[:-2]) + '/', isDir = True))
if self.showDirectories:
for x in directories:
if not (self.inhibitMounts and self.getMountpoint(x) in self.inhibitMounts) and not self.inParentDirs(x, self.inhibitDirs):
name = x.split('/')[-2]
self.list.append(FileEntryComponent(name = name, absolute = x, isDir = True))
if self.showFiles:
for x in files:
if self.useServiceRef:
path = x.getPath()
name = path.split('/')[-1]
else:
path = directory + x
name = x
if (self.matchingPattern is None) or re_compile(self.matchingPattern).search(path):
self.list.append(FileEntryComponent(name = name, absolute = x , isDir = False))
if self.showMountpoints and len(self.list) == 0:
self.list.append(FileEntryComponent(name = _("nothing connected"), absolute = None, isDir = False))
self.l.setList(self.list)
if select is not None:
i = 0
self.moveToIndex(0)
for x in self.list:
p = x[0][0]
if isinstance(p, eServiceReference):
p = p.getPath()
if p == select:
self.moveToIndex(i)
i += 1
def getCurrentDirectory(self):
return self.current_directory
def canDescent(self):
if self.getSelection() is None:
return False
return self.getSelection()[1]
def descent(self):
if self.getSelection() is None:
return
self.changeDir(self.getSelection()[0], select = self.current_directory)
def getFilename(self):
if self.getSelection() is None:
return None
x = self.getSelection()[0]
if isinstance(x, eServiceReference):
x = x.getPath()
return x
def getServiceRef(self):
if self.getSelection() is None:
return None
x = self.getSelection()[0]
if isinstance(x, eServiceReference):
return x
return None
def execBegin(self):
harddiskmanager.on_partition_list_change.append(self.partitionListChanged)
def execEnd(self):
harddiskmanager.on_partition_list_change.remove(self.partitionListChanged)
def refresh(self):
self.changeDir(self.current_directory, self.getFilename())
def partitionListChanged(self, action, device):
self.refreshMountpoints()
if self.current_directory is None:
self.refresh()
def MultiFileSelectEntryComponent(name, absolute = None, isDir = False, selected = False):
res = [ (absolute, isDir, selected, name) ]
res.append((eListboxPythonMultiContent.TYPE_TEXT, 55, 1, 470, 20, 0, RT_HALIGN_LEFT, name))
if isDir:
png = LoadPixmap(cached=True, path=resolveFilename(SCOPE_CURRENT_SKIN, "extensions/directory.png"))
else:
extension = name.split('.')
extension = extension[-1].lower()
if EXTENSIONS.has_key(extension):
png = LoadPixmap(resolveFilename(SCOPE_CURRENT_SKIN, "extensions/" + EXTENSIONS[extension] + ".png"))
else:
png = None
if png is not None:
res.append((eListboxPythonMultiContent.TYPE_PIXMAP_ALPHATEST, 30, 2, 20, 20, png))
if not name.startswith('<'):
if selected is False:
icon = LoadPixmap(cached=True, path=resolveFilename(SCOPE_CURRENT_SKIN, "skin_default/icons/lock_off.png"))
res.append((eListboxPythonMultiContent.TYPE_PIXMAP_ALPHATEST, 2, 0, 25, 25, icon))
else:
icon = LoadPixmap(cached=True, path=resolveFilename(SCOPE_CURRENT_SKIN, "skin_default/icons/lock_on.png"))
res.append((eListboxPythonMultiContent.TYPE_PIXMAP_ALPHATEST, 2, 0, 25, 25, icon))
return res
class MultiFileSelectList(FileList):
def __init__(self, preselectedFiles, directory, showMountpoints = False, matchingPattern = None, showDirectories = True, showFiles = True, useServiceRef = False, inhibitDirs = False, inhibitMounts = False, isTop = False, enableWrapAround = False, additionalExtensions = None):
self.selectedFiles = preselectedFiles
if self.selectedFiles is None:
self.selectedFiles = []
FileList.__init__(self, directory, showMountpoints = showMountpoints, matchingPattern = matchingPattern, showDirectories = showDirectories, showFiles = showFiles, useServiceRef = useServiceRef, inhibitDirs = inhibitDirs, inhibitMounts = inhibitMounts, isTop = isTop, enableWrapAround = enableWrapAround, additionalExtensions = additionalExtensions)
self.changeDir(directory)
self.l.setItemHeight(25)
self.l.setFont(0, gFont("Regular", 20))
self.onSelectionChanged = [ ]
def selectionChanged(self):
for f in self.onSelectionChanged:
f()
def changeSelectionState(self):
idx = self.l.getCurrentSelectionIndex()
count = 0
newList = []
for x in self.list:
if idx == count:
if x[0][3].startswith('<'):
newList.append(x)
else:
if x[0][1] is True:
realPathname = x[0][0]
else:
realPathname = self.current_directory + x[0][0]
if x[0][2] == True:
SelectState = False
for entry in self.selectedFiles:
if entry == realPathname:
self.selectedFiles.remove(entry)
else:
SelectState = True
alreadyinList = False
for entry in self.selectedFiles:
if entry == realPathname:
alreadyinList = True
if not alreadyinList:
self.selectedFiles.append(realPathname)
newList.append(MultiFileSelectEntryComponent(name = x[0][3], absolute = x[0][0], isDir = x[0][1], selected = SelectState ))
else:
newList.append(x)
count += 1
self.list = newList
self.l.setList(self.list)
def getSelectedList(self):
return self.selectedFiles
def changeDir(self, directory, select = None):
self.list = []
# if we are just entering from the list of mount points:
if self.current_directory is None:
if directory and self.showMountpoints:
self.current_mountpoint = self.getMountpointLink(directory)
else:
self.current_mountpoint = None
self.current_directory = directory
directories = []
files = []
if directory is None and self.showMountpoints: # present available mountpoints
for p in harddiskmanager.getMountedPartitions():
path = os_path.join(p.mountpoint, "")
if path not in self.inhibitMounts and not self.inParentDirs(path, self.inhibitDirs):
self.list.append(MultiFileSelectEntryComponent(name = p.description, absolute = path, isDir = True))
files = [ ]
directories = [ ]
elif directory is None:
files = [ ]
directories = [ ]
elif self.useServiceRef:
root = eServiceReference("2:0:1:0:0:0:0:0:0:0:" + directory)
if self.additional_extensions:
root.setName(self.additional_extensions)
serviceHandler = eServiceCenter.getInstance()
list = serviceHandler.list(root)
while 1:
s = list.getNext()
if not s.valid():
del list
break
if s.flags & s.mustDescent:
directories.append(s.getPath())
else:
files.append(s)
directories.sort()
files.sort()
else:
if fileExists(directory):
try:
files = listdir(directory)
except:
files = []
files.sort()
tmpfiles = files[:]
for x in tmpfiles:
if os_path.isdir(directory + x):
directories.append(directory + x + "/")
files.remove(x)
if directory is not None and self.showDirectories and not self.isTop:
if directory == self.current_mountpoint and self.showMountpoints:
self.list.append(MultiFileSelectEntryComponent(name = "<" +_("List of Storage Devices") + ">", absolute = None, isDir = True))
elif (directory != "/") and not (self.inhibitMounts and self.getMountpoint(directory) in self.inhibitMounts):
self.list.append(MultiFileSelectEntryComponent(name = "<" +_("Parent Directory") + ">", absolute = '/'.join(directory.split('/')[:-2]) + '/', isDir = True))
if self.showDirectories:
for x in directories:
if not (self.inhibitMounts and self.getMountpoint(x) in self.inhibitMounts) and not self.inParentDirs(x, self.inhibitDirs):
name = x.split('/')[-2]
alreadySelected = False
for entry in self.selectedFiles:
if entry == x:
alreadySelected = True
if alreadySelected:
self.list.append(MultiFileSelectEntryComponent(name = name, absolute = x, isDir = True, selected = True))
else:
self.list.append(MultiFileSelectEntryComponent(name = name, absolute = x, isDir = True, selected = False))
if self.showFiles:
for x in files:
if self.useServiceRef:
path = x.getPath()
name = path.split('/')[-1]
else:
path = directory + x
name = x
if (self.matchingPattern is None) or re_compile(self.matchingPattern).search(path):
alreadySelected = False
for entry in self.selectedFiles:
if os_path.basename(entry) == x:
alreadySelected = True
if alreadySelected:
self.list.append(MultiFileSelectEntryComponent(name = name, absolute = x , isDir = False, selected = True))
else:
self.list.append(MultiFileSelectEntryComponent(name = name, absolute = x , isDir = False, selected = False))
self.l.setList(self.list)
if select is not None:
i = 0
self.moveToIndex(0)
for x in self.list:
p = x[0][0]
if isinstance(p, eServiceReference):
p = p.getPath()
if p == select:
self.moveToIndex(i)
i += 1
| openpli-arm/enigma2-arm | lib/python/Components/FileList.py | Python | gpl-2.0 | 14,837 |
'''
Copyright (C) 2015 Jacob Bieker, [email protected], www.jacobbieker.com
This program is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation; either version 2 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License along
with this program; if not, write to the Free Software Foundation, Inc.,
51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
'''
__author__ = 'Jacob Bieker'
print("Starting Google Plus Parsing") | jacobbieker/Insights | insights/google/GPlus2SQLite.py | Python | gpl-2.0 | 825 |
from crispy_forms.helper import FormHelper
from crispy_forms.layout import Layout, Submit, Button, Field, Hidden, HTML, Div
from crispy_forms.bootstrap import FormActions, AppendedText, StrictButton, InlineField
from django import forms
from django.contrib.auth.decorators import login_required
from django.core.urlresolvers import resolve, reverse
from django.db import models
from django.db.models import F, ExpressionWrapper, FloatField, IntegerField, CharField, Case, When, Sum, Func, Min, Q
from django.shortcuts import render, redirect
from django.utils.encoding import python_2_unicode_compatible
from cooking.helpers import prepareContext
from cooking.models import Ingredient
from cooking.forms import ConfirmDeleteForm
class IngredientForm(forms.ModelForm):
def __init__(self, *args, **kwargs):
super(IngredientForm, self).__init__(*args, **kwargs)
self.fields['cooked_weight'].required = False
self.helper = FormHelper()
self.helper.form_class = 'form-horizontal'
self.helper.form_method = 'post'
self.helper.form_action = ''
self.helper.label_class = 'col-lg-2'
self.helper.field_class = 'col-lg-4'
self.helper.layout = Layout(
Field('name'),
Field('buying_quantity'),
Field('buying_measurement'),
Field('calculation_quantity'),
Field('calculation_measurement'),
AppendedText('cooked_weight', 'Gram'),
AppendedText('price', '€'),
Field('cheapest_store'),
Field('remarks'),
Field('allergens'),
FormActions(
Submit('save', 'Save changes'),
HTML('<a href="' + reverse('cooking:ingredients') + '" class="btn btn-default" role="button">Cancel</a>'),
)
)
#self.helper.add_input(Submit('submit', 'Save'))
def clean_price(self):
if(self.cleaned_data.get('price') < 0):
raise forms.ValidationError("Price can't be negative")
return self.cleaned_data.get('price')
def clean_buying_quantity(self):
if(self.cleaned_data.get('buying_quantity') == 0):
raise forms.ValidationError("Buying Quantity can't be zero")
return self.cleaned_data.get('buying_quantity')
def clean_calculation_quantity(self):
if(self.cleaned_data.get('calculation_quantity') != None and self.cleaned_data.get('calculation_quantity') == 0):
raise forms.ValidationError("Calculation Quantity can not be zero, leave it out if you don't need it")
return self.cleaned_data.get('calculation_quantity')
def clean_calculation_measurement(self):
if(self.cleaned_data.get('calculation_measurement') == None and self.cleaned_data.get('calculation_quantity') != None):
raise forms.ValidationError('If calculation quantity is set, you also need to set the measurement')
elif(self.cleaned_data.get('calculation_quantity') == None and self.cleaned_data.get('calculation_measurement') != None):
raise forms.ValidationError('You can not set a measurement without a quantity')
else:
return self.cleaned_data.get('calculation_measurement')
def clean_cooked_weight(self):
if(self.cleaned_data.get('cooked_weight') == None):
self.cleaned_data['cooked_weight'] = 0
return self.cleaned_data.get('cooked_weight')
class Meta:
model = Ingredient
exclude = ['id']
@login_required
def list_ingredients(request):
context = prepareContext(request)
context['ingredient_list'] = Ingredient.objects.all()
context['pagetitle'] = 'Ingredients'
return render(request, 'listings/ingredients.html', context)
@login_required
def edit_ingredient(request, ingredient):
context = prepareContext(request)
if(request.POST and 'id' in request.POST):
ingredient = int(request.POST.get('id'))
ing = Ingredient.objects.get(id=ingredient)
form = IngredientForm(request.POST or None, instance=ing)
if(form.is_valid()):
form.save()
context['submitted'] = True
context['form'] = form
context['name'] = ing.name
context['pagetitle'] = 'Edit Ingredient'
return render(request, 'single/defaultform.html', context)
@login_required
def del_ingredient(request, ingredient):
context = prepareContext(request)
ing = Ingredient.objects.get(id=ingredient)
form = ConfirmDeleteForm(request.POST or None)
if(form.is_valid()):
ing.delete()
return redirect('cooking:ingredients')
context['object'] = ing
context['noaction'] = reverse('cooking:ingredients')
context['form'] = form
context['pagetitle'] = 'Delete Ingredient'
return render(request, 'single/confirmdelete.html', context)
@login_required
def new_ingredient(request):
context = prepareContext(request)
form = None
if(request.POST):
form = IngredientForm(data=request.POST or None)
if(form.is_valid()):
form.save()
return redirect('cooking:ingredients')
else:
form = IngredientForm()
context['form'] = form
context['name'] = 'New Ingredient'
context['pagetitle'] = 'New Ingredient'
return render(request, 'single/defaultform.html', context)
@login_required
def ingredients_csv(request):
response = HttpResponse(content_type='text/csv')
response['Content-Disposition'] = 'attachment; filename="ingredients.csv"'
writer = UnicodeWriter(response)
writer.writerow(['Name', 'Buying unit', '', 'Calculation unit', '', 'Price', 'Remarks', 'Cheapest Store', 'Allergens'])
ingredients = Ingredient.objects.all()
for item in ingredients:
writer.writerow([item.name,
item.buying_quantity,
conv_measurement(item.buying_measurement, item.buying_quantity),
item.calculation_quantity,
conv_measurement(item.calculation_measurement, item.calculation_quantity),
item.price,
item.remarks,
item.cheapest_store,
', '.join([a.name for a in item.allergens.all()])])
return response
| blacksph3re/alastair | cooking/ingredient/ingredient.py | Python | gpl-2.0 | 5,596 |
class Solution:
# @param haystack, a string
# @param needle, a string
# @return an integer
def strStr(self, haystack, needle):
lenH = len(haystack)
lenN = len(needle)
if lenN == 0:
return 0
for i in range(lenH-lenN+1):
p = i
q = 0
while q < lenN and haystack[p] == needle[q]:
p += 1
q += 1
if q == lenN:
return i
return -1
if __name__ == '__main__':
sol = Solution()
haystack = 'a'
needle = 'a'
print sol.strStr(haystack, needle)
| Dectinc/leetcode | python/28 - Implement strStr().py | Python | gpl-2.0 | 612 |
from os import environ
from os import path
from time import sleep
from selenium import webdriver
from selenium.webdriver.common.by import By
from selenium.common.exceptions import NoSuchElementException
from selenium.webdriver.support.ui import WebDriverWait, Select
from selenium.webdriver.support import expected_conditions as EC
from selenium.webdriver.common.action_chains import ActionChains
import atexit
class WDriverBase():
DEFAULT_TIMEOUT = 1
def __init__(self):
try:
self.base_url = environ['BASE_URL']
except KeyError:
raise BaseException('No BASE_URL environment variable')
self.implicitly_wait(WDriverBase.DEFAULT_TIMEOUT)
def module(self, name):
package = __import__('zentyal.' + name)
return getattr(package, name).WDriverModule()
def var(self, name, mandatory=False):
if not environ.has_key(name):
if mandatory:
raise Exception('Missing mandatory variable :' + name)
return None
return environ[name]
def var_as_list(self, name):
if not environ.has_key(name):
return []
return environ[name].split()
def open(self, url):
self.get(self.base_url + url)
def click(self, name=None, id=None, xpath=None, link=None, css=None):
if name:
print "CLICK name = " + name
self._wait_for_element_clickable(name, By.NAME)
self.find_element_by_name(name).click()
elif id:
print "CLICK id = " + id
self._wait_for_element_clickable(id, By.ID)
self.find_element_by_id(id).click()
elif xpath:
print "CLICK xpath = " + xpath
self._wait_for_element_clickable(xpath, By.XPATH)
self.find_element_by_xpath(xpath).click()
elif link:
print "CLICK link = " + link
self._wait_for_element_clickable(link, By.LINK_TEXT)
self.find_element_by_link_text(link).click()
elif css:
print "CLICK css = " + css
self._wait_for_element_clickable(css, By.CSS_SELECTOR)
self.find_element_by_css_selector(css).click()
else:
raise ValueError("No valid selector passed (name, id, xpath, link or css)")
def click_radio(self, name, value):
self.click(xpath=".//input[@type='radio' and @name='" + name + "' and contains(@value, '" + value + "')]")
def type(self, text, name=None, id=None, xpath=None, css=None):
text = str(text)
if name:
print "TYPE " + text + " IN name = " + name
self._type_text_in_element(text, name, By.NAME)
elif id:
print "TYPE " + text + " IN id = " + id
self._type_text_in_element(text, id, By.ID)
elif xpath:
print "TYPE " + text + " IN xpath = " + xpath
self._type_text_in_element(text, xpath, By.XPATH)
elif css:
print "TYPE " + text + " IN css = " + css
self._type_text_in_element(text, css, By.CSS_SELECTOR)
else:
raise ValueError("No valid selector passed (name, id, xpath or css)")
def type_var(self, var, name=None, id=None, xpath=None, css=None):
self.type(environ[var], name, id, xpath, css)
def select(self, option=None, value=None, name=None, id=None, xpath=None, css=None):
how = None
what = None
selector = None
if name:
what = name
how = By.NAME
selector = 'name'
elif id:
what = id
how = By.ID
selector = 'id'
elif xpath:
what = xpath
how = By.XPATH
selector = 'xpath'
elif css:
what = css
how = By.CSS_SELECTOR
selector = 'css'
else:
raise ValueError("No valid selector passed (name, id, xpath or css)")
elem = self.find_element(by=how, value=what)
select = Select(elem)
if value:
print "SELECT value = " + value + " IN " + selector + " = " + what
select.select_by_value(value)
elif option:
print "SELECT option = " + str(option) + " IN " + selector + " = " + what
select.select_by_visible_text(option)
else:
raise ValueError("No option or value passed")
def check(self, name, how=By.NAME):
elem = self.find_element(by=how, value=name)
if not elem.is_selected():
elem.click()
def uncheck(self, name, how=By.NAME):
elem = self.find_element(by=how, value=name)
if elem.is_selected():
elem.click()
def assert_true(self, expr, msg='assertion failed'):
if not expr:
raise Exception("Failed in driver assertion with error: " + msg)
def assert_present(self, name=None, id=None, xpath=None, text=None, css=None, timeout=10, msg='not present'):
self.assert_true(self.wait_for(name, id, xpath, text, css, timeout), msg)
def assert_value(self, value, name=None, id=None, xpath=None, css=None, timeout=10, msg='not present'):
self.assert_true(self.wait_for_value(value, name, id, xpath, css, timeout), msg)
def wait_for(self, name=None, id=None, xpath=None, text=None, css=None, timeout=10):
if name:
print "WAIT FOR name = " + name
return self._wait_for_element_present(name, By.NAME, timeout_in_seconds=timeout)
elif id:
print "WAIT FOR id = " + id
return self._wait_for_element_present(id, By.ID, timeout_in_seconds=timeout)
elif xpath:
print "WAIT FOR xpath = " + xpath
return self._wait_for_element_present(xpath, By.XPATH, timeout_in_seconds=timeout)
elif text:
print "WAIT FOR text = " + text
for i in range(timeout):
if self.find_element_by_tag_name('body').text.find(text) != -1:
return True
sleep(1)
return False
elif css:
print "WAIT FOR css = " + css
return self._wait_for_element_present(css, By.CSS_SELECTOR, timeout_in_seconds=timeout)
else:
raise ValueError("No valid selector passed (name, id, xpath or css)")
def wait_for_value(self, value, name=None, id=None, xpath=None, css=None, timeout=10):
if name:
print "WAIT FOR VALUE " + value + " IN name = " + name
return self._wait_for_value(name, value, By.NAME, timeout_in_seconds=timeout)
elif id:
print "WAIT FOR VALUE " + value + " IN id = " + id
return self._wait_for_value(id, value, By.ID, timeout_in_seconds=timeout)
elif xpath:
print "WAIT FOR VALUE " + value + " IN xpath = " + xpath
return self._wait_for_value(xpath, value, By.XPATH, timeout_in_seconds=timeout)
elif css:
print "WAIT FOR VALUE " + value + " IN css = " + css
return self._wait_for_value(css, value, By.CSS_SELECTOR, timeout_in_seconds=timeout)
else:
raise ValueError("No valid selector passed (name, id, xpath or css)")
def is_present(self, name=None, id=None, xpath=None, text=None, css=None):
if name:
print "IS PRESENT? name = " + name
return self._is_element_present(By.NAME, name)
elif id:
print "IS PRESENT? id = " + id
return self._is_element_present(By.ID, id)
elif xpath:
print "IS PRESENT? xpath = " + xpath
return self._is_element_present(By.XPATH, xpath)
elif text:
print "IS PRESENT? text = " + text
return self.find_element_by_tag_name('body').text.find(text) != -1
elif css:
print "IS PRESENT? css = " + css
return self._is_element_present(By.CSS, css)
else:
raise ValueError("No valid selector passed (name, id, xpath, text or css)")
def drag_and_drop(self, xpath_drag, id_drop):
drag = driver.find_element_by_xpath(xpath_drag)
drop = driver.find_element_by_id(id_drop)
ActionChains(self).drag_and_drop(drag, drop).perform()
def _is_element_present(self, how, what):
self.implicitly_wait(WDriverBase.DEFAULT_TIMEOUT)
try:
return self.find_element(by=how, value=what).is_displayed()
except NoSuchElementException:
return False
def _wait_for_value(self, name, value, how=By.NAME, timeout_in_seconds=10):
for i in range(timeout_in_seconds):
if self._is_element_present(how, name):
if self.find_element(by=how, value=name).get_attribute("value") == value:
return True
sleep(1)
return False
def _wait_for_element_present(self, element, how=By.NAME, timeout_in_seconds=10):
for i in range(timeout_in_seconds):
if self._is_element_present(how, element):
return True
sleep(1)
print "Timeout after " + str(timeout_in_seconds) + " seconds."
return False
def _type_text_in_element(self, text, element, how=By.NAME):
self._wait_for_element_present(element, how)
elem = self.find_element(how, element)
elem.click()
elem.clear()
elem.send_keys(text.decode('utf-8'))
def _wait_for_element_clickable(self, element, how=By.NAME, timeout_in_seconds=10):
wait = WebDriverWait(self, timeout_in_seconds)
element = wait.until(EC.element_to_be_clickable((how,element)))
return element
class WDriverFirefox(webdriver.Firefox, WDriverBase):
init_done = False
instance = None
def __new__(cls, *args, **kargs):
if cls.instance is None:
cls.instance = object.__new__(cls, *args, **kargs)
return cls.instance
def __init__(self):
if not WDriverFirefox.init_done:
webdriver.Firefox.__init__(self)
WDriverBase.__init__(self)
WDriverFirefox.init_done = True
atexit.register(self.quit)
class WDriverChrome(webdriver.Chrome, WDriverBase):
init_done = False
instance = None
default_ubuntu_path = '/usr/lib/chromium-browser/chromedriver'
def __new__(cls, *args, **kargs):
if cls.instance is None:
cls.instance = object.__new__(cls, *args, **kargs)
return cls.instance
def __init__(self):
if not WDriverChrome.init_done:
if (self.exists()):
webdriver.Chrome.__init__(
self, executable_path=self.default_ubuntu_path)
else:
webdriver.Chrome.__init__(self)
WDriverBase.__init__(self)
WDriverChrome.init_done = True
atexit.register(self.quit)
@classmethod
def exists(cls):
return path.exists(cls.default_ubuntu_path)
class WDriverPhantomJS(webdriver.PhantomJS, WDriverBase):
init_done = False
instance = None
def __new__(cls, *args, **kargs):
if cls.instance is None:
cls.instance = object.__new__(cls, *args, **kargs)
return cls.instance
def __init__(self):
if not WDriverPhantomJS.init_done:
webdriver.PhantomJS.__init__(self, service_args=['--ignore-ssl-errors=true'])
WDriverBase.__init__(self)
WDriverPhantomJS.init_done = True
atexit.register(self.quit)
@classmethod
def exists(cls):
default_phantomjs_path = '/usr/bin/phantomjs'
return path.exists(default_phantomjs_path)
def instance():
if environ.has_key("GLOBAL_browser"):
if environ["GLOBAL_browser"] == "Chrome":
return WDriverChrome()
elif environ["GLOBAL_browser"] == "Firefox":
return WDriverFirefox()
if WDriverPhantomJS.exists():
return WDriverPhantomJS(client_base)
elif WDriverChrome.exists():
return WDriverChrome(client_base)
return WDriverPhantomJS()
| Zentyal/anste | lib/anste/wdriver.py | Python | gpl-2.0 | 12,084 |
#
# Gramps - a GTK+/GNOME based genealogy program
#
# Copyright (C) 2000-2007 Donald N. Allingham
# Copyright (C) 2008 Raphael Ackermann
# Copyright (C) 2010 Benny Malengier
# Copyright (C) 2010 Nick Hall
# Copyright (C) 2012 Doug Blank <[email protected]>
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
#
#-------------------------------------------------------------------------
#
# Standard python modules
#
#-------------------------------------------------------------------------
from __future__ import print_function
import random
import os
from xml.sax.saxutils import escape
import collections
#-------------------------------------------------------------------------
#
# GTK/Gnome modules
#
#-------------------------------------------------------------------------
from gi.repository import GObject
from gi.repository import Gdk
from gi.repository import Gtk
#-------------------------------------------------------------------------
#
# gramps modules
#
#-------------------------------------------------------------------------
from gramps.gen.config import config
from gramps.gen.const import GRAMPS_LOCALE as glocale
from gramps.gen.const import HOME_DIR, URL_WIKISTRING
from gramps.gen.datehandler import get_date_formats
from gramps.gen.display.name import displayer as _nd
from gramps.gen.display.name import NameDisplayError
from gramps.gen.utils.alive import update_constants
from gramps.gen.utils.keyword import (get_keywords, get_translation_from_keyword,
get_translations, get_keyword_from_translation)
from gramps.gen.lib import Date, FamilyRelType
from gramps.gen.lib import Name, Surname, NameOriginType
from gramps.gen.constfunc import conv_to_unicode
from .managedwindow import ManagedWindow
from .widgets import MarkupLabel, BasicLabel
from .dialog import ErrorDialog, QuestionDialog2, OkDialog
from .glade import Glade
from gramps.gen.plug.utils import available_updates
from .plug import PluginWindows
from gramps.gen.errors import WindowActiveError
from .spell import HAVE_GTKSPELL
_ = glocale.translation.gettext
#-------------------------------------------------------------------------
#
# Constants
#
#-------------------------------------------------------------------------
_surname_styles = [
_("Father's surname"),
_("None"),
_("Combination of mother's and father's surname"),
_("Icelandic style"),
]
# column numbers for the 'name format' model
COL_NUM = 0
COL_NAME = 1
COL_FMT = 2
COL_EXPL = 3
#-------------------------------------------------------------------------
#
#
#
#-------------------------------------------------------------------------
class DisplayNameEditor(ManagedWindow):
def __init__(self, uistate, dbstate, track, dialog):
# Assumes that there are two methods: dialog.name_changed_check(),
# and dialog._build_custom_name_ui()
ManagedWindow.__init__(self, uistate, [], DisplayNameEditor)
self.dialog = dialog
self.dbstate = dbstate
self.set_window(
Gtk.Dialog(_('Display Name Editor'),
buttons=(Gtk.STOCK_CLOSE, Gtk.ResponseType.CLOSE)),
None, _('Display Name Editor'), None)
table = self.dialog._build_custom_name_ui()
label = Gtk.Label(label=_("""The following keywords are replaced with the appropriate name parts:
<tt>
<b>Given</b> - given name (first name) <b>Surname</b> - surnames (with prefix and connectors)
<b>Title</b> - title (Dr., Mrs.) <b>Suffix</b> - suffix (Jr., Sr.)
<b>Call</b> - call name <b>Nickname</b> - nick name
<b>Initials</b> - first letters of Given <b>Common</b> - nick name, otherwise first of Given
<b>Primary, Primary[pre] or [sur] or [con]</b>- full primary surname, prefix, surname only, connector
<b>Patronymic, or [pre] or [sur] or [con]</b> - full pa/matronymic surname, prefix, surname only, connector
<b>Familynick</b> - family nick name <b>Prefix</b> - all prefixes (von, de)
<b>Rest</b> - non primary surnames <b>Notpatronymic</b>- all surnames, except pa/matronymic & primary
<b>Rawsurnames</b>- surnames (no prefixes and connectors)
</tt>
UPPERCASE keyword forces uppercase. Extra parentheses, commas are removed. Other text appears literally.
<b>Example</b>: 'Dr. Edwin Jose von der Smith and Weston Wilson Sr ("Ed") - Underhills'
<i>Edwin Jose</i> is given name, <i>von der</i> is the prefix, <i>Smith</i> and <i>Weston</i> surnames,
<i>and</i> a connector, <i>Wilson</i> patronymic surname, <i>Dr.</i> title, <i>Sr</i> suffix, <i>Ed</i> nick name,
<i>Underhills</i> family nick name, <i>Jose</i> callname.
"""))
label.set_use_markup(True)
self.window.vbox.pack_start(label, False, True, 0)
self.window.vbox.pack_start(table, True, True, 0)
self.window.set_default_size(600, 550)
self.window.connect('response', self.close)
self.show()
def close(self, *obj):
self.dialog.name_changed_check()
ManagedWindow.close(self, *obj)
def build_menu_names(self, obj):
return (_(" Name Editor"), _("Preferences"))
#-------------------------------------------------------------------------
#
# ConfigureDialog
#
#-------------------------------------------------------------------------
class ConfigureDialog(ManagedWindow):
"""
Base class for configuration dialogs. They provide a Notebook, to which
pages are added with configuration options, and a Cancel and Save button.
On save, a config file on which the dialog works, is saved to disk, and
a callback called.
"""
def __init__(self, uistate, dbstate, configure_page_funcs, configobj,
configmanager,
dialogtitle=_("Preferences"), on_close=None):
"""
Set up a configuration dialog
:param uistate: a DisplayState instance
:param dbstate: a DbState instance
:param configure_page_funcs: a list of function that return a tuple
(str, Gtk.Widget). The string is used as label for the
configuration page, and the widget as the content of the
configuration page
:param configobj: the unique object that is configured, it must be
identifiable (id(configobj)). If the configure dialog of the
configobj is already open, a WindowActiveError will be
raised. Grab this exception in the calling method
:param configmanager: a configmanager object. Several convenience
methods are present in ConfigureDialog to set up widgets that
write changes directly via this configmanager.
:param dialogtitle: the title of the configuration dialog
:param on_close: callback that is called on close
"""
self.dbstate = dbstate
self.__config = configmanager
ManagedWindow.__init__(self, uistate, [], configobj)
self.set_window(
Gtk.Dialog(dialogtitle,
buttons=(Gtk.STOCK_CLOSE, Gtk.ResponseType.CLOSE)),
None, dialogtitle, None)
self.panel = Gtk.Notebook()
self.panel.set_scrollable(True)
self.window.vbox.pack_start(self.panel, True, True, 0)
self.__on_close = on_close
self.window.connect('response', self.done)
self.__setup_pages(configure_page_funcs)
self.window.show_all()
self.show()
def __setup_pages(self, configure_page_funcs):
"""
This method builds the notebookpages in the panel
"""
if isinstance(configure_page_funcs, collections.Callable):
pages = configure_page_funcs()
else:
pages = configure_page_funcs
for func in pages:
labeltitle, widget = func(self)
self.panel.append_page(widget, MarkupLabel(labeltitle))
def done(self, obj, value):
if self.__on_close:
self.__on_close()
self.close()
def update_int_entry(self, obj, constant):
"""
:param obj: an object with get_text method that should contain an
integer
:param constant: the config setting to which the integer value must be
saved
"""
try:
self.__config.set(constant, int(obj.get_text()))
except:
print("WARNING: ignoring invalid value for '%s'" % constant)
def update_markup_entry(self, obj, constant):
"""
:param obj: an object with get_text method
:param constant: the config setting to which the text value must be
saved
"""
try:
obj.get_text() % 'test_markup'
except TypeError:
print("WARNING: ignoring invalid value for '%s'" % constant)
ErrorDialog(_("Invalid or incomplete format definition."),
obj.get_text())
obj.set_text('<b>%s</b>')
except ValueError:
print("WARNING: ignoring invalid value for '%s'" % constant)
ErrorDialog(_("Invalid or incomplete format definition."),
obj.get_text())
obj.set_text('<b>%s</b>')
self.__config.set(constant, unicode(obj.get_text()))
def update_entry(self, obj, constant):
"""
:param obj: an object with get_text method
:param constant: the config setting to which the text value must be
saved
"""
self.__config.set(constant, conv_to_unicode(obj.get_text()))
def update_color(self, obj, constant, color_hex_label):
color = obj.get_color()
hexval = "#%02x%02x%02x" % (color.red/256,
color.green/256,
color.blue/256)
color_hex_label.set_text(hexval)
self.__config.set(constant, hexval)
def update_checkbox(self, obj, constant, config=None):
if not config:
config = self.__config
config.set(constant, obj.get_active())
def update_radiobox(self, obj, constant):
self.__config.set(constant, obj.get_active())
def update_combo(self, obj, constant):
"""
:param obj: the ComboBox object
:param constant: the config setting to which the value must be saved
"""
self.__config.set(constant, obj.get_active())
def update_slider(self, obj, constant):
"""
:param obj: the HScale object
:param constant: the config setting to which the value must be saved
"""
self.__config.set(constant, int(obj.get_value()))
def update_spinner(self, obj, constant):
"""
:param obj: the SpinButton object
:param constant: the config setting to which the value must be saved
"""
self.__config.set(constant, int(obj.get_value()))
def add_checkbox(self, table, label, index, constant, start=1, stop=9,
config=None, extra_callback=None):
if not config:
config = self.__config
checkbox = Gtk.CheckButton(label=label)
checkbox.set_active(config.get(constant))
checkbox.connect('toggled', self.update_checkbox, constant, config)
if extra_callback:
checkbox.connect('toggled', extra_callback)
table.attach(checkbox, start, stop, index, index+1, yoptions=0)
return checkbox
def add_radiobox(self, table, label, index, constant, group, column,
config=None):
if not config:
config = self.__config
radiobox = Gtk.RadioButton.new_with_mnemonic_from_widget(group, label)
if config.get(constant) == True:
radiobox.set_active(True)
radiobox.connect('toggled', self.update_radiobox, constant)
table.attach(radiobox, column, column+1, index, index+1, yoptions=0)
return radiobox
def add_text(self, table, label, index, config=None, line_wrap=True):
if not config:
config = self.__config
text = Gtk.Label()
text.set_line_wrap(line_wrap)
text.set_alignment(0.,0.)
text.set_text(label)
table.attach(text, 1, 9, index, index+1, yoptions=Gtk.AttachOptions.SHRINK)
def add_path_box(self, table, label, index, entry, path, callback_label,
callback_sel, config=None):
""" Add an entry to give in path and a select button to open a
dialog.
Changing entry calls callback_label
Clicking open button call callback_sel
"""
if not config:
config = self.__config
lwidget = BasicLabel("%s: " %label)
hbox = Gtk.HBox()
if path:
entry.set_text(path)
entry.connect('changed', callback_label)
btn = Gtk.Button()
btn.connect('clicked', callback_sel)
image = Gtk.Image()
image.set_from_stock(Gtk.STOCK_OPEN, Gtk.IconSize.BUTTON)
image.show()
btn.add(image)
hbox.pack_start(entry, True, True, 0)
hbox.pack_start(btn, False, False, 0)
table.attach(lwidget, 1, 2, index, index+1, yoptions=0,
xoptions=Gtk.AttachOptions.FILL)
table.attach(hbox, 2, 3, index, index+1, yoptions=0)
def add_entry(self, table, label, index, constant, callback=None,
config=None, col_attach=0):
if not config:
config = self.__config
if not callback:
callback = self.update_entry
if label:
lwidget = BasicLabel("%s: " % label)
entry = Gtk.Entry()
entry.set_text(config.get(constant))
entry.connect('changed', callback, constant)
if label:
table.attach(lwidget, col_attach, col_attach+1, index, index+1, yoptions=0,
xoptions=Gtk.AttachOptions.FILL)
table.attach(entry, col_attach+1, col_attach+2, index, index+1, yoptions=0)
else:
table.attach(entry, col_attach, col_attach+1, index, index+1, yoptions=0)
return entry
def add_pos_int_entry(self, table, label, index, constant, callback=None,
config=None, col_attach=1, helptext=''):
""" entry field for positive integers
"""
if not config:
config = self.__config
lwidget = BasicLabel("%s: " % label)
entry = Gtk.Entry()
entry.set_text(str(config.get(constant)))
entry.set_tooltip_markup(helptext)
if callback:
entry.connect('changed', callback, constant)
table.attach(lwidget, col_attach, col_attach+1, index, index+1,
yoptions=0, xoptions=Gtk.AttachOptions.FILL)
table.attach(entry, col_attach+1, col_attach+2, index, index+1,
yoptions=0)
def add_color(self, table, label, index, constant, config=None, col=0):
if not config:
config = self.__config
lwidget = BasicLabel("%s: " % label)
hexval = config.get(constant)
color = Gdk.color_parse(hexval)
entry = Gtk.ColorButton(color=color)
color_hex_label = BasicLabel(hexval)
entry.connect('color-set', self.update_color, constant, color_hex_label)
table.attach(lwidget, col, col+1, index, index+1, yoptions=0,
xoptions=Gtk.AttachOptions.FILL)
table.attach(entry, col+1, col+2, index, index+1, yoptions=0, xoptions=0)
table.attach(color_hex_label, col+2, col+3, index, index+1, yoptions=0)
return entry
def add_combo(self, table, label, index, constant, opts, callback=None,
config=None, valueactive=False, setactive=None):
"""
A drop-down list allowing selection from a number of fixed options.
:param opts: A list of options. Each option is a tuple containing an
integer code and a textual description.
If valueactive = True, the constant stores the value, not the position
in the list
"""
if not config:
config = self.__config
if not callback:
callback = self.update_combo
lwidget = BasicLabel("%s: " % label)
store = Gtk.ListStore(int, str)
for item in opts:
store.append(item)
combo = Gtk.ComboBox(model=store)
cell = Gtk.CellRendererText()
combo.pack_start(cell, True)
combo.add_attribute(cell, 'text', 1)
if valueactive:
val = config.get(constant)
pos = 0
for nr, item in enumerate(opts):
if item[-1] == val:
pos = nr
break
combo.set_active(pos)
else:
if setactive is None:
combo.set_active(config.get(constant))
else:
combo.set_active(setactive)
combo.connect('changed', callback, constant)
table.attach(lwidget, 1, 2, index, index+1, yoptions=0,
xoptions=Gtk.AttachOptions.FILL)
table.attach(combo, 2, 3, index, index+1, yoptions=0)
return combo
def add_slider(self, table, label, index, constant, range, callback=None,
config=None):
"""
A slider allowing the selection of an integer within a specified range.
:param range: A tuple containing the minimum and maximum allowed values.
"""
if not config:
config = self.__config
if not callback:
callback = self.update_slider
lwidget = BasicLabel("%s: " % label)
adj = Gtk.Adjustment(config.get(constant), range[0], range[1], 1, 0, 0)
slider = Gtk.HScale(adjustment=adj)
slider.set_digits(0)
slider.set_value_pos(Gtk.PositionType.BOTTOM)
slider.connect('value-changed', callback, constant)
table.attach(lwidget, 1, 2, index, index+1, yoptions=0,
xoptions=Gtk.AttachOptions.FILL)
table.attach(slider, 2, 3, index, index+1, yoptions=0)
return slider
def add_spinner(self, table, label, index, constant, range, callback=None,
config=None):
"""
A spinner allowing the selection of an integer within a specified range.
:param range: A tuple containing the minimum and maximum allowed values.
"""
if not config:
config = self.__config
if not callback:
callback = self.update_spinner
lwidget = BasicLabel("%s: " % label)
adj = Gtk.Adjustment(config.get(constant), range[0], range[1], 1, 0, 0)
spinner = Gtk.SpinButton(adjustment=adj, climb_rate=0.0, digits=0)
spinner.connect('value-changed', callback, constant)
table.attach(lwidget, 1, 2, index, index+1, yoptions=0,
xoptions=Gtk.AttachOptions.FILL)
table.attach(spinner, 2, 3, index, index+1, yoptions=0)
return spinner
#-------------------------------------------------------------------------
#
# GrampsPreferences
#
#-------------------------------------------------------------------------
class GrampsPreferences(ConfigureDialog):
def __init__(self, uistate, dbstate):
page_funcs = (
self.add_behavior_panel,
self.add_famtree_panel,
self.add_formats_panel,
self.add_text_panel,
self.add_prefix_panel,
self.add_date_panel,
self.add_researcher_panel,
self.add_advanced_panel,
self.add_color_panel
)
ConfigureDialog.__init__(self, uistate, dbstate, page_funcs,
GrampsPreferences, config,
on_close=update_constants)
def add_researcher_panel(self, configdialog):
table = Gtk.Table(n_rows=3, n_columns=8)
table.set_border_width(12)
table.set_col_spacings(6)
table.set_row_spacings(6)
self.add_text(table, _('Enter your information so people can contact you when you'
' distribute your Family Tree'), 0, line_wrap=False)
self.add_entry(table, _('Name'), 1, 'researcher.researcher-name')
self.add_entry(table, _('Address'), 2, 'researcher.researcher-addr')
self.add_entry(table, _('Locality'), 3, 'researcher.researcher-locality')
self.add_entry(table, _('City'), 4, 'researcher.researcher-city')
self.add_entry(table, _('State/County'), 5, 'researcher.researcher-state')
self.add_entry(table, _('Country'), 6, 'researcher.researcher-country')
self.add_entry(table, _('ZIP/Postal Code'), 7, 'researcher.researcher-postal')
self.add_entry(table, _('Phone'), 8, 'researcher.researcher-phone')
self.add_entry(table, _('Email'), 9, 'researcher.researcher-email')
return _('Researcher'), table
def add_prefix_panel(self, configdialog):
"""
Add the ID prefix tab to the preferences.
"""
table = Gtk.Table(n_rows=3, n_columns=8)
table.set_border_width(12)
table.set_col_spacings(6)
table.set_row_spacings(6)
self.add_entry(table, _('Person'), 0, 'preferences.iprefix',
self.update_idformat_entry)
self.add_entry(table, _('Family'), 1, 'preferences.fprefix',
self.update_idformat_entry)
self.add_entry(table, _('Place'), 2, 'preferences.pprefix',
self.update_idformat_entry)
self.add_entry(table, _('Source'), 3, 'preferences.sprefix',
self.update_idformat_entry)
self.add_entry(table, _('Citation'), 4, 'preferences.cprefix',
self.update_idformat_entry)
self.add_entry(table, _('Media Object'), 5, 'preferences.oprefix',
self.update_idformat_entry)
self.add_entry(table, _('Event'), 6, 'preferences.eprefix',
self.update_idformat_entry)
self.add_entry(table, _('Repository'), 7, 'preferences.rprefix',
self.update_idformat_entry)
self.add_entry(table, _('Note'), 8, 'preferences.nprefix',
self.update_idformat_entry)
return _('ID Formats'), table
def add_color_panel(self, configdialog):
"""
Add the tab to set defaults colors for graph boxes
"""
table = Gtk.Table(n_rows=17, n_columns=8)
self.add_text(table, _('Set the colors used for boxes in the graphical views'),
0, line_wrap=False)
self.add_color(table, _('Gender Male Alive'), 1,
'preferences.color-gender-male-alive')
self.add_color(table, _('Border Male Alive'), 2,
'preferences.bordercolor-gender-male-alive')
self.add_color(table, _('Gender Male Death'), 3,
'preferences.color-gender-male-death')
self.add_color(table, _('Border Male Death'), 4,
'preferences.bordercolor-gender-male-death')
self.add_color(table, _('Gender Female Alive'), 1,
'preferences.color-gender-female-alive', col=4)
self.add_color(table, _('Border Female Alive'), 2,
'preferences.bordercolor-gender-female-alive', col=4)
self.add_color(table, _('Gender Female Death'), 3,
'preferences.color-gender-female-death', col=4)
self.add_color(table, _('Border Female Death'), 4,
'preferences.bordercolor-gender-female-death', col=4)
## self.add_color(table, _('Gender Other Alive'), 5,
## 'preferences.color-gender-other-alive')
## self.add_color(table, _('Border Other Alive'), 6,
## 'preferences.bordercolor-gender-other-alive')
## self.add_color(table, _('Gender Other Death'), 7,
## 'preferences.color-gender-other-death')
## self.add_color(table, _('Border Other Death'), 8,
## 'preferences.bordercolor-gender-other-death')
self.add_color(table, _('Gender Unknown Alive'), 5,
'preferences.color-gender-unknown-alive', col=4)
self.add_color(table, _('Border Unknown Alive'), 6,
'preferences.bordercolor-gender-unknown-alive', col=4)
self.add_color(table, _('Gender Unknown Death'), 7,
'preferences.color-gender-unknown-death', col=4)
self.add_color(table, _('Border Unknown Death'), 8,
'preferences.bordercolor-gender-unknown-death', col=4)
return _('Colors'), table
def add_advanced_panel(self, configdialog):
table = Gtk.Table(n_rows=4, n_columns=8)
table.set_border_width(12)
table.set_col_spacings(6)
table.set_row_spacings(6)
self.add_checkbox(
table, _('Suppress warning when adding parents to a child.'),
0, 'preferences.family-warn')
self.add_checkbox(
table, _('Suppress warning when canceling with changed data.'),
1, 'interface.dont-ask')
self.add_checkbox(
table, _('Suppress warning about missing researcher when'
' exporting to GEDCOM.'),
2, 'behavior.owner-warn')
self.add_checkbox(
table, _('Show plugin status dialog on plugin load error.'),
3, 'behavior.pop-plugin-status')
return _('Warnings'), table
def _build_name_format_model(self, active):
"""
Create a common model for ComboBox and TreeView
"""
name_format_model = Gtk.ListStore(GObject.TYPE_INT,
GObject.TYPE_STRING,
GObject.TYPE_STRING,
GObject.TYPE_STRING)
index = 0
the_index = 0
for num, name, fmt_str, act in _nd.get_name_format():
translation = fmt_str
for key in get_keywords():
if key in translation:
translation = translation.replace(key, get_translation_from_keyword(key))
self.examplename.set_display_as(num)
name_format_model.append(
row=[num, translation, fmt_str, _nd.display_name(self.examplename)])
if num == active: the_index = index
index += 1
return name_format_model, the_index
def __new_name(self, obj):
lyst = ["%s, %s %s (%s)" % (_("Surname"), _("Given"), _("Suffix"),
_("Common")),
"%s, %s %s (%s)" % (_("Surname"), _("Given"), _("Suffix"),
_("Nickname")),
"%s, %s %s (%s)" % (_("Surname"), _("Name|Common"), _("Suffix"),
_("Nickname")),
"%s, %s %s" % (_("Surname"), _("Name|Common"), _("Suffix")),
"%s, %s %s (%s)" % (_("SURNAME"), _("Given"), _("Suffix"),
_("Call")),
"%s, %s (%s)" % (_("Surname"), _("Given"), _("Name|Common")),
"%s, %s (%s)" % (_("Surname"), _("Name|Common"), _("Nickname")),
"%s %s" % (_("Given"), _("Surname")),
"%s %s, %s" % (_("Given"), _("Surname"), _("Suffix")),
"%s %s %s" % (_("Given"), _("NotPatronymic"), _("Patronymic")),
"%s, %s %s (%s)" % (_("SURNAME"), _("Given"), _("Suffix"),
_("Common")),
"%s, %s (%s)" % (_("SURNAME"), _("Given"), _("Name|Common")),
"%s, %s (%s)" % (_("SURNAME"), _("Given"), _("Nickname")),
"%s %s" % (_("Given"), _("SURNAME")),
"%s %s, %s" % (_("Given"), _("SURNAME"), _("Suffix")),
"%s /%s/" % (_("Given"), _("SURNAME")),
"%s %s, %s" % (_("Given"), _("Rawsurnames"), _("Suffix")),
]
#repeat above list, but not translated.
fmtlyst = ["%s, %s %s (%s)" % (("Surname"), ("Given"), ("Suffix"),
("Common")),
"%s, %s %s (%s)" % (("Surname"), ("Given"), ("Suffix"),
("Nickname")),
"%s, %s %s (%s)" % (("Surname"), ("Name|Common"), ("Suffix"),
("Nickname")),
"%s, %s %s" % (("Surname"), ("Name|Common"), ("Suffix")),
"%s, %s %s (%s)" % (("SURNAME"), ("Given"), ("Suffix"),
("Call")),
"%s, %s (%s)" % (("Surname"), ("Given"), ("Name|Common")),
"%s, %s (%s)" % (("Surname"), ("Name|Common"), ("Nickname")),
"%s %s" % (("Given"), ("Surname")),
"%s %s, %s" % (("Given"), ("Surname"), ("Suffix")),
"%s %s %s" % (("Given"), ("NotPatronymic"), ("Patronymic")),
"%s, %s %s (%s)" % (("SURNAME"), ("Given"), ("Suffix"),
("Common")),
"%s, %s (%s)" % (("SURNAME"), ("Given"), ("Name|Common")),
"%s, %s (%s)" % (("SURNAME"), ("Given"), ("Nickname")),
"%s %s" % (("Given"), ("SURNAME")),
"%s %s, %s" % (("Given"), ("SURNAME"), ("Suffix")),
"%s /%s/" % (("Given"), ("SURNAME")),
"%s %s, %s" % (("Given"), ("Rawsurnames"), ("Suffix")),
]
rand = int(random.random() * len(lyst))
f = lyst[rand]
fmt = fmtlyst[rand]
i = _nd.add_name_format(f, fmt)
node = self.fmt_model.append(row=[i, f, fmt,
_nd.format_str(self.examplename, fmt)])
path = self.fmt_model.get_path(node)
self.format_list.set_cursor(path, self.name_column, True)
self.edit_button.set_sensitive(False)
self.remove_button.set_sensitive(False)
self.insert_button.set_sensitive(False)
def __edit_name(self, obj):
store, node = self.format_list.get_selection().get_selected()
path = self.fmt_model.get_path(node)
self.edit_button.set_sensitive(False)
self.remove_button.set_sensitive(False)
self.insert_button.set_sensitive(False)
self.format_list.set_cursor(path, self.name_column, True)
def __check_for_name(self, name, oldnode):
"""
Check to see if there is another name the same as name
in the format list. Don't compare with self (oldnode).
"""
model = self.fmt_obox.get_model()
iter = model.get_iter_first()
while iter is not None:
othernum = model.get_value(iter, COL_NUM)
oldnum = model.get_value(oldnode, COL_NUM)
if othernum == oldnum:
pass# skip comparison with self
else:
othername = model.get_value(iter, COL_NAME)
if othername == name:
return True
iter = model.iter_next(iter)
return False
def __start_name_editing(self, dummy_renderer, dummy_editable, dummy_path):
"""
Method called at the start of editing a name format.
"""
self.format_list.set_tooltip_text(_("Enter to save, Esc to cancel "
"editing"))
def __cancel_change(self, dummy_renderer):
"""
Break off the editing of a name format.
"""
self.format_list.set_tooltip_text('')
num = self.selected_fmt[COL_NUM]
if any(fmt[COL_NUM] == num for fmt in self.dbstate.db.name_formats):
return
else: # editing a new format not yet in db, cleanup is needed
self.fmt_model.remove(self.iter)
_nd.del_name_format(num)
self.insert_button.set_sensitive(True)
def __change_name(self, text, path, new_text):
"""
Called when a name format changed and needs to be stored in the db.
"""
self.format_list.set_tooltip_text('')
if len(new_text) > 0 and text != new_text:
# build a pattern from translated pattern:
pattern = new_text
if (len(new_text) > 2 and
new_text[0] == '"' and
new_text[-1] == '"'):
pass
else:
for key in get_translations():
if key in pattern:
pattern = pattern.replace(key, get_keyword_from_translation(key))
# now build up a proper translation:
translation = pattern
if (len(new_text) > 2 and
new_text[0] == '"' and
new_text[-1] == '"'):
pass
else:
for key in get_keywords():
if key in translation:
translation = translation.replace(key, get_translation_from_keyword(key))
num, name, fmt = self.selected_fmt[COL_NUM:COL_EXPL]
node = self.fmt_model.get_iter(path)
oldname = self.fmt_model.get_value(node, COL_NAME)
# check to see if this pattern already exists
if self.__check_for_name(translation, node):
ErrorDialog(_("This format exists already."),
translation)
self.edit_button.emit('clicked')
return
# else, change the name
self.edit_button.set_sensitive(True)
self.remove_button.set_sensitive(True)
self.insert_button.set_sensitive(True)
exmpl = _nd.format_str(self.examplename, pattern)
self.fmt_model.set(self.iter, COL_NAME, translation,
COL_FMT, pattern,
COL_EXPL, exmpl)
self.selected_fmt = (num, translation, pattern, exmpl)
_nd.edit_name_format(num, translation, pattern)
self.dbstate.db.name_formats = _nd.get_name_format(only_custom=True,
only_active=False)
def __format_change(self, obj):
try:
t = (_nd.format_str(self.name, escape(obj.get_text())))
self.valid = True
except NameDisplayError:
t = _("Invalid or incomplete format definition.")
self.valid = False
self.fmt_model.set(self.iter, COL_EXPL, t)
def _build_custom_name_ui(self):
"""
UI to manage the custom name formats
"""
table = Gtk.Table(n_rows=2, n_columns=3)
table.set_border_width(6)
table.set_col_spacings(6)
table.set_row_spacings(6)
# make a treeview for listing all the name formats
format_tree = Gtk.TreeView(self.fmt_model)
name_renderer = Gtk.CellRendererText()
name_column = Gtk.TreeViewColumn(_('Format'),
name_renderer,
text=COL_NAME)
name_renderer.set_property('editable', False)
name_renderer.connect('editing-started', self.__start_name_editing)
name_renderer.connect('edited', self.__change_name)
name_renderer.connect('editing-canceled', self.__cancel_change)
self.name_renderer = name_renderer
format_tree.append_column(name_column)
example_renderer = Gtk.CellRendererText()
example_column = Gtk.TreeViewColumn(_('Example'),
example_renderer,
text=COL_EXPL)
format_tree.append_column(example_column)
format_tree.get_selection().connect('changed',
self.cb_format_tree_select)
format_tree.set_rules_hint(True)
# ... and put it into a scrolled win
format_sw = Gtk.ScrolledWindow()
format_sw.set_policy(Gtk.PolicyType.AUTOMATIC, Gtk.PolicyType.AUTOMATIC)
format_sw.add(format_tree)
format_sw.set_shadow_type(Gtk.ShadowType.IN)
table.attach(format_sw, 0, 3, 0, 1, yoptions=Gtk.AttachOptions.FILL|Gtk.AttachOptions.EXPAND)
# to hold the values of the selected row of the tree and the iter
self.selected_fmt = ()
self.iter = None
self.insert_button = Gtk.Button(stock=Gtk.STOCK_ADD)
self.insert_button.connect('clicked', self.__new_name)
self.edit_button = Gtk.Button(stock=Gtk.STOCK_EDIT)
self.edit_button.connect('clicked', self.__edit_name)
self.edit_button.set_sensitive(False)
self.remove_button = Gtk.Button(stock=Gtk.STOCK_REMOVE)
self.remove_button.connect('clicked', self.cb_del_fmt_str)
self.remove_button.set_sensitive(False)
table.attach(self.insert_button, 0, 1, 1, 2, yoptions=0)
table.attach(self.remove_button, 1, 2, 1, 2, yoptions=0)
table.attach(self.edit_button, 2, 3, 1, 2, yoptions=0)
self.format_list = format_tree
self.name_column = name_column
return table
def name_changed_check(self):
"""
Method to check for a name change. Called by Name Edit Dialog.
"""
obj = self.fmt_obox
the_list = obj.get_model()
the_iter = obj.get_active_iter()
format = the_list.get_value(the_iter, COL_FMT)
if format != self.old_format:
# Yes a change; call the callback
self.cb_name_changed(obj)
def cb_name_changed(self, obj):
"""
Preset name format ComboBox callback
"""
the_list = obj.get_model()
the_iter = obj.get_active_iter()
new_idx = the_list.get_value(the_iter, COL_NUM)
config.set('preferences.name-format', new_idx)
_nd.set_default_format(new_idx)
self.uistate.emit('nameformat-changed')
def cb_pa_sur_changed(self,*args):
"""
checkbox patronymic as surname changed, propagate to namedisplayer
"""
_nd.change_pa_sur()
self.uistate.emit('nameformat-changed')
def cb_format_tree_select(self, tree_selection):
"""
Name format editor TreeView callback
Remember the values of the selected row (self.selected_fmt, self.iter)
and set the Remove and Edit button sensitivity
"""
model, self.iter = tree_selection.get_selected()
if self.iter is None:
tree_selection.select_path(0)
model, self.iter = tree_selection.get_selected()
self.selected_fmt = model.get(self.iter, 0, 1, 2)
idx = self.selected_fmt[COL_NUM] < 0
self.remove_button.set_sensitive(idx)
self.edit_button.set_sensitive(idx)
self.name_renderer.set_property('editable', idx)
def cb_del_fmt_str(self, obj):
"""
Name format editor Remove button callback
"""
num = self.selected_fmt[COL_NUM]
if _nd.get_default_format() == num:
self.fmt_obox.set_active(0)
self.fmt_model.remove(self.iter)
_nd.set_format_inactive(num)
self.dbstate.db.name_formats = _nd.get_name_format(only_custom=True,
only_active=False)
def cb_grampletbar_close(self, obj):
"""
Gramplet bar close button preference callback
"""
self.uistate.emit('grampletbar-close-changed')
def add_formats_panel(self, configdialog):
row = 0
table = Gtk.Table(n_rows=4, n_columns=4)
table.set_border_width(12)
table.set_col_spacings(6)
table.set_row_spacings(6)
# Display name:
self.examplename = Name()
examplesurname = Surname()
examplesurnamesecond = Surname()
examplesurnamepat = Surname()
self.examplename.set_title('Dr.')
self.examplename.set_first_name('Edwin Jose')
examplesurname.set_prefix('von der')
examplesurname.set_surname('Smith')
examplesurname.set_connector('and')
self.examplename.add_surname(examplesurname)
examplesurnamesecond.set_surname('Weston')
self.examplename.add_surname(examplesurnamesecond)
examplesurnamepat.set_surname('Wilson')
examplesurnamepat.set_origintype(
NameOriginType(NameOriginType.PATRONYMIC))
self.examplename.add_surname(examplesurnamepat)
self.examplename.set_primary_surname(0)
self.examplename.set_suffix('Sr')
self.examplename.set_call_name('Jose')
self.examplename.set_nick_name('Ed')
self.examplename.set_family_nick_name('Underhills')
# get the model for the combo and the treeview
active = _nd.get_default_format()
self.fmt_model, active = self._build_name_format_model(active)
# set up the combo to choose the preset format
self.fmt_obox = Gtk.ComboBox()
cell = Gtk.CellRendererText()
self.fmt_obox.pack_start(cell, True)
self.fmt_obox.add_attribute(cell, 'text', 1)
self.fmt_obox.set_model(self.fmt_model)
# set the default value as active in the combo
self.fmt_obox.set_active(active)
self.fmt_obox.connect('changed', self.cb_name_changed)
# label for the combo
lwidget = BasicLabel("%s: " % _('Name format'))
lwidget.set_use_underline(True)
lwidget.set_mnemonic_widget(self.fmt_obox)
hbox = Gtk.HBox()
btn = Gtk.Button("%s..." % _('Edit') )
btn.connect('clicked', self.cb_name_dialog)
hbox.pack_start(self.fmt_obox, True, True, 0)
hbox.pack_start(btn, False, False, 0)
table.attach(lwidget, 0, 1, row, row+1, yoptions=0)
table.attach(hbox, 1, 3, row, row+1, yoptions=0)
row += 1
# Pa/Matronymic surname handling
self.add_checkbox(table,
_("Consider single pa/matronymic as surname"),
row, 'preferences.patronimic-surname', stop=3,
extra_callback=self.cb_pa_sur_changed)
row += 1
# Date format:
obox = Gtk.ComboBoxText()
formats = get_date_formats()
list(map(obox.append_text, formats))
active = config.get('preferences.date-format')
if active >= len(formats):
active = 0
obox.set_active(active)
obox.connect('changed', self.date_format_changed)
lwidget = BasicLabel("%s: " % _('Date format'))
table.attach(lwidget, 0, 1, row, row+1, yoptions=0)
table.attach(obox, 1, 3, row, row+1, yoptions=0)
row += 1
# Age precision:
# precision=1 for "year", 2: "year, month" or 3: "year, month, days"
obox = Gtk.ComboBoxText()
age_precision = [_("Years"),
_("Years, Months"),
_("Years, Months, Days")]
list(map(obox.append_text, age_precision))
# Combo_box active index is from 0 to 2, we need values from 1 to 3
active = config.get('preferences.age-display-precision') - 1
if active >= 0 and active <= 2:
obox.set_active(active)
else:
obox.set_active(0)
obox.connect('changed',
lambda obj: config.set('preferences.age-display-precision',
obj.get_active() + 1))
lwidget = BasicLabel("%s: "
% _('Age display precision (requires restart)'))
table.attach(lwidget, 0, 1, row, row+1, yoptions=0)
table.attach(obox, 1, 3, row, row+1, yoptions=0)
row += 1
# Calendar format on report:
obox = Gtk.ComboBoxText()
list(map(obox.append_text, Date.ui_calendar_names))
active = config.get('preferences.calendar-format-report')
if active >= len(formats):
active = 0
obox.set_active(active)
obox.connect('changed', self.date_calendar_changed)
lwidget = BasicLabel("%s: " % _('Calendar on reports'))
table.attach(lwidget, 0, 1, row, row+1, yoptions=0)
table.attach(obox, 1, 3, row, row+1, yoptions=0)
row += 1
# Surname guessing:
obox = Gtk.ComboBoxText()
formats = _surname_styles
list(map(obox.append_text, formats))
obox.set_active(config.get('behavior.surname-guessing'))
obox.connect('changed',
lambda obj: config.set('behavior.surname-guessing',
obj.get_active()))
lwidget = BasicLabel("%s: " % _('Surname guessing'))
table.attach(lwidget, 0, 1, row, row+1, yoptions=0)
table.attach(obox, 1, 3, row, row+1, yoptions=0)
row += 1
# Default Family Relationship
obox = Gtk.ComboBoxText()
formats = FamilyRelType().get_standard_names()
list(map(obox.append_text, formats))
obox.set_active(config.get('preferences.family-relation-type'))
obox.connect('changed',
lambda obj: config.set('preferences.family-relation-type',
obj.get_active()))
lwidget = BasicLabel("%s: " % _('Default family relationship'))
table.attach(lwidget, 0, 1, row, row+1, yoptions=0)
table.attach(obox, 1, 3, row, row+1, yoptions=0)
row += 1
#height multiple surname table
self.add_pos_int_entry(table,
_('Height multiple surname box (pixels)'),
row, 'interface.surname-box-height', self.update_surn_height,
col_attach=0)
row += 1
# Status bar:
obox = Gtk.ComboBoxText()
formats = [_("Active person's name and ID"),
_("Relationship to home person")]
list(map(obox.append_text, formats))
active = config.get('interface.statusbar')
if active < 2:
obox.set_active(0)
else:
obox.set_active(1)
obox.connect('changed',
lambda obj: config.set('interface.statusbar', 2*obj.get_active()))
lwidget = BasicLabel("%s: " % _('Status bar'))
table.attach(lwidget, 0, 1, row, row+1, yoptions=0)
table.attach(obox, 1, 3, row, row+1, yoptions=0)
row += 1
# Text in sidebar:
self.add_checkbox(table,
_("Show text in sidebar buttons (requires restart)"),
row, 'interface.sidebar-text', stop=3)
row += 1
# Gramplet bar close buttons:
self.add_checkbox(table,
_("Show close button in gramplet bar tabs"),
row, 'interface.grampletbar-close', stop=3,
extra_callback=self.cb_grampletbar_close)
row += 1
return _('Display'), table
def add_text_panel(self, configdialog):
row = 0
table = Gtk.Table(n_rows=6, n_columns=8)
table.set_border_width(12)
table.set_col_spacings(6)
table.set_row_spacings(6)
self.add_entry(table, _('Missing surname'), row,
'preferences.no-surname-text')
row += 1
self.add_entry(table, _('Missing given name'), row,
'preferences.no-given-text')
row += 1
self.add_entry(table, _('Missing record'), row,
'preferences.no-record-text')
row += 1
self.add_entry(table, _('Private surname'), row,
'preferences.private-surname-text')
row += 1
self.add_entry(table, _('Private given name'), row,
'preferences.private-given-text')
row += 1
self.add_entry(table, _('Private record'), row,
'preferences.private-record-text')
row += 1
return _('Text'), table
def cb_name_dialog(self, obj):
the_list = self.fmt_obox.get_model()
the_iter = self.fmt_obox.get_active_iter()
self.old_format = the_list.get_value(the_iter, COL_FMT)
win = DisplayNameEditor(self.uistate, self.dbstate, self.track, self)
def check_for_type_changed(self, obj):
active = obj.get_active()
if active == 0: # update
config.set('behavior.check-for-update-types', ["update"])
elif active == 1: # update
config.set('behavior.check-for-update-types', ["new"])
elif active == 2: # update
config.set('behavior.check-for-update-types', ["update", "new"])
def toggle_hide_previous_addons(self, obj):
active = obj.get_active()
config.set('behavior.do-not-show-previously-seen-updates',
bool(active))
def toggle_tag_on_import(self, obj):
active = obj.get_active()
config.set('preferences.tag-on-import', bool(active))
self.tag_format_entry.set_sensitive(bool(active))
def check_for_updates_changed(self, obj):
active = obj.get_active()
config.set('behavior.check-for-updates', active)
def date_format_changed(self, obj):
config.set('preferences.date-format', obj.get_active())
OkDialog(_('Change is not immediate'),
_('Changing the data format will not take '
'effect until the next time Gramps is started.'))
def date_calendar_changed(self, obj):
config.set('preferences.calendar-format-report', obj.get_active())
def add_date_panel(self, configdialog):
table = Gtk.Table(n_rows=2, n_columns=7)
table.set_border_width(12)
table.set_col_spacings(6)
table.set_row_spacings(6)
self.add_spinner(table,
_('Date about range'),
0, 'behavior.date-about-range', (1, 9999))
self.add_spinner(table,
_('Date after range'),
1, 'behavior.date-after-range', (1, 9999))
self.add_spinner(table,
_('Date before range'),
2, 'behavior.date-before-range', (1, 9999))
self.add_spinner(table,
_('Maximum age probably alive'),
3, 'behavior.max-age-prob-alive', (80, 140))
self.add_spinner(table,
_('Maximum sibling age difference'),
4, 'behavior.max-sib-age-diff', (10, 30))
self.add_spinner(table,
_('Minimum years between generations'),
5, 'behavior.min-generation-years', (5, 20))
self.add_spinner(table,
_('Average years between generations'),
6, 'behavior.avg-generation-gap', (10, 30))
self.add_pos_int_entry(table,
_('Markup for invalid date format'),
7, 'preferences.invalid-date-format',
self.update_markup_entry,
helptext = _('Convenience markups are:\n'
'<b><b>Bold</b></b>\n'
'<big><big>Makes font relatively larger</big></big>\n'
'<i><i>Italic</i></i>\n'
'<s><s>Strikethrough</s></s>\n'
'<sub><sub>Subscript</sub></sub>\n'
'<sup><sup>Superscript</sup></sup>\n'
'<small><small>Makes font relatively smaller</small></small>\n'
'<tt><tt>Monospace font</tt></tt>\n'
'<u><u>Underline</u></u>\n\n'
'For example: <u><b>%s</b></u>\n'
'will display <u><b>Underlined bold date</b></u>.\n')
)
return _('Dates'), table
def add_behavior_panel(self, configdialog):
table = Gtk.Table(n_rows=2, n_columns=8)
table.set_border_width(12)
table.set_col_spacings(6)
table.set_row_spacings(6)
current_line = 0
self.add_checkbox(table,
_('Add default source on GEDCOM import'),
current_line, 'preferences.default-source')
current_line += 1
checkbutton = Gtk.CheckButton(label=_("Add tag on import"))
checkbutton.set_active(config.get('preferences.tag-on-import'))
checkbutton.connect("toggled", self.toggle_tag_on_import)
table.attach(checkbutton, 1, 2, current_line, current_line+1, yoptions=0)
self.tag_format_entry = self.add_entry(table, None, current_line,
'preferences.tag-on-import-format',
col_attach=2)
self.tag_format_entry.set_sensitive(config.get('preferences.tag-on-import'))
current_line += 1
obj = self.add_checkbox(table,
_('Enable spelling checker'),
current_line, 'behavior.spellcheck')
if not HAVE_GTKSPELL:
obj.set_sensitive(False)
spell_dict = { 'gramps_wiki_build_spell_url' :
URL_WIKISTRING +
"GEPS_029:_GTK3-GObject_introspection"
"_Conversion#Spell_Check_Install" }
obj.set_tooltip_text(
_("GtkSpell not loaded. "
"Spell checking will not be available.\n"
"To build it for Gramps see "
"%(gramps_wiki_build_spell_url)s") % spell_dict )
current_line += 1
self.add_checkbox(table,
_('Display Tip of the Day'),
current_line, 'behavior.use-tips')
current_line += 1
self.add_checkbox(table,
_('Remember last view displayed'),
current_line, 'preferences.use-last-view')
current_line += 1
self.add_spinner(table,
_('Max generations for relationships'),
current_line, 'behavior.generation-depth', (5, 50), self.update_gendepth)
current_line += 1
self.path_entry = Gtk.Entry()
self.add_path_box(table,
_('Base path for relative media paths'),
current_line, self.path_entry, self.dbstate.db.get_mediapath(),
self.set_mediapath, self.select_mediapath)
current_line += 1
# Check for updates:
obox = Gtk.ComboBoxText()
formats = [_("Never"),
_("Once a month"),
_("Once a week"),
_("Once a day"),
_("Always"), ]
list(map(obox.append_text, formats))
active = config.get('behavior.check-for-updates')
obox.set_active(active)
obox.connect('changed', self.check_for_updates_changed)
lwidget = BasicLabel("%s: " % _('Check for updates'))
table.attach(lwidget, 1, 2, current_line, current_line+1, yoptions=0)
table.attach(obox, 2, 3, current_line, current_line+1, yoptions=0)
current_line += 1
self.whattype_box = Gtk.ComboBoxText()
formats = [_("Updated addons only"),
_("New addons only"),
_("New and updated addons"),]
list(map(self.whattype_box.append_text, formats))
whattype = config.get('behavior.check-for-update-types')
if "new" in whattype and "update" in whattype:
self.whattype_box.set_active(2)
elif "new" in whattype:
self.whattype_box.set_active(1)
elif "update" in whattype:
self.whattype_box.set_active(0)
self.whattype_box.connect('changed', self.check_for_type_changed)
lwidget = BasicLabel("%s: " % _('What to check'))
table.attach(lwidget, 1, 2, current_line, current_line+1, yoptions=0)
table.attach(self.whattype_box, 2, 3, current_line, current_line+1, yoptions=0)
current_line += 1
self.add_entry(table, _('Where to check'), current_line, 'behavior.addons-url', col_attach=1)
current_line += 1
checkbutton = Gtk.CheckButton(
label=_("Do not ask about previously notified addons"))
checkbutton.set_active(config.get('behavior.do-not-show-previously-seen-updates'))
checkbutton.connect("toggled", self.toggle_hide_previous_addons)
table.attach(checkbutton, 0, 3, current_line, current_line+1, yoptions=0)
button = Gtk.Button(_("Check now"))
button.connect("clicked", self.check_for_updates)
table.attach(button, 3, 4, current_line, current_line+1, yoptions=0)
return _('General'), table
def check_for_updates(self, button):
try:
addon_update_list = available_updates()
except:
OkDialog(_("Checking Addons Failed"),
_("The addon repository appears to be unavailable. "
"Please try again later."),
self.window)
return
if len(addon_update_list) > 0:
try:
PluginWindows.UpdateAddons(self.uistate, [], addon_update_list)
except WindowActiveError:
pass
else:
check_types = config.get('behavior.check-for-update-types')
OkDialog(_("There are no available addons of this type"),
_("Checked for '%s'") %
_("' and '").join([_(t) for t in check_types]),
self.window)
# List of translated strings used here
# Dead code for l10n
_('new'), _('update')
self.uistate.viewmanager.do_reg_plugins(self.dbstate, self.uistate)
def add_famtree_panel(self, configdialog):
table = Gtk.Table(n_rows=2, n_columns=2)
table.set_border_width(12)
table.set_col_spacings(6)
table.set_row_spacings(6)
self.dbpath_entry = Gtk.Entry()
self.add_path_box(table,
_('Family Tree Database path'),
0, self.dbpath_entry, config.get('behavior.database-path'),
self.set_dbpath, self.select_dbpath)
#self.add_entry(table,
# _('Family Tree Database path'),
# 0, 'behavior.database-path')
self.add_checkbox(table,
_('Automatically load last Family Tree'),
1, 'behavior.autoload')
return _('Family Tree'), table
def set_mediapath(self, *obj):
if self.path_entry.get_text().strip():
self.dbstate.db.set_mediapath(self.path_entry.get_text())
else:
self.dbstate.db.set_mediapath(None)
def select_mediapath(self, *obj):
f = Gtk.FileChooserDialog(
_("Select media directory"),
action=Gtk.FileChooserAction.SELECT_FOLDER,
buttons=(Gtk.STOCK_CANCEL,
Gtk.ResponseType.CANCEL,
Gtk.STOCK_APPLY,
Gtk.ResponseType.OK))
mpath = self.dbstate.db.get_mediapath()
if not mpath:
mpath = HOME_DIR
f.set_current_folder(os.path.dirname(mpath))
status = f.run()
if status == Gtk.ResponseType.OK:
val = conv_to_unicode(f.get_filename())
if val:
self.path_entry.set_text(val)
f.destroy()
def set_dbpath(self, *obj):
path = conv_to_unicode(self.dbpath_entry.get_text().strip())
config.set('behavior.database-path', path)
def select_dbpath(self, *obj):
f = Gtk.FileChooserDialog(
_("Select database directory"),
action=Gtk.FileChooserAction.SELECT_FOLDER,
buttons=(Gtk.STOCK_CANCEL,
Gtk.ResponseType.CANCEL,
Gtk.STOCK_APPLY,
Gtk.ResponseType.OK))
dbpath = config.get('behavior.database-path')
if not dbpath:
dbpath = os.path.join(HOME_DIR,'grampsdb')
f.set_current_folder(os.path.dirname(dbpath))
status = f.run()
if status == Gtk.ResponseType.OK:
val = conv_to_unicode(f.get_filename())
if val:
self.dbpath_entry.set_text(val)
f.destroy()
def update_idformat_entry(self, obj, constant):
config.set(constant, conv_to_unicode(obj.get_text()))
self.dbstate.db.set_prefixes(
config.get('preferences.iprefix'),
config.get('preferences.oprefix'),
config.get('preferences.fprefix'),
config.get('preferences.sprefix'),
config.get('preferences.cprefix'),
config.get('preferences.pprefix'),
config.get('preferences.eprefix'),
config.get('preferences.rprefix'),
config.get('preferences.nprefix') )
def update_gendepth(self, obj, constant):
"""
Called when the generation depth setting is changed.
"""
intval = int(obj.get_value())
config.set(constant, intval)
#immediately use this value in displaystate.
self.uistate.set_gendepth(intval)
def update_surn_height(self, obj, constant):
ok = True
if not obj.get_text():
return
try:
intval = int(obj.get_text())
except:
intval = config.get(constant)
ok = False
if intval < 0 :
intval = config.get(constant)
ok = False
if ok:
config.set(constant, intval)
else:
obj.set_text(str(intval))
def build_menu_names(self, obj):
return (_('Preferences'), None)
# FIXME: is this needed?
def _set_button(self, stock):
button = Gtk.Button()
image = Gtk.Image()
image.set_from_stock(stock, Gtk.IconSize.BUTTON)
image.show()
button.add(image)
button.show()
return button
| pmghalvorsen/gramps_branch | gramps/gui/configure.py | Python | gpl-2.0 | 62,972 |
"""
Wilson - Implemet the Wilson model for stratified and heterogeneous regimes, and a 'framework'
that takes the lesser of the two results
"""
| rcriii42/DHLLDV | src/Wilson/__init__.py | Python | gpl-2.0 | 145 |
from preprocessing.tests import TestMetabolicStandardScaler
from preprocessing.tests import TestMetabolicChangeScaler
from preprocessing.tests import TestMetabolicSolutionScaler
from preprocessing.tests import TestMostActivePathwayScaler
from classifiers.tests import TestMetaboliteLevelDiseaseClassifier
from classifiers.tests import TestSolutionLevelDiseaseClassifier
from classifiers.tests import TestFromSolutionSolutionLevelDiseaseClassifier
from classifiers.tests import TestDummyClassifier
from clustering.tests import TestMetaboliteLevelDiseaseClustering
from metrics.test import TestExtendedJaccard
from services.tests import TestSolutionService
from services.tests import TestNamingService
from services.tests import TestDataReader
import unittest
if __name__ == "__main__":
unittest.main()
| MuhammedHasan/disease-diagnosis | src/tests.py | Python | gpl-3.0 | 812 |
# coding: utf-8
from __future__ import unicode_literals
import uuid
from django.db import models
from django.conf import settings
class PricePercentageChange(models.Model):
id = models.UUIDField(primary_key=True, default=uuid.uuid4)
done = models.BooleanField('已经提醒过', default=False)
threshold = models.FloatField(null=True, db_index=True)
increase = models.BooleanField('上涨还是下跌')
owner = models.ForeignKey(settings.AUTH_USER_MODEL, verbose_name='创建者',
related_name='percent_change', on_delete=models.CASCADE)
period = models.IntegerField('分钟', default=60, db_index=True)
class Meta:
ordering = ["-threshold"]
db_table = 'eosram_price_percent_change'
| polyrabbit/WeCron | WeCron/eosram/models/percent.py | Python | gpl-3.0 | 763 |
####################################################################################################
#
# PyDvi - A Python Library to Process DVI Stream.
# Copyright (C) 2009 Salvaire Fabrice
#
####################################################################################################
####################################################################################################
#
# Audit
#
# - 16/01/2010 fabrice
#
####################################################################################################
####################################################################################################
__ALL__ = ['mime_extensions', 'mime_copyright']
####################################################################################################
from Logging import format_card, remove_enclosing_new_line
####################################################################################################
mime_extensions = (
('sh', ('py', 'sh')),
('c', ('c', 'h', 'cpp', 'hpp', 'cxx', 'hxx', 'i')),
)
###################################################
copyright_text = remove_enclosing_new_line('''
PyDvi - A Python Library to Process DVI Stream.
Copyright (C) 2009 Salvaire Fabrice
''')
width = 100
mime_copyright = {}
###################################################
mime_copyright['sh'] = format_card(copyright_text,
centered=True,
width=width,
rule_char='#',
border = True,
)
###################################################
def format_c(text):
text += '\\'
formated_text = ''
first_line = True
for line in text.split('\n'):
if first_line:
formated_text += '\\'
first_line = False
else:
formated_text += ' '
formated_text += line + '\n'
return formated_text
mime_copyright['c'] = format_c(format_card(copyright_text,
centered=True,
width=width,
rule_char='*',
border = True,
))
####################################################################################################
#
# End
#
####################################################################################################
| FabriceSalvaire/PyDVI | tools/CopyrightConfig.py | Python | gpl-3.0 | 2,557 |
#!/usr/bin/env python
#coding=utf-8
# Nathive (and this file) is free software: you can redistribute it and/or
# modify it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or newer.
#
# You should have received a copy of the GNU General Public License along with
# this file. If not, see <http://www.gnu.org/licenses/>.
import gtk
import webbrowser
from nathive.lib.plugin import *
class Home(PluginLauncher):
def __init__(self):
# Subclass it.
PluginLauncher.__init__(self)
# Common attributes.
self.name = 'home'
self.author = 'nathive-dev'
self.type = 'launcher'
self.menu = 'help'
self.label = _('Nathive website')
self.icon = 'gtk-home'
def callback(self):
"""To do when the plugin is called."""
webbrowser.open('http://www.nathive.org')
| johnnyLadders/Nathive_CITA | nathive/plugins/home.py | Python | gpl-3.0 | 933 |
import os
import sys
import numpy as np
from copy import deepcopy
import argparse
sys.path.append(os.path.join(os.path.dirname(__file__),"../projects/tools"))
import msh
import executable_paths as exe
def parse():
parser = argparse.ArgumentParser(description="Creates mandible and masseter files for the database creation")
parser.add_argument("-i", "--input", help="input .mesh object", type=str, required=True)
parser.add_argument("-t", "--template", help="template mandible in full size", type=str, required=True)
return parser.parse_args()
def checkArgs(args):
if not os.path.isfile(args.input):
print args.input + " is not a valid file"
sys.exit()
if not os.path.splitext(args.input)[1] == ".mesh":
print args.input + " is not a .mesh file"
sys.exit()
if not os.path.isfile(args.template):
print args.template + " is not a valid file"
sys.exit()
if not os.path.splitext(args.template)[1] == ".mesh":
print args.template + " is not a .mesh file"
sys.exit()
def command(cmd, displayOutput=False):
err = 1
print "Running the command '" + cmd + "'"
if displayOutput:
err = os.system(cmd)
else:
err = os.system(cmd + " > tmp_out.txt 2>tmp_err.txt")
if err:
print "An error happened while executing:\n"+cmd+"\nLook in tmp_out.txt or tmp_err.txt for info\nExiting..."
sys.exit()
else:
os.system("rm tmp_out.txt tmp_err.txt >/dev/null 2>&1")
if __name__=="__main__":
args = parse()
checkArgs(args)
# 3 - Align to the template full mandibule
command(exe.align + " -i " + args.input + " " + args.template + " -d 50 -o 0.95", displayOutput=True)
command(exe.pythonICP + " -s " + args.input + " -t " + args.template + " -m mat_PythonICP.txt")
# 1 - Reading the input file
fullMandible = msh.Mesh(args.input)
fullMandible.applyMatrix(matFile="mat_Super4PCS.txt")
fullMandible.applyMatrix(matFile="mat_PythonICP.txt")
# 2 - Scale to [0,1]
MAT = fullMandible.toUnitMatrix()
np.savetxt("mat_toUnit.txt",MAT)
fullMandible.applyMatrix(mat=MAT)
fullMandible.write("mandible.mesh")
# 4 - Cut the mandible in two
rightMandible = deepcopy(fullMandible)
leftMandible = fullMandible
#Generate the mask
mask = [1 for i in range(len(leftMandible.tris))]
mid = np.mean(leftMandible.verts,axis=0)[0]
print mid
for i,t in enumerate(leftMandible.tris):
for v in t:
x = leftMandible.verts[v][0]
if x < mid:
mask[i] = 0
#Create the left mandible
leftMandible.tris = np.array([t for i,t in enumerate(leftMandible.tris) if mask[i]==1])
print len(leftMandible.tris)
leftMandible.discardUnused()
leftMAT = leftMandible.toUnitMatrix()
np.savetxt("1_leftMandibleToUnit.txt", leftMAT)
leftMandible.applyMatrix(mat=leftMAT)
leftMandible.write("leftMandible.mesh")
#And the right one, symetrized
rightMandible.tris = np.array([t for i,t in enumerate(rightMandible.tris) if mask[i]==0])
rightMandible.discardUnused()
rightMAT = rightMandible.toUnitMatrix()
np.savetxt("1_rightMandibleToUnit.txt", rightMAT)
rightMandible.applyMatrix(mat=rightMAT)
rightMandible.verts[:,0] = 1-rightMandible.verts[:,0]
rightMandible.write("rightMandible.mesh")
# 5 - Create the shells for left and right mandibles
#command(exe.boundingMesh + " leftMandible.mesh", displayOutput=True)
command(exe.shell + " -i leftMandible.mesh -o leftShell.mesh -c", displayOutput=True)
command(exe.shell + " -i rightMandible.mesh -o rightShell.mesh -c", displayOutput=True)
sys.exit()
# 6 - Warp the shell to the mandibles
command(exe.warping + " leftMandible.shell.mesh leftMandible.mesh")
command(exe.warping + " rightMandible.shell.mesh rightMandible.mesh")
# 7 - Create a domain for mshdist computation
#Right mandible
cube=msh.Mesh(cube=[0,1,0,1,0,1])
cube.write("rightBox.mesh")
command( "tetgen -pgANEF rightBox.mesh")
command( "mmg3d_O3 rightBox.1.mesh -hausd " + str(np.max(mesh.dims)/25) + " -hmax " + str(np.max(mesh.dims)/25))
command( "mshdist -ncpu 4 -noscale rightBox.1.o.mesh rightMandible.warped.mesh")
# 9 - Morphing the template_mandibule surface to the computed boxes
command(morphing + " template_halfMandible_volume.mesh rightBox.1.o.mesh")
# 10 - Extract the surface from the morphing results
morphed = msh.Mesh("morphed.mesh")
morphed.readSol()
morphed.extractSurfaces#Placeholder
| ISCDtoolbox/FaciLe | pipeline/processMandibleAndMasseter.py | Python | gpl-3.0 | 4,598 |
# -*- coding: utf-8 -*-
"""
Created on Thu Jun 23 13:52:07 2016
@author: huliqun
"""
import unittest
import json
import binascii
from falcon import testing
import sys
sys.path.append('..')
from workserver.util.AES_PKCS7_extension import Cryptor
from MainServer import app
class TestAuth(testing.TestCase):
def setUp(self):
self.api = app
# def test_regUser(self):
# headers = {'content-type':'application/json'}
# body = '{"username":"[email protected]","displayname":"wahaha","email":"[email protected]","password":"123456","mobile":"18698729476"}'
# result =self.simulate_post('/api/users',headers=headers,body=body )
# self.assertEqual(result.status_code, 201)
def test_auth(self):
import hashlib
def md5(s):
m = hashlib.md5()
m.update(s.encode("utf-8"))
return m.hexdigest()
# print("pwd : %s" % md5('123456'))
iv, encrypted = Cryptor.encrypt('aaa', md5('12345678'))
print(md5('12345678'))
# print("iv : %s" % iv.decode())
# print("encrypted : %s" % binascii.b2a_base64(encrypted).rstrip())
print(Cryptor.decrypt(encrypted,md5('12345678'), iv))
# headers = {'content-type':'application/json'}
# body = {
# 'oid': 2,
# 'username':'admin',
# 'identifyCode':encrypted,
# 'magicNo':iv.decode()
# }
# result =self.simulate_post('/api/auth',headers=headers,body=json.dumps(body) )
# self.assertEqual(result.status_code, 200)
# print(result.text)
# should raise an exception for an immutable sequence
# self.assertRaises(TypeError, random.shuffle, (1,2,3))
if __name__ == '__main__':
unittest.main() | LiqunHu/MVPN | testing/testAuth.py | Python | gpl-3.0 | 1,873 |
from nltk.chat.util import Chat, reflections
pairs = [
[
r"My name is (.*)",
['hello %1', '%1 mabuhay ka'],
],
[
r'hi',
['hello', 'kamusta', 'mabuhay',],
],
[
r'(.*) (hungry|sleepy|groot)',
[
"%1 %2"
]
],
[
r'(.*)(mahal|love)(.*)',
[
"https://goo.gl/ndTZVq",
"I always thought Love was a static class until you made an instance of it.",
"I love user interfaces it's because that's where U and I are always together.",
],
],
[
r'(.*)(relationship)(.*)',
[
"Mabuti pa sa database may relationship. Eh tayo, wala.",
],
],
[
r'(meron|mayron|ano|does|is there|what) (.*) (forever)(.*)',
[
"Loading...",
"None",
"while True: pass",
],
],
[
r'(.*)', # default response if no patterns from above is found
[
"http://lmgtfy.com/?q=%1",
"Sorry I don't know what `%1` is?",
],
],
]
def hugot_bot():
print("Hi what's your name?")
chat = Chat(pairs, reflections)
chat.converse()
if __name__ == "__main__":
hugot_bot()
| davidam/python-examples | nlp/nltk/hugotbot.py | Python | gpl-3.0 | 1,251 |
from urlparse import urlparse
import sys
import socket
import os
import re
class HttpClient(object):
def __init__(self,proxy=None,logfile='headers.log'):
self.proxy = proxy
self.LOGFILE = logfile
self.parsed_url = None # Instance of class urlparse
self.http_version = "HTTP/1.1"
self.buffer = 4096
self.separador = '\r\n\r\n' # Separador de header y content de la respuesta HTTP
self.download_file = 'download.part'
self._header_detected = False
self._url = None
try:
# Si quedo una descarga trunca, la limpiamos
with open(self.download_file):
os.remove(self.download_file)
except IOError:
pass
def _get_host(self,use_proxy=False):
"""Devuelve el hostname de la url de forma inteligente(?)"""
if use_proxy:
return urlparse(self.proxy).hostname
else:
if self.parsed_url is None:
return 'localhost'
else:
if self.parsed_url.hostname in (None,''):
return 'localhost'
else:
return self.parsed_url.hostname
def _get_port(self,use_proxy=False):
"""Devuelve el puerto de la url de forma inteligente(?)"""
if use_proxy:
return urlparse(self.proxy).port
else:
if self.parsed_url is None:
return 80
else:
if self.parsed_url.port in (None,''):
return 80
else:
return self.parsed_url.port
def _get_path(self):
"""Devuelve el path de la url de forma inteligente(?)"""
if self.proxy is not None:
return self.parsed_url.scheme + '://' + self.parsed_url.netloc + self.parsed_url.path
else:
if self.parsed_url is None:
return '/'
else:
if self.parsed_url.path in (None,''):
return '/'
else:
return self.parsed_url.path
def retrieve(self,url=None,method="GET"):
"""Punto de acceso del cliente, crea la peticion, la envia al servidor, y guarda la respuesta.
Maneja redireccion 301 (movido permanente)."""
if url:
self._retrieve(url=url,method=method)
#~ Soporta redireccion infinita, lo cual es un problema. Deberia tener un contador. Maximo?
while self.headers["status"] == "301":
self._retrieve(url=self.headers["Location"],method=method)
else:
raise Exception("Expect parameter url")
def _retrieve(self,url=None,method="GET"):
"""Metodo de alto nivel que recupera la url solicitada"""
if url:
self._url = url
self.parsed_url = urlparse(url)
if self.parsed_url.scheme is '':
raise Exception("Formato de url incorrecto. Formato esperado: (http|ftp|https)://url[:port][/path_to_resource]")
self.method = method # GET o HEAD
self._conect() # self.s socket created
self._build_request() # self.request string created
self._send_request() # Realiza la peticion y gestiona la descarga del recurso
else:
raise Exception("Expect parameter url")
def _conect(self):
"""Crea el socket con el servidor"""
self.s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
try:
if self.proxy:
self.s.connect((self._get_host(use_proxy=True) , self._get_port(use_proxy=True)))
else:
self.s.connect((self._get_host() , self._get_port()))
except socket.error, msg:
sys.stderr.write("[ERROR] %s\n" % msg[1])
sys.exit(2)
def _build_request(self):
"""Construye el str de request para el servidor"""
self.request = "%(method)s %(path)s %(http_version)s\r\n"
self.request += "Host: %(host)s\r\n"
self.request += "User-Agent: Mozilla/5.0 (X11; Ubuntu; Linux x86_64; rv:23.0) Gecko/20100101 Firefox/23.0\r\n"
self.request += "Accept: text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8\r\n"
self.request += "Accept-Language: es-ar,es;q=0.8,en-us;q=0.5,en;q=0.3\r\n"
#~ self.request += "Accept-Encoding: gzip, deflate\r\n" # No soporta encoding en esta version
self.request += "Connection: keep-alive\r\n\r\n"
self.request = self.request % { 'method':self.method, \
'path':self._get_path(), \
'http_version':self.http_version, \
'host':self._get_host()}
def _send_request(self):
"""Envia el request y recibe la respuesta"""
self.s.sendall(self.request)
response = self.s.recv(self.buffer)
self.data = ""
self._header_detected = False
while len(response):
self.data += response
# Se controla que detecte solo la primera vez las cabeceras
if not self._header_detected:
self._header_detect()
if not self.method == "HEAD":
self._sync_data()
if self.headers.has_key("status"):
if self.headers["status"] == "301":
break
response = self.s.recv(self.buffer)
if not self.method == "HEAD" and not self.headers["status"] == "301":
self._sync_data()
self._save_file() # Guardar el archivo
self._log_headers() # Logs a un header
def _sync_data(self):
""" Este metodo se encarga de descargar la memoria si el archivo
que se descarga es demasiado grande"""
if len(self.data) > 100000:
f = open(self.download_file,'a')
f.write(self.data)
self.data = ""
f.close()
def _header_detect(self):
"""Metodo que detecta si en la descarga se encuentra el header.
En caso afirmativo, lo carga en la instancia y lo elimina del
stream de descarga."""
headers = self.data.split(self.separador)
# Si len es mayor a 1, el header ya esta completo
if len(headers) > 1:
self.data = self.separador.join(headers[1:]) # Arma la informacion de descarga sin el header
self.str_headers = headers[0]
self.headers = dict(re.findall(r"(?P<name>.*?): (?P<value>.*?)\r\n", self.str_headers)) # Arma un dic con los headers
# Primer linea del header HTTP/1.1
self.headers["http"] = headers[0].split('\r\n')[0]
self.headers["http_version"] = self.headers["http"].split(' ')[0]
self.headers["status"] = self.headers["http"].split(' ')[1]
self.headers["status_message"] = ' '.join(self.headers["http"].split(' ')[2:])
self._header_detected = True
def _log_headers(self):
"""Descarga las cabeceras de response a un archivo de log"""
if self.LOGFILE is not None:
f = open(self.LOGFILE,'a')
f.write("== HEADER: Response from %s\n" % self._url)
f.write("== Method: %s\n" % self.method)
f.write("%s\n" % self.str_headers)
f.close()
def _save_file(self):
"""Guarda el archivo a disco, teniendo en cuenta si la descarga ya lo hizo o no"""
file_in_disk = self._saved_file()
filename = self._filename()
if file_in_disk:
os.rename(self.download_file, filename)
else:
f = open(filename,'w')
f.write(self.data)
f.close()
def _content_encoding(self):
"""Soporte para encoding de contenido con gzip. No soportado"""
if self.headers.has_key('Content-Encoding'):
return self.headers['Content-Encoding']
else:
return None
def _file_type(self):
"""Retorna la extension segun el tipo de archivo"""
if self.headers.has_key('Content-Type'):
return '.' + self.headers['Content-Type'].split('; ')[0].split('/')[1]
else:
return '.html' # Que habria que devolver por default? vacio?
def _filename(self):
"""Retorna el mejor nombre de archivo en funcion de la informacion disponible"""
if self.headers["status"] == "404":
return "error_page_404.html"
else:
extension = self._file_type()
if self.proxy is not None:
resource_name = self._get_path().split('/')
if resource_name[-1] is not '':
return resource_name[-1] + extension
else:
return resource_name[-2] + extension
else:
if self._get_path() in ('/', ''):
return self._get_host() + extension
else:
return self._get_path().split('/')[-1]
def _saved_file(self):
"""Controla si durante la descarga el archivo fue bajado temporalmente a disco"""
try:
open(self.download_file)
except:
return False
return True
| tomasdelvechio/py-net-dev | Protocolos/3 - http client/http_client_object.py | Python | gpl-3.0 | 9,464 |
# -*- Mode:Python; indent-tabs-mode:nil; tab-width:4 -*-
#
# Copyright (C) 2016-2018 Canonical Ltd
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License version 3 as
# published by the Free Software Foundation.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
import yaml
from testtools.matchers import Equals
import snapcraft.internal
from tests import unit
class StageStateBaseTestCase(unit.TestCase):
def setUp(self):
super().setUp()
class Project:
pass
self.project = Project()
self.files = {"foo"}
self.directories = {"bar"}
self.part_properties = {
"filesets": {"qux": "quux"},
"override-stage": "touch override-stage",
"stage": ["baz"],
}
self.state = snapcraft.internal.states.StageState(
self.files, self.directories, self.part_properties, self.project
)
class StateStageTestCase(StageStateBaseTestCase):
def test_yaml_conversion(self):
state_from_yaml = yaml.load(yaml.dump(self.state))
self.assertThat(state_from_yaml, Equals(self.state))
def test_comparison(self):
other = snapcraft.internal.states.StageState(
self.files, self.directories, self.part_properties, self.project
)
self.assertTrue(self.state == other, "Expected states to be identical")
def test_properties_of_interest(self):
properties = self.state.properties_of_interest(self.part_properties)
self.assertThat(len(properties), Equals(3))
self.assertThat(properties["filesets"], Equals({"qux": "quux"}))
self.assertThat(properties["override-stage"], Equals("touch override-stage"))
self.assertThat(properties["stage"], Equals(["baz"]))
def test_project_options_of_interest(self):
self.assertFalse(self.state.project_options_of_interest(self.project))
class StageStateNotEqualTestCase(StageStateBaseTestCase):
scenarios = [
("no files", dict(other_property="files", other_value=set())),
("no directories", dict(other_property="directories", other_value=set())),
(
"no part properties",
dict(other_property="part_properties", other_value=None),
),
]
def test_comparison_not_equal(self):
setattr(self, self.other_property, self.other_value)
other_state = snapcraft.internal.states.StageState(
self.files, self.directories, self.part_properties, self.project
)
self.assertFalse(self.state == other_state, "Expected states to be different")
| cprov/snapcraft | tests/unit/states/test_stage.py | Python | gpl-3.0 | 3,005 |
"""
Author: Tom Daniels, Kaitlin Keenan
Purpose: Creates a force graph of IP addresses and any connections they made
"""
import igraph
import json
import plotly
import plotly.graph_objs as pg
import sys
GEN_INFO = 0 # General information
PHYS = 1 # Physical layer information
DATA = 2 # Data link layer information
NET = 3 # Network layer information
TRANS = 4 # Transport layer information
SRC_IP = 12
DST_IP=14
LOWER_MULTICAST = 224 # First octect of the lowest multicast addresses
UPPER_MULTICAST = 240 # First octect of the highest multicast addresses
BROADCAST = 255
f = open('jsonOut.json', 'rb')
jsonData = json.loads(f.read())
packets = jsonData['pdml']['packet']
nodeID = {} # Maps: Node -> ID
nodes = [] # List of nodes
mapping = {} # Maps: Node interation with Node -> # times
nodeNumber = 0 # Give the Nodes an ID
nodeCounter = {} # Maps: Node -> # of times we've seen it
for packet in packets:
if(len(packet['proto']) > NET and packet['proto'][NET]['@name'] == 'ip'):
src = packet['proto'][NET]['field'][SRC_IP]['@show'].encode('ascii')
dst = packet['proto'][NET]['field'][DST_IP]['@show'].encode('ascii')
# If either address is multi/broadcast address, ignore it
if(LOWER_MULTICAST <= int(src.split('.')[0]) < UPPER_MULTICAST or
LOWER_MULTICAST <= int(dst.split('.')[0]) < UPPER_MULTICAST or
int(dst.split('.')[3]) == 255):
continue
# Add the address(es) to our database if they're not in them
if(not(nodeID.has_key(src))):
nodes.append(src)
nodeID[src] = nodeNumber
nodeNumber = nodeNumber + 1
if(not(nodeID.has_key(dst))):
nodes.append(dst)
nodeID[dst] = nodeNumber
nodeNumber = nodeNumber + 1
# Increment the counter for the number of times we've seen this node
if(nodeCounter.has_key(src)):
nodeCounter[src] += 1
else:
nodeCounter[src] = 1
if(nodeCounter.has_key(dst)):
nodeCounter[dst] += 1
else:
nodeCounter[dst] = 1
# Replace string IP addresses with numbers
src = str(nodeID[src])
dst = str(nodeID[dst])
# Add the mapping to the dictionary
if(not(mapping.has_key(src + ':' + dst) and mapping.has_key(dst + ':' + src))):
mapping[src + ':' + dst] = 1
totalPackets = len(packets)
colors = []
# Set the colors for each node based on how many packets they sent out
for node in nodes:
colors.append(nodeCounter[node] / float(totalPackets))
edges = [(int(key.split(':')[0]), int(key.split(':')[1])) for key in mapping.keys()]
graph = igraph.Graph(edges, directed=False)
layout = graph.layout('kk', dim=3)
length = len(nodes)
Xn = [layout[i][0] for i in range(length)]
Yn = [layout[i][1] for i in range(length)]
Zn = [layout[i][2] for i in range(length)]
Xe = []
Ye = []
Ze = []
for e in edges:
Xe+=[layout[e[0]][0], layout[e[1]][0], None]
Ye+=[layout[e[0]][1], layout[e[1]][1], None]
Ze+=[layout[e[0]][2], layout[e[1]][2], None]
# Makes the edges
trace1 = pg.Scatter3d(x=Xe, y=Ye, z=Ze, mode='lines',
line=pg.Line(color='rgb(125,125,125)', width=1),
hoverinfo='none')
# Makes the nodes
trace2 = pg.Scatter3d(x=Xn, y=Yn, z=Zn, mode='markers',
name='IP Addresses',
marker=pg.Marker(symbol='dot',size=6,
line=pg.Line(color='rgb(50,50,50)',
width=0.5), colorscale=[[0, 'rgb(0,0,255)'], [1, 'rgb(255,0,0)']],
color=colors),
text=nodes, hoverinfo='text')
axis = dict(showbackground=False, showline=False, zeroline=False, showgrid=False,
showticklabels=False, title='')
trueLayout = pg.Layout(title='test', width=1000, height=1000, showlegend=False,
scene=pg.Scene(xaxis=pg.XAxis(axis),
yaxis=pg.YAxis(axis),
zaxis=pg.ZAxis(axis)),
margin=pg.Margin(t=100), hovermode='closest')
plotly.offline.plot(pg.Figure(data=pg.Data([trace1, trace2]), layout=trueLayout),
filename=sys.argv[1] + '/' + 'forceGraph.html')
| TRDan6577/networkStatistics | networkNodes.py | Python | gpl-3.0 | 4,386 |
# Definition for a binary tree node.
# class TreeNode:
# def __init__(self, val=0, left=None, right=None):
# self.val = val
# self.left = left
# self.right = right
class Solution:
def invertTree(self, root: TreeNode) -> TreeNode:
def recurse(node):
if not node:
return node
node.left, node.right = recurse(node.right), recurse(node.left)
return node
return recurse(root)
| 1337/yesterday-i-learned | leetcode/226e.py | Python | gpl-3.0 | 474 |
import requests
import random
import time
user_agent_generic="Mozilla/5.0 (X11; Linux x86_64; rv:52.0) Gecko/20100101 Firefox/52.0"
# In case of providers whose request do not follow a regular pattern, you can use code here to form it
############################################################################################################
# list of affected provider_codes
custom_url_list=('DK','DOP40','NIB','Here')
custom_url_list = custom_url_list+tuple([x + '_NAIP' for x in (
'AL','AR','AZ','CA','CO','CT','DE','FL','GA','IA','ID','IL',
'IN','KS','KY','LA','MA','MD','ME','MI','MN','MO','MS','MT',
'NC','ND','NE','NH','NJ','NM','NV','NY','OH','OK','OR','PA',
'RI','SC','SD','TN','TX','UT','VA','VT','WA','WI','WV','WY')])
############################################################################################################
############################################################################################################
# might get some session tokens here
############################################################################################################
# Denmark
DK_time=time.time()
DK_ticket=None
def get_DK_ticket():
global DK_time, DK_ticket
while DK_ticket=="loading":
print(" Waiting for DK ticket to be updated.")
time.sleep(3)
if (not DK_ticket) or (time.time()-DK_time)>=3600:
DK_ticket="loading"
tmp=requests.packages.urllib3.util.ssl_.DEFAULT_CIPHERS
requests.packages.urllib3.util.ssl_.DEFAULT_CIPHERS='HIGH:!DH:!aNULL'
DK_ticket=requests.get("https://sdfekort.dk/spatialmap?").content.decode().split('ticket=')[1].split("'")[0]
requests.packages.urllib3.util.ssl_.DEFAULT_CIPHERS=tmp
DK_time=time.time()
return DK_ticket
# Germany DOP40
DOP40_time=time.time()
DOP40_cookie=None
def get_DOP40_cookie():
global DOP40_time, DOP40_cookie
while DOP40_cookie=="loading":
print(" Waiting for DOP40 cookie to be updated.")
time.sleep(3)
if (not DOP40_cookie) or (time.time()-DOP40_time)>=3600:
DOP40_cookie="loading"
DOP40_cookie=requests.Session().get('https://sg.geodatenzentrum.de/web_bkg_webmap/lib/bkgwebmap-0.12.4.all.min.js?bkg_appid=4cc455dc-a595-bbcf-0d00-c1d81caab5c3').headers['Set-Cookie'].split(';')[0]
DOP40_time=time.time()
return DOP40_cookie
# NorgeIbilder
NIB_time=time.time()
NIB_token=None
def get_NIB_token():
global NIB_time, NIB_token
while NIB_token=="loading":
print(" Waiting for NIB token to be updated.")
time.sleep(3)
if (not NIB_token) or (time.time()-NIB_time)>=3600:
NIB_token="loading"
NIB_token=str(requests.get('http://www.norgeibilder.no').content).split('nibToken')[1].split("'")[1][:-1]
NIB_time=time.time()
return NIB_token
# Here
Here_time=time.time()
Here_value=None
def get_Here_value():
global Here_time, Here_value
while Here_value=="loading":
print(" Waiting for Here value to be updated.")
time.sleep(3)
if (not Here_value) or (time.time()-Here_time)>=10000:
Here_value="loading"
Here_value=str(requests.get('https://wego.here.com').content).split('aerial.maps.api.here.com/maptile/2.1')[1][:100].split('"')[4]
Here_time=time.time()
return Here_value
############################################################################################################
def custom_wms_request(bbox,width,height,provider):
if provider['code']=='DK':
(xmin,ymax,xmax,ymin)=bbox
bbox_string=str(xmin)+','+str(ymin)+','+str(xmax)+','+str(ymax)
url="http://kortforsyningen.kms.dk/orto_foraar?TICKET="+get_DK_ticket()+"&SERVICE=WMS&VERSION=1.1.1&FORMAT=image/jpeg&REQUEST=GetMap&LAYERS=orto_foraar&STYLES=&SRS=EPSG:3857&WIDTH="+str(width)+"&HEIGHT="+str(height)+"&BBOX="+bbox_string
return (url,None)
elif provider['code']=='DOP40':
(xmin,ymax,xmax,ymin)=bbox
bbox_string=str(xmin)+','+str(ymin)+','+str(xmax)+','+str(ymax)
url="http://sg.geodatenzentrum.de/wms_dop40?&SERVICE=WMS&VERSION=1.1.1&FORMAT=image/jpeg&REQUEST=GetMap&LAYERS=rgb&STYLES=&SRS=EPSG:25832&WIDTH="+str(width)+"&HEIGHT="+str(height)+"&BBOX="+bbox_string
fake_headers={'User-Agent':user_agent_generic,'Accept':'text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8','Connection':'keep-alive','Accept-Encoding':'gzip, deflate','Cookie':get_DOP40_cookie(),'Referer':'http://sg.geodatenzentrum.de/web_bkg_webmap/applications/dop/dop_viewer.html'}
return (url,fake_headers)
elif '_NAIP' in provider['code']:
(xmin,ymax,xmax,ymin)=bbox
url="https://gis.apfo.usda.gov/arcgis/rest/services/NAIP_Historical/"+provider['code']+"/ImageServer/exportImage?f=image&bbox="+str(xmin)+"%2C"+str(ymin)+"%2C"+str(xmax)+"%2C"+str(ymax)+"&imageSR=102100&bboxSR=102100&size="+str(width)+"%2C"+str(height)
return (url,None)
def custom_tms_request(tilematrix,til_x,til_y,provider):
if provider['code']=='NIB':
NIB_token=get_NIB_token()
url="http://agsservices.norgeibilder.no/arcgis/rest/services/Nibcache_UTM33_EUREF89_v2/MapServer/tile/"+str(tilematrix)+"/"+str(til_y)+"/"+str(til_x)+"?token="+NIB_token
return (url,None)
elif provider['code']=='Here':
Here_value=get_Here_value()
url="https://"+random.choice(['1','2','3','4'])+".aerial.maps.api.here.com/maptile/2.1/maptile/"+Here_value+"/satellite.day/"+str(tilematrix)+"/"+str(til_x)+"/"+str(til_y)+"/256/jpg?app_id=bC4fb9WQfCCZfkxspD4z&app_code=K2Cpd_EKDzrZb1tz0zdpeQ"
return (url,None)
| oscarpilote/Ortho4XP | Providers/O4_Custom_URL.py | Python | gpl-3.0 | 5,644 |
#!/usr/bin/env python
import argparse
import errno
import glob
import os
import subprocess
import sys
import zipfile
dirs = ("", "entity", "entity/chest", "colormap", "blocks", "entity/shulker", "entity/bed")
assets = "assets/minecraft/textures/"
files = [
("entity/chest/normal.png", assets + "entity/chest/normal.png"),
("entity/chest/normal_double.png", assets + "entity/chest/normal_double.png"),
("entity/chest/ender.png", assets + "entity/chest/ender.png"),
("entity/chest/trapped.png", assets + "entity/chest/trapped.png"),
("entity/chest/trapped_double.png", assets + "entity/chest/trapped_double.png"),
("colormap/foliage.png", assets + "colormap/foliage.png"),
("colormap/grass.png", assets + "colormap/grass.png"),
("entity/shulker/shulker_black.png", assets + "entity/shulker/shulker_black.png"),
("entity/shulker/shulker_blue.png", assets + "entity/shulker/shulker_blue.png"),
("entity/shulker/shulker_brown.png", assets + "entity/shulker/shulker_brown.png"),
("entity/shulker/shulker_cyan.png", assets + "entity/shulker/shulker_cyan.png"),
("entity/shulker/shulker_gray.png", assets + "entity/shulker/shulker_gray.png"),
("entity/shulker/shulker_green.png", assets + "entity/shulker/shulker_green.png"),
("entity/shulker/shulker_light_blue.png", assets + "entity/shulker/shulker_light_blue.png"),
("entity/shulker/shulker_lime.png", assets + "entity/shulker/shulker_lime.png"),
("entity/shulker/shulker_magenta.png", assets + "entity/shulker/shulker_magenta.png"),
("entity/shulker/shulker_orange.png", assets + "entity/shulker/shulker_orange.png"),
("entity/shulker/shulker_pink.png", assets + "entity/shulker/shulker_pink.png"),
("entity/shulker/shulker_purple.png", assets + "entity/shulker/shulker_purple.png"),
("entity/shulker/shulker_red.png", assets + "entity/shulker/shulker_red.png"),
("entity/shulker/shulker_silver.png", assets + "entity/shulker/shulker_silver.png"),
("entity/shulker/shulker_white.png", assets + "entity/shulker/shulker_white.png"),
("entity/shulker/shulker_yellow.png", assets + "entity/shulker/shulker_yellow.png"),
("entity/bed/black.png", assets + "entity/bed/black.png"),
("entity/bed/blue.png", assets + "entity/bed/blue.png"),
("entity/bed/brown.png", assets + "entity/bed/brown.png"),
("entity/bed/cyan.png", assets + "entity/bed/cyan.png"),
("entity/bed/gray.png", assets + "entity/bed/gray.png"),
("entity/bed/green.png", assets + "entity/bed/green.png"),
("entity/bed/light_blue.png", assets + "entity/bed/light_blue.png"),
("entity/bed/lime.png", assets + "entity/bed/lime.png"),
("entity/bed/magenta.png", assets + "entity/bed/magenta.png"),
("entity/bed/orange.png", assets + "entity/bed/orange.png"),
("entity/bed/pink.png", assets + "entity/bed/pink.png"),
("entity/bed/purple.png", assets + "entity/bed/purple.png"),
("entity/bed/red.png", assets + "entity/bed/red.png"),
("entity/bed/silver.png", assets + "entity/bed/silver.png"),
("entity/bed/white.png", assets + "entity/bed/white.png"),
("entity/bed/yellow.png", assets + "entity/bed/yellow.png"),
]
def has_imagemagick():
try:
# try to call convert command
subprocess.check_output("convert")
return True
except subprocess.CalledProcessError:
# command exited with error status, probably because we didn't specify any files to convert
return True
except OSError as e:
# return False if command not found
if e.errno == errno.ENOENT:
return False
raise e
if __name__ == "__main__":
parser = argparse.ArgumentParser(description="Extracts from a Minecraft Jar file the textures required for mapcrafter.")
parser.add_argument("-f", "--force",
help="forces overwriting eventually already existing textures",
action="store_true")
parser.add_argument("jarfile",
help="the Minecraft Jar file to use",
metavar="<jarfile>")
parser.add_argument("outdir",
help="the output texture directory",
metavar="<outdir>")
args = vars(parser.parse_args())
jar = zipfile.ZipFile(args["jarfile"])
for dir in dirs:
if not os.path.exists(os.path.join(args["outdir"], dir)):
os.mkdir(os.path.join(args["outdir"], dir))
print("Extracting block images:")
found, extracted, skipped = 0, 0, 0
for info in jar.infolist():
if info.filename.startswith("assets/minecraft/textures/blocks/") and info.filename != "assets/minecraft/textures/blocks/":
filename = info.filename.replace("assets/minecraft/textures/", "")
# unpack only PNGs, no other files (or directory entries)
if not filename.endswith(".png"):
continue
# make sure to not unpack subdirectories
base_path = os.path.dirname(filename)
if base_path != os.path.dirname("blocks/test.png"):
continue
filename = os.path.join(args["outdir"], filename)
found += 1
if os.path.exists(filename) and not args["force"]:
skipped += 1
continue
fin = jar.open(info)
fout = open(filename, "wb")
fout.write(fin.read())
fin.close()
fout.close()
extracted += 1
print(" - Found %d block images." % found)
print(" - Extracted %d." % extracted)
print(" - Skipped %d (Use -f to force overwrite)." % skipped)
print("")
print("Extracting other textures:")
for filename, zipname in files:
try:
info = jar.getinfo(zipname)
filename = os.path.join(args["outdir"], filename)
if os.path.exists(filename) and not args["force"]:
print(" - Extracting %s ... skipped." % filename)
else:
fin = jar.open(info)
fout = open(filename, "wb")
fout.write(fin.read())
fin.close()
fout.close()
print(" - Extracting %s ... extracted." % filename)
except KeyError:
print(" - Extracting %s ... not found!" % filename)
if not has_imagemagick():
print("")
print("Warning: imagemagick is not installed (command 'convert' not found).")
print("Install imagemagick to enable automatic texture fixes (to prevent libpng warnings).")
else:
for filename in glob.glob(os.path.join(args["outdir"], "blocks", "hardened_clay*.png")):
if os.path.exists(filename):
subprocess.check_call(["convert", "-strip", filename, filename])
filename = os.path.join(args["outdir"], "blocks", "red_sand.png")
if os.path.exists(filename):
subprocess.check_call(["convert", "-strip", filename, filename])
filename = os.path.join(args["outdir"], "blocks", "glass_pane_top_white.png")
if os.path.exists(filename):
subprocess.check_call(["convert", "-strip", filename, "-type", "TrueColorMatte", "-define", "png:color-type=6", filename])
| mapcrafter/mapcrafter | src/tools/mapcrafter_textures.py | Python | gpl-3.0 | 6,484 |
from database import Database
import argparse
class Analyzer():
ignores = ['id', 'time', 'browser_fingerprint', 'computer_fingerprint_1', "fonts"]
db = Database('uniquemachine')
cols = db.run_sql("SHOW COLUMNS FROM features")
def __init__(self):
pass
def check_imgs_difference_by_str(self, str_1, str_2):
"""
check the differences of two gpu rendering result strs
vars: str_1, str_2
return: the differences
"""
imgs_1 = str_1.split(',')
imgs_2 = str_2.split(',')
length = len(imgs_1)
if len(imgs_2) != length:
return "different number of imgs"
imgs_1 = sorted(imgs_1, key=lambda img: int(img.split('_')[0]))
imgs_2 = sorted(imgs_2, key=lambda img: int(img.split('_')[0]))
res = {}
for i in range(length):
img_1 = imgs_1[i].split('_')[2]
img_2 = imgs_2[i].split('_')[2]
if img_1 != img_2:
res[i] = (img_1, img_2)
return res
def check_fonts_difference_by_str(self, str_1, str_2):
"""
check the differences of two font lists
vars: str_1, str_2
return: the differences
"""
if str_1 == None or str_2 == None:
return ([], [])
fonts_1 = str_1.split('_')
fonts_2 = str_2.split('_')
f1 = []
f2 = []
for f in fonts_1:
if f not in fonts_2:
f1.append(f)
for f in fonts_2:
if f not in fonts_1:
f2.append(f)
return (f1, f2)
def output_diff(self, keys, values):
length = len(keys)
for i in range(length):
print '\t'+ str(keys[i]) + ': \t' + str(values[i])
def check_difference_by_id(self, base_id, entry_id, detail):
"""
check the difference of two entries based on the ids
vars: id1, id2, print details or not
return: the array of differences
"""
base_entry = self.db.get_entry_by_id('features', base_id)
compare_entry = self.db.get_entry_by_id('features', entry_id)
length = len(base_entry)
res = {}
for i in range(length):
if self.cols[i][0] in self.ignores:
continue
if base_entry[i] != compare_entry[i]:
if self.cols[i][0] == 'gpuimgs':
diff = self.check_imgs_difference_by_str(base_entry[i], compare_entry[i])
if len(diff) == 0:
continue
res[self.cols[i][0]] = diff
if (detail):
print self.cols[i][0]
self.output_diff(diff.keys(), diff.values())
elif self.cols[i][0] == 'flashFonts':
diff = self.check_fonts_difference_by_str(base_entry[i], compare_entry[i])
res[self.cols[i][0]] = diff
if detail == True:
print self.cols[i][0]
self.output_diff([base_id, entry_id], diff)
else:
res[self.cols[i][0]] = [base_entry[i], compare_entry[i]]
if detail == True:
print self.cols[i][0]
self.output_diff([base_id, entry_id], [base_entry[i], compare_entry[i]])
return res
def cal_gpuimgs_distance(self, diff):
return (1, "video==================================")
def cal_flashFonts_distance(self, diff):
return (1, len(diff[0]) + len(diff[1]))
def cal_agent_distance(self, diff):
return (1, "agent")
def cal_distance(self, diff):
dis = 0
way = ""
for feature in diff:
if feature == "gpuimgs":
gpuimgs_change = self.cal_gpuimgs_distance(diff[feature])
dis += gpuimgs_change[0]
way += gpuimgs_change[1]
elif feature == "agent":
agent_change = self.cal_agent_distance(diff[feature])
dis += agent_change[0]
way += agent_change[1]
elif feature == "flashFonts":
flashFonts_change = self.cal_flashFonts_distance(diff[feature])
dis += flashFonts_change[0]
way += str(flashFonts_change[1]) + " fonts ===================="
elif feature == "label":
dis += 0
way += diff[feature][1]
else:
dis += 1
way += feature
way += '~~'
return (dis, way)
def check_difference_by_group(self, firefox_version, base_group, compare_group, detail):
"""
check the difference of two groups
"""
sql_str = "SELECT id FROM features WHERE agent like '%" + str(firefox_version) + "%' and label like '%" + base_group + "%'"
base_id = self.db.run_sql(sql_str)[0][0]
sql_str = "SELECT id FROM features WHERE agent like '%" + str(firefox_version) + "%' and label like '%" + compare_group + "%'"
compare_id = self.db.run_sql(sql_str)[0][0]
diff = self.check_difference_by_id(base_id, compare_id, detail)
return diff
def cal_all_distances(self, aim, detail):
"""
calculate the distance between aim and all other entries
"""
sql_str = "SELECT id FROM features"
all_ids = self.db.run_sql(sql_str)
length = len(all_ids)
distances = []
if aim == 0:
for i in range(1, length):
distances.append(self.cal_all_distances(all_ids[i][0], detail))
else:
for i in range(1, length):
dis = self.cal_distance(self.check_difference_by_id(aim, all_ids[i][0], detail))
if dis[0] != 0:
distances.append((all_ids[i][0], dis))
return distances
def check_change(self):
"""
check if there is any changes for same cookie/ip (We can decide it later)
"""
sql_str = "SELECT DISTINCT(label) FROM features"
all_cookies = self.db.run_sql(sql_str)
num_cookies = len(all_cookies)
for cookie in all_cookies:
sql_str = "SELECT IP FROM features WHERE label='" + cookie[0] + "'"
records = self.db.run_sql(sql_str)
if len(records) > 10:
print len(records)
print records[0]
def check_unique(self):
for i in range(1, 10):
print self.db.run_sql('select count(browser_fingerprint) from ( select browser_fingerprint from features GROUP BY browser_fingerprint HAVING count(*) = ' + str(i) + ' ) AS only_once');
def main():
parser = argparse.ArgumentParser()
parser.add_argument("-c", "--change", action = "store_true", help = "Check if there is any change for a single computer")
parser.add_argument("-g", "--group", nargs = '*', action="store", help="Input the key word of two groups")
parser.add_argument("-v", "--firefox_version", type=int, action="store", help = "Input the firefox version")
parser.add_argument("-a", "--all", type=int, action = "store", help = "Compare all data pairs in database")
parser.add_argument("-d", "--detail", action = "store_true", help = "Compare all data pairs in database")
parser.add_argument("-i", "--id", type=int, nargs = '*', action = "store", help = "Compare all data pairs in database")
args = parser.parse_args()
analyzer = Analyzer()
analyzer.check_unique()
if args.change:
analyzer.check_change()
elif args.all != None :
distance = analyzer.cal_all_distances(args.all, args.detail)
if args.all == 0:
for i in distance:
string = ""
for j in i:
string += str(j[0]) + '\t'
print string
else:
for i in distance:
print i
elif args.id != None:
ids = args.id
diff = analyzer.check_difference_by_id(ids[0], ids[1], args.detail)
distance = analyzer.cal_distance(diff)
print distance
else:
groups = args.group
firefox_version = args.firefox_version
if firefox_version == None:
firefox_version = 0
if groups == None:
print "Please use -h to see the usage. Key words needed here"
return 0
diff = analyzer.check_difference_by_group(firefox_version, groups[0], groups[1], args.detail)
distance = analyzer.cal_distance(diff)
print distance
if __name__ == "__main__":
main()
| Song-Li/dynamic_fingerprinting | research/analyze/analyze.py | Python | gpl-3.0 | 8,676 |
#!/usr/bin/python
# -*- coding: utf-8 -*-
# (c) 2016, Brian Coca <[email protected]>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
DOCUMENTATION = '''
module: systemd
author:
- "Ansible Core Team"
version_added: "2.2"
short_description: Manage services.
description:
- Controls systemd services on remote hosts.
options:
name:
required: true
description:
- Name of the service.
aliases: ['unit', 'service']
state:
required: false
default: null
choices: [ 'started', 'stopped', 'restarted', 'reloaded' ]
description:
- C(started)/C(stopped) are idempotent actions that will not run commands unless necessary.
C(restarted) will always bounce the service. C(reloaded) will always reload.
enabled:
required: false
choices: [ "yes", "no" ]
default: null
description:
- Whether the service should start on boot. B(At least one of state and enabled are required.)
masked:
required: false
choices: [ "yes", "no" ]
default: null
description:
- Whether the unit should be masked or not, a masked unit is impossible to start.
daemon_reload:
required: false
default: no
choices: [ "yes", "no" ]
description:
- run daemon-reload before doing any other operations, to make sure systemd has read any changes.
aliases: ['daemon-reload']
user:
required: false
default: no
choices: [ "yes", "no" ]
description:
- run systemctl talking to the service manager of the calling user, rather than the service manager
of the system.
notes:
- One option other than name is required.
requirements:
- A system managed by systemd
'''
EXAMPLES = '''
# Example action to start service httpd, if not running
- systemd: state=started name=httpd
# Example action to stop service cron on debian, if running
- systemd: name=cron state=stopped
# Example action to restart service cron on centos, in all cases, also issue daemon-reload to pick up config changes
- systemd:
state: restarted
daemon_reload: yes
name: crond
# Example action to reload service httpd, in all cases
- systemd:
name: httpd
state: reloaded
# Example action to enable service httpd and ensure it is not masked
- systemd:
name: httpd
enabled: yes
masked: no
# Example action to enable a timer for dnf-automatic
- systemd:
name: dnf-automatic.timer
state: started
enabled: True
'''
RETURN = '''
status:
description: A dictionary with the key=value pairs returned from `systemctl show`
returned: success
type: complex
sample: {
"ActiveEnterTimestamp": "Sun 2016-05-15 18:28:49 EDT",
"ActiveEnterTimestampMonotonic": "8135942",
"ActiveExitTimestampMonotonic": "0",
"ActiveState": "active",
"After": "auditd.service systemd-user-sessions.service time-sync.target systemd-journald.socket basic.target system.slice",
"AllowIsolate": "no",
"Before": "shutdown.target multi-user.target",
"BlockIOAccounting": "no",
"BlockIOWeight": "1000",
"CPUAccounting": "no",
"CPUSchedulingPolicy": "0",
"CPUSchedulingPriority": "0",
"CPUSchedulingResetOnFork": "no",
"CPUShares": "1024",
"CanIsolate": "no",
"CanReload": "yes",
"CanStart": "yes",
"CanStop": "yes",
"CapabilityBoundingSet": "18446744073709551615",
"ConditionResult": "yes",
"ConditionTimestamp": "Sun 2016-05-15 18:28:49 EDT",
"ConditionTimestampMonotonic": "7902742",
"Conflicts": "shutdown.target",
"ControlGroup": "/system.slice/crond.service",
"ControlPID": "0",
"DefaultDependencies": "yes",
"Delegate": "no",
"Description": "Command Scheduler",
"DevicePolicy": "auto",
"EnvironmentFile": "/etc/sysconfig/crond (ignore_errors=no)",
"ExecMainCode": "0",
"ExecMainExitTimestampMonotonic": "0",
"ExecMainPID": "595",
"ExecMainStartTimestamp": "Sun 2016-05-15 18:28:49 EDT",
"ExecMainStartTimestampMonotonic": "8134990",
"ExecMainStatus": "0",
"ExecReload": "{ path=/bin/kill ; argv[]=/bin/kill -HUP $MAINPID ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }",
"ExecStart": "{ path=/usr/sbin/crond ; argv[]=/usr/sbin/crond -n $CRONDARGS ; ignore_errors=no ; start_time=[n/a] ; stop_time=[n/a] ; pid=0 ; code=(null) ; status=0/0 }",
"FragmentPath": "/usr/lib/systemd/system/crond.service",
"GuessMainPID": "yes",
"IOScheduling": "0",
"Id": "crond.service",
"IgnoreOnIsolate": "no",
"IgnoreOnSnapshot": "no",
"IgnoreSIGPIPE": "yes",
"InactiveEnterTimestampMonotonic": "0",
"InactiveExitTimestamp": "Sun 2016-05-15 18:28:49 EDT",
"InactiveExitTimestampMonotonic": "8135942",
"JobTimeoutUSec": "0",
"KillMode": "process",
"KillSignal": "15",
"LimitAS": "18446744073709551615",
"LimitCORE": "18446744073709551615",
"LimitCPU": "18446744073709551615",
"LimitDATA": "18446744073709551615",
"LimitFSIZE": "18446744073709551615",
"LimitLOCKS": "18446744073709551615",
"LimitMEMLOCK": "65536",
"LimitMSGQUEUE": "819200",
"LimitNICE": "0",
"LimitNOFILE": "4096",
"LimitNPROC": "3902",
"LimitRSS": "18446744073709551615",
"LimitRTPRIO": "0",
"LimitRTTIME": "18446744073709551615",
"LimitSIGPENDING": "3902",
"LimitSTACK": "18446744073709551615",
"LoadState": "loaded",
"MainPID": "595",
"MemoryAccounting": "no",
"MemoryLimit": "18446744073709551615",
"MountFlags": "0",
"Names": "crond.service",
"NeedDaemonReload": "no",
"Nice": "0",
"NoNewPrivileges": "no",
"NonBlocking": "no",
"NotifyAccess": "none",
"OOMScoreAdjust": "0",
"OnFailureIsolate": "no",
"PermissionsStartOnly": "no",
"PrivateNetwork": "no",
"PrivateTmp": "no",
"RefuseManualStart": "no",
"RefuseManualStop": "no",
"RemainAfterExit": "no",
"Requires": "basic.target",
"Restart": "no",
"RestartUSec": "100ms",
"Result": "success",
"RootDirectoryStartOnly": "no",
"SameProcessGroup": "no",
"SecureBits": "0",
"SendSIGHUP": "no",
"SendSIGKILL": "yes",
"Slice": "system.slice",
"StandardError": "inherit",
"StandardInput": "null",
"StandardOutput": "journal",
"StartLimitAction": "none",
"StartLimitBurst": "5",
"StartLimitInterval": "10000000",
"StatusErrno": "0",
"StopWhenUnneeded": "no",
"SubState": "running",
"SyslogLevelPrefix": "yes",
"SyslogPriority": "30",
"TTYReset": "no",
"TTYVHangup": "no",
"TTYVTDisallocate": "no",
"TimeoutStartUSec": "1min 30s",
"TimeoutStopUSec": "1min 30s",
"TimerSlackNSec": "50000",
"Transient": "no",
"Type": "simple",
"UMask": "0022",
"UnitFileState": "enabled",
"WantedBy": "multi-user.target",
"Wants": "system.slice",
"WatchdogTimestampMonotonic": "0",
"WatchdogUSec": "0",
}
'''
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.service import sysv_exists, sysv_is_enabled, fail_if_missing
from ansible.module_utils._text import to_native
# ===========================================
# Main control flow
def main():
# initialize
module = AnsibleModule(
argument_spec = dict(
name = dict(required=True, type='str', aliases=['unit', 'service']),
state = dict(choices=[ 'started', 'stopped', 'restarted', 'reloaded'], type='str'),
enabled = dict(type='bool'),
masked = dict(type='bool'),
daemon_reload= dict(type='bool', default=False, aliases=['daemon-reload']),
user= dict(type='bool', default=False),
),
supports_check_mode=True,
required_one_of=[['state', 'enabled', 'masked', 'daemon_reload']],
)
systemctl = module.get_bin_path('systemctl')
if module.params['user']:
systemctl = systemctl + " --user"
unit = module.params['name']
rc = 0
out = err = ''
result = {
'name': unit,
'changed': False,
'status': {},
'warnings': [],
}
# Run daemon-reload first, if requested
if module.params['daemon_reload']:
(rc, out, err) = module.run_command("%s daemon-reload" % (systemctl))
if rc != 0:
module.fail_json(msg='failure %d during daemon-reload: %s' % (rc, err))
# check service data
(rc, out, err) = module.run_command("%s show '%s'" % (systemctl, unit))
if rc != 0:
module.fail_json(msg='failure %d running systemctl show for %r: %s' % (rc, unit, err))
found = False
is_initd = sysv_exists(unit)
is_systemd = False
# load return of systemctl show into dictionary for easy access and return
multival = []
if out:
k = None
for line in to_native(out).split('\n'): # systemd can have multiline values delimited with {}
if line.strip():
if k is None:
if '=' in line:
k,v = line.split('=', 1)
if v.lstrip().startswith('{'):
if not v.rstrip().endswith('}'):
multival.append(line)
continue
result['status'][k] = v.strip()
k = None
else:
if line.rstrip().endswith('}'):
result['status'][k] = '\n'.join(multival).strip()
multival = []
k = None
else:
multival.append(line)
is_systemd = 'LoadState' in result['status'] and result['status']['LoadState'] != 'not-found'
# Check for loading error
if is_systemd and 'LoadError' in result['status']:
module.fail_json(msg="Error loading unit file '%s': %s" % (unit, result['status']['LoadError']))
# Does service exist?
found = is_systemd or is_initd
if is_initd and not is_systemd:
result['warnings'].append('The service (%s) is actually an init script but the system is managed by systemd' % unit)
# mask/unmask the service, if requested, can operate on services before they are installed
if module.params['masked'] is not None:
# state is not masked unless systemd affirms otherwise
masked = ('LoadState' in result['status'] and result['status']['LoadState'] == 'masked')
if masked != module.params['masked']:
result['changed'] = True
if module.params['masked']:
action = 'mask'
else:
action = 'unmask'
if not module.check_mode:
(rc, out, err) = module.run_command("%s %s '%s'" % (systemctl, action, unit))
if rc != 0:
# some versions of system CAN mask/unmask non existing services, we only fail on missing if they don't
fail_if_missing(module, found, unit, "cannot %s" % (action))
module.fail_json(msg="Unable to %s service %s: %s" % (action, unit, err))
# Enable/disable service startup at boot if requested
if module.params['enabled'] is not None:
if module.params['enabled']:
action = 'enable'
else:
action = 'disable'
fail_if_missing(module, found, unit, "cannot %s" % (action))
# do we need to enable the service?
enabled = False
(rc, out, err) = module.run_command("%s is-enabled '%s'" % (systemctl, unit))
# check systemctl result or if it is a init script
if rc == 0:
enabled = True
elif rc == 1:
# if both init script and unit file exist stdout should have enabled/disabled, otherwise use rc entries
if is_initd and (not out.startswith('disabled') or sysv_is_enabled(unit)):
enabled = True
# default to current state
result['enabled'] = enabled
# Change enable/disable if needed
if enabled != module.params['enabled']:
result['changed'] = True
if not module.check_mode:
(rc, out, err) = module.run_command("%s %s '%s'" % (systemctl, action, unit))
if rc != 0:
module.fail_json(msg="Unable to %s service %s: %s" % (action, unit, out + err))
result['enabled'] = not enabled
# set service state if requested
if module.params['state'] is not None:
fail_if_missing(module, found, unit, "cannot check nor set state")
# default to desired state
result['state'] = module.params['state']
# What is current service state?
if 'ActiveState' in result['status']:
action = None
if module.params['state'] == 'started':
if result['status']['ActiveState'] != 'active':
action = 'start'
elif module.params['state'] == 'stopped':
if result['status']['ActiveState'] == 'active':
action = 'stop'
else:
action = module.params['state'][:-2] # remove 'ed' from restarted/reloaded
result['state'] = 'started'
if action:
result['changed'] = True
if not module.check_mode:
(rc, out, err) = module.run_command("%s %s '%s'" % (systemctl, action, unit))
if rc != 0:
module.fail_json(msg="Unable to %s service %s: %s" % (action, unit, err))
else:
# this should not happen?
module.fail_json(msg="Service is in unknown state", status=result['status'])
module.exit_json(**result)
if __name__ == '__main__':
main()
| jtyr/ansible-modules-core | system/systemd.py | Python | gpl-3.0 | 15,567 |
#!./python_link
# -*- coding: utf-8 -*-
################################################################################
# DChars-FE Copyright (C) 2008 Xavier Faure
# Contact: faure dot epistulam dot mihi dot scripsisti at orange dot fr
#
# This file is part of DChars-FE.
# DChars-FE is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# DChars-FE is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with DChars-FE. If not, see <http://www.gnu.org/licenses/>.
################################################################################
"""
❏DChars-FE❏ config_ini.py
"""
import configparser, os, codecs
CONFIG_INI = None
#///////////////////////////////////////////////////////////////////////////////
def read_configuration_file():
"""
function read_configuration_file()
read the config.ini and return the result.
"""
DATA = configparser.ConfigParser()
# about the following line : why not simply DATA.read( "dchars-fr", "config.ini") ?
# -> once installed, DChars have to know the exact path to config.ini,
# hence the following line :
config_ini_filename = os.path.join(os.path.dirname(os.path.realpath(__file__)), "config.ini" )
# something's wrong with configparser : instead of simply writing
# DATA.read( open(config_ini_filename, "r", encoding="utf-8") )
# we have to use this strange hack :
DATA.readfp( codecs.open(config_ini_filename, "r", "utf-8") )
return DATA
| suizokukan/dchars-fe | config_ini.py | Python | gpl-3.0 | 1,966 |
# -*- coding: utf-8 -*-
#-----------------------------------------------------------------------------
# OpenModes - An eigenmode solver for open electromagnetic resonantors
# Copyright (C) 2013 David Powell
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#-----------------------------------------------------------------------------
import setuptools
from distutils.util import get_platform
import os.path as osp
import os
from pkg_resources import parse_version
try:
import numpy
except ImportError:
numpy_installed = False
else:
numpy_installed = True
if not numpy_installed or (parse_version(numpy.__version__) < parse_version('1.10.0')):
raise ValueError("Numpy 1.10.0 or greater required")
from numpy.distutils.core import Extension, setup
import platform
if platform.system() == 'Darwin':
os.environ["CC"] = "gcc-7"
os.environ["CXX"] = "gcc-7"
# Ideally would like to perform static linking under mingw32 to avoid
# packaging a whole bunch of dlls. However, static linking is not supported
# for the openmp libraries.
ccompiler_dependent_options = {
'mingw32': {
# 'extra_link_args' : ['-static']
}
}
# The following options are required to enable openmp to be used in the fortran
# code, which is entirely compiler dependent
fcompiler_dependent_options = {
# gnu gfortran (including under mingw)
'gnu95': {
# -O3 is most desireable, but generate NaNs under mingw32
'extra_f90_compile_args': ["-g", "-fimplicit-none", "-fopenmp", "-O3"],
'libraries': ["gomp"]
},
'intel': {
# Currently ifort gives NaNs in impedance matrix derivative
# on -O2, but not on -O3. To be investigated!
#'extra_f90_compile_args': ['/debug', '-openmp', '-O3', '/fpe:0', '/fp:precise']#, '/traceback'],
'extra_f90_compile_args': ['-openmp', '-O2', '/fpe:0', '/fp:fast=2']#, '/traceback'],
#'extra_link_args' : ['-openmp']
#'extra_f77_compile_args' : ['-openmp', '-O3'],
#'extra_compile_args' : ['-openmp', '-O3', '-static'],
#'extra_link_args' : ['-nodefaultlib:msvcrt']
}
}
# Intel fortran compiler goes by several names depending on the version
# and target platform. Here the settings are all the same
fcompiler_dependent_options['intelem'] = fcompiler_dependent_options['intel']
fcompiler_dependent_options['intelvem'] = fcompiler_dependent_options['intel']
core = Extension(name='openmodes.core',
sources=[osp.join('src', 'core.pyf'),
osp.join('src', 'common.f90'),
osp.join('src', 'rwg.f90')],
)
dunavant = Extension(name='openmodes.dunavant',
sources=[osp.join('src', 'dunavant.pyf'),
osp.join('src', 'dunavant.f90')])
from numpy.distutils.command.build_ext import build_ext
class compiler_dependent_build_ext(build_ext):
"""A build extension which allows compiler-dependent options for
compilation, linking etc. Options can depend on either the C or FORTRAN
compiler which is actually used (as distinct from the default compilers,
which are much easier to detect)
Based on http://stackoverflow.com/a/5192738/482420
"""
def build_extensions(self):
ccompiler = self.compiler.compiler_type
fcompiler = self._f77_compiler.compiler_type
# add the compiler dependent options to each extension
for extension in self.extensions:
try:
modification = ccompiler_dependent_options[ccompiler]
for key, val in modification.items():
getattr(extension, key).extend(val)
except (KeyError, AttributeError):
pass
try:
modification = fcompiler_dependent_options[fcompiler]
for key, val in modification.items():
getattr(extension, key).extend(val)
except (KeyError, AttributeError):
pass
build_ext.build_extensions(self)
# Find library files which must be included, which should be placed in the
# appropriate subdirectory of the redist directory. This must be done manually,
# as this code cannot detect which compiler will be used.
redist_path = osp.join("redist", get_platform())
redist_data = []
if osp.exists(redist_path):
redist_data.append(redist_path)
try:
import pypandoc
long_description = pypandoc.convert('README.md', 'rst')
except(IOError, ImportError):
long_description = open('README.md').read()
# run the script to find the version
exec(open(osp.join("openmodes", "version.py")).read())
setup(name='OpenModes',
description="An eigenmode solver for open electromagnetic resonantors",
author="David Powell",
author_email='[email protected]',
license='GPLv3+',
url='http://davidpowell.github.io/OpenModes',
packages=setuptools.find_packages(),
package_data={'openmodes': [osp.join("geometry", "*.geo"),
osp.join("external", "three.js", "*"),
osp.join("templates", "*"),
osp.join("static", "*")]},
ext_modules=[dunavant, core],
version=__version__,
install_requires=['numpy >= 1.10.0', 'scipy >= 0.18.0', 'matplotlib', 'jinja2',
'six', 'ipywidgets', 'meshio', 'dill'],
long_description=long_description,
long_description_content_type="text/markdown",
platforms="Windows, Linux",
classifiers=[
'Development Status :: 4 - Beta',
'Environment :: Console',
'Environment :: Web Environment',
'Intended Audience :: Science/Research',
'Intended Audience :: Developers',
'License :: OSI Approved :: GNU General Public License v3 or later (GPLv3+)',
'Operating System :: Microsoft :: Windows',
'Operating System :: POSIX',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
'Programming Language :: Python :: 3.7',
'Programming Language :: Fortran',
'Topic :: Scientific/Engineering'
],
cmdclass={'build_ext': compiler_dependent_build_ext},
# Include any required library files
data_files=[('openmodes', redist_data+["RELEASE-VERSION"])]
)
| DavidPowell/OpenModes | setup.py | Python | gpl-3.0 | 7,161 |
Subsets and Splits