desc
stringlengths 3
26.7k
| decl
stringlengths 11
7.89k
| bodies
stringlengths 8
553k
|
---|---|---|
'Internal: process a STAT, NEXT or LAST command.'
|
def statcmd(self, line):
|
resp = self.shortcmd(line)
return self.statparse(resp)
|
'Process a STAT command. Argument:
- id: article number or message id
Returns:
- resp: server response if successful
- nr: the article number
- id: the message id'
|
def stat(self, id):
|
return self.statcmd(('STAT ' + id))
|
'Process a NEXT command. No arguments. Return as for STAT.'
|
def next(self):
|
return self.statcmd('NEXT')
|
'Process a LAST command. No arguments. Return as for STAT.'
|
def last(self):
|
return self.statcmd('LAST')
|
'Internal: process a HEAD, BODY or ARTICLE command.'
|
def artcmd(self, line, file=None):
|
(resp, list) = self.longcmd(line, file)
(resp, nr, id) = self.statparse(resp)
return (resp, nr, id, list)
|
'Process a HEAD command. Argument:
- id: article number or message id
Returns:
- resp: server response if successful
- nr: article number
- id: message id
- list: the lines of the article\'s header'
|
def head(self, id):
|
return self.artcmd(('HEAD ' + id))
|
'Process a BODY command. Argument:
- id: article number or message id
- file: Filename string or file object to store the article in
Returns:
- resp: server response if successful
- nr: article number
- id: message id
- list: the lines of the article\'s body or an empty list
if file was used'
|
def body(self, id, file=None):
|
return self.artcmd(('BODY ' + id), file)
|
'Process an ARTICLE command. Argument:
- id: article number or message id
Returns:
- resp: server response if successful
- nr: article number
- id: message id
- list: the lines of the article'
|
def article(self, id):
|
return self.artcmd(('ARTICLE ' + id))
|
'Process a SLAVE command. Returns:
- resp: server response if successful'
|
def slave(self):
|
return self.shortcmd('SLAVE')
|
'Process an XHDR command (optional server extension). Arguments:
- hdr: the header type (e.g. \'subject\')
- str: an article nr, a message id, or a range nr1-nr2
Returns:
- resp: server response if successful
- list: list of (nr, value) strings'
|
def xhdr(self, hdr, str, file=None):
|
pat = re.compile('^([0-9]+) ?(.*)\n?')
(resp, lines) = self.longcmd(((('XHDR ' + hdr) + ' ') + str), file)
for i in range(len(lines)):
line = lines[i]
m = pat.match(line)
if m:
lines[i] = m.group(1, 2)
return (resp, lines)
|
'Process an XOVER command (optional server extension) Arguments:
- start: start of range
- end: end of range
Returns:
- resp: server response if successful
- list: list of (art-nr, subject, poster, date,
id, references, size, lines)'
|
def xover(self, start, end, file=None):
|
(resp, lines) = self.longcmd(((('XOVER ' + start) + '-') + end), file)
xover_lines = []
for line in lines:
elem = line.split(' DCTB ')
try:
xover_lines.append((elem[0], elem[1], elem[2], elem[3], elem[4], elem[5].split(), elem[6], elem[7]))
except IndexError:
raise NNTPDataError(line)
return (resp, xover_lines)
|
'Process an XGTITLE command (optional server extension) Arguments:
- group: group name wildcard (i.e. news.*)
Returns:
- resp: server response if successful
- list: list of (name,title) strings'
|
def xgtitle(self, group, file=None):
|
line_pat = re.compile('^([^ DCTB ]+)[ DCTB ]+(.*)$')
(resp, raw_lines) = self.longcmd(('XGTITLE ' + group), file)
lines = []
for raw_line in raw_lines:
match = line_pat.search(raw_line.strip())
if match:
lines.append(match.group(1, 2))
return (resp, lines)
|
'Process an XPATH command (optional server extension) Arguments:
- id: Message id of article
Returns:
resp: server response if successful
path: directory path to article'
|
def xpath(self, id):
|
resp = self.shortcmd(('XPATH ' + id))
if (resp[:3] != '223'):
raise NNTPReplyError(resp)
try:
[resp_num, path] = resp.split()
except ValueError:
raise NNTPReplyError(resp)
else:
return (resp, path)
|
'Process the DATE command. Arguments:
None
Returns:
resp: server response if successful
date: Date suitable for newnews/newgroups commands etc.
time: Time suitable for newnews/newgroups commands etc.'
|
def date(self):
|
resp = self.shortcmd('DATE')
if (resp[:3] != '111'):
raise NNTPReplyError(resp)
elem = resp.split()
if (len(elem) != 2):
raise NNTPDataError(resp)
date = elem[1][2:8]
time = elem[1][(-6):]
if ((len(date) != 6) or (len(time) != 6)):
raise NNTPDataError(resp)
return (resp, date, time)
|
'Process a POST command. Arguments:
- f: file containing the article
Returns:
- resp: server response if successful'
|
def post(self, f):
|
resp = self.shortcmd('POST')
if (resp[0] != '3'):
raise NNTPReplyError(resp)
while 1:
line = f.readline()
if (not line):
break
if (line[(-1)] == '\n'):
line = line[:(-1)]
if (line[:1] == '.'):
line = ('.' + line)
self.putline(line)
self.putline('.')
return self.getresp()
|
'Process an IHAVE command. Arguments:
- id: message-id of the article
- f: file containing the article
Returns:
- resp: server response if successful
Note that if the server refuses the article an exception is raised.'
|
def ihave(self, id, f):
|
resp = self.shortcmd(('IHAVE ' + id))
if (resp[0] != '3'):
raise NNTPReplyError(resp)
while 1:
line = f.readline()
if (not line):
break
if (line[(-1)] == '\n'):
line = line[:(-1)]
if (line[:1] == '.'):
line = ('.' + line)
self.putline(line)
self.putline('.')
return self.getresp()
|
'Process a QUIT command and close the socket. Returns:
- resp: server response if successful'
|
def quit(self):
|
resp = self.shortcmd('QUIT')
self.file.close()
self.sock.close()
del self.file, self.sock
return resp
|
'Serve a GET request.'
|
def do_GET(self):
|
f = self.send_head()
if f:
self.copyfile(f, self.wfile)
f.close()
|
'Serve a HEAD request.'
|
def do_HEAD(self):
|
f = self.send_head()
if f:
f.close()
|
'Common code for GET and HEAD commands.
This sends the response code and MIME headers.
Return value is either a file object (which has to be copied
to the outputfile by the caller unless the command was HEAD,
and must be closed by the caller under all circumstances), or
None, in which case the caller has nothing further to do.'
|
def send_head(self):
|
path = self.translate_path(self.path)
f = None
if os.path.isdir(path):
if (not self.path.endswith('/')):
self.send_response(301)
self.send_header('Location', (self.path + '/'))
self.end_headers()
return None
for index in ('index.html', 'index.htm'):
index = os.path.join(path, index)
if os.path.exists(index):
path = index
break
else:
return self.list_directory(path)
ctype = self.guess_type(path)
try:
f = open(path, 'rb')
except IOError:
self.send_error(404, 'File not found')
return None
self.send_response(200)
self.send_header('Content-type', ctype)
fs = os.fstat(f.fileno())
self.send_header('Content-Length', str(fs[6]))
self.send_header('Last-Modified', self.date_time_string(fs.st_mtime))
self.end_headers()
return f
|
'Helper to produce a directory listing (absent index.html).
Return value is either a file object, or None (indicating an
error). In either case, the headers are sent, making the
interface the same as for send_head().'
|
def list_directory(self, path):
|
try:
list = os.listdir(path)
except os.error:
self.send_error(404, 'No permission to list directory')
return None
list.sort(key=(lambda a: a.lower()))
f = StringIO()
displaypath = cgi.escape(urllib.unquote(self.path))
f.write('<!DOCTYPE html PUBLIC "-//W3C//DTD HTML 3.2 Final//EN">')
f.write(('<html>\n<title>Directory listing for %s</title>\n' % displaypath))
f.write(('<body>\n<h2>Directory listing for %s</h2>\n' % displaypath))
f.write('<hr>\n<ul>\n')
for name in list:
fullname = os.path.join(path, name)
displayname = linkname = name
if os.path.isdir(fullname):
displayname = (name + '/')
linkname = (name + '/')
if os.path.islink(fullname):
displayname = (name + '@')
f.write(('<li><a href="%s">%s</a>\n' % (urllib.quote(linkname), cgi.escape(displayname))))
f.write('</ul>\n<hr>\n</body>\n</html>\n')
length = f.tell()
f.seek(0)
self.send_response(200)
encoding = sys.getfilesystemencoding()
self.send_header('Content-type', ('text/html; charset=%s' % encoding))
self.send_header('Content-Length', str(length))
self.end_headers()
return f
|
'Translate a /-separated PATH to the local filename syntax.
Components that mean special things to the local file system
(e.g. drive or directory names) are ignored. (XXX They should
probably be diagnosed.)'
|
def translate_path(self, path):
|
path = path.split('?', 1)[0]
path = path.split('#', 1)[0]
path = posixpath.normpath(urllib.unquote(path))
words = path.split('/')
words = filter(None, words)
path = os.getcwd()
for word in words:
(drive, word) = os.path.splitdrive(word)
(head, word) = os.path.split(word)
if (word in (os.curdir, os.pardir)):
continue
path = os.path.join(path, word)
return path
|
'Copy all data between two file objects.
The SOURCE argument is a file object open for reading
(or anything with a read() method) and the DESTINATION
argument is a file object open for writing (or
anything with a write() method).
The only reason for overriding this would be to change
the block size or perhaps to replace newlines by CRLF
-- note however that this the default server uses this
to copy binary data as well.'
|
def copyfile(self, source, outputfile):
|
shutil.copyfileobj(source, outputfile)
|
'Guess the type of a file.
Argument is a PATH (a filename).
Return value is a string of the form type/subtype,
usable for a MIME Content-type header.
The default implementation looks the file\'s extension
up in the table self.extensions_map, using application/octet-stream
as a default; however it would be permissible (if
slow) to look inside the data to make a better guess.'
|
def guess_type(self, path):
|
(base, ext) = posixpath.splitext(path)
if (ext in self.extensions_map):
return self.extensions_map[ext]
ext = ext.lower()
if (ext in self.extensions_map):
return self.extensions_map[ext]
else:
return self.extensions_map['']
|
'Returns a dialect (or None) corresponding to the sample'
|
def sniff(self, sample, delimiters=None):
|
(quotechar, doublequote, delimiter, skipinitialspace) = self._guess_quote_and_delimiter(sample, delimiters)
if (not delimiter):
(delimiter, skipinitialspace) = self._guess_delimiter(sample, delimiters)
if (not delimiter):
raise Error, 'Could not determine delimiter'
class dialect(Dialect, ):
_name = 'sniffed'
lineterminator = '\r\n'
quoting = QUOTE_MINIMAL
dialect.doublequote = doublequote
dialect.delimiter = delimiter
dialect.quotechar = (quotechar or '"')
dialect.skipinitialspace = skipinitialspace
return dialect
|
'Looks for text enclosed between two identical quotes
(the probable quotechar) which are preceded and followed
by the same character (the probable delimiter).
For example:
,\'some text\',
The quote with the most wins, same with the delimiter.
If there is no quotechar the delimiter can\'t be determined
this way.'
|
def _guess_quote_and_delimiter(self, data, delimiters):
|
matches = []
for restr in ('(?P<delim>[^\\w\n"\'])(?P<space> ?)(?P<quote>["\']).*?(?P=quote)(?P=delim)', '(?:^|\n)(?P<quote>["\']).*?(?P=quote)(?P<delim>[^\\w\n"\'])(?P<space> ?)', '(?P<delim>>[^\\w\n"\'])(?P<space> ?)(?P<quote>["\']).*?(?P=quote)(?:$|\n)', '(?:^|\n)(?P<quote>["\']).*?(?P=quote)(?:$|\n)'):
regexp = re.compile(restr, (re.DOTALL | re.MULTILINE))
matches = regexp.findall(data)
if matches:
break
if (not matches):
return ('', False, None, 0)
quotes = {}
delims = {}
spaces = 0
for m in matches:
n = (regexp.groupindex['quote'] - 1)
key = m[n]
if key:
quotes[key] = (quotes.get(key, 0) + 1)
try:
n = (regexp.groupindex['delim'] - 1)
key = m[n]
except KeyError:
continue
if (key and ((delimiters is None) or (key in delimiters))):
delims[key] = (delims.get(key, 0) + 1)
try:
n = (regexp.groupindex['space'] - 1)
except KeyError:
continue
if m[n]:
spaces += 1
quotechar = reduce((lambda a, b, quotes=quotes: (((quotes[a] > quotes[b]) and a) or b)), quotes.keys())
if delims:
delim = reduce((lambda a, b, delims=delims: (((delims[a] > delims[b]) and a) or b)), delims.keys())
skipinitialspace = (delims[delim] == spaces)
if (delim == '\n'):
delim = ''
else:
delim = ''
skipinitialspace = 0
dq_regexp = re.compile(('((%(delim)s)|^)\\W*%(quote)s[^%(delim)s\\n]*%(quote)s[^%(delim)s\\n]*%(quote)s\\W*((%(delim)s)|$)' % {'delim': delim, 'quote': quotechar}), re.MULTILINE)
if dq_regexp.search(data):
doublequote = True
else:
doublequote = False
return (quotechar, doublequote, delim, skipinitialspace)
|
'The delimiter /should/ occur the same number of times on
each row. However, due to malformed data, it may not. We don\'t want
an all or nothing approach, so we allow for small variations in this
number.
1) build a table of the frequency of each character on every line.
2) build a table of frequencies of this frequency (meta-frequency?),
e.g. \'x occurred 5 times in 10 rows, 6 times in 1000 rows,
7 times in 2 rows\'
3) use the mode of the meta-frequency to determine the /expected/
frequency for that character
4) find out how often the character actually meets that goal
5) the character that best meets its goal is the delimiter
For performance reasons, the data is evaluated in chunks, so it can
try and evaluate the smallest portion of the data possible, evaluating
additional chunks as necessary.'
|
def _guess_delimiter(self, data, delimiters):
|
data = filter(None, data.split('\n'))
ascii = [chr(c) for c in range(127)]
chunkLength = min(10, len(data))
iteration = 0
charFrequency = {}
modes = {}
delims = {}
(start, end) = (0, min(chunkLength, len(data)))
while (start < len(data)):
iteration += 1
for line in data[start:end]:
for char in ascii:
metaFrequency = charFrequency.get(char, {})
freq = line.count(char)
metaFrequency[freq] = (metaFrequency.get(freq, 0) + 1)
charFrequency[char] = metaFrequency
for char in charFrequency.keys():
items = charFrequency[char].items()
if ((len(items) == 1) and (items[0][0] == 0)):
continue
if (len(items) > 1):
modes[char] = reduce((lambda a, b: (((a[1] > b[1]) and a) or b)), items)
items.remove(modes[char])
modes[char] = (modes[char][0], (modes[char][1] - reduce((lambda a, b: (0, (a[1] + b[1]))), items)[1]))
else:
modes[char] = items[0]
modeList = modes.items()
total = float((chunkLength * iteration))
consistency = 1.0
threshold = 0.9
while ((len(delims) == 0) and (consistency >= threshold)):
for (k, v) in modeList:
if ((v[0] > 0) and (v[1] > 0)):
if (((v[1] / total) >= consistency) and ((delimiters is None) or (k in delimiters))):
delims[k] = v
consistency -= 0.01
if (len(delims) == 1):
delim = delims.keys()[0]
skipinitialspace = (data[0].count(delim) == data[0].count(('%c ' % delim)))
return (delim, skipinitialspace)
start = end
end += chunkLength
if (not delims):
return ('', 0)
if (len(delims) > 1):
for d in self.preferred:
if (d in delims.keys()):
skipinitialspace = (data[0].count(d) == data[0].count(('%c ' % d)))
return (d, skipinitialspace)
items = [(v, k) for (k, v) in delims.items()]
items.sort()
delim = items[(-1)][1]
skipinitialspace = (data[0].count(delim) == data[0].count(('%c ' % delim)))
return (delim, skipinitialspace)
|
'Registers an instance to respond to XML-RPC requests.
Only one instance can be installed at a time.
If the registered instance has a _dispatch method then that
method will be called with the name of the XML-RPC method and
its parameters as a tuple
e.g. instance._dispatch(\'add\',(2,3))
If the registered instance does not have a _dispatch method
then the instance will be searched to find a matching method
and, if found, will be called. Methods beginning with an \'_\'
are considered private and will not be called by
SimpleXMLRPCServer.
If a registered function matches a XML-RPC request, then it
will be called instead of the registered instance.
If the optional allow_dotted_names argument is true and the
instance does not have a _dispatch method, method names
containing dots are supported and resolved, as long as none of
the name segments start with an \'_\'.
*** SECURITY WARNING: ***
Enabling the allow_dotted_names options allows intruders
to access your module\'s global variables and may allow
intruders to execute arbitrary code on your machine. Only
use this option on a secure, closed network.'
|
def register_instance(self, instance, allow_dotted_names=False):
|
self.instance = instance
self.allow_dotted_names = allow_dotted_names
|
'Registers a function to respond to XML-RPC requests.
The optional name argument can be used to set a Unicode name
for the function.'
|
def register_function(self, function, name=None):
|
if (name is None):
name = function.__name__
self.funcs[name] = function
|
'Registers the XML-RPC introspection methods in the system
namespace.
see http://xmlrpc.usefulinc.com/doc/reserved.html'
|
def register_introspection_functions(self):
|
self.funcs.update({'system.listMethods': self.system_listMethods, 'system.methodSignature': self.system_methodSignature, 'system.methodHelp': self.system_methodHelp})
|
'Registers the XML-RPC multicall method in the system
namespace.
see http://www.xmlrpc.com/discuss/msgReader$1208'
|
def register_multicall_functions(self):
|
self.funcs.update({'system.multicall': self.system_multicall})
|
'Dispatches an XML-RPC method from marshalled (XML) data.
XML-RPC methods are dispatched from the marshalled (XML) data
using the _dispatch method and the result is returned as
marshalled data. For backwards compatibility, a dispatch
function can be provided as an argument (see comment in
SimpleXMLRPCRequestHandler.do_POST) but overriding the
existing method through subclassing is the preferred means
of changing method dispatch behavior.'
|
def _marshaled_dispatch(self, data, dispatch_method=None, path=None):
|
try:
(params, method) = xmlrpclib.loads(data)
if (dispatch_method is not None):
response = dispatch_method(method, params)
else:
response = self._dispatch(method, params)
response = (response,)
response = xmlrpclib.dumps(response, methodresponse=1, allow_none=self.allow_none, encoding=self.encoding)
except Fault as fault:
response = xmlrpclib.dumps(fault, allow_none=self.allow_none, encoding=self.encoding)
except:
(exc_type, exc_value, exc_tb) = sys.exc_info()
response = xmlrpclib.dumps(xmlrpclib.Fault(1, ('%s:%s' % (exc_type, exc_value))), encoding=self.encoding, allow_none=self.allow_none)
return response
|
'system.listMethods() => [\'add\', \'subtract\', \'multiple\']
Returns a list of the methods supported by the server.'
|
def system_listMethods(self):
|
methods = self.funcs.keys()
if (self.instance is not None):
if hasattr(self.instance, '_listMethods'):
methods = remove_duplicates((methods + self.instance._listMethods()))
elif (not hasattr(self.instance, '_dispatch')):
methods = remove_duplicates((methods + list_public_methods(self.instance)))
methods.sort()
return methods
|
'system.methodSignature(\'add\') => [double, int, int]
Returns a list describing the signature of the method. In the
above example, the add method takes two integers as arguments
and returns a double result.
This server does NOT support system.methodSignature.'
|
def system_methodSignature(self, method_name):
|
return 'signatures not supported'
|
'system.methodHelp(\'add\') => "Adds two integers together"
Returns a string containing documentation for the specified method.'
|
def system_methodHelp(self, method_name):
|
method = None
if (method_name in self.funcs):
method = self.funcs[method_name]
elif (self.instance is not None):
if hasattr(self.instance, '_methodHelp'):
return self.instance._methodHelp(method_name)
elif (not hasattr(self.instance, '_dispatch')):
try:
method = resolve_dotted_attribute(self.instance, method_name, self.allow_dotted_names)
except AttributeError:
pass
if (method is None):
return ''
else:
import pydoc
return pydoc.getdoc(method)
|
'system.multicall([{\'methodName\': \'add\', \'params\': [2, 2]}, ...]) => [[4], ...]
Allows the caller to package multiple XML-RPC calls into a single
request.
See http://www.xmlrpc.com/discuss/msgReader$1208'
|
def system_multicall(self, call_list):
|
results = []
for call in call_list:
method_name = call['methodName']
params = call['params']
try:
results.append([self._dispatch(method_name, params)])
except Fault as fault:
results.append({'faultCode': fault.faultCode, 'faultString': fault.faultString})
except:
(exc_type, exc_value, exc_tb) = sys.exc_info()
results.append({'faultCode': 1, 'faultString': ('%s:%s' % (exc_type, exc_value))})
return results
|
'Dispatches the XML-RPC method.
XML-RPC calls are forwarded to a registered function that
matches the called XML-RPC method name. If no such function
exists then the call is forwarded to the registered instance,
if available.
If the registered instance has a _dispatch method then that
method will be called with the name of the XML-RPC method and
its parameters as a tuple
e.g. instance._dispatch(\'add\',(2,3))
If the registered instance does not have a _dispatch method
then the instance will be searched to find a matching method
and, if found, will be called.
Methods beginning with an \'_\' are considered private and will
not be called.'
|
def _dispatch(self, method, params):
|
func = None
try:
func = self.funcs[method]
except KeyError:
if (self.instance is not None):
if hasattr(self.instance, '_dispatch'):
return self.instance._dispatch(method, params)
else:
try:
func = resolve_dotted_attribute(self.instance, method, self.allow_dotted_names)
except AttributeError:
pass
if (func is not None):
return func(*params)
else:
raise Exception(('method "%s" is not supported' % method))
|
'Handles the HTTP POST request.
Attempts to interpret all HTTP POST requests as XML-RPC calls,
which are forwarded to the server\'s _dispatch method for handling.'
|
def do_POST(self):
|
if (not self.is_rpc_path_valid()):
self.report_404()
return
try:
max_chunk_size = ((10 * 1024) * 1024)
size_remaining = int(self.headers['content-length'])
L = []
while size_remaining:
chunk_size = min(size_remaining, max_chunk_size)
L.append(self.rfile.read(chunk_size))
size_remaining -= len(L[(-1)])
data = ''.join(L)
data = self.decode_request_content(data)
if (data is None):
return
response = self.server._marshaled_dispatch(data, getattr(self, '_dispatch', None), self.path)
except Exception as e:
self.send_response(500)
if (hasattr(self.server, '_send_traceback_header') and self.server._send_traceback_header):
self.send_header('X-exception', str(e))
self.send_header('X-traceback', traceback.format_exc())
self.send_header('Content-length', '0')
self.end_headers()
else:
self.send_response(200)
self.send_header('Content-type', 'text/xml')
if (self.encode_threshold is not None):
if (len(response) > self.encode_threshold):
q = self.accept_encodings().get('gzip', 0)
if q:
try:
response = xmlrpclib.gzip_encode(response)
self.send_header('Content-Encoding', 'gzip')
except NotImplementedError:
pass
self.send_header('Content-length', str(len(response)))
self.end_headers()
self.wfile.write(response)
|
'Selectively log an accepted request.'
|
def log_request(self, code='-', size='-'):
|
if self.server.logRequests:
BaseHTTPServer.BaseHTTPRequestHandler.log_request(self, code, size)
|
'Handle a single XML-RPC request'
|
def handle_xmlrpc(self, request_text):
|
response = self._marshaled_dispatch(request_text)
print 'Content-Type: text/xml'
print ('Content-Length: %d' % len(response))
print
sys.stdout.write(response)
|
'Handle a single HTTP GET request.
Default implementation indicates an error because
XML-RPC uses the POST method.'
|
def handle_get(self):
|
code = 400
(message, explain) = BaseHTTPServer.BaseHTTPRequestHandler.responses[code]
response = (BaseHTTPServer.DEFAULT_ERROR_MESSAGE % {'code': code, 'message': message, 'explain': explain})
print ('Status: %d %s' % (code, message))
print ('Content-Type: %s' % BaseHTTPServer.DEFAULT_ERROR_CONTENT_TYPE)
print ('Content-Length: %d' % len(response))
print
sys.stdout.write(response)
|
'Handle a single XML-RPC request passed through a CGI post method.
If no XML data is given then it is read from stdin. The resulting
XML-RPC response is printed to stdout along with the correct HTTP
headers.'
|
def handle_request(self, request_text=None):
|
if ((request_text is None) and (os.environ.get('REQUEST_METHOD', None) == 'GET')):
self.handle_get()
else:
try:
length = int(os.environ.get('CONTENT_LENGTH', None))
except (TypeError, ValueError):
length = (-1)
if (request_text is None):
request_text = sys.stdin.read(length)
self.handle_xmlrpc(request_text)
|
'Initialize a new instance, passing the time and delay
functions'
|
def __init__(self, timefunc, delayfunc):
|
self._queue = []
self.timefunc = timefunc
self.delayfunc = delayfunc
|
'Enter a new event in the queue at an absolute time.
Returns an ID for the event which can be used to remove it,
if necessary.'
|
def enterabs(self, time, priority, action, argument):
|
event = Event(time, priority, action, argument)
heapq.heappush(self._queue, event)
return event
|
'A variant that specifies the time as a relative time.
This is actually the more commonly used interface.'
|
def enter(self, delay, priority, action, argument):
|
time = (self.timefunc() + delay)
return self.enterabs(time, priority, action, argument)
|
'Remove an event from the queue.
This must be presented the ID as returned by enter().
If the event is not in the queue, this raises ValueError.'
|
def cancel(self, event):
|
self._queue.remove(event)
heapq.heapify(self._queue)
|
'Check whether the queue is empty.'
|
def empty(self):
|
return (not self._queue)
|
'Execute events until the queue is empty.
When there is a positive delay until the first event, the
delay function is called and the event is left in the queue;
otherwise, the event is removed from the queue and executed
(its action function is called, passing it the argument). If
the delay function returns prematurely, it is simply
restarted.
It is legal for both the delay function and the action
function to to modify the queue or to raise an exception;
exceptions are not caught but the scheduler\'s state remains
well-defined so run() may be called again.
A questionable hack is added to allow other threads to run:
just after an event is executed, a delay of 0 is executed, to
avoid monopolizing the CPU when other threads are also
runnable.'
|
def run(self):
|
q = self._queue
delayfunc = self.delayfunc
timefunc = self.timefunc
pop = heapq.heappop
while q:
(time, priority, action, argument) = checked_event = q[0]
now = timefunc()
if (now < time):
delayfunc((time - now))
else:
event = pop(q)
if (event is checked_event):
action(*argument)
delayfunc(0)
else:
heapq.heappush(q, event)
|
'An ordered list of upcoming events.
Events are named tuples with fields for:
time, priority, action, arguments'
|
@property
def queue(self):
|
events = self._queue[:]
return map(heapq.heappop, ([events] * len(events)))
|
'Return the name (ID) of the current chunk.'
|
def getname(self):
|
return self.chunkname
|
'Return the size of the current chunk.'
|
def getsize(self):
|
return self.chunksize
|
'Seek to specified position into the chunk.
Default position is 0 (start of chunk).
If the file is not seekable, this will result in an error.'
|
def seek(self, pos, whence=0):
|
if self.closed:
raise ValueError, 'I/O operation on closed file'
if (not self.seekable):
raise IOError, 'cannot seek'
if (whence == 1):
pos = (pos + self.size_read)
elif (whence == 2):
pos = (pos + self.chunksize)
if ((pos < 0) or (pos > self.chunksize)):
raise RuntimeError
self.file.seek((self.offset + pos), 0)
self.size_read = pos
|
'Read at most size bytes from the chunk.
If size is omitted or negative, read until the end
of the chunk.'
|
def read(self, size=(-1)):
|
if self.closed:
raise ValueError, 'I/O operation on closed file'
if (self.size_read >= self.chunksize):
return ''
if (size < 0):
size = (self.chunksize - self.size_read)
if (size > (self.chunksize - self.size_read)):
size = (self.chunksize - self.size_read)
data = self.file.read(size)
self.size_read = (self.size_read + len(data))
if ((self.size_read == self.chunksize) and self.align and (self.chunksize & 1)):
dummy = self.file.read(1)
self.size_read = (self.size_read + len(dummy))
return data
|
'Skip the rest of the chunk.
If you are not interested in the contents of the chunk,
this method should be called so that the file points to
the start of the next chunk.'
|
def skip(self):
|
if self.closed:
raise ValueError, 'I/O operation on closed file'
if self.seekable:
try:
n = (self.chunksize - self.size_read)
if (self.align and (self.chunksize & 1)):
n = (n + 1)
self.file.seek(n, 1)
self.size_read = (self.size_read + n)
return
except IOError:
pass
while (self.size_read < self.chunksize):
n = min(8192, (self.chunksize - self.size_read))
dummy = self.read(n)
if (not dummy):
raise EOFError
|
'Return the list of values for an axis'
|
def axisValues(self, axis):
|
ax = self._interpretAxis(axis)
if ('values' in self._info[ax]):
return self._info[ax]['values']
else:
raise Exception(('Array axis %s (%d) has no associated values.' % (str(axis), ax)))
|
'Synonym for axisValues()'
|
def xvals(self, axis):
|
return self.axisValues(axis)
|
'Return the units for axis'
|
def axisUnits(self, axis):
|
ax = self._info[self._interpretAxis(axis)]
if ('units' in ax):
return ax['units']
|
'Return a list of column names for axis. If axis is not specified, then return a dict of {axisName: (column names), ...}.'
|
def listColumns(self, axis=None):
|
if (axis is None):
ret = {}
for i in range(self.ndim):
if ('cols' in self._info[i]):
cols = [c['name'] for c in self._info[i]['cols']]
else:
cols = []
ret[self.axisName(i)] = cols
return ret
else:
axis = self._interpretAxis(axis)
return [c['name'] for c in self._info[axis]['cols']]
|
'Return the units for column in axis'
|
def columnUnits(self, axis, column):
|
ax = self._info[self._interpretAxis(axis)]
if ('cols' in ax):
for c in ax['cols']:
if (c['name'] == column):
return c['units']
raise Exception(('Axis %s has no column named %s' % (str(axis), str(column))))
else:
raise Exception(('Axis %s has no column definitions' % str(axis)))
|
'Return this object with all records sorted along axis using key as the index to the values to compare. Does not yet modify meta info.'
|
def rowsort(self, axis, key=0):
|
keyList = self[key]
order = keyList.argsort()
if (type(axis) == int):
ind = ([slice(None)] * axis)
ind.append(order)
elif isinstance(axis, basestring):
ind = (slice(axis, order),)
return self[tuple(ind)]
|
'Return this object with val appended along axis. Does not yet combine meta info.'
|
def append(self, val, axis):
|
s = list(self.shape)
axis = self._interpretAxis(axis)
s[axis] += 1
n = MetaArray(tuple(s), info=self._info, dtype=self.dtype)
ind = ([slice(None)] * self.ndim)
ind[axis] = slice(None, (-1))
n[tuple(ind)] = self
ind[axis] = (-1)
n[tuple(ind)] = val
return n
|
'Return the concatenation along axis of this object and val. Does not yet combine meta info.'
|
def extend(self, val, axis):
|
axis = self._interpretAxis(axis)
return MetaArray(np.concatenate(self, val, axis), info=self._info)
|
'Return a deep copy of the axis meta info for this object'
|
def infoCopy(self, axis=None):
|
if (axis is None):
return copy.deepcopy(self._info)
else:
return copy.deepcopy(self._info[self._interpretAxis(axis)])
|
'Load the data and meta info stored in *filename*
Different arguments are allowed depending on the type of file.
For HDF5 files:
*writable* (bool) if True, then any modifications to data in the array will be stored to disk.
*readAllData* (bool) if True, then all data in the array is immediately read from disk
and the file is closed (this is the default for files < 500MB). Otherwise, the file will
be left open and data will be read only as requested (this is
the default for files >= 500MB).'
|
def readFile(self, filename, **kwargs):
|
with open(filename, 'rb') as fd:
magic = fd.read(8)
if (magic == '\x89HDF\r\n\x1a\n'):
fd.close()
self._readHDF5(filename, **kwargs)
self._isHDF = True
else:
fd.seek(0)
meta = MetaArray._readMeta(fd)
if (not kwargs.get('readAllData', True)):
self._data = np.empty(meta['shape'], dtype=meta['type'])
if ('version' in meta):
ver = meta['version']
else:
ver = 1
rFuncName = ('_readData%s' % str(ver))
if (not hasattr(MetaArray, rFuncName)):
raise Exception(("This MetaArray library does not support array version '%s'" % ver))
rFunc = getattr(self, rFuncName)
rFunc(fd, meta, **kwargs)
self._isHDF = False
|
'Read meta array from the top of a file. Read lines until a blank line is reached.
This function should ideally work for ALL versions of MetaArray.'
|
@staticmethod
def _readMeta(fd):
|
meta = ''
while True:
line = fd.readline().strip()
if (line == ''):
break
meta += line
ret = eval(meta)
return ret
|
'Write this object to a file. The object can be restored by calling MetaArray(file=fileName)
opts:
appendAxis: the name (or index) of the appendable axis. Allows the array to grow.
compression: None, \'gzip\' (good compression), \'lzf\' (fast compression), etc.
chunks: bool or tuple specifying chunk shape'
|
def write(self, fileName, **opts):
|
if (USE_HDF5 and HAVE_HDF5):
return self.writeHDF5(fileName, **opts)
else:
return self.writeMa(fileName, **opts)
|
'Used to re-write meta info to the given file.
This feature is only available for HDF5 files.'
|
def writeMeta(self, fileName):
|
f = h5py.File(fileName, 'r+')
if (f.attrs['MetaArray'] != MetaArray.version):
raise Exception(('The file %s was created with a different version of MetaArray. Will not modify.' % fileName))
del f['info']
self.writeHDF5Meta(f, 'info', self._info)
f.close()
|
'Write an old-style .ma file'
|
def writeMa(self, fileName, appendAxis=None, newFile=False):
|
meta = {'shape': self.shape, 'type': str(self.dtype), 'info': self.infoCopy(), 'version': MetaArray.version}
axstrs = []
if (appendAxis is not None):
if MetaArray.isNameType(appendAxis):
appendAxis = self._interpretAxis(appendAxis)
ax = meta['info'][appendAxis]
ax['values_len'] = 'dynamic'
if ('values' in ax):
ax['values_type'] = str(ax['values'].dtype)
dynXVals = ax['values']
del ax['values']
else:
dynXVals = None
for ax in meta['info']:
if ('values' in ax):
axstrs.append(ax['values'].tostring())
ax['values_len'] = len(axstrs[(-1)])
ax['values_type'] = str(ax['values'].dtype)
del ax['values']
if (not newFile):
newFile = ((not os.path.exists(fileName)) or (os.stat(fileName).st_size == 0))
if ((appendAxis is None) or newFile):
fd = open(fileName, 'wb')
fd.write((str(meta) + '\n\n'))
for ax in axstrs:
fd.write(ax)
else:
fd = open(fileName, 'ab')
if (self.dtype != object):
dataStr = self.view(np.ndarray).tostring()
else:
dataStr = pickle.dumps(self.view(np.ndarray))
if (appendAxis is not None):
frameInfo = {'len': len(dataStr), 'numFrames': self.shape[appendAxis]}
if (dynXVals is not None):
frameInfo['xVals'] = list(dynXVals)
fd.write((('\n' + str(frameInfo)) + '\n'))
fd.write(dataStr)
fd.close()
|
'Write 2D array to CSV file or return the string if no filename is given'
|
def writeCsv(self, fileName=None):
|
if (self.ndim > 2):
raise Exception('CSV Export is only for 2D arrays')
if (fileName is not None):
file = open(fileName, 'w')
ret = ''
if ('cols' in self._info[0]):
s = (','.join([x['name'] for x in self._info[0]['cols']]) + '\n')
if (fileName is not None):
file.write(s)
else:
ret += s
for row in range(0, self.shape[1]):
s = (','.join([('%g' % x) for x in self[:, row]]) + '\n')
if (fileName is not None):
file.write(s)
else:
ret += s
if (fileName is not None):
file.close()
else:
return ret
|
'**Arguments:**
vertexes (Nv, 3) array of vertex coordinates.
If faces is not specified, then this will instead be
interpreted as (Nf, 3, 3) array of coordinates.
faces (Nf, 3) array of indexes into the vertex array.
edges [not available yet]
vertexColors (Nv, 4) array of vertex colors.
If faces is not specified, then this will instead be
interpreted as (Nf, 3, 4) array of colors.
faceColors (Nf, 4) array of face colors.
All arguments are optional.'
|
def __init__(self, vertexes=None, faces=None, edges=None, vertexColors=None, faceColors=None):
|
self._vertexes = None
self._vertexesIndexedByFaces = None
self._vertexesIndexedByEdges = None
self._faces = None
self._edges = None
self._vertexFaces = None
self._vertexEdges = None
self._vertexNormals = None
self._vertexNormalsIndexedByFaces = None
self._vertexColors = None
self._vertexColorsIndexedByFaces = None
self._vertexColorsIndexedByEdges = None
self._faceNormals = None
self._faceNormalsIndexedByFaces = None
self._faceColors = None
self._faceColorsIndexedByFaces = None
self._faceColorsIndexedByEdges = None
self._edgeColors = None
self._edgeColorsIndexedByEdges = None
if (vertexes is not None):
if (faces is None):
self.setVertexes(vertexes, indexed='faces')
if (vertexColors is not None):
self.setVertexColors(vertexColors, indexed='faces')
if (faceColors is not None):
self.setFaceColors(faceColors, indexed='faces')
else:
self.setVertexes(vertexes)
self.setFaces(faces)
if (vertexColors is not None):
self.setVertexColors(vertexColors)
if (faceColors is not None):
self.setFaceColors(faceColors)
|
'Return an array (Nf, 3) of vertex indexes, three per triangular face in the mesh.
If faces have not been computed for this mesh, the function returns None.'
|
def faces(self):
|
return self._faces
|
'Return an array (Nf, 3) of vertex indexes, two per edge in the mesh.'
|
def edges(self):
|
if (self._edges is None):
self._computeEdges()
return self._edges
|
'Set the (Nf, 3) array of faces. Each rown in the array contains
three indexes into the vertex array, specifying the three corners
of a triangular face.'
|
def setFaces(self, faces):
|
self._faces = faces
self._edges = None
self._vertexFaces = None
self._vertexesIndexedByFaces = None
self.resetNormals()
self._vertexColorsIndexedByFaces = None
self._faceColorsIndexedByFaces = None
|
'Return an array (N,3) of the positions of vertexes in the mesh.
By default, each unique vertex appears only once in the array.
If indexed is \'faces\', then the array will instead contain three vertexes
per face in the mesh (and a single vertex may appear more than once in the array).'
|
def vertexes(self, indexed=None):
|
if (indexed is None):
if ((self._vertexes is None) and (self._vertexesIndexedByFaces is not None)):
self._computeUnindexedVertexes()
return self._vertexes
elif (indexed == 'faces'):
if ((self._vertexesIndexedByFaces is None) and (self._vertexes is not None)):
self._vertexesIndexedByFaces = self._vertexes[self.faces()]
return self._vertexesIndexedByFaces
else:
raise Exception("Invalid indexing mode. Accepts: None, 'faces'")
|
'Set the array (Nv, 3) of vertex coordinates.
If indexed==\'faces\', then the data must have shape (Nf, 3, 3) and is
assumed to be already indexed as a list of faces.
This will cause any pre-existing normal vectors to be cleared
unless resetNormals=False.'
|
def setVertexes(self, verts=None, indexed=None, resetNormals=True):
|
if (indexed is None):
if (verts is not None):
self._vertexes = verts
self._vertexesIndexedByFaces = None
elif (indexed == 'faces'):
self._vertexes = None
if (verts is not None):
self._vertexesIndexedByFaces = verts
else:
raise Exception("Invalid indexing mode. Accepts: None, 'faces'")
if resetNormals:
self.resetNormals()
|
'Return True if this object already has vertex positions indexed by face'
|
def hasFaceIndexedData(self):
|
return (self._vertexesIndexedByFaces is not None)
|
'Return True if this data set has vertex color information'
|
def hasVertexColor(self):
|
for v in (self._vertexColors, self._vertexColorsIndexedByFaces, self._vertexColorsIndexedByEdges):
if (v is not None):
return True
return False
|
'Return True if this data set has face color information'
|
def hasFaceColor(self):
|
for v in (self._faceColors, self._faceColorsIndexedByFaces, self._faceColorsIndexedByEdges):
if (v is not None):
return True
return False
|
'Return an array (Nf, 3) of normal vectors for each face.
If indexed=\'faces\', then instead return an indexed array
(Nf, 3, 3) (this is just the same array with each vector
copied three times).'
|
def faceNormals(self, indexed=None):
|
if (self._faceNormals is None):
v = self.vertexes(indexed='faces')
self._faceNormals = np.cross((v[:, 1] - v[:, 0]), (v[:, 2] - v[:, 0]))
if (indexed is None):
return self._faceNormals
elif (indexed == 'faces'):
if (self._faceNormalsIndexedByFaces is None):
norms = np.empty((self._faceNormals.shape[0], 3, 3))
norms[:] = self._faceNormals[:, np.newaxis, :]
self._faceNormalsIndexedByFaces = norms
return self._faceNormalsIndexedByFaces
else:
raise Exception("Invalid indexing mode. Accepts: None, 'faces'")
|
'Return an array of normal vectors.
By default, the array will be (N, 3) with one entry per unique vertex in the mesh.
If indexed is \'faces\', then the array will contain three normal vectors per face
(and some vertexes may be repeated).'
|
def vertexNormals(self, indexed=None):
|
if (self._vertexNormals is None):
faceNorms = self.faceNormals()
vertFaces = self.vertexFaces()
self._vertexNormals = np.empty(self._vertexes.shape, dtype=float)
for vindex in xrange(self._vertexes.shape[0]):
faces = vertFaces[vindex]
if (len(faces) == 0):
self._vertexNormals[vindex] = (0, 0, 0)
continue
norms = faceNorms[faces]
norm = norms.sum(axis=0)
norm /= ((norm ** 2).sum() ** 0.5)
self._vertexNormals[vindex] = norm
if (indexed is None):
return self._vertexNormals
elif (indexed == 'faces'):
return self._vertexNormals[self.faces()]
else:
raise Exception("Invalid indexing mode. Accepts: None, 'faces'")
|
'Return an array (Nv, 4) of vertex colors.
If indexed==\'faces\', then instead return an indexed array
(Nf, 3, 4).'
|
def vertexColors(self, indexed=None):
|
if (indexed is None):
return self._vertexColors
elif (indexed == 'faces'):
if (self._vertexColorsIndexedByFaces is None):
self._vertexColorsIndexedByFaces = self._vertexColors[self.faces()]
return self._vertexColorsIndexedByFaces
else:
raise Exception("Invalid indexing mode. Accepts: None, 'faces'")
|
'Set the vertex color array (Nv, 4).
If indexed==\'faces\', then the array will be interpreted
as indexed and should have shape (Nf, 3, 4)'
|
def setVertexColors(self, colors, indexed=None):
|
if (indexed is None):
self._vertexColors = colors
self._vertexColorsIndexedByFaces = None
elif (indexed == 'faces'):
self._vertexColors = None
self._vertexColorsIndexedByFaces = colors
else:
raise Exception("Invalid indexing mode. Accepts: None, 'faces'")
|
'Return an array (Nf, 4) of face colors.
If indexed==\'faces\', then instead return an indexed array
(Nf, 3, 4) (note this is just the same array with each color
repeated three times).'
|
def faceColors(self, indexed=None):
|
if (indexed is None):
return self._faceColors
elif (indexed == 'faces'):
if ((self._faceColorsIndexedByFaces is None) and (self._faceColors is not None)):
Nf = self._faceColors.shape[0]
self._faceColorsIndexedByFaces = np.empty((Nf, 3, 4), dtype=self._faceColors.dtype)
self._faceColorsIndexedByFaces[:] = self._faceColors.reshape(Nf, 1, 4)
return self._faceColorsIndexedByFaces
else:
raise Exception("Invalid indexing mode. Accepts: None, 'faces'")
|
'Set the face color array (Nf, 4).
If indexed==\'faces\', then the array will be interpreted
as indexed and should have shape (Nf, 3, 4)'
|
def setFaceColors(self, colors, indexed=None):
|
if (indexed is None):
self._faceColors = colors
self._faceColorsIndexedByFaces = None
elif (indexed == 'faces'):
self._faceColors = None
self._faceColorsIndexedByFaces = colors
else:
raise Exception("Invalid indexing mode. Accepts: None, 'faces'")
|
'Return the number of faces in the mesh.'
|
def faceCount(self):
|
if (self._faces is not None):
return self._faces.shape[0]
elif (self._vertexesIndexedByFaces is not None):
return self._vertexesIndexedByFaces.shape[0]
|
'Return list mapping each vertex index to a list of face indexes that use the vertex.'
|
def vertexFaces(self):
|
if (self._vertexFaces is None):
self._vertexFaces = [[] for i in xrange(len(self.vertexes()))]
for i in xrange(self._faces.shape[0]):
face = self._faces[i]
for ind in face:
self._vertexFaces[ind].append(i)
return self._vertexFaces
|
'Serialize this mesh to a string appropriate for disk storage'
|
def save(self):
|
import pickle
if (self._faces is not None):
names = ['_vertexes', '_faces']
else:
names = ['_vertexesIndexedByFaces']
if (self._vertexColors is not None):
names.append('_vertexColors')
elif (self._vertexColorsIndexedByFaces is not None):
names.append('_vertexColorsIndexedByFaces')
if (self._faceColors is not None):
names.append('_faceColors')
elif (self._faceColorsIndexedByFaces is not None):
names.append('_faceColorsIndexedByFaces')
state = dict([(n, getattr(self, n)) for n in names])
return pickle.dumps(state)
|
'Restore the state of a mesh previously saved using save()'
|
def restore(self, state):
|
import pickle
state = pickle.loads(state)
for k in state:
if isinstance(state[k], list):
if isinstance(state[k][0], QtGui.QVector3D):
state[k] = [[v.x(), v.y(), v.z()] for v in state[k]]
state[k] = np.array(state[k])
setattr(self, k, state[k])
|
'Return a MeshData instance with vertexes and faces computed
for a spherical surface.'
|
@staticmethod
def sphere(rows, cols, radius=1.0, offset=True):
|
verts = np.empty(((rows + 1), cols, 3), dtype=float)
phi = ((np.arange((rows + 1)) * np.pi) / rows).reshape((rows + 1), 1)
s = (radius * np.sin(phi))
verts[..., 2] = (radius * np.cos(phi))
th = (((np.arange(cols) * 2) * np.pi) / cols).reshape(1, cols)
if offset:
th = (th + ((np.pi / cols) * np.arange((rows + 1)).reshape((rows + 1), 1)))
verts[..., 0] = (s * np.cos(th))
verts[..., 1] = (s * np.sin(th))
verts = verts.reshape(((rows + 1) * cols), 3)[(cols - 1):(- (cols - 1))]
faces = np.empty((((rows * cols) * 2), 3), dtype=np.uint)
rowtemplate1 = (((np.arange(cols).reshape(cols, 1) + np.array([[0, 1, 0]])) % cols) + np.array([[0, 0, cols]]))
rowtemplate2 = (((np.arange(cols).reshape(cols, 1) + np.array([[0, 1, 1]])) % cols) + np.array([[cols, 0, cols]]))
for row in range(rows):
start = ((row * cols) * 2)
faces[start:(start + cols)] = (rowtemplate1 + (row * cols))
faces[(start + cols):(start + (cols * 2))] = (rowtemplate2 + (row * cols))
faces = faces[cols:(- cols)]
vmin = (cols - 1)
faces[(faces < vmin)] = vmin
faces -= vmin
vmax = (verts.shape[0] - 1)
faces[(faces > vmax)] = vmax
return MeshData(vertexes=verts, faces=faces)
|
'Return a MeshData instance with vertexes and faces computed
for a cylindrical surface.
The cylinder may be tapered with different radii at each end (truncated cone)'
|
@staticmethod
def cylinder(rows, cols, radius=[1.0, 1.0], length=1.0, offset=False):
|
verts = np.empty(((rows + 1), cols, 3), dtype=float)
if isinstance(radius, int):
radius = [radius, radius]
th = np.linspace((2 * np.pi), 0, cols).reshape(1, cols)
r = np.linspace(radius[0], radius[1], num=(rows + 1), endpoint=True).reshape((rows + 1), 1)
verts[..., 2] = np.linspace(0, length, num=(rows + 1), endpoint=True).reshape((rows + 1), 1)
if offset:
th = (th + ((np.pi / cols) * np.arange((rows + 1)).reshape((rows + 1), 1)))
verts[..., 0] = (r * np.cos(th))
verts[..., 1] = (r * np.sin(th))
verts = verts.reshape(((rows + 1) * cols), 3)
faces = np.empty((((rows * cols) * 2), 3), dtype=np.uint)
rowtemplate1 = (((np.arange(cols).reshape(cols, 1) + np.array([[0, 1, 0]])) % cols) + np.array([[0, 0, cols]]))
rowtemplate2 = (((np.arange(cols).reshape(cols, 1) + np.array([[0, 1, 1]])) % cols) + np.array([[cols, 0, cols]]))
for row in range(rows):
start = ((row * cols) * 2)
faces[start:(start + cols)] = (rowtemplate1 + (row * cols))
faces[(start + cols):(start + (cols * 2))] = (rowtemplate2 + (row * cols))
return MeshData(vertexes=verts, faces=faces)
|
'Set the background color of the widget. Accepts the same arguments as
pg.mkColor() and pg.glColor().'
|
def setBackgroundColor(self, *args, **kwds):
|
self.opts['bgcolor'] = fn.glColor(*args, **kwds)
self.update()
|
'Return a list of the items displayed in the region (x, y, w, h)
relative to the widget.'
|
def itemsAt(self, region=None):
|
region = (region[0], (self.height() - (region[1] + region[3])), region[2], region[3])
buf = glSelectBuffer(100000)
try:
glRenderMode(GL_SELECT)
glInitNames()
glPushName(0)
self._itemNames = {}
self.paintGL(region=region, useItemNames=True)
finally:
hits = glRenderMode(GL_RENDER)
items = [(h.near, h.names[0]) for h in hits]
items.sort(key=(lambda i: i[0]))
return [self._itemNames[i[1]] for i in items]
|
'viewport specifies the arguments to glViewport. If None, then we use self.opts[\'viewport\']
region specifies the sub-region of self.opts[\'viewport\'] that should be rendered.
Note that we may use viewport != self.opts[\'viewport\'] when exporting.'
|
def paintGL(self, region=None, viewport=None, useItemNames=False):
|
if (viewport is None):
glViewport(*self.getViewport())
else:
glViewport(*viewport)
self.setProjection(region=region)
self.setModelview()
bgcolor = self.opts['bgcolor']
glClearColor(*bgcolor)
glClear((GL_DEPTH_BUFFER_BIT | GL_COLOR_BUFFER_BIT))
self.drawItemTree(useItemNames=useItemNames)
|
'Return current position of camera based on center, dist, elevation, and azimuth'
|
def cameraPosition(self):
|
center = self.opts['center']
dist = self.opts['distance']
elev = ((self.opts['elevation'] * np.pi) / 180.0)
azim = ((self.opts['azimuth'] * np.pi) / 180.0)
pos = Vector((center.x() + ((dist * np.cos(elev)) * np.cos(azim))), (center.y() + ((dist * np.cos(elev)) * np.sin(azim))), (center.z() + (dist * np.sin(elev))))
return pos
|
'Orbits the camera around the center position. *azim* and *elev* are given in degrees.'
|
def orbit(self, azim, elev):
|
self.opts['azimuth'] += azim
self.opts['elevation'] = np.clip((self.opts['elevation'] + elev), (-90), 90)
self.update()
|
'Moves the center (look-at) position while holding the camera in place.
If relative=True, then the coordinates are interpreted such that x
if in the global xy plane and points to the right side of the view, y is
in the global xy plane and orthogonal to x, and z points in the global z
direction. Distances are scaled roughly such that a value of 1.0 moves
by one pixel on screen.'
|
def pan(self, dx, dy, dz, relative=False):
|
if (not relative):
self.opts['center'] += QtGui.QVector3D(dx, dy, dz)
else:
cPos = self.cameraPosition()
cVec = (self.opts['center'] - cPos)
dist = cVec.length()
xDist = ((dist * 2.0) * np.tan((((0.5 * self.opts['fov']) * np.pi) / 180.0)))
xScale = (xDist / self.width())
zVec = QtGui.QVector3D(0, 0, 1)
xVec = QtGui.QVector3D.crossProduct(zVec, cVec).normalized()
yVec = QtGui.QVector3D.crossProduct(xVec, zVec).normalized()
self.opts['center'] = (((self.opts['center'] + ((xVec * xScale) * dx)) + ((yVec * xScale) * dy)) + ((zVec * xScale) * dz))
self.update()
|
'Return the approximate size of a screen pixel at the location pos
Pos may be a Vector or an (N,3) array of locations'
|
def pixelSize(self, pos):
|
cam = self.cameraPosition()
if isinstance(pos, np.ndarray):
cam = np.array(cam).reshape((((1,) * (pos.ndim - 1)) + (3,)))
dist = (((pos - cam) ** 2).sum(axis=(-1)) ** 0.5)
else:
dist = (pos - cam).length()
xDist = ((dist * 2.0) * np.tan((((0.5 * self.opts['fov']) * np.pi) / 180.0)))
return (xDist / self.width())
|
'Read the current buffer pixels out as a QImage.'
|
def readQImage(self):
|
w = self.width()
h = self.height()
self.repaint()
pixels = np.empty((h, w, 4), dtype=np.ubyte)
pixels[:] = 128
pixels[..., 0] = 50
pixels[..., 3] = 255
glReadPixels(0, 0, w, h, GL_RGBA, GL_UNSIGNED_BYTE, pixels)
tmp = pixels[..., 0].copy()
pixels[..., 0] = pixels[..., 2]
pixels[..., 2] = tmp
pixels = pixels[::(-1)]
img = fn.makeQImage(pixels, transpose=False)
return img
|
'Return the location integer for a uniform variable in this program'
|
def uniform(self, name):
|
return glGetUniformLocation(self.program(), name.encode('utf_8'))
|
'Set the size of the axes (in its local coordinate system; this does not affect the transform)
Arguments can be x,y,z or size=QVector3D().'
|
def setSize(self, x=None, y=None, z=None, size=None):
|
if (size is not None):
x = size.x()
y = size.y()
z = size.z()
self.__size = [x, y, z]
self.update()
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.