desc
stringlengths 3
26.7k
| decl
stringlengths 11
7.89k
| bodies
stringlengths 8
553k
|
---|---|---|
'True if \'self.result\' is an instance of \'self.wsgi_file_wrapper\''
| def result_is_file(self):
| wrapper = self.wsgi_file_wrapper
return ((wrapper is not None) and isinstance(self.result, wrapper))
|
'True if client can accept status and headers'
| def client_is_modern(self):
| return (self.environ['SERVER_PROTOCOL'].upper() != 'HTTP/0.9')
|
'Log the \'exc_info\' tuple in the server log
Subclasses may override to retarget the output or change its format.'
| def log_exception(self, exc_info):
| try:
from traceback import print_exception
stderr = self.get_stderr()
print_exception(exc_info[0], exc_info[1], exc_info[2], self.traceback_limit, stderr)
stderr.flush()
finally:
exc_info = None
|
'Log current error, and send error output to client if possible'
| def handle_error(self):
| self.log_exception(sys.exc_info())
if (not self.headers_sent):
self.result = self.error_output(self.environ, self.start_response)
self.finish_response()
|
'WSGI mini-app to create error output
By default, this just uses the \'error_status\', \'error_headers\',
and \'error_body\' attributes to generate an output page. It can
be overridden in a subclass to dynamically generate diagnostics,
choose an appropriate message for the user\'s preferred language, etc.
Note, however, that it\'s not recommended from a security perspective to
spit out diagnostics to any old user; ideally, you should have to do
something special to enable diagnostic output, which is why we don\'t
include any here!'
| def error_output(self, environ, start_response):
| start_response(self.error_status, self.error_headers[:], sys.exc_info())
return [self.error_body]
|
'Override in subclass to buffer data for send to client
It\'s okay if this method actually transmits the data; BaseHandler
just separates write and flush operations for greater efficiency
when the underlying system actually has such a distinction.'
| def _write(self, data):
| raise NotImplementedError
|
'Override in subclass to force sending of recent \'_write()\' calls
It\'s okay if this method is a no-op (i.e., if \'_write()\' actually
sends the data.'
| def _flush(self):
| raise NotImplementedError
|
'Override in subclass to return suitable \'wsgi.input\''
| def get_stdin(self):
| raise NotImplementedError
|
'Override in subclass to return suitable \'wsgi.errors\''
| def get_stderr(self):
| raise NotImplementedError
|
'Override in subclass to insert CGI variables in \'self.environ\''
| def add_cgi_vars(self):
| raise NotImplementedError
|
'Serve a POST request.
This is only implemented for CGI scripts.'
| def do_POST(self):
| if self.is_cgi():
self.run_cgi()
else:
self.send_error(501, 'Can only POST to CGI scripts')
|
'Version of send_head that support CGI scripts'
| def send_head(self):
| if self.is_cgi():
return self.run_cgi()
else:
return SimpleHTTPServer.SimpleHTTPRequestHandler.send_head(self)
|
'Test whether self.path corresponds to a CGI script.
Returns True and updates the cgi_info attribute to the tuple
(dir, rest) if self.path requires running a CGI script.
Returns False otherwise.
If any exception is raised, the caller should assume that
self.path was rejected as invalid and act accordingly.
The default implementation tests whether the normalized url
path begins with one of the strings in self.cgi_directories
(and the next character is a \'/\' or the end of the string).'
| def is_cgi(self):
| collapsed_path = _url_collapse_path(self.path)
dir_sep = collapsed_path.find('/', 1)
(head, tail) = (collapsed_path[:dir_sep], collapsed_path[(dir_sep + 1):])
if (head in self.cgi_directories):
self.cgi_info = (head, tail)
return True
return False
|
'Test whether argument path is an executable file.'
| def is_executable(self, path):
| return executable(path)
|
'Test whether argument path is a Python script.'
| def is_python(self, path):
| (head, tail) = os.path.splitext(path)
return (tail.lower() in ('.py', '.pyw'))
|
'Execute a CGI script.'
| def run_cgi(self):
| (dir, rest) = self.cgi_info
path = ((dir + '/') + rest)
i = path.find('/', (len(dir) + 1))
while (i >= 0):
nextdir = path[:i]
nextrest = path[(i + 1):]
scriptdir = self.translate_path(nextdir)
if os.path.isdir(scriptdir):
(dir, rest) = (nextdir, nextrest)
i = path.find('/', (len(dir) + 1))
else:
break
(rest, _, query) = rest.partition('?')
i = rest.find('/')
if (i >= 0):
(script, rest) = (rest[:i], rest[i:])
else:
(script, rest) = (rest, '')
scriptname = ((dir + '/') + script)
scriptfile = self.translate_path(scriptname)
if (not os.path.exists(scriptfile)):
self.send_error(404, ('No such CGI script (%r)' % scriptname))
return
if (not os.path.isfile(scriptfile)):
self.send_error(403, ('CGI script is not a plain file (%r)' % scriptname))
return
ispy = self.is_python(scriptname)
if (not ispy):
if (not (self.have_fork or self.have_popen2 or self.have_popen3)):
self.send_error(403, ('CGI script is not a Python script (%r)' % scriptname))
return
if (not self.is_executable(scriptfile)):
self.send_error(403, ('CGI script is not executable (%r)' % scriptname))
return
env = copy.deepcopy(os.environ)
env['SERVER_SOFTWARE'] = self.version_string()
env['SERVER_NAME'] = self.server.server_name
env['GATEWAY_INTERFACE'] = 'CGI/1.1'
env['SERVER_PROTOCOL'] = self.protocol_version
env['SERVER_PORT'] = str(self.server.server_port)
env['REQUEST_METHOD'] = self.command
uqrest = urllib.unquote(rest)
env['PATH_INFO'] = uqrest
env['PATH_TRANSLATED'] = self.translate_path(uqrest)
env['SCRIPT_NAME'] = scriptname
if query:
env['QUERY_STRING'] = query
host = self.address_string()
if (host != self.client_address[0]):
env['REMOTE_HOST'] = host
env['REMOTE_ADDR'] = self.client_address[0]
authorization = self.headers.getheader('authorization')
if authorization:
authorization = authorization.split()
if (len(authorization) == 2):
import base64, binascii
env['AUTH_TYPE'] = authorization[0]
if (authorization[0].lower() == 'basic'):
try:
authorization = base64.decodestring(authorization[1])
except binascii.Error:
pass
else:
authorization = authorization.split(':')
if (len(authorization) == 2):
env['REMOTE_USER'] = authorization[0]
if (self.headers.typeheader is None):
env['CONTENT_TYPE'] = self.headers.type
else:
env['CONTENT_TYPE'] = self.headers.typeheader
length = self.headers.getheader('content-length')
if length:
env['CONTENT_LENGTH'] = length
referer = self.headers.getheader('referer')
if referer:
env['HTTP_REFERER'] = referer
accept = []
for line in self.headers.getallmatchingheaders('accept'):
if (line[:1] in ' DCTB \n\r '):
accept.append(line.strip())
else:
accept = (accept + line[7:].split(','))
env['HTTP_ACCEPT'] = ','.join(accept)
ua = self.headers.getheader('user-agent')
if ua:
env['HTTP_USER_AGENT'] = ua
co = filter(None, self.headers.getheaders('cookie'))
if co:
env['HTTP_COOKIE'] = ', '.join(co)
for k in ('QUERY_STRING', 'REMOTE_HOST', 'CONTENT_LENGTH', 'HTTP_USER_AGENT', 'HTTP_COOKIE', 'HTTP_REFERER'):
env.setdefault(k, '')
self.send_response(200, 'Script output follows')
decoded_query = query.replace('+', ' ')
if self.have_fork:
args = [script]
if ('=' not in decoded_query):
args.append(decoded_query)
nobody = nobody_uid()
self.wfile.flush()
pid = os.fork()
if (pid != 0):
(pid, sts) = os.waitpid(pid, 0)
while select.select([self.rfile], [], [], 0)[0]:
if (not self.rfile.read(1)):
break
if sts:
self.log_error('CGI script exit status %#x', sts)
return
try:
try:
os.setuid(nobody)
except os.error:
pass
os.dup2(self.rfile.fileno(), 0)
os.dup2(self.wfile.fileno(), 1)
os.execve(scriptfile, args, env)
except:
self.server.handle_error(self.request, self.client_address)
os._exit(127)
else:
import subprocess
cmdline = [scriptfile]
if self.is_python(scriptfile):
interp = sys.executable
if interp.lower().endswith('w.exe'):
interp = (interp[:(-5)] + interp[(-4):])
cmdline = ([interp, '-u'] + cmdline)
if ('=' not in query):
cmdline.append(query)
self.log_message('command: %s', subprocess.list2cmdline(cmdline))
try:
nbytes = int(length)
except (TypeError, ValueError):
nbytes = 0
p = subprocess.Popen(cmdline, stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=subprocess.PIPE, env=env)
if ((self.command.lower() == 'post') and (nbytes > 0)):
data = self.rfile.read(nbytes)
else:
data = None
while select.select([self.rfile._sock], [], [], 0)[0]:
if (not self.rfile._sock.recv(1)):
break
(stdout, stderr) = p.communicate(data)
self.wfile.write(stdout)
if stderr:
self.log_error('%s', stderr)
p.stderr.close()
p.stdout.close()
status = p.returncode
if status:
self.log_error('CGI script exit status %#x', status)
else:
self.log_message('CGI script exited OK')
|
'Override this method to support alternative .mo formats.'
| def _parse(self, fp):
| unpack = struct.unpack
filename = getattr(fp, 'name', '')
self._catalog = catalog = {}
self.plural = (lambda n: int((n != 1)))
buf = fp.read()
buflen = len(buf)
magic = unpack('<I', buf[:4])[0]
if (magic == self.LE_MAGIC):
(version, msgcount, masteridx, transidx) = unpack('<4I', buf[4:20])
ii = '<II'
elif (magic == self.BE_MAGIC):
(version, msgcount, masteridx, transidx) = unpack('>4I', buf[4:20])
ii = '>II'
else:
raise IOError(0, 'Bad magic number', filename)
for i in xrange(0, msgcount):
(mlen, moff) = unpack(ii, buf[masteridx:(masteridx + 8)])
mend = (moff + mlen)
(tlen, toff) = unpack(ii, buf[transidx:(transidx + 8)])
tend = (toff + tlen)
if ((mend < buflen) and (tend < buflen)):
msg = buf[moff:mend]
tmsg = buf[toff:tend]
else:
raise IOError(0, 'File is corrupt', filename)
if (mlen == 0):
lastk = None
for item in tmsg.splitlines():
item = item.strip()
if (not item):
continue
k = v = None
if (':' in item):
(k, v) = item.split(':', 1)
k = k.strip().lower()
v = v.strip()
self._info[k] = v
lastk = k
elif lastk:
self._info[lastk] += ('\n' + item)
if (k == 'content-type'):
self._charset = v.split('charset=')[1]
elif (k == 'plural-forms'):
v = v.split(';')
plural = v[1].split('plural=')[1]
self.plural = c2py(plural)
if ('\x00' in msg):
(msgid1, msgid2) = msg.split('\x00')
tmsg = tmsg.split('\x00')
if self._charset:
msgid1 = unicode(msgid1, self._charset)
tmsg = [unicode(x, self._charset) for x in tmsg]
for i in range(len(tmsg)):
catalog[(msgid1, i)] = tmsg[i]
else:
if self._charset:
msg = unicode(msg, self._charset)
tmsg = unicode(tmsg, self._charset)
catalog[msg] = tmsg
masteridx += 8
transidx += 8
|
'Return the next item from the iterator. When exhausted, raise StopIteration'
| @abstractmethod
def next(self):
| raise StopIteration
|
'Construct an instance of the class from any iterable input.
Must override this method if the class constructor signature
does not accept an iterable for an input.'
| @classmethod
def _from_iterable(cls, it):
| return cls(it)
|
'Return True if two sets have a null intersection.'
| def isdisjoint(self, other):
| for value in other:
if (value in self):
return False
return True
|
'Compute the hash value of a set.
Note that we don\'t define __hash__: not all sets are hashable.
But if you define a hashable set type, its __hash__ should
call this function.
This must be compatible __eq__.
All sets ought to compare equal if they contain the same
elements, regardless of how they are implemented, and
regardless of the order of the elements; so there\'s not much
freedom for __eq__ or __hash__. We match the algorithm used
by the built-in frozenset type.'
| def _hash(self):
| MAX = sys.maxint
MASK = ((2 * MAX) + 1)
n = len(self)
h = (1927868237 * (n + 1))
h &= MASK
for x in self:
hx = hash(x)
h ^= (((hx ^ (hx << 16)) ^ 89869747) * 3644798167)
h &= MASK
h = ((h * 69069) + 907133923)
h &= MASK
if (h > MAX):
h -= (MASK + 1)
if (h == (-1)):
h = 590923713
return h
|
'Add an element.'
| @abstractmethod
def add(self, value):
| raise NotImplementedError
|
'Remove an element. Do not raise an exception if absent.'
| @abstractmethod
def discard(self, value):
| raise NotImplementedError
|
'Remove an element. If not a member, raise a KeyError.'
| def remove(self, value):
| if (value not in self):
raise KeyError(value)
self.discard(value)
|
'Return the popped value. Raise KeyError if empty.'
| def pop(self):
| it = iter(self)
try:
value = next(it)
except StopIteration:
raise KeyError
self.discard(value)
return value
|
'This is slow (creates N new iterators!) but effective.'
| def clear(self):
| try:
while True:
self.pop()
except KeyError:
pass
|
'D.get(k[,d]) -> D[k] if k in D, else d. d defaults to None.'
| def get(self, key, default=None):
| try:
return self[key]
except KeyError:
return default
|
'D.iterkeys() -> an iterator over the keys of D'
| def iterkeys(self):
| return iter(self)
|
'D.itervalues() -> an iterator over the values of D'
| def itervalues(self):
| for key in self:
(yield self[key])
|
'D.iteritems() -> an iterator over the (key, value) items of D'
| def iteritems(self):
| for key in self:
(yield (key, self[key]))
|
'D.keys() -> list of D\'s keys'
| def keys(self):
| return list(self)
|
'D.items() -> list of D\'s (key, value) pairs, as 2-tuples'
| def items(self):
| return [(key, self[key]) for key in self]
|
'D.values() -> list of D\'s values'
| def values(self):
| return [self[key] for key in self]
|
'D.pop(k[,d]) -> v, remove specified key and return the corresponding value.
If key is not found, d is returned if given, otherwise KeyError is raised.'
| def pop(self, key, default=__marker):
| try:
value = self[key]
except KeyError:
if (default is self.__marker):
raise
return default
else:
del self[key]
return value
|
'D.popitem() -> (k, v), remove and return some (key, value) pair
as a 2-tuple; but raise KeyError if D is empty.'
| def popitem(self):
| try:
key = next(iter(self))
except StopIteration:
raise KeyError
value = self[key]
del self[key]
return (key, value)
|
'D.clear() -> None. Remove all items from D.'
| def clear(self):
| try:
while True:
self.popitem()
except KeyError:
pass
|
'D.update([E, ]**F) -> None. Update D from mapping/iterable E and F.
If E present and has a .keys() method, does: for k in E: D[k] = E[k]
If E present and lacks .keys() method, does: for (k, v) in E: D[k] = v
In either case, this is followed by: for k, v in F.items(): D[k] = v'
| def update(*args, **kwds):
| if (not args):
raise TypeError("descriptor 'update' of 'MutableMapping' object needs an argument")
self = args[0]
args = args[1:]
if (len(args) > 1):
raise TypeError(('update expected at most 1 arguments, got %d' % len(args)))
if args:
other = args[0]
if isinstance(other, Mapping):
for key in other:
self[key] = other[key]
elif hasattr(other, 'keys'):
for key in other.keys():
self[key] = other[key]
else:
for (key, value) in other:
self[key] = value
for (key, value) in kwds.items():
self[key] = value
|
'D.setdefault(k[,d]) -> D.get(k,d), also set D[k]=d if k not in D'
| def setdefault(self, key, default=None):
| try:
return self[key]
except KeyError:
self[key] = default
return default
|
'S.index(value) -> integer -- return first index of value.
Raises ValueError if the value is not present.'
| def index(self, value):
| for (i, v) in enumerate(self):
if (v == value):
return i
raise ValueError
|
'S.count(value) -> integer -- return number of occurrences of value'
| def count(self, value):
| return sum((1 for v in self if (v == value)))
|
'S.insert(index, object) -- insert object before index'
| @abstractmethod
def insert(self, index, value):
| raise IndexError
|
'S.append(object) -- append object to the end of the sequence'
| def append(self, value):
| self.insert(len(self), value)
|
'S.reverse() -- reverse *IN PLACE*'
| def reverse(self):
| n = len(self)
for i in range((n // 2)):
(self[i], self[((n - i) - 1)]) = (self[((n - i) - 1)], self[i])
|
'S.extend(iterable) -- extend sequence by appending elements from the iterable'
| def extend(self, values):
| for v in values:
self.append(v)
|
'S.pop([index]) -> item -- remove and return item at index (default last).
Raise IndexError if list is empty or index is out of range.'
| def pop(self, index=(-1)):
| v = self[index]
del self[index]
return v
|
'S.remove(value) -- remove first occurrence of value.
Raise ValueError if the value is not present.'
| def remove(self, value):
| del self[self.index(value)]
|
'This tests the improved concurrency with pysqlite 2.3.4. You needed
to roll back con2 before you could commit con1.'
| def CheckLocking(self):
| if (sqlite.sqlite_version_info < (3, 2, 2)):
return
self.cur1.execute('create table test(i)')
self.cur1.execute('insert into test(i) values (5)')
try:
self.cur2.execute('insert into test(i) values (5)')
self.fail('should have raised an OperationalError')
except sqlite.OperationalError:
pass
except:
self.fail('should have raised an OperationalError')
self.con1.commit()
|
'Checks if cursors on the connection are set into a "reset" state
when a rollback is done on the connection.'
| def CheckRollbackCursorConsistency(self):
| con = sqlite.connect(':memory:')
cur = con.cursor()
cur.execute('create table test(x)')
cur.execute('insert into test(x) values (5)')
cur.execute('select 1 union select 2 union select 3')
con.rollback()
try:
cur.fetchall()
self.fail('InterfaceError should have been raised')
except sqlite.InterfaceError as e:
pass
except:
self.fail('InterfaceError should have been raised')
|
'Can the connection be used as a context manager at all?'
| def CheckContextManager(self):
| with self.con:
pass
|
'Is a commit called in the context manager?'
| def CheckContextManagerCommit(self):
| with self.con:
self.con.execute("insert into test(c) values ('foo')")
self.con.rollback()
count = self.con.execute('select count(*) from test').fetchone()[0]
self.assertEqual(count, 1)
|
'Is a rollback called in the context manager?'
| def CheckContextManagerRollback(self):
| global did_rollback
self.assertEqual(did_rollback, False)
try:
with self.con:
self.con.execute('insert into test(c) values (4)')
self.con.execute('insert into test(c) values (4)')
except sqlite.IntegrityError:
pass
self.assertEqual(did_rollback, True)
|
'Checks if the row object is iterable'
| def CheckSqliteRowIter(self):
| self.con.row_factory = sqlite.Row
row = self.con.execute('select 1 as a, 2 as b').fetchone()
for col in row:
pass
|
'Checks if the row object can be converted to a tuple'
| def CheckSqliteRowAsTuple(self):
| self.con.row_factory = sqlite.Row
row = self.con.execute('select 1 as a, 2 as b').fetchone()
t = tuple(row)
self.assertEqual(t, (row['a'], row['b']))
|
'Checks if the row object can be correctly converted to a dictionary'
| def CheckSqliteRowAsDict(self):
| self.con.row_factory = sqlite.Row
row = self.con.execute('select 1 as a, 2 as b').fetchone()
d = dict(row)
self.assertEqual(d['a'], row['a'])
self.assertEqual(d['b'], row['b'])
|
'Checks if the row object compares and hashes correctly'
| def CheckSqliteRowHashCmp(self):
| self.con.row_factory = sqlite.Row
row_1 = self.con.execute('select 1 as a, 2 as b').fetchone()
row_2 = self.con.execute('select 1 as a, 2 as b').fetchone()
row_3 = self.con.execute('select 1 as a, 3 as b').fetchone()
self.assertEqual(row_1, row_1)
self.assertEqual(row_1, row_2)
self.assertTrue((row_2 != row_3))
self.assertFalse((row_1 != row_1))
self.assertFalse((row_1 != row_2))
self.assertFalse((row_2 == row_3))
self.assertEqual(row_1, row_2)
self.assertEqual(hash(row_1), hash(row_2))
self.assertNotEqual(row_1, row_3)
self.assertNotEqual(hash(row_1), hash(row_3))
|
'Checks if the row object can act like a sequence'
| def CheckSqliteRowAsSequence(self):
| self.con.row_factory = sqlite.Row
row = self.con.execute('select 1 as a, 2 as b').fetchone()
as_tuple = tuple(row)
self.assertEqual(list(reversed(row)), list(reversed(as_tuple)))
self.assertIsInstance(row, Sequence)
|
'Register two different collation functions under the same name.
Verify that the last one is actually used.'
| def CheckCollationRegisterTwice(self):
| con = sqlite.connect(':memory:')
con.create_collation('mycoll', cmp)
con.create_collation('mycoll', (lambda x, y: (- cmp(x, y))))
result = con.execute("\n select x from (select 'a' as x union select 'b' as x) order by x collate mycoll\n ").fetchall()
if ((result[0][0] != 'b') or (result[1][0] != 'a')):
self.fail('wrong collation function is used')
|
'Register a collation, then deregister it. Make sure an error is raised if we try
to use it.'
| def CheckDeregisterCollation(self):
| con = sqlite.connect(':memory:')
con.create_collation('mycoll', cmp)
con.create_collation('mycoll', None)
try:
con.execute("select 'a' as x union select 'b' as x order by x collate mycoll")
self.fail('should have raised an OperationalError')
except sqlite.OperationalError as e:
if (not e.args[0].startswith('no such collation sequence')):
self.fail('wrong OperationalError raised')
|
'Test that the progress handler is invoked once it is set.'
| def CheckProgressHandlerUsed(self):
| con = sqlite.connect(':memory:')
progress_calls = []
def progress():
progress_calls.append(None)
return 0
con.set_progress_handler(progress, 1)
con.execute('\n create table foo(a, b)\n ')
self.assertTrue(progress_calls)
|
'Test that the opcode argument is respected.'
| def CheckOpcodeCount(self):
| con = sqlite.connect(':memory:')
progress_calls = []
def progress():
progress_calls.append(None)
return 0
con.set_progress_handler(progress, 1)
curs = con.cursor()
curs.execute('\n create table foo (a, b)\n ')
first_count = len(progress_calls)
progress_calls = []
con.set_progress_handler(progress, 2)
curs.execute('\n create table bar (a, b)\n ')
second_count = len(progress_calls)
self.assertGreaterEqual(first_count, second_count)
|
'Test that returning a non-zero value stops the operation in progress.'
| def CheckCancelOperation(self):
| con = sqlite.connect(':memory:')
progress_calls = []
def progress():
progress_calls.append(None)
return 1
con.set_progress_handler(progress, 1)
curs = con.cursor()
self.assertRaises(sqlite.OperationalError, curs.execute, 'create table bar (a, b)')
|
'Test that setting the progress handler to None clears the previously set handler.'
| def CheckClearHandler(self):
| con = sqlite.connect(':memory:')
action = []
def progress():
action.append(1)
return 0
con.set_progress_handler(progress, 1)
con.set_progress_handler(None, 1)
con.execute('select 1 union select 2 union select 3').fetchall()
self.assertEqual(len(action), 0, 'progress handler was not cleared')
|
'A commit should also work when no changes were made to the database.'
| def CheckCommitAfterNoChanges(self):
| self.cx.commit()
self.cx.commit()
|
'A rollback should also work when no changes were made to the database.'
| def CheckRollbackAfterNoChanges(self):
| self.cx.rollback()
self.cx.rollback()
|
'pysqlite does not know the rowcount of SELECT statements, because we
don\'t fetch all rows after executing the select statement. The rowcount
has thus to be -1.'
| def CheckRowcountSelect(self):
| self.cu.execute('select 5 union select 6')
self.assertEqual(self.cu.rowcount, (-1))
|
'Checks if fetchmany works with keyword arguments'
| def CheckFetchmanyKwArg(self):
| self.cu.execute('select name from test')
res = self.cu.fetchmany(size=100)
self.assertEqual(len(res), 1)
|
'Checks whether converter names are cut off at \'(\' characters'
| def CheckNumber2(self):
| self.cur.execute('insert into test(n2) values (5)')
value = self.cur.execute('select n2 from test').fetchone()[0]
self.assertEqual(type(value), float)
|
'Assures that the declared type is not used when PARSE_DECLTYPES
is not set.'
| def CheckDeclTypeNotUsed(self):
| self.cur.execute('insert into test(x) values (?)', ('xxx',))
self.cur.execute('select x from test')
val = self.cur.fetchone()[0]
self.assertEqual(val, 'xxx')
|
'cursor.description should at least provide the column name(s), even if
no row returned.'
| def CheckCursorDescriptionNoRow(self):
| self.cur.execute('select * from test where 0 = 1')
self.assertEqual(self.cur.description[0][0], 'x')
|
'pysqlite would crash with older SQLite versions unless
a workaround is implemented.'
| def CheckWorkaroundForBuggySqliteTransferBindings(self):
| self.con.execute('create table foo(bar)')
self.con.execute('drop table foo')
self.con.execute('create table foo(bar)')
|
'pysqlite used to segfault with SQLite versions 3.5.x. These return NULL
for "no-operation" statements'
| def CheckEmptyStatement(self):
| self.con.execute('')
|
'With pysqlite 2.4.0 you needed to use a string or a APSW connection
object for opening database connections.
Formerly, both bytestrings and unicode strings used to work.
Let\'s make sure unicode strings work in the future.'
| def CheckUnicodeConnect(self):
| con = sqlite.connect(u':memory:')
con.close()
|
'pysqlite until 2.4.1 did not rebuild the row_cast_map when recompiling
a statement. This test exhibits the problem.'
| def CheckTypeMapUsage(self):
| SELECT = 'select * from foo'
con = sqlite.connect(':memory:', detect_types=sqlite.PARSE_DECLTYPES)
con.execute('create table foo(bar timestamp)')
con.execute('insert into foo(bar) values (?)', (datetime.datetime.now(),))
con.execute(SELECT)
con.execute('drop table foo')
con.execute('create table foo(bar integer)')
con.execute('insert into foo(bar) values (5)')
con.execute(SELECT)
|
'See issue 3312.'
| def CheckRegisterAdapter(self):
| self.assertRaises(TypeError, sqlite.register_adapter, {}, None)
|
'See issue 3312.'
| def CheckSetIsolationLevel(self):
| con = sqlite.connect(':memory:')
self.assertRaises(UnicodeEncodeError, setattr, con, 'isolation_level', u'\xe9')
|
'Verifies that cursor methods check whether base class __init__ was
called.'
| def CheckCursorConstructorCallCheck(self):
| class Cursor(sqlite.Cursor, ):
def __init__(self, con):
pass
con = sqlite.connect(':memory:')
cur = Cursor(con)
try:
cur.execute('select 4+5').fetchall()
self.fail('should have raised ProgrammingError')
except sqlite.ProgrammingError:
pass
except:
self.fail('should have raised ProgrammingError')
|
'Verifies that connection methods check whether base class __init__ was
called.'
| def CheckConnectionConstructorCallCheck(self):
| class Connection(sqlite.Connection, ):
def __init__(self, name):
pass
con = Connection(':memory:')
try:
cur = con.cursor()
self.fail('should have raised ProgrammingError')
except sqlite.ProgrammingError:
pass
except:
self.fail('should have raised ProgrammingError')
|
'Verifies that subclassed cursor classes are correctly registered with
the connection object, too. (fetch-across-rollback problem)'
| def CheckCursorRegistration(self):
| class Connection(sqlite.Connection, ):
def cursor(self):
return Cursor(self)
class Cursor(sqlite.Cursor, ):
def __init__(self, con):
sqlite.Cursor.__init__(self, con)
con = Connection(':memory:')
cur = con.cursor()
cur.execute('create table foo(x)')
cur.executemany('insert into foo(x) values (?)', [(3,), (4,), (5,)])
cur.execute('select x from foo')
con.rollback()
try:
cur.fetchall()
self.fail('should have raised InterfaceError')
except sqlite.InterfaceError:
pass
except:
self.fail('should have raised InterfaceError')
|
'Verifies that creating a connection in autocommit mode works.
2.5.3 introduced a regression so that these could no longer
be created.'
| def CheckAutoCommit(self):
| con = sqlite.connect(':memory:', isolation_level=None)
|
'Verifies that running a PRAGMA statement that does an autocommit does
work. This did not work in 2.5.3/2.5.4.'
| def CheckPragmaAutocommit(self):
| cur = self.con.cursor()
cur.execute('create table foo(bar)')
cur.execute('insert into foo(bar) values (5)')
cur.execute('pragma page_size')
row = cur.fetchone()
|
'See http://bugs.python.org/issue7478
It was possible to successfully register callbacks that could not be
hashed. Return codes of PyDict_SetItem were not checked properly.'
| def CheckSetDict(self):
| class NotHashable:
def __call__(self, *args, **kw):
pass
def __hash__(self):
raise TypeError()
var = NotHashable()
self.assertRaises(TypeError, self.con.create_function, var)
self.assertRaises(TypeError, self.con.create_aggregate, var)
self.assertRaises(TypeError, self.con.set_authorizer, var)
self.assertRaises(TypeError, self.con.set_progress_handler, var)
|
'Call a connection with a non-string SQL request: check error handling
of the statement constructor.'
| def CheckConnectionCall(self):
| self.assertRaises(sqlite.Warning, self.con, 1)
|
'http://bugs.python.org/issue10811
Recursively using a cursor, such as when reusing it from a generator led to segfaults.
Now we catch recursive cursor usage and raise a ProgrammingError.'
| def CheckRecursiveCursorUse(self):
| con = sqlite.connect(':memory:')
cur = con.cursor()
cur.execute('create table a (bar)')
cur.execute('create table b (baz)')
def foo():
cur.execute('insert into a (bar) values (?)', (1,))
(yield 1)
with self.assertRaises(sqlite.ProgrammingError):
cur.executemany('insert into b (baz) values (?)', ((i,) for i in foo()))
|
'http://bugs.python.org/issue14720
The microsecond parsing of convert_timestamp() should pad with zeros,
since the microsecond string "456" actually represents "456000".'
| def CheckConvertTimestampMicrosecondPadding(self):
| con = sqlite.connect(':memory:', detect_types=sqlite.PARSE_DECLTYPES)
cur = con.cursor()
cur.execute('CREATE TABLE t (x TIMESTAMP)')
cur.execute("INSERT INTO t (x) VALUES ('2012-04-04 15:06:00.456')")
cur.execute("INSERT INTO t (x) VALUES ('2012-04-04 15:06:00.123456789')")
cur.execute('SELECT * FROM t')
values = [x[0] for x in cur.fetchall()]
self.assertEqual(values, [datetime.datetime(2012, 4, 4, 15, 6, 0, 456000), datetime.datetime(2012, 4, 4, 15, 6, 0, 123456)])
|
'Return true if (and only if) cookie should be accepted from server.
Currently, pre-expired cookies never get this far -- the CookieJar
class deletes such cookies itself.'
| def set_ok(self, cookie, request):
| raise NotImplementedError()
|
'Return true if (and only if) cookie should be returned to server.'
| def return_ok(self, cookie, request):
| raise NotImplementedError()
|
'Return false if cookies should not be returned, given cookie domain.'
| def domain_return_ok(self, domain, request):
| return True
|
'Return false if cookies should not be returned, given cookie path.'
| def path_return_ok(self, path, request):
| return True
|
'Constructor arguments should be passed as keyword arguments only.'
| def __init__(self, blocked_domains=None, allowed_domains=None, netscape=True, rfc2965=False, rfc2109_as_netscape=None, hide_cookie2=False, strict_domain=False, strict_rfc2965_unverifiable=True, strict_ns_unverifiable=False, strict_ns_domain=DomainLiberal, strict_ns_set_initial_dollar=False, strict_ns_set_path=False):
| self.netscape = netscape
self.rfc2965 = rfc2965
self.rfc2109_as_netscape = rfc2109_as_netscape
self.hide_cookie2 = hide_cookie2
self.strict_domain = strict_domain
self.strict_rfc2965_unverifiable = strict_rfc2965_unverifiable
self.strict_ns_unverifiable = strict_ns_unverifiable
self.strict_ns_domain = strict_ns_domain
self.strict_ns_set_initial_dollar = strict_ns_set_initial_dollar
self.strict_ns_set_path = strict_ns_set_path
if (blocked_domains is not None):
self._blocked_domains = tuple(blocked_domains)
else:
self._blocked_domains = ()
if (allowed_domains is not None):
allowed_domains = tuple(allowed_domains)
self._allowed_domains = allowed_domains
|
'Return the sequence of blocked domains (as a tuple).'
| def blocked_domains(self):
| return self._blocked_domains
|
'Set the sequence of blocked domains.'
| def set_blocked_domains(self, blocked_domains):
| self._blocked_domains = tuple(blocked_domains)
|
'Return None, or the sequence of allowed domains (as a tuple).'
| def allowed_domains(self):
| return self._allowed_domains
|
'Set the sequence of allowed domains, or None.'
| def set_allowed_domains(self, allowed_domains):
| if (allowed_domains is not None):
allowed_domains = tuple(allowed_domains)
self._allowed_domains = allowed_domains
|
'If you override .set_ok(), be sure to call this method. If it returns
false, so should your subclass (assuming your subclass wants to be more
strict about which cookies to accept).'
| def set_ok(self, cookie, request):
| _debug(' - checking cookie %s=%s', cookie.name, cookie.value)
assert (cookie.name is not None)
for n in ('version', 'verifiability', 'name', 'path', 'domain', 'port'):
fn_name = ('set_ok_' + n)
fn = getattr(self, fn_name)
if (not fn(cookie, request)):
return False
return True
|
'If you override .return_ok(), be sure to call this method. If it
returns false, so should your subclass (assuming your subclass wants to
be more strict about which cookies to return).'
| def return_ok(self, cookie, request):
| _debug(' - checking cookie %s=%s', cookie.name, cookie.value)
for n in ('version', 'verifiability', 'secure', 'expires', 'port', 'domain'):
fn_name = ('return_ok_' + n)
fn = getattr(self, fn_name)
if (not fn(cookie, request)):
return False
return True
|
'Return a list of cookies to be returned to server.'
| def _cookies_for_request(self, request):
| cookies = []
for domain in self._cookies.keys():
cookies.extend(self._cookies_for_domain(domain, request))
return cookies
|
'Return a list of cookie-attributes to be returned to server.
like [\'foo="bar"; $Path="/"\', ...]
The $Version attribute is also added when appropriate (currently only
once per request).'
| def _cookie_attrs(self, cookies):
| cookies.sort(key=(lambda arg: len(arg.path)), reverse=True)
version_set = False
attrs = []
for cookie in cookies:
version = cookie.version
if (not version_set):
version_set = True
if (version > 0):
attrs.append(('$Version=%s' % version))
if ((cookie.value is not None) and self.non_word_re.search(cookie.value) and (version > 0)):
value = self.quote_re.sub('\\\\\\1', cookie.value)
else:
value = cookie.value
if (cookie.value is None):
attrs.append(cookie.name)
else:
attrs.append(('%s=%s' % (cookie.name, value)))
if (version > 0):
if cookie.path_specified:
attrs.append(('$Path="%s"' % cookie.path))
if cookie.domain.startswith('.'):
domain = cookie.domain
if ((not cookie.domain_initial_dot) and domain.startswith('.')):
domain = domain[1:]
attrs.append(('$Domain="%s"' % domain))
if (cookie.port is not None):
p = '$Port'
if cookie.port_specified:
p = (p + ('="%s"' % cookie.port))
attrs.append(p)
return attrs
|
'Add correct Cookie: header to request (urllib2.Request object).
The Cookie2 header is also added unless policy.hide_cookie2 is true.'
| def add_cookie_header(self, request):
| _debug('add_cookie_header')
self._cookies_lock.acquire()
try:
self._policy._now = self._now = int(time.time())
cookies = self._cookies_for_request(request)
attrs = self._cookie_attrs(cookies)
if attrs:
if (not request.has_header('Cookie')):
request.add_unredirected_header('Cookie', '; '.join(attrs))
if (self._policy.rfc2965 and (not self._policy.hide_cookie2) and (not request.has_header('Cookie2'))):
for cookie in cookies:
if (cookie.version != 1):
request.add_unredirected_header('Cookie2', '$Version="1"')
break
finally:
self._cookies_lock.release()
self.clear_expired_cookies()
|
'Return list of tuples containing normalised cookie information.
attrs_set is the list of lists of key,value pairs extracted from
the Set-Cookie or Set-Cookie2 headers.
Tuples are name, value, standard, rest, where name and value are the
cookie name and value, standard is a dictionary containing the standard
cookie-attributes (discard, secure, version, expires or max-age,
domain, path and port) and rest is a dictionary containing the rest of
the cookie-attributes.'
| def _normalized_cookie_tuples(self, attrs_set):
| cookie_tuples = []
boolean_attrs = ('discard', 'secure')
value_attrs = ('version', 'expires', 'max-age', 'domain', 'path', 'port', 'comment', 'commenturl')
for cookie_attrs in attrs_set:
(name, value) = cookie_attrs[0]
max_age_set = False
bad_cookie = False
standard = {}
rest = {}
for (k, v) in cookie_attrs[1:]:
lc = k.lower()
if ((lc in value_attrs) or (lc in boolean_attrs)):
k = lc
if ((k in boolean_attrs) and (v is None)):
v = True
if (k in standard):
continue
if (k == 'domain'):
if (v is None):
_debug(' missing value for domain attribute')
bad_cookie = True
break
v = v.lower()
if (k == 'expires'):
if max_age_set:
continue
if (v is None):
_debug(' missing or invalid value for expires attribute: treating as session cookie')
continue
if (k == 'max-age'):
max_age_set = True
try:
v = int(v)
except ValueError:
_debug(' missing or invalid (non-numeric) value for max-age attribute')
bad_cookie = True
break
k = 'expires'
v = (self._now + v)
if ((k in value_attrs) or (k in boolean_attrs)):
if ((v is None) and (k not in ('port', 'comment', 'commenturl'))):
_debug((' missing value for %s attribute' % k))
bad_cookie = True
break
standard[k] = v
else:
rest[k] = v
if bad_cookie:
continue
cookie_tuples.append((name, value, standard, rest))
return cookie_tuples
|
'Return sequence of Cookie objects extracted from response object.'
| def make_cookies(self, response, request):
| headers = response.info()
rfc2965_hdrs = headers.getheaders('Set-Cookie2')
ns_hdrs = headers.getheaders('Set-Cookie')
rfc2965 = self._policy.rfc2965
netscape = self._policy.netscape
if (((not rfc2965_hdrs) and (not ns_hdrs)) or ((not ns_hdrs) and (not rfc2965)) or ((not rfc2965_hdrs) and (not netscape)) or ((not netscape) and (not rfc2965))):
return []
try:
cookies = self._cookies_from_attrs_set(split_header_words(rfc2965_hdrs), request)
except Exception:
_warn_unhandled_exception()
cookies = []
if (ns_hdrs and netscape):
try:
ns_cookies = self._cookies_from_attrs_set(parse_ns_headers(ns_hdrs), request)
except Exception:
_warn_unhandled_exception()
ns_cookies = []
self._process_rfc2109_cookies(ns_cookies)
if rfc2965:
lookup = {}
for cookie in cookies:
lookup[(cookie.domain, cookie.path, cookie.name)] = None
def no_matching_rfc2965(ns_cookie, lookup=lookup):
key = (ns_cookie.domain, ns_cookie.path, ns_cookie.name)
return (key not in lookup)
ns_cookies = filter(no_matching_rfc2965, ns_cookies)
if ns_cookies:
cookies.extend(ns_cookies)
return cookies
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.