desc
stringlengths 3
26.7k
| decl
stringlengths 11
7.89k
| bodies
stringlengths 8
553k
|
---|---|---|
'This sends a string to the child process. This returns the number of
bytes written. If a log file was set then the data is also written to
the log.'
| def send(self, s):
| time.sleep(self.delaybeforesend)
if (self.logfile is not None):
self.logfile.write(s)
self.logfile.flush()
if (self.logfile_send is not None):
self.logfile_send.write(s)
self.logfile_send.flush()
c = os.write(self.child_fd, s)
return c
|
'This is like send(), but it adds a line feed (os.linesep). This
returns the number of bytes written.'
| def sendline(self, s=''):
| n = self.send(s)
n = (n + self.send(os.linesep))
return n
|
'This sends a control character to the child such as Ctrl-C or
Ctrl-D. For example, to send a Ctrl-G (ASCII 7)::
child.sendcontrol(\'g\')
See also, sendintr() and sendeof().'
| def sendcontrol(self, char):
| char = char.lower()
a = ord(char)
if ((a >= 97) and (a <= 122)):
a = ((a - ord('a')) + 1)
return self.send(chr(a))
d = {'@': 0, '`': 0, '[': 27, '{': 27, '\\': 28, '|': 28, ']': 29, '}': 29, '^': 30, '~': 30, '_': 31, '?': 127}
if (char not in d):
return 0
return self.send(chr(d[char]))
|
'This sends an EOF to the child. This sends a character which causes
the pending parent output buffer to be sent to the waiting child
program without waiting for end-of-line. If it is the first character
of the line, the read() in the user program returns 0, which signifies
end-of-file. This means to work as expected a sendeof() has to be
called at the beginning of a line. This method does not send a newline.
It is the responsibility of the caller to ensure the eof is sent at the
beginning of a line.'
| def sendeof(self):
| if hasattr(termios, 'VEOF'):
char = termios.tcgetattr(self.child_fd)[6][termios.VEOF]
else:
char = chr(4)
self.send(char)
|
'This sends a SIGINT to the child. It does not require
the SIGINT to be the first character on a line.'
| def sendintr(self):
| if hasattr(termios, 'VINTR'):
char = termios.tcgetattr(self.child_fd)[6][termios.VINTR]
else:
char = chr(3)
self.send(char)
|
'This returns True if the EOF exception was ever raised.'
| def eof(self):
| return self.flag_eof
|
'This forces a child process to terminate. It starts nicely with
SIGHUP and SIGINT. If "force" is True then moves onto SIGKILL. This
returns True if the child was terminated. This returns False if the
child could not be terminated.'
| def terminate(self, force=False):
| if (not self.isalive()):
return True
try:
self.kill(signal.SIGHUP)
time.sleep(self.delayafterterminate)
if (not self.isalive()):
return True
self.kill(signal.SIGCONT)
time.sleep(self.delayafterterminate)
if (not self.isalive()):
return True
self.kill(signal.SIGINT)
time.sleep(self.delayafterterminate)
if (not self.isalive()):
return True
if force:
self.kill(signal.SIGKILL)
time.sleep(self.delayafterterminate)
if (not self.isalive()):
return True
else:
return False
return False
except OSError as e:
time.sleep(self.delayafterterminate)
if (not self.isalive()):
return True
else:
return False
|
'This waits until the child exits. This is a blocking call. This will
not read any data from the child, so this will block forever if the
child has unread output and has terminated. In other words, the child
may have printed output then called exit(); but, technically, the child
is still alive until its output is read.'
| def wait(self):
| if self.isalive():
(pid, status) = os.waitpid(self.pid, 0)
else:
raise ExceptionPexpect('Cannot wait for dead child process.')
self.exitstatus = os.WEXITSTATUS(status)
if os.WIFEXITED(status):
self.status = status
self.exitstatus = os.WEXITSTATUS(status)
self.signalstatus = None
self.terminated = True
elif os.WIFSIGNALED(status):
self.status = status
self.exitstatus = None
self.signalstatus = os.WTERMSIG(status)
self.terminated = True
elif os.WIFSTOPPED(status):
raise ExceptionPexpect('Wait was called for a child process that is stopped. This is not supported. Is some other process attempting job control with our child pid?')
return self.exitstatus
|
'This tests if the child process is running or not. This is
non-blocking. If the child was terminated then this will read the
exitstatus or signalstatus of the child. This returns True if the child
process appears to be running or False if not. It can take literally
SECONDS for Solaris to return the right status.'
| def isalive(self):
| if self.terminated:
return False
if self.flag_eof:
waitpid_options = 0
else:
waitpid_options = os.WNOHANG
try:
(pid, status) = os.waitpid(self.pid, waitpid_options)
except OSError as e:
if (e[0] == errno.ECHILD):
raise ExceptionPexpect('isalive() encountered condition where "terminated" is 0, but there was no child process. Did someone else call waitpid() on our process?')
else:
raise e
if (pid == 0):
try:
(pid, status) = os.waitpid(self.pid, waitpid_options)
except OSError as e:
if (e[0] == errno.ECHILD):
raise ExceptionPexpect('isalive() encountered condition that should never happen. There was no child process. Did someone else call waitpid() on our process?')
else:
raise e
if (pid == 0):
return True
if (pid == 0):
return True
if os.WIFEXITED(status):
self.status = status
self.exitstatus = os.WEXITSTATUS(status)
self.signalstatus = None
self.terminated = True
elif os.WIFSIGNALED(status):
self.status = status
self.exitstatus = None
self.signalstatus = os.WTERMSIG(status)
self.terminated = True
elif os.WIFSTOPPED(status):
raise ExceptionPexpect('isalive() encountered condition where child process is stopped. This is not supported. Is some other process attempting job control with our child pid?')
return False
|
'This sends the given signal to the child application. In keeping
with UNIX tradition it has a misleading name. It does not necessarily
kill the child unless you send the right signal.'
| def kill(self, sig):
| if self.isalive():
os.kill(self.pid, sig)
|
'This compiles a pattern-string or a list of pattern-strings.
Patterns must be a StringType, EOF, TIMEOUT, SRE_Pattern, or a list of
those. Patterns may also be None which results in an empty list (you
might do this if waiting for an EOF or TIMEOUT condition without
expecting any pattern).
This is used by expect() when calling expect_list(). Thus expect() is
nothing more than::
cpl = self.compile_pattern_list(pl)
return self.expect_list(cpl, timeout)
If you are using expect() within a loop it may be more
efficient to compile the patterns first and then call expect_list().
This avoid calls in a loop to compile_pattern_list()::
cpl = self.compile_pattern_list(my_pattern)
while some_condition:
i = self.expect_list(clp, timeout)'
| def compile_pattern_list(self, patterns):
| if (patterns is None):
return []
if (type(patterns) is not types.ListType):
patterns = [patterns]
compile_flags = re.DOTALL
if self.ignorecase:
compile_flags = (compile_flags | re.IGNORECASE)
compiled_pattern_list = []
for p in patterns:
if (type(p) in types.StringTypes):
compiled_pattern_list.append(re.compile(p, compile_flags))
elif (p is EOF):
compiled_pattern_list.append(EOF)
elif (p is TIMEOUT):
compiled_pattern_list.append(TIMEOUT)
elif (type(p) is type(re.compile(''))):
compiled_pattern_list.append(p)
else:
raise TypeError(('Argument must be one of StringTypes, EOF, TIMEOUT, SRE_Pattern, or a list of those type. %s' % str(type(p))))
return compiled_pattern_list
|
'This seeks through the stream until a pattern is matched. The
pattern is overloaded and may take several types. The pattern can be a
StringType, EOF, a compiled re, or a list of any of those types.
Strings will be compiled to re types. This returns the index into the
pattern list. If the pattern was not a list this returns index 0 on a
successful match. This may raise exceptions for EOF or TIMEOUT. To
avoid the EOF or TIMEOUT exceptions add EOF or TIMEOUT to the pattern
list. That will cause expect to match an EOF or TIMEOUT condition
instead of raising an exception.
If you pass a list of patterns and more than one matches, the first match
in the stream is chosen. If more than one pattern matches at that point,
the leftmost in the pattern list is chosen. For example::
# the input is \'foobar\'
index = p.expect ([\'bar\', \'foo\', \'foobar\'])
# returns 1 (\'foo\') even though \'foobar\' is a "better" match
Please note, however, that buffering can affect this behavior, since
input arrives in unpredictable chunks. For example::
# the input is \'foobar\'
index = p.expect ([\'foobar\', \'foo\'])
# returns 0 (\'foobar\') if all input is available at once,
# but returs 1 (\'foo\') if parts of the final \'bar\' arrive late
After a match is found the instance attributes \'before\', \'after\' and
\'match\' will be set. You can see all the data read before the match in
\'before\'. You can see the data that was matched in \'after\'. The
re.MatchObject used in the re match will be in \'match\'. If an error
occurred then \'before\' will be set to all the data read so far and
\'after\' and \'match\' will be None.
If timeout is -1 then timeout will be set to the self.timeout value.
A list entry may be EOF or TIMEOUT instead of a string. This will
catch these exceptions and return the index of the list entry instead
of raising the exception. The attribute \'after\' will be set to the
exception type. The attribute \'match\' will be None. This allows you to
write code like this::
index = p.expect ([\'good\', \'bad\', pexpect.EOF, pexpect.TIMEOUT])
if index == 0:
do_something()
elif index == 1:
do_something_else()
elif index == 2:
do_some_other_thing()
elif index == 3:
do_something_completely_different()
instead of code like this::
try:
index = p.expect ([\'good\', \'bad\'])
if index == 0:
do_something()
elif index == 1:
do_something_else()
except EOF:
do_some_other_thing()
except TIMEOUT:
do_something_completely_different()
These two forms are equivalent. It all depends on what you want. You
can also just expect the EOF if you are waiting for all output of a
child to finish. For example::
p = pexpect.spawn(\'/bin/ls\')
p.expect (pexpect.EOF)
print p.before
If you are trying to optimize for speed then see expect_list().'
| def expect(self, pattern, timeout=(-1), searchwindowsize=None):
| compiled_pattern_list = self.compile_pattern_list(pattern)
return self.expect_list(compiled_pattern_list, timeout, searchwindowsize)
|
'This takes a list of compiled regular expressions and returns the
index into the pattern_list that matched the child output. The list may
also contain EOF or TIMEOUT (which are not compiled regular
expressions). This method is similar to the expect() method except that
expect_list() does not recompile the pattern list on every call. This
may help if you are trying to optimize for speed, otherwise just use
the expect() method. This is called by expect(). If timeout==-1 then
the self.timeout value is used. If searchwindowsize==-1 then the
self.searchwindowsize value is used.'
| def expect_list(self, pattern_list, timeout=(-1), searchwindowsize=(-1)):
| return self.expect_loop(searcher_re(pattern_list), timeout, searchwindowsize)
|
'This is similar to expect(), but uses plain string matching instead
of compiled regular expressions in \'pattern_list\'. The \'pattern_list\'
may be a string; a list or other sequence of strings; or TIMEOUT and
EOF.
This call might be faster than expect() for two reasons: string
searching is faster than RE matching and it is possible to limit the
search to just the end of the input buffer.
This method is also useful when you don\'t want to have to worry about
escaping regular expression characters that you want to match.'
| def expect_exact(self, pattern_list, timeout=(-1), searchwindowsize=(-1)):
| if ((type(pattern_list) in types.StringTypes) or (pattern_list in (TIMEOUT, EOF))):
pattern_list = [pattern_list]
return self.expect_loop(searcher_string(pattern_list), timeout, searchwindowsize)
|
'This is the common loop used inside expect. The \'searcher\' should be
an instance of searcher_re or searcher_string, which describes how and what
to search for in the input.
See expect() for other arguments, return value and exceptions.'
| def expect_loop(self, searcher, timeout=(-1), searchwindowsize=(-1)):
| self.searcher = searcher
if (timeout == (-1)):
timeout = self.timeout
if (timeout is not None):
end_time = (time.time() + timeout)
if (searchwindowsize == (-1)):
searchwindowsize = self.searchwindowsize
try:
incoming = self.buffer
freshlen = len(incoming)
while True:
index = searcher.search(incoming, freshlen, searchwindowsize)
if (index >= 0):
self.buffer = incoming[searcher.end:]
self.before = incoming[:searcher.start]
self.after = incoming[searcher.start:searcher.end]
self.match = searcher.match
self.match_index = index
return self.match_index
if ((timeout < 0) and (timeout is not None)):
raise TIMEOUT('Timeout exceeded in expect_any().')
c = self.read_nonblocking(self.maxread, timeout)
freshlen = len(c)
time.sleep(0.0001)
incoming = (incoming + c)
if (timeout is not None):
timeout = (end_time - time.time())
except EOF as e:
self.buffer = ''
self.before = incoming
self.after = EOF
index = searcher.eof_index
if (index >= 0):
self.match = EOF
self.match_index = index
return self.match_index
else:
self.match = None
self.match_index = None
raise EOF(((str(e) + '\n') + str(self)))
except TIMEOUT as e:
self.buffer = incoming
self.before = incoming
self.after = TIMEOUT
index = searcher.timeout_index
if (index >= 0):
self.match = TIMEOUT
self.match_index = index
return self.match_index
else:
self.match = None
self.match_index = None
raise TIMEOUT(((str(e) + '\n') + str(self)))
except:
self.before = incoming
self.after = None
self.match = None
self.match_index = None
raise
|
'This returns the terminal window size of the child tty. The return
value is a tuple of (rows, cols).'
| def getwinsize(self):
| TIOCGWINSZ = getattr(termios, 'TIOCGWINSZ', 1074295912L)
s = struct.pack('HHHH', 0, 0, 0, 0)
x = fcntl.ioctl(self.fileno(), TIOCGWINSZ, s)
return struct.unpack('HHHH', x)[0:2]
|
'This sets the terminal window size of the child tty. This will cause
a SIGWINCH signal to be sent to the child. This does not change the
physical window size. It changes the size reported to TTY-aware
applications like vi or curses -- applications that respond to the
SIGWINCH signal.'
| def setwinsize(self, r, c):
| TIOCSWINSZ = getattr(termios, 'TIOCSWINSZ', (-2146929561))
if (TIOCSWINSZ == 2148037735L):
TIOCSWINSZ = (-2146929561)
s = struct.pack('HHHH', r, c, 0, 0)
fcntl.ioctl(self.fileno(), TIOCSWINSZ, s)
|
'This gives control of the child process to the interactive user (the
human at the keyboard). Keystrokes are sent to the child process, and
the stdout and stderr output of the child process is printed. This
simply echos the child stdout and child stderr to the real stdout and
it echos the real stdin to the child stdin. When the user types the
escape_character this method will stop. The default for
escape_character is ^]. This should not be confused with ASCII 27 --
the ESC character. ASCII 29 was chosen for historical merit because
this is the character used by \'telnet\' as the escape character. The
escape_character will not be sent to the child process.
You may pass in optional input and output filter functions. These
functions should take a string and return a string. The output_filter
will be passed all the output from the child process. The input_filter
will be passed all the keyboard input from the user. The input_filter
is run BEFORE the check for the escape_character.
Note that if you change the window size of the parent the SIGWINCH
signal will not be passed through to the child. If you want the child
window size to change when the parent\'s window size changes then do
something like the following example::
import pexpect, struct, fcntl, termios, signal, sys
def sigwinch_passthrough (sig, data):
s = struct.pack("HHHH", 0, 0, 0, 0)
a = struct.unpack(\'hhhh\', fcntl.ioctl(sys.stdout.fileno(), termios.TIOCGWINSZ , s))
global p
p.setwinsize(a[0],a[1])
p = pexpect.spawn(\'/bin/bash\') # Note this is global and used in sigwinch_passthrough.
signal.signal(signal.SIGWINCH, sigwinch_passthrough)
p.interact()'
| def interact(self, escape_character=chr(29), input_filter=None, output_filter=None):
| self.stdout.write(self.buffer)
self.stdout.flush()
self.buffer = ''
mode = tty.tcgetattr(self.STDIN_FILENO)
tty.setraw(self.STDIN_FILENO)
try:
self.__interact_copy(escape_character, input_filter, output_filter)
finally:
tty.tcsetattr(self.STDIN_FILENO, tty.TCSAFLUSH, mode)
|
'This is used by the interact() method.'
| def __interact_writen(self, fd, data):
| while ((data != '') and self.isalive()):
n = os.write(fd, data)
data = data[n:]
|
'This is used by the interact() method.'
| def __interact_read(self, fd):
| return os.read(fd, 1000)
|
'This is used by the interact() method.'
| def __interact_copy(self, escape_character=None, input_filter=None, output_filter=None):
| while self.isalive():
(r, w, e) = self.__select([self.child_fd, self.STDIN_FILENO], [], [])
if (self.child_fd in r):
data = self.__interact_read(self.child_fd)
if output_filter:
data = output_filter(data)
if (self.logfile is not None):
self.logfile.write(data)
self.logfile.flush()
os.write(self.STDOUT_FILENO, data)
if (self.STDIN_FILENO in r):
data = self.__interact_read(self.STDIN_FILENO)
if input_filter:
data = input_filter(data)
i = data.rfind(escape_character)
if (i != (-1)):
data = data[:i]
self.__interact_writen(self.child_fd, data)
break
self.__interact_writen(self.child_fd, data)
|
'This is a wrapper around select.select() that ignores signals. If
select.select raises a select.error exception and errno is an EINTR
error then it is ignored. Mainly this is used to ignore sigwinch
(terminal resize).'
| def __select(self, iwtd, owtd, ewtd, timeout=None):
| if (timeout is not None):
end_time = (time.time() + timeout)
while True:
try:
return select.select(iwtd, owtd, ewtd, timeout)
except select.error as e:
if (e[0] == errno.EINTR):
if (timeout is not None):
timeout = (end_time - time.time())
if (timeout < 0):
return ([], [], [])
else:
raise
|
'This method is no longer supported or allowed. I don\'t like getters
and setters without a good reason.'
| def setmaxread(self, maxread):
| raise ExceptionPexpect('This method is no longer supported or allowed. Just assign a value to the maxread member variable.')
|
'This method is no longer supported or allowed.'
| def setlog(self, fileobject):
| raise ExceptionPexpect('This method is no longer supported or allowed. Just assign a value to the logfile member variable.')
|
'This creates an instance of searcher_string. This argument \'strings\'
may be a list; a sequence of strings; or the EOF or TIMEOUT types.'
| def __init__(self, strings):
| self.eof_index = (-1)
self.timeout_index = (-1)
self._strings = []
for (n, s) in zip(range(len(strings)), strings):
if (s is EOF):
self.eof_index = n
continue
if (s is TIMEOUT):
self.timeout_index = n
continue
self._strings.append((n, s))
|
'This returns a human-readable string that represents the state of
the object.'
| def __str__(self):
| ss = [(ns[0], (' %d: "%s"' % ns)) for ns in self._strings]
ss.append(((-1), 'searcher_string:'))
if (self.eof_index >= 0):
ss.append((self.eof_index, (' %d: EOF' % self.eof_index)))
if (self.timeout_index >= 0):
ss.append((self.timeout_index, (' %d: TIMEOUT' % self.timeout_index)))
ss.sort()
ss = zip(*ss)[1]
return '\n'.join(ss)
|
'This searches \'buffer\' for the first occurence of one of the search
strings. \'freshlen\' must indicate the number of bytes at the end of
\'buffer\' which have not been searched before. It helps to avoid
searching the same, possibly big, buffer over and over again.
See class spawn for the \'searchwindowsize\' argument.
If there is a match this returns the index of that string, and sets
\'start\', \'end\' and \'match\'. Otherwise, this returns -1.'
| def search(self, buffer, freshlen, searchwindowsize=None):
| absurd_match = len(buffer)
first_match = absurd_match
for (index, s) in self._strings:
if (searchwindowsize is None):
offset = (- (freshlen + len(s)))
else:
offset = (- searchwindowsize)
n = buffer.find(s, offset)
if ((n >= 0) and (n < first_match)):
first_match = n
(best_index, best_match) = (index, s)
if (first_match == absurd_match):
return (-1)
self.match = best_match
self.start = first_match
self.end = (self.start + len(self.match))
return best_index
|
'This creates an instance that searches for \'patterns\' Where
\'patterns\' may be a list or other sequence of compiled regular
expressions, or the EOF or TIMEOUT types.'
| def __init__(self, patterns):
| self.eof_index = (-1)
self.timeout_index = (-1)
self._searches = []
for (n, s) in zip(range(len(patterns)), patterns):
if (s is EOF):
self.eof_index = n
continue
if (s is TIMEOUT):
self.timeout_index = n
continue
self._searches.append((n, s))
|
'This returns a human-readable string that represents the state of
the object.'
| def __str__(self):
| ss = [(n, (' %d: re.compile("%s")' % (n, str(s.pattern)))) for (n, s) in self._searches]
ss.append(((-1), 'searcher_re:'))
if (self.eof_index >= 0):
ss.append((self.eof_index, (' %d: EOF' % self.eof_index)))
if (self.timeout_index >= 0):
ss.append((self.timeout_index, (' %d: TIMEOUT' % self.timeout_index)))
ss.sort()
ss = zip(*ss)[1]
return '\n'.join(ss)
|
'This searches \'buffer\' for the first occurence of one of the regular
expressions. \'freshlen\' must indicate the number of bytes at the end of
\'buffer\' which have not been searched before.
See class spawn for the \'searchwindowsize\' argument.
If there is a match this returns the index of that string, and sets
\'start\', \'end\' and \'match\'. Otherwise, returns -1.'
| def search(self, buffer, freshlen, searchwindowsize=None):
| absurd_match = len(buffer)
first_match = absurd_match
if (searchwindowsize is None):
searchstart = 0
else:
searchstart = max(0, (len(buffer) - searchwindowsize))
for (index, s) in self._searches:
match = s.search(buffer, searchstart)
if (match is None):
continue
n = match.start()
if (n < first_match):
first_match = n
the_match = match
best_index = index
if (first_match == absurd_match):
return (-1)
self.start = first_match
self.match = the_match
self.end = self.match.end()
return best_index
|
''
| def value_decode(self, val):
| return (val, val)
|
''
| def value_encode(self, val):
| strval = str(val)
return (strval, strval)
|
''
| def __set(self, key, real_value, coded_value):
| M = self.get(key, Morsel())
M.set(key, real_value, coded_value)
dict.__setitem__(self, key, M)
|
''
| def __setitem__(self, key, value):
| (rval, cval) = self.value_encode(value)
self.__set(key, rval, cval)
|
''
| def output(self, attrs=None, header='Set-Cookie:', sep='\r\n'):
| result = []
items = self.items()
items.sort()
for (K, V) in items:
result.append(V.output(attrs, header))
return sep.join(result)
|
''
| def js_output(self, attrs=None):
| result = []
items = self.items()
items.sort()
for (K, V) in items:
result.append(V.js_output(attrs))
return _nulljoin(result)
|
''
| def load(self, rawdata):
| if (type(rawdata) == type('')):
self.__ParseString(rawdata)
else:
for (k, v) in rawdata.items():
self[k] = v
return
|
''
| def get(self, key, default=None, type=None):
| try:
rv = self[key]
if (type is not None):
rv = type(rv)
except (KeyError, ValueError):
rv = default
return rv
|
''
| def __getitem__(self, key):
| if (key in self):
return dict.__getitem__(self, key)[0]
raise KeyError(key)
|
''
| def __setitem__(self, key, value):
| dict.__setitem__(self, key, [value])
|
''
| def add(self, key, value):
| dict.setdefault(self, key, []).append(value)
|
''
| def getlist(self, key, type=None):
| try:
rv = dict.__getitem__(self, key)
except KeyError:
return []
if (type is None):
return list(rv)
result = []
for item in rv:
try:
result.append(type(item))
except ValueError:
pass
return result
|
''
| def setlist(self, key, new_list):
| dict.__setitem__(self, key, list(new_list))
|
''
| def setdefault(self, key, default=None):
| if (key not in self):
self[key] = default
else:
default = self[key]
return default
|
''
| def setlistdefault(self, key, default_list=None):
| if (key not in self):
default_list = list((default_list or ()))
dict.__setitem__(self, key, default_list)
else:
default_list = dict.__getitem__(self, key)
return default_list
|
''
| def items(self, multi=False):
| return list(self.iteritems(multi))
|
''
| def lists(self):
| return list(self.iterlists())
|
''
| def values(self):
| return [self[key] for key in self.iterkeys()]
|
''
| def listvalues(self):
| return list(self.iterlistvalues())
|
''
| def iteritems(self, multi=False):
| for (key, values) in dict.iteritems(self):
if multi:
for value in values:
(yield (key, value))
else:
(yield (key, values[0]))
|
''
| def iterlists(self):
| for (key, values) in dict.iteritems(self):
(yield (key, list(values)))
|
''
| def itervalues(self):
| for values in dict.itervalues(self):
(yield values[0])
|
''
| def iterlistvalues(self):
| return dict.itervalues(self)
|
''
| def copy(self):
| return self.__class__(self)
|
''
| def to_dict(self, flat=True):
| if flat:
return dict(self.iteritems())
return dict(self.lists())
|
''
| def update(self, other_dict):
| for (key, value) in iter_multi_items(other_dict):
MultiDict.add(self, key, value)
|
''
| def pop(self, key, default=_missing):
| try:
return dict.pop(self, key)[0]
except KeyError as e:
if (default is not _missing):
return default
raise KeyError(str(e))
|
''
| def popitem(self):
| try:
item = dict.popitem(self)
return (item[0], item[1][0])
except KeyError as e:
raise KeyError(str(e))
|
''
| def poplist(self, key):
| return dict.pop(self, key, [])
|
''
| def popitemlist(self):
| try:
return dict.popitem(self)
except KeyError as e:
raise KeyError(str(e))
|
''
| def get_redirect_location(self):
| if (self.status in [301, 302, 303, 307]):
return self.headers.get('location')
return False
|
''
| def read(self, amt=None, decode_content=None, cache_content=False):
| content_encoding = self.headers.get('content-encoding')
decoder = self.CONTENT_DECODERS.get(content_encoding)
if (decode_content is None):
decode_content = self._decode_content
if (self._fp is None):
return
try:
if (amt is None):
data = self._fp.read()
else:
return self._fp.read(amt)
try:
if (decode_content and decoder):
data = decoder(data)
except IOError:
raise HTTPError(('Received response with content-encoding: %s, but failed to decode it.' % content_encoding))
if cache_content:
self._body = data
return data
finally:
if (self._original_response and self._original_response.isclosed()):
self.release_conn()
|
''
| @classmethod
def from_httplib(ResponseCls, r, **response_kw):
| headers = {}
for (k, v) in r.getheaders():
k = k.lower()
has_value = headers.get(k)
if has_value:
v = ', '.join([has_value, v])
headers[k] = v
strict = getattr(r, 'strict', 0)
return ResponseCls(body=r, headers=headers, status=r.status, version=r.version, reason=r.reason, strict=strict, original_response=r, **response_kw)
|
''
| def connection_from_host(self, host, port=80, scheme='http'):
| pool_key = (scheme, host, port)
pool = self.pools.get(pool_key)
if pool:
return pool
pool_cls = pool_classes_by_scheme[scheme]
pool = pool_cls(host, port, **self.connection_pool_kw)
self.pools[pool_key] = pool
return pool
|
''
| def connection_from_url(self, url):
| (scheme, host, port) = get_host(url)
port = (port or port_by_scheme.get(scheme, 80))
return self.connection_from_host(host, port=port, scheme=scheme)
|
''
| def urlopen(self, method, url, **kw):
| conn = self.connection_from_url(url)
try:
return conn.urlopen(method, url, **kw)
except HostChangedError as e:
kw['retries'] = e.retries
return self.urlopen(method, e.url, **kw)
|
''
| def urlopen(self, method, url, **kw):
| kw['assert_same_host'] = False
kw['headers'] = self._set_proxy_headers(kw.get('headers'))
return self.proxy_pool.urlopen(method, url, **kw)
|
''
| def _invalidate_entry(self, key):
| old_entry = self.access_lookup.get(key)
if old_entry:
old_entry.is_valid = False
return old_entry
|
''
| def _push_entry(self, key):
| self._invalidate_entry(key)
new_entry = AccessEntry(key)
self.access_lookup[key] = new_entry
self.access_log_lock.acquire()
self.access_log.appendleft(new_entry)
self.access_log_lock.release()
|
''
| def _prune_entries(self, num):
| while (num > 0):
self.access_log_lock.acquire()
p = self.access_log.pop()
self.access_log_lock.release()
if (not p.is_valid):
continue
dict.pop(self, p.key, None)
self.access_lookup.pop(p.key, None)
num -= 1
|
''
| def _prune_invalidated_entries(self):
| self.access_log_lock.acquire()
self.access_log = deque((e for e in self.access_log if e.is_valid))
self.access_log_lock.release()
|
''
| def _get_ordered_access_keys(self):
| self.access_log_lock.acquire()
r = [e.key for e in self.access_log if e.is_valid]
self.access_log_lock.release()
return r
|
''
| def _new_conn(self):
| self.num_connections += 1
log.debug(('Starting new HTTP connection (%d): %s' % (self.num_connections, self.host)))
return HTTPConnection(host=self.host, port=self.port)
|
''
| def _get_conn(self, timeout=None):
| conn = None
try:
conn = self.pool.get(block=self.block, timeout=timeout)
if (conn and is_connection_dropped(conn)):
log.info(('Resetting dropped connection: %s' % self.host))
conn.close()
except Empty:
if self.block:
raise EmptyPoolError(self, 'Pool reached maximum size and no more connections are allowed.')
pass
return (conn or self._new_conn())
|
''
| def _put_conn(self, conn):
| try:
self.pool.put(conn, block=False)
except Full:
log.warning(('HttpConnectionPool is full, discarding connection: %s' % self.host))
|
''
| def _make_request(self, conn, method, url, timeout=_Default, **httplib_request_kw):
| self.num_requests += 1
if (timeout is _Default):
timeout = self.timeout
conn.timeout = timeout
conn.request(method, url, **httplib_request_kw)
sock = getattr(conn, 'sock', False)
if sock:
sock.settimeout(timeout)
httplib_response = conn.getresponse()
log.debug(('"%s %s %s" %s %s' % (method, url, conn._http_vsn_str, httplib_response.status, httplib_response.length)))
return httplib_response
|
''
| def is_same_host(self, url):
| (scheme, host, port) = get_host(url)
if (self.port and (not port)):
port = port_by_scheme.get(scheme)
return (url.startswith('/') or ((scheme, host, port) == (self.scheme, self.host, self.port)))
|
''
| def urlopen(self, method, url, body=None, headers=None, retries=3, redirect=True, assert_same_host=True, timeout=_Default, pool_timeout=None, release_conn=None, **response_kw):
| if (headers is None):
headers = self.headers
if (retries < 0):
raise MaxRetryError(self, url)
if (timeout is _Default):
timeout = self.timeout
if (release_conn is None):
release_conn = response_kw.get('preload_content', True)
if (assert_same_host and (not self.is_same_host(url))):
host = ('%s://%s' % (self.scheme, self.host))
if self.port:
host = ('%s:%d' % (host, self.port))
raise HostChangedError(self, url, (retries - 1))
conn = None
try:
conn = self._get_conn(timeout=pool_timeout)
httplib_response = self._make_request(conn, method, url, timeout=timeout, body=body, headers=headers)
response_conn = ((not release_conn) and conn)
response = HTTPResponse.from_httplib(httplib_response, pool=self, connection=response_conn, **response_kw)
except Empty as e:
raise TimeoutError(self, ('Request timed out. (pool_timeout=%s)' % pool_timeout))
except SocketTimeout as e:
raise TimeoutError(self, ('Request timed out. (timeout=%s)' % timeout))
except BaseSSLError as e:
raise SSLError(e)
except CertificateError as e:
raise SSLError(e)
except (HTTPException, SocketError) as e:
conn = None
err = e
finally:
if (conn and release_conn):
self._put_conn(conn)
if (not conn):
log.warn(("Retrying (%d attempts remain) after connection broken by '%r': %s" % (retries, err, url)))
return self.urlopen(method, url, body, headers, (retries - 1), redirect, assert_same_host)
redirect_location = (redirect and response.get_redirect_location())
if redirect_location:
log.info(('Redirecting %s -> %s' % (url, redirect_location)))
return self.urlopen(method, redirect_location, body, headers, (retries - 1), redirect, assert_same_host)
return response
|
''
| def _new_conn(self):
| self.num_connections += 1
log.debug(('Starting new HTTPS connection (%d): %s' % (self.num_connections, self.host)))
if (not ssl):
if ((not HTTPSConnection) or (HTTPSConnection is object)):
raise SSLError("Can't connect to HTTPS URL because the SSL module is not available.")
return HTTPSConnection(host=self.host, port=self.port)
connection = VerifiedHTTPSConnection(host=self.host, port=self.port)
connection.set_cert(key_file=self.key_file, cert_file=self.cert_file, cert_reqs=self.cert_reqs, ca_certs=self.ca_certs)
return connection
|
''
| def request(self, method, url, fields=None, headers=None, **urlopen_kw):
| method = method.upper()
if (method in self._encode_url_methods):
return self.request_encode_url(method, url, fields=fields, headers=headers, **urlopen_kw)
else:
return self.request_encode_body(method, url, fields=fields, headers=headers, **urlopen_kw)
|
''
| def request_encode_url(self, method, url, fields=None, **urlopen_kw):
| if fields:
url += ('?' + urlencode(fields))
return self.urlopen(method, url, **urlopen_kw)
|
''
| def request_encode_body(self, method, url, fields=None, headers=None, encode_multipart=True, multipart_boundary=None, **urlopen_kw):
| if encode_multipart:
(body, content_type) = encode_multipart_formdata((fields or {}), boundary=multipart_boundary)
else:
(body, content_type) = (urlencode((fields or {})), 'application/x-www-form-urlencoded')
headers = (headers or {})
headers.update({'Content-Type': content_type})
return self.urlopen(method, url, body=body, headers=headers, **urlopen_kw)
|
''
| def _build_response(self, resp):
| def build(resp):
response = Response()
response.config = self.config
if resp:
response.status_code = getattr(resp, 'status', None)
response.headers = CaseInsensitiveDict(getattr(resp, 'headers', None))
response.encoding = get_encoding_from_headers(response.headers)
cookies = (self.cookies or dict())
if ('set-cookie' in response.headers):
cookie_header = response.headers['set-cookie']
cookies = dict_from_string(cookie_header)
response.cookies = cookies
response.error = getattr(resp, 'error', None)
response.raw = resp
if isinstance(self.full_url, bytes):
response.url = self.full_url.decode('utf-8')
else:
response.url = self.full_url
return response
history = []
r = build(resp)
self.cookies.update(r.cookies)
if ((r.status_code in REDIRECT_STATI) and (not self.redirect)):
while (('location' in r.headers) and ((r.status_code is codes.see_other) or self.allow_redirects)):
r.content
if (not (len(history) < self.config.get('max_redirects'))):
raise TooManyRedirects()
r.raw.release_conn()
history.append(r)
url = r.headers['location']
data = self.data
if url.startswith('//'):
parsed_rurl = urlparse(r.url)
url = ('%s:%s' % (parsed_rurl.scheme, url))
if (not urlparse(url).netloc):
url = urljoin(r.url, requote_uri(url))
if (r.status_code is codes.see_other):
method = 'GET'
data = None
else:
method = self.method
if (not self.config.get('strict_mode')):
if ((r.status_code in (codes.moved, codes.found)) and (self.method == 'POST')):
method = 'GET'
data = None
if ((r.status_code == 303) and (self.method != 'HEAD')):
method = 'GET'
data = None
headers = self.headers
try:
del headers['Cookie']
except KeyError:
pass
request = Request(url=url, headers=headers, files=self.files, method=method, params=self.session.params, auth=self.auth, cookies=self.cookies, redirect=True, data=data, config=self.config, timeout=self.timeout, _poolmanager=self._poolmanager, proxies=self.proxies, verify=self.verify, session=self.session, cert=self.cert)
request.send()
r = request.response
self.cookies.update(r.cookies)
r.history = history
self.response = r
self.response.request = self
self.response.cookies.update(self.cookies)
|
''
| @staticmethod
def _encode_params(data):
| if isinstance(data, bytes):
return (data, data)
if (hasattr(data, '__iter__') and (not isinstance(data, str))):
data = dict(data)
if hasattr(data, 'items'):
result = []
for (k, vs) in list(data.items()):
for v in ((isinstance(vs, list) and vs) or [vs]):
result.append(((k.encode('utf-8') if isinstance(k, str) else k), (v.encode('utf-8') if isinstance(v, str) else v)))
return (result, urlencode(result, doseq=True))
else:
return (data, data)
|
''
| @property
def full_url(self):
| if (not self.url):
raise URLRequired()
url = self.url
(scheme, netloc, path, params, query, fragment) = urlparse(url)
if (not scheme):
raise MissingSchema(('Invalid URL %r: No schema supplied' % url))
if (not (scheme in SCHEMAS)):
raise InvalidSchema(('Invalid scheme %r' % scheme))
netloc = netloc.encode('idna').decode('utf-8')
if (not path):
path = '/'
if is_py2:
if isinstance(scheme, str):
scheme = scheme.encode('utf-8')
if isinstance(netloc, str):
netloc = netloc.encode('utf-8')
if isinstance(path, str):
path = path.encode('utf-8')
if isinstance(params, str):
params = params.encode('utf-8')
if isinstance(query, str):
query = query.encode('utf-8')
if isinstance(fragment, str):
fragment = fragment.encode('utf-8')
url = urlunparse([scheme, netloc, path, params, query, fragment])
if self._enc_params:
if urlparse(url).query:
url = ('%s&%s' % (url, self._enc_params))
else:
url = ('%s?%s' % (url, self._enc_params))
if self.config.get('encode_uri', True):
url = requote_uri(url)
return url
|
''
| @property
def path_url(self):
| url = []
p = urlsplit(self.full_url)
if (p.scheme in self.proxies):
return self.full_url
path = p.path
if (not path):
path = '/'
url.append(path)
query = p.query
if query:
url.append('?')
url.append(query)
return ''.join(url)
|
''
| def register_hook(self, event, hook):
| return self.hooks[event].append(hook)
|
''
| def send(self, anyway=False, prefetch=False):
| url = self.full_url
if self.config.get('verbose'):
self.config.get('verbose').write(('%s %s %s\n' % (datetime.now().isoformat(), self.method, url)))
body = None
content_type = None
if self.files:
if (not isinstance(self.data, str)):
try:
fields = self.data.copy()
except AttributeError:
fields = dict(self.data)
for (k, v) in list(self.files.items()):
if isinstance(v, (tuple, list)):
(fn, fp) = v
else:
fn = (guess_filename(v) or k)
fp = v
fields.update({k: (fn, fp.read())})
(body, content_type) = encode_multipart_formdata(fields)
else:
pass
elif self.data:
body = self._enc_data
if isinstance(self.data, str):
content_type = None
else:
content_type = 'application/x-www-form-urlencoded'
if (content_type and (not ('content-type' in self.headers))):
self.headers['Content-Type'] = content_type
if ((not self.auth) and self.config.get('trust_env')):
self.auth = get_netrc_auth(url)
if self.auth:
if (isinstance(self.auth, tuple) and (len(self.auth) == 2)):
self.auth = HTTPBasicAuth(*self.auth)
r = self.auth(self)
self.__dict__.update(r.__dict__)
_p = urlparse(url)
proxy = self.proxies.get(_p.scheme)
if proxy:
conn = poolmanager.proxy_from_url(proxy)
_proxy = urlparse(proxy)
if ('@' in _proxy.netloc):
(auth, url) = _proxy.netloc.split('@', 1)
self.proxy_auth = HTTPProxyAuth(*auth.split(':', 1))
r = self.proxy_auth(self)
self.__dict__.update(r.__dict__)
elif self.config.get('keep_alive'):
conn = self._poolmanager.connection_from_url(url)
else:
conn = connectionpool.connection_from_url(url)
if (url.startswith('https') and self.verify):
cert_loc = None
if (self.verify is not True):
cert_loc = self.verify
if ((not cert_loc) and self.config.get('trust_env')):
cert_loc = os.environ.get('REQUESTS_CA_BUNDLE')
if ((not cert_loc) and self.config.get('trust_env')):
cert_loc = os.environ.get('CURL_CA_BUNDLE')
conn.cert_reqs = 'CERT_REQUIRED'
conn.ca_certs = cert_loc
else:
conn.cert_reqs = 'CERT_NONE'
conn.ca_certs = None
if (self.cert and self.verify):
if (len(self.cert) == 2):
conn.cert_file = self.cert[0]
conn.key_file = self.cert[1]
else:
conn.cert_file = self.cert
if ((not self.sent) or anyway):
if self.cookies:
if ('cookie' not in self.headers):
c = SimpleCookie()
for (k, v) in list(self.cookies.items()):
c[k] = v
cookie_header = c.output(header='', sep='; ').strip()
self.headers['Cookie'] = cookie_header
r = dispatch_hook('pre_request', self.hooks, self)
self.__dict__.update(r.__dict__)
try:
try:
r = conn.urlopen(method=self.method, url=self.path_url, body=body, headers=self.headers, redirect=False, assert_same_host=False, preload_content=False, decode_content=False, retries=self.config.get('max_retries', 0), timeout=self.timeout)
self.sent = True
except MaxRetryError as e:
raise ConnectionError(e)
except (_SSLError, _HTTPError) as e:
if (self.verify and isinstance(e, _SSLError)):
raise SSLError(e)
raise Timeout('Request timed out.')
except RequestException as e:
if self.config.get('safe_mode', False):
r = HTTPResponse()
r.error = e
else:
raise
self._build_response(r)
self.response = dispatch_hook('response', self.hooks, self.response)
r = dispatch_hook('post_request', self.hooks, self)
self.__dict__.update(r.__dict__)
if prefetch:
self.response.content
if self.config.get('danger_mode'):
self.response.raise_for_status()
return self.sent
|
''
| def __bool__(self):
| return self.ok
|
''
| def __nonzero__(self):
| return self.ok
|
''
| def iter_content(self, chunk_size=(10 * 1024), decode_unicode=False):
| if self._content_consumed:
raise RuntimeError('The content for this response was already consumed')
def generate():
while 1:
chunk = self.raw.read(chunk_size)
if (not chunk):
break
(yield chunk)
self._content_consumed = True
gen = stream_untransfer(generate(), self)
if decode_unicode:
gen = stream_decode_response_unicode(gen, self)
return gen
|
''
| def iter_lines(self, chunk_size=(10 * 1024), decode_unicode=None):
| pending = None
for chunk in self.iter_content(chunk_size=chunk_size, decode_unicode=decode_unicode):
if (pending is not None):
chunk = (pending + chunk)
lines = chunk.splitlines()
if (lines[(-1)][(-1)] == chunk[(-1)]):
pending = lines.pop()
else:
pending = None
for line in lines:
(yield line)
if (pending is not None):
(yield pending)
|
''
| @property
def content(self):
| if (self._content is None):
try:
if self._content_consumed:
raise RuntimeError('The content for this response was already consumed')
if (self.status_code is 0):
self._content = None
else:
self._content = (bytes().join(self.iter_content()) or bytes())
except AttributeError:
self._content = None
self._content_consumed = True
return self._content
|
''
| @property
def text(self):
| content = None
encoding = self.encoding
if (self.encoding is None):
encoding = self._detected_encoding()
try:
content = str(self.content, encoding, errors='replace')
except LookupError:
content = str(self.content, errors='replace')
except (UnicodeError, TypeError):
pass
return content
|
''
| def raise_for_status(self, allow_redirects=True):
| if self.error:
raise self.error
if ((self.status_code >= 300) and (self.status_code < 400) and (not allow_redirects)):
http_error = HTTPError(('%s Redirection' % self.status_code))
http_error.response = self
raise http_error
elif ((self.status_code >= 400) and (self.status_code < 500)):
http_error = HTTPError(('%s Client Error' % self.status_code))
http_error.response = self
raise http_error
elif ((self.status_code >= 500) and (self.status_code < 600)):
http_error = HTTPError(('%s Server Error' % self.status_code))
http_error.response = self
raise http_error
|
''
| def handle_401(self, r):
| s_auth = r.headers.get('www-authenticate', '')
if ('digest' in s_auth.lower()):
last_nonce = ''
nonce_count = 0
chal = parse_dict_header(s_auth.replace('Digest ', ''))
realm = chal['realm']
nonce = chal['nonce']
qop = chal.get('qop')
algorithm = chal.get('algorithm', 'MD5')
opaque = chal.get('opaque', None)
algorithm = algorithm.upper()
if (algorithm == 'MD5'):
def h(x):
if isinstance(x, str):
x = x.encode('utf-8')
return hashlib.md5(x).hexdigest()
H = h
elif (algorithm == 'SHA'):
def h(x):
if isinstance(x, str):
x = x.encode('utf-8')
return hashlib.sha1(x).hexdigest()
H = h
KD = (lambda s, d: H(('%s:%s' % (s, d))))
if (H is None):
return None
entdig = None
p_parsed = urlparse(r.request.url)
path = p_parsed.path
if p_parsed.query:
path += ('?' + p_parsed.query)
A1 = ('%s:%s:%s' % (self.username, realm, self.password))
A2 = ('%s:%s' % (r.request.method, path))
if (qop == 'auth'):
if (nonce == last_nonce):
nonce_count += 1
else:
nonce_count = 1
last_nonce = nonce
ncvalue = ('%08x' % nonce_count)
s = str(nonce_count).encode('utf-8')
s += nonce.encode('utf-8')
s += time.ctime().encode('utf-8')
s += randombytes(8)
cnonce = hashlib.sha1(s).hexdigest()[:16]
noncebit = ('%s:%s:%s:%s:%s' % (nonce, ncvalue, cnonce, qop, H(A2)))
respdig = KD(H(A1), noncebit)
elif (qop is None):
respdig = KD(H(A1), ('%s:%s' % (nonce, H(A2))))
else:
return None
base = ('username="%s", realm="%s", nonce="%s", uri="%s", response="%s"' % (self.username, realm, nonce, path, respdig))
if opaque:
base += (', opaque="%s"' % opaque)
if entdig:
base += (', digest="%s"' % entdig)
base += (', algorithm="%s"' % algorithm)
if qop:
base += (', qop=auth, nc=%s, cnonce="%s"' % (ncvalue, cnonce))
r.request.headers['Authorization'] = ('Digest %s' % base)
r.request.send(anyway=True)
_r = r.request.response
_r.history.append(r)
return _r
return r
|
''
| def request(self, method, url, params=None, data=None, headers=None, cookies=None, files=None, auth=None, timeout=None, allow_redirects=True, proxies=None, hooks=None, return_response=True, config=None, prefetch=False, verify=None, cert=None):
| method = str(method).upper()
cookies = ({} if (cookies is None) else cookies)
data = ({} if (data is None) else data)
files = ({} if (files is None) else files)
headers = ({} if (headers is None) else headers)
params = ({} if (params is None) else params)
hooks = ({} if (hooks is None) else hooks)
prefetch = (self.prefetch or prefetch)
for (key, cb) in list(self.hooks.items()):
hooks.setdefault(key, cb)
if headers:
for (k, v) in (list(headers.items()) or {}):
headers[k] = header_expand(v)
args = dict(method=method, url=url, data=data, params=params, headers=headers, cookies=cookies, files=files, auth=auth, hooks=hooks, timeout=timeout, allow_redirects=allow_redirects, proxies=proxies, config=config, verify=verify, cert=cert, _poolmanager=self.poolmanager)
for attr in self.__attrs__:
session_val = getattr(self, attr, None)
local_val = args.get(attr)
args[attr] = merge_kwargs(local_val, session_val)
args = dispatch_hook('args', args['hooks'], args)
r = Request(**args)
r.session = self
if (not return_response):
return r
r.send(prefetch=prefetch)
self.cookies.update(r.response.cookies)
return r.response
|
''
| def get(self, url, **kwargs):
| kwargs.setdefault('allow_redirects', True)
return self.request('get', url, **kwargs)
|
''
| def options(self, url, **kwargs):
| kwargs.setdefault('allow_redirects', True)
return self.request('options', url, **kwargs)
|
''
| def head(self, url, **kwargs):
| kwargs.setdefault('allow_redirects', False)
return self.request('head', url, **kwargs)
|
Subsets and Splits
No saved queries yet
Save your SQL queries to embed, download, and access them later. Queries will appear here once saved.