desc
stringlengths 3
26.7k
| decl
stringlengths 11
7.89k
| bodies
stringlengths 8
553k
|
---|---|---|
''
| def post(self, url, data=None, **kwargs):
| return self.request('post', url, data=data, **kwargs)
|
''
| def put(self, url, data=None, **kwargs):
| return self.request('put', url, data=data, **kwargs)
|
''
| def patch(self, url, data=None, **kwargs):
| return self.request('patch', url, data=data, **kwargs)
|
''
| def delete(self, url, **kwargs):
| return self.request('delete', url, **kwargs)
|
''
| def __init__(self, ns, parsers, multi=False, **kwargs):
| self.prompt = ('(%s) > ' % self.name)
self.ns = ns
self.parsers = parsers
self.payload = None
self.action_help = {}
self.action_types = {}
for parser in self.parsers:
for a in parser._actions:
self.action_help[a.dest] = a.help
self.action_types[a.dest] = a.type
self.multi = multi
self.payload = None
if (not ns.host):
ns.__dict__['host'] = ' '
if ((not hasattr(ns, 'mode')) or (not ns.mode)):
ns.__dict__['mode'] = self.modes[0]
for key in ns.__dict__:
if ((not ns.__dict__[key]) and hasattr(self, key)):
ns.__dict__[key] = getattr(self, key)
self.log = logging.getLogger(self.name)
if (not os.path.isdir('/current/down')):
self.logdir = 'logs'
if (not os.path.isdir(self.logdir)):
os.makedirs(self.logdir)
self.logbase = strftime('%Y-%m-%d-%H%M%S')
self.logbase = os.path.join(self.logdir, self.logbase)
hndl = logging.FileHandler((self.logbase + '.log'))
hndl.setLevel(logging.DEBUG)
self.log.addHandler(hndl)
logging.getLogger('fosho.requests.packages.urllib3.connectionpool').addHandler(hndl)
self.httplog = open((self.logbase + '_http.log'), 'w')
Session.__init__(self, **kwargs)
self._apply_settings()
Cmd.__init__(self)
|
''
| def cmdloop(self, **kwargs):
| try:
Cmd.cmdloop(self, **kwargs)
except ExitException as e:
return
except Exception as e:
self.log.error(('cmdloop caught error: %s' % e))
self.continue_prompt('A critical error occurred. Would you like to keep this shell open?')
self.cmdloop(**kwargs)
|
''
| @excmd
def do_touch(self, arg=None):
| (res, resp) = ([], None)
if (hasattr(self, 'target_url') and self.target_url):
resp = self.head(self.target_url)
res.append(('HEAD %s - %s' % (self.target_url, resp.status_code)))
for (key, val) in resp.headers.items():
res.append(('Header: %s -- %s' % (key, val)))
tres = self.touch(resp)
if (len(res) > 0):
for i in res:
self.log.info(('Touch result: %s' % i))
if (type(tres) == type([])):
for i in res:
self.log.info(('Touch result: %s' % i))
elif tres:
self.log.error('Touch returned an error')
self.log.error(('Reason given: %s' % res))
|
''
| @excmd
def do_probe(self, arg=None):
| res = self.probe()
if (not res):
self.log.info('Target is vulnerable. Safe to proceed.')
else:
self.log.error('Target appears not to be vulnerable.')
self.log.error(('Reason given: %s' % res))
|
''
| @excmd
def do_survey(self, arg=None):
| res = self.survey()
if (not res):
self.log.info('Survey complete.')
else:
self.log.error('Survey failed.')
self.log.error(('Reason given: %s' % res))
|
''
| @excmd
def do_exploit(self, arg=None):
| res = self.exploit()
if (not res):
self.log.info('Exploit complete. Got root?')
else:
self.log.error('Exploit failed')
self.log.error(('Reason given: %s' % res))
|
''
| @excmd
def do_clean(self, arg=None):
| res = self.clean()
if (not res):
self.log.info('Cleanup completed successfully.')
else:
self.log.error(('Cleanup failed: %s' % res))
|
''
| def do_show(self, args=None):
| if args:
args = args.strip()
try:
print ('%s = %s :: %s' % (args, self.ns.__dict__[args], self.action_help[args]))
except KeyError:
self.log.warning('Variable does not exist.')
else:
print 'Exploit variables'
print '========================='
for (key, val) in self.ns.__dict__.items():
if (key not in self.action_help):
self.action_help[key] = 'No help available'
if (key not in ['func', 'config', 'load']):
print (' %s = %s :: %s' % (key, val, self.action_help[key]))
|
''
| def do_set(self, args):
| if args:
args = split(args)
if (len(args) >= 2):
if (args[0] in self.ns.__dict__):
try:
self._update_var(args[0], args[1])
except ArgumentTypeError as e:
self.log.warning(('Setting %s failed: %s' % (args[0], e)))
else:
self.log.warning(('Variable %s does not exist' % args[0]))
else:
self.log.warning('Inavlid command syntax. Please see help for usage.')
self._apply_settings()
|
''
| def do_guided(self, args=None):
| for cmd in self.excmds:
if (cmd in dir(self)):
res = raw_input(('About to execute %s. Continue, skip, interact, or quit? (C/s/i/q) ' % cmd))
if (res.lower() == 's'):
continue
elif (res.lower() == 'q'):
return
elif (res.lower() == 'i'):
self.cmdloop()
res = raw_input(('Done interacting, about to execute %s. Continue, skip, or quit? (C/s/q) ' % cmd))
if (res.lower() == 's'):
continue
if (res.lower() == 'q'):
return
elif getattr(self, ('do_' + cmd))():
return
elif getattr(self, ('do_' + cmd))():
return
res = raw_input('Finished executing commands. Quit or enter interactive mode? (Q/i) ')
if (res.lower() == 'i'):
self.cmdloop()
|
''
| def request(self, mthd, url, **kwargs):
| quiet = False
if ('quiet' in kwargs):
quiet = kwargs['quiet']
del kwargs['quiet']
if quiet:
l = logging.getLogger('fosho.requests.packages.urllib3.connectionpool')
old = l.getEffectiveLevel()
l.setLevel(logging.ERROR)
if (not self.multi):
url = (self.target + url)
self.log.debug(('Requesting %s %s with following provided settings: %s' % (mthd, url, kwargs)))
if self.ns.host:
if ('headers' not in kwargs):
kwargs['headers'] = {}
if ('host' not in kwargs['headers']):
kwargs['headers']['Host'] = self.host
kwargs['config'] = {}
httplog = StringIO()
stdout = sys.stdout
t = TeeStdout(httplog, self.debug)
try:
resp = Session.request(self, mthd, url, **kwargs)
except Exception as e:
self._restore_stdout(httplog, stdout)
if (('ignore_errors' in kwargs) and kwargs['ignore_errors']):
return
if (not quiet):
self.log.error(('Exception occurred during request: %s' % e))
else:
l.setLevel(old)
raise e
if quiet:
l.setLevel(old)
print ('body: %s' % repr(resp.content))
self._restore_stdout(httplog, stdout)
return resp
|
''
| def prompt_for_settings(self, names):
| for name in names:
try:
res = getattr(self, ('_get_' + name))()
except AttributeError as e:
res = None
if (not hasattr(self.ns, name)):
self.ns.__dict__[name] = None
if ((not self.ask) and (not res)):
res = self.ns.__dict__[name]
if ((not res) and self.ask):
res = raw_input(('%s [%s]: ' % (name, self.ns.__dict__[name])))
if (not res):
continue
elif self.ask:
res2 = raw_input(('%s [%s]: ' % (name, res)))
if (res2.strip() != ''):
res = res2
self._update_var(name, res)
self._apply_settings()
|
''
| def continue_prompt(self, msg, default='n'):
| other = ('y' if (default == 'n') else 'n')
opts = '(y/n)'.replace(default, default.upper())
res = raw_input(('%s %s ' % (msg, opts)))
if (res.lower() != 'y'):
try:
self._do_finish()
except:
pass
sys.exit('Stopping exploit...')
|
''
| def get_etag(self, path):
| res = self.head(path)
return self._parse_etag(res.headers['etag'])
|
''
| def _parse_etag(self, etag):
| return (etag, 'Could not parse etag')
|
''
| def _apply_settings(self):
| for (key, val) in self.ns.__dict__.items():
if (key in ['quiet', 'debug']):
level = logging.DEBUG
if self.ns.quiet:
level = logging.INFO
self.log.setLevel(level)
logging.getLogger('fosho.requests.packages.urllib3.connectionpool').setLevel(level)
if (key == 'debug'):
self.debug = (val and (not self.ns.quiet))
elif (key not in ['config', 'func']):
setattr(self, key, val)
|
''
| def _update_var(self, key, val):
| if (val == 'False'):
val = False
cur = self.ns.__dict__[key]
if (cur == None):
cur = ''
t = type(self.ns.__dict__[key])
if ((key in self.action_types) and self.action_types[key]):
t = self.action_types[key]
try:
self.ns.__dict__[key] = t(val)
except TypeError:
self.ns.__dict__[key] = None
|
''
| @classmethod
def add_args(cur, cls):
| parser = ArgumentParser(prog=sys.argv[0], description=('%s %s - %s' % (cls.name, cls.version, cls.desc)))
subparsers = parser.add_subparsers(help='Exploit Commands')
if cls.interact:
inparse = subparsers.add_parser('interact', help='Run tool in interactive mode')
inparse.set_defaults(func=cls.cmdloop)
if cls.guided:
gdparse = subparsers.add_parser('guided', help='Run tool in guided mode')
gdparse.set_defaults(func=cls.do_guided)
if hasattr(cls, 'touch'):
tparse = subparsers.add_parser('touch', help='Touch target and return targeting information.')
tparse.set_defaults(func=cls.do_touch)
if hasattr(cls, 'probe'):
vparse = subparsers.add_parser('probe', help='Check if target is vulnerable.')
vparse.set_defaults(func=cls.do_probe)
if hasattr(cls, 'survey'):
iparse = subparsers.add_parser('survey', help='Gather useful information from target.')
iparse.add_argument('-w', '--outfile', type=str, help='File to save target information to. (default: out.tar)')
iparse.add_argument('-s', '--script', type=str, help='Survey script to run on server.')
iparse.set_defaults(func=cls.do_survey)
if hasattr(cls, 'exploit'):
eparse = subparsers.add_parser('exploit', help='Exploit target.')
eparse.add_argument('--mode', choices=cls.modes, help='Mode to use against target')
eparse.add_argument('-p', '--binpath', default=None, help='Path to tool being used.')
eparse.add_argument('-c', '--callback', type=is_target, help='Callback IP:Port for tool (Example: 127.0.0.1:12345)')
eparse.set_defaults(func=cls.do_exploit)
if hasattr(cls, 'clean'):
cparse = subparsers.add_parser('clean', help='Do clean after exploit.')
cparse.set_defaults(func=cls.do_clean)
ggroup = parser.add_argument_group('Generic Exploit Options')
ggroup.add_argument('--quiet', action='store_true', help='Disable verbose logging')
ggroup.add_argument('--debug', action='store_true', help='Enable debug output. (Warning: prepare for spam)')
ggroup.add_argument('-a', '--ask', action='store_true', help='Enable confirmation prompting before running commands.')
ggroup.add_argument('--color', action='store_true', help='Enable log output colors.')
ggroup.add_argument('-l', '--loadlast', action='store_true', help='Load last session used.')
ggroup.add_argument('-s', '--session', type=str, help='Use specified session file.')
ggroup.add_argument('-t', '--target', type=is_url, help='Target to exploit. (Ex: https://127.0.0.1:1234)')
hgroup = parser.add_argument_group('HTTP Options')
hgroup.add_argument('--timeout', type=int, help='Socket timeout')
hgroup.add_argument('--host', type=str, help='Host header to use (default: empty')
return (parser, subparsers)
|
''
| def clean(self):
| self.log.info('Cleaning up /tmp/ ...')
self._run_cmd('rm -f /t*/cgi*')
self._run_cmd('rm -f /t*/.a')
self._run_cmd('rm -f /t*/.te*')
|
''
| def _get_cid(self):
| if self.cid:
self.log.info(('Already know cookie id: %s' % self.cid))
return self.cid
try:
cid = self.get(self.exploit_url).cookies.keys()[0]
self.log.info(('Detected cookie id: %s' % cid))
return cid
except IndexError:
self.log.warning("Could not reliably detect cookie. Using 'session_id'...")
return 'session_id'
|
''
| def value_decode(self, val):
| return (val, val)
|
''
| def value_encode(self, val):
| strval = str(val)
return (strval, strval)
|
''
| def __set(self, key, real_value, coded_value):
| M = self.get(key, Morsel())
M.set(key, real_value, coded_value)
dict.__setitem__(self, key, M)
|
''
| def __setitem__(self, key, value):
| (rval, cval) = self.value_encode(value)
self.__set(key, rval, cval)
|
''
| def output(self, attrs=None, header='Set-Cookie:', sep='\r\n'):
| result = []
items = self.items()
items.sort()
for (K, V) in items:
result.append(V.output(attrs, header))
return sep.join(result)
|
''
| def js_output(self, attrs=None):
| result = []
items = self.items()
items.sort()
for (K, V) in items:
result.append(V.js_output(attrs))
return _nulljoin(result)
|
''
| def load(self, rawdata):
| if (type(rawdata) == type('')):
self.__ParseString(rawdata)
else:
for (k, v) in rawdata.items():
self[k] = v
return
|
''
| def get(self, key, default=None, type=None):
| try:
rv = self[key]
if (type is not None):
rv = type(rv)
except (KeyError, ValueError):
rv = default
return rv
|
''
| def __getitem__(self, key):
| if (key in self):
return dict.__getitem__(self, key)[0]
raise KeyError(key)
|
''
| def __setitem__(self, key, value):
| dict.__setitem__(self, key, [value])
|
''
| def add(self, key, value):
| dict.setdefault(self, key, []).append(value)
|
''
| def getlist(self, key, type=None):
| try:
rv = dict.__getitem__(self, key)
except KeyError:
return []
if (type is None):
return list(rv)
result = []
for item in rv:
try:
result.append(type(item))
except ValueError:
pass
return result
|
''
| def setlist(self, key, new_list):
| dict.__setitem__(self, key, list(new_list))
|
''
| def setdefault(self, key, default=None):
| if (key not in self):
self[key] = default
else:
default = self[key]
return default
|
''
| def setlistdefault(self, key, default_list=None):
| if (key not in self):
default_list = list((default_list or ()))
dict.__setitem__(self, key, default_list)
else:
default_list = dict.__getitem__(self, key)
return default_list
|
''
| def items(self, multi=False):
| return list(self.iteritems(multi))
|
''
| def lists(self):
| return list(self.iterlists())
|
''
| def values(self):
| return [self[key] for key in self.iterkeys()]
|
''
| def listvalues(self):
| return list(self.iterlistvalues())
|
''
| def iteritems(self, multi=False):
| for (key, values) in dict.iteritems(self):
if multi:
for value in values:
(yield (key, value))
else:
(yield (key, values[0]))
|
''
| def iterlists(self):
| for (key, values) in dict.iteritems(self):
(yield (key, list(values)))
|
''
| def itervalues(self):
| for values in dict.itervalues(self):
(yield values[0])
|
''
| def iterlistvalues(self):
| return dict.itervalues(self)
|
''
| def copy(self):
| return self.__class__(self)
|
''
| def to_dict(self, flat=True):
| if flat:
return dict(self.iteritems())
return dict(self.lists())
|
''
| def update(self, other_dict):
| for (key, value) in iter_multi_items(other_dict):
MultiDict.add(self, key, value)
|
''
| def pop(self, key, default=_missing):
| try:
return dict.pop(self, key)[0]
except KeyError as e:
if (default is not _missing):
return default
raise KeyError(str(e))
|
''
| def popitem(self):
| try:
item = dict.popitem(self)
return (item[0], item[1][0])
except KeyError as e:
raise KeyError(str(e))
|
''
| def poplist(self, key):
| return dict.pop(self, key, [])
|
''
| def popitemlist(self):
| try:
return dict.popitem(self)
except KeyError as e:
raise KeyError(str(e))
|
''
| def get_redirect_location(self):
| if (self.status in [301, 302, 303, 307]):
return self.headers.get('location')
return False
|
''
| def read(self, amt=None, decode_content=None, cache_content=False):
| content_encoding = self.headers.get('content-encoding')
decoder = self.CONTENT_DECODERS.get(content_encoding)
if (decode_content is None):
decode_content = self._decode_content
if (self._fp is None):
return
try:
if (amt is None):
data = self._fp.read()
else:
return self._fp.read(amt)
try:
if (decode_content and decoder):
data = decoder(data)
except IOError:
raise HTTPError(('Received response with content-encoding: %s, but failed to decode it.' % content_encoding))
if cache_content:
self._body = data
return data
finally:
if (self._original_response and self._original_response.isclosed()):
self.release_conn()
|
''
| @classmethod
def from_httplib(ResponseCls, r, **response_kw):
| headers = {}
for (k, v) in r.getheaders():
k = k.lower()
has_value = headers.get(k)
if has_value:
v = ', '.join([has_value, v])
headers[k] = v
strict = getattr(r, 'strict', 0)
return ResponseCls(body=r, headers=headers, status=r.status, version=r.version, reason=r.reason, strict=strict, original_response=r, **response_kw)
|
''
| def connection_from_host(self, host, port=80, scheme='http'):
| pool_key = (scheme, host, port)
pool = self.pools.get(pool_key)
if pool:
return pool
pool_cls = pool_classes_by_scheme[scheme]
pool = pool_cls(host, port, **self.connection_pool_kw)
self.pools[pool_key] = pool
return pool
|
''
| def connection_from_url(self, url):
| (scheme, host, port) = get_host(url)
port = (port or port_by_scheme.get(scheme, 80))
return self.connection_from_host(host, port=port, scheme=scheme)
|
''
| def urlopen(self, method, url, **kw):
| conn = self.connection_from_url(url)
try:
return conn.urlopen(method, url, **kw)
except HostChangedError as e:
kw['retries'] = e.retries
return self.urlopen(method, e.url, **kw)
|
''
| def urlopen(self, method, url, **kw):
| kw['assert_same_host'] = False
kw['headers'] = self._set_proxy_headers(kw.get('headers'))
return self.proxy_pool.urlopen(method, url, **kw)
|
''
| def _invalidate_entry(self, key):
| old_entry = self.access_lookup.get(key)
if old_entry:
old_entry.is_valid = False
return old_entry
|
''
| def _push_entry(self, key):
| self._invalidate_entry(key)
new_entry = AccessEntry(key)
self.access_lookup[key] = new_entry
self.access_log_lock.acquire()
self.access_log.appendleft(new_entry)
self.access_log_lock.release()
|
''
| def _prune_entries(self, num):
| while (num > 0):
self.access_log_lock.acquire()
p = self.access_log.pop()
self.access_log_lock.release()
if (not p.is_valid):
continue
dict.pop(self, p.key, None)
self.access_lookup.pop(p.key, None)
num -= 1
|
''
| def _prune_invalidated_entries(self):
| self.access_log_lock.acquire()
self.access_log = deque((e for e in self.access_log if e.is_valid))
self.access_log_lock.release()
|
''
| def _get_ordered_access_keys(self):
| self.access_log_lock.acquire()
r = [e.key for e in self.access_log if e.is_valid]
self.access_log_lock.release()
return r
|
''
| def _new_conn(self):
| self.num_connections += 1
log.debug(('Starting new HTTP connection (%d): %s' % (self.num_connections, self.host)))
return HTTPConnection(host=self.host, port=self.port)
|
''
| def _get_conn(self, timeout=None):
| conn = None
try:
conn = self.pool.get(block=self.block, timeout=timeout)
if (conn and is_connection_dropped(conn)):
log.info(('Resetting dropped connection: %s' % self.host))
conn.close()
except Empty:
if self.block:
raise EmptyPoolError(self, 'Pool reached maximum size and no more connections are allowed.')
pass
return (conn or self._new_conn())
|
''
| def _put_conn(self, conn):
| try:
self.pool.put(conn, block=False)
except Full:
log.warning(('HttpConnectionPool is full, discarding connection: %s' % self.host))
|
''
| def _make_request(self, conn, method, url, timeout=_Default, **httplib_request_kw):
| self.num_requests += 1
if (timeout is _Default):
timeout = self.timeout
conn.timeout = timeout
conn.request(method, url, **httplib_request_kw)
sock = getattr(conn, 'sock', False)
if sock:
sock.settimeout(timeout)
httplib_response = conn.getresponse()
log.debug(('"%s %s %s" %s %s' % (method, url, conn._http_vsn_str, httplib_response.status, httplib_response.length)))
return httplib_response
|
''
| def is_same_host(self, url):
| (scheme, host, port) = get_host(url)
if (self.port and (not port)):
port = port_by_scheme.get(scheme)
return (url.startswith('/') or ((scheme, host, port) == (self.scheme, self.host, self.port)))
|
''
| def urlopen(self, method, url, body=None, headers=None, retries=3, redirect=True, assert_same_host=True, timeout=_Default, pool_timeout=None, release_conn=None, **response_kw):
| if (headers is None):
headers = self.headers
if (retries < 0):
raise MaxRetryError(self, url)
if (timeout is _Default):
timeout = self.timeout
if (release_conn is None):
release_conn = response_kw.get('preload_content', True)
if (assert_same_host and (not self.is_same_host(url))):
host = ('%s://%s' % (self.scheme, self.host))
if self.port:
host = ('%s:%d' % (host, self.port))
raise HostChangedError(self, url, (retries - 1))
conn = None
try:
conn = self._get_conn(timeout=pool_timeout)
httplib_response = self._make_request(conn, method, url, timeout=timeout, body=body, headers=headers)
response_conn = ((not release_conn) and conn)
response = HTTPResponse.from_httplib(httplib_response, pool=self, connection=response_conn, **response_kw)
except Empty as e:
raise TimeoutError(self, ('Request timed out. (pool_timeout=%s)' % pool_timeout))
except SocketTimeout as e:
raise TimeoutError(self, ('Request timed out. (timeout=%s)' % timeout))
except BaseSSLError as e:
raise SSLError(e)
except CertificateError as e:
raise SSLError(e)
except (HTTPException, SocketError) as e:
conn = None
err = e
finally:
if (conn and release_conn):
self._put_conn(conn)
if (not conn):
log.warn(("Retrying (%d attempts remain) after connection broken by '%r': %s" % (retries, err, url)))
return self.urlopen(method, url, body, headers, (retries - 1), redirect, assert_same_host)
redirect_location = (redirect and response.get_redirect_location())
if redirect_location:
log.info(('Redirecting %s -> %s' % (url, redirect_location)))
return self.urlopen(method, redirect_location, body, headers, (retries - 1), redirect, assert_same_host)
return response
|
''
| def _new_conn(self):
| self.num_connections += 1
log.debug(('Starting new HTTPS connection (%d): %s' % (self.num_connections, self.host)))
if (not ssl):
if ((not HTTPSConnection) or (HTTPSConnection is object)):
raise SSLError("Can't connect to HTTPS URL because the SSL module is not available.")
return HTTPSConnection(host=self.host, port=self.port)
connection = VerifiedHTTPSConnection(host=self.host, port=self.port)
connection.set_cert(key_file=self.key_file, cert_file=self.cert_file, cert_reqs=self.cert_reqs, ca_certs=self.ca_certs)
return connection
|
''
| def request(self, method, url, fields=None, headers=None, **urlopen_kw):
| method = method.upper()
if (method in self._encode_url_methods):
return self.request_encode_url(method, url, fields=fields, headers=headers, **urlopen_kw)
else:
return self.request_encode_body(method, url, fields=fields, headers=headers, **urlopen_kw)
|
''
| def request_encode_url(self, method, url, fields=None, **urlopen_kw):
| if fields:
url += ('?' + urlencode(fields))
return self.urlopen(method, url, **urlopen_kw)
|
''
| def request_encode_body(self, method, url, fields=None, headers=None, encode_multipart=True, multipart_boundary=None, **urlopen_kw):
| if encode_multipart:
(body, content_type) = encode_multipart_formdata((fields or {}), boundary=multipart_boundary)
else:
(body, content_type) = (urlencode((fields or {})), 'application/x-www-form-urlencoded')
headers = (headers or {})
headers.update({'Content-Type': content_type})
return self.urlopen(method, url, body=body, headers=headers, **urlopen_kw)
|
''
| def _build_response(self, resp):
| def build(resp):
response = Response()
response.config = self.config
if resp:
response.status_code = getattr(resp, 'status', None)
response.headers = CaseInsensitiveDict(getattr(resp, 'headers', None))
response.encoding = get_encoding_from_headers(response.headers)
cookies = (self.cookies or dict())
if ('set-cookie' in response.headers):
cookie_header = response.headers['set-cookie']
cookies = dict_from_string(cookie_header)
response.cookies = cookies
response.error = getattr(resp, 'error', None)
response.raw = resp
if isinstance(self.full_url, bytes):
response.url = self.full_url.decode('utf-8')
else:
response.url = self.full_url
return response
history = []
r = build(resp)
self.cookies.update(r.cookies)
if ((r.status_code in REDIRECT_STATI) and (not self.redirect)):
while (('location' in r.headers) and ((r.status_code is codes.see_other) or self.allow_redirects)):
r.content
if (not (len(history) < self.config.get('max_redirects'))):
raise TooManyRedirects()
r.raw.release_conn()
history.append(r)
url = r.headers['location']
data = self.data
if url.startswith('//'):
parsed_rurl = urlparse(r.url)
url = ('%s:%s' % (parsed_rurl.scheme, url))
if (not urlparse(url).netloc):
url = urljoin(r.url, requote_uri(url))
if (r.status_code is codes.see_other):
method = 'GET'
data = None
else:
method = self.method
if (not self.config.get('strict_mode')):
if ((r.status_code in (codes.moved, codes.found)) and (self.method == 'POST')):
method = 'GET'
data = None
if ((r.status_code == 303) and (self.method != 'HEAD')):
method = 'GET'
data = None
headers = self.headers
try:
del headers['Cookie']
except KeyError:
pass
request = Request(url=url, headers=headers, files=self.files, method=method, params=self.session.params, auth=self.auth, cookies=self.cookies, redirect=True, data=data, config=self.config, timeout=self.timeout, _poolmanager=self._poolmanager, proxies=self.proxies, verify=self.verify, session=self.session, cert=self.cert)
request.send()
r = request.response
self.cookies.update(r.cookies)
r.history = history
self.response = r
self.response.request = self
self.response.cookies.update(self.cookies)
|
''
| @staticmethod
def _encode_params(data):
| if isinstance(data, bytes):
return (data, data)
if (hasattr(data, '__iter__') and (not isinstance(data, str))):
data = dict(data)
if hasattr(data, 'items'):
result = []
for (k, vs) in list(data.items()):
for v in ((isinstance(vs, list) and vs) or [vs]):
result.append(((k.encode('utf-8') if isinstance(k, str) else k), (v.encode('utf-8') if isinstance(v, str) else v)))
return (result, urlencode(result, doseq=True))
else:
return (data, data)
|
''
| @property
def full_url(self):
| if (not self.url):
raise URLRequired()
url = self.url
(scheme, netloc, path, params, query, fragment) = urlparse(url)
if (not scheme):
raise MissingSchema(('Invalid URL %r: No schema supplied' % url))
if (not (scheme in SCHEMAS)):
raise InvalidSchema(('Invalid scheme %r' % scheme))
netloc = netloc.encode('idna').decode('utf-8')
if (not path):
path = '/'
if is_py2:
if isinstance(scheme, str):
scheme = scheme.encode('utf-8')
if isinstance(netloc, str):
netloc = netloc.encode('utf-8')
if isinstance(path, str):
path = path.encode('utf-8')
if isinstance(params, str):
params = params.encode('utf-8')
if isinstance(query, str):
query = query.encode('utf-8')
if isinstance(fragment, str):
fragment = fragment.encode('utf-8')
url = urlunparse([scheme, netloc, path, params, query, fragment])
if self._enc_params:
if urlparse(url).query:
url = ('%s&%s' % (url, self._enc_params))
else:
url = ('%s?%s' % (url, self._enc_params))
if self.config.get('encode_uri', True):
url = requote_uri(url)
return url
|
''
| @property
def path_url(self):
| url = []
p = urlsplit(self.full_url)
if (p.scheme in self.proxies):
return self.full_url
path = p.path
if (not path):
path = '/'
url.append(path)
query = p.query
if query:
url.append('?')
url.append(query)
return ''.join(url)
|
''
| def register_hook(self, event, hook):
| return self.hooks[event].append(hook)
|
''
| def send(self, anyway=False, prefetch=False):
| url = self.full_url
if self.config.get('verbose'):
self.config.get('verbose').write(('%s %s %s\n' % (datetime.now().isoformat(), self.method, url)))
body = None
content_type = None
if self.files:
if (not isinstance(self.data, str)):
try:
fields = self.data.copy()
except AttributeError:
fields = dict(self.data)
for (k, v) in list(self.files.items()):
if isinstance(v, (tuple, list)):
(fn, fp) = v
else:
fn = (guess_filename(v) or k)
fp = v
fields.update({k: (fn, fp.read())})
(body, content_type) = encode_multipart_formdata(fields)
else:
pass
elif self.data:
body = self._enc_data
if isinstance(self.data, str):
content_type = None
else:
content_type = 'application/x-www-form-urlencoded'
if (content_type and (not ('content-type' in self.headers))):
self.headers['Content-Type'] = content_type
if ((not self.auth) and self.config.get('trust_env')):
self.auth = get_netrc_auth(url)
if self.auth:
if (isinstance(self.auth, tuple) and (len(self.auth) == 2)):
self.auth = HTTPBasicAuth(*self.auth)
r = self.auth(self)
self.__dict__.update(r.__dict__)
_p = urlparse(url)
proxy = self.proxies.get(_p.scheme)
if proxy:
conn = poolmanager.proxy_from_url(proxy)
_proxy = urlparse(proxy)
if ('@' in _proxy.netloc):
(auth, url) = _proxy.netloc.split('@', 1)
self.proxy_auth = HTTPProxyAuth(*auth.split(':', 1))
r = self.proxy_auth(self)
self.__dict__.update(r.__dict__)
elif self.config.get('keep_alive'):
conn = self._poolmanager.connection_from_url(url)
else:
conn = connectionpool.connection_from_url(url)
if (url.startswith('https') and self.verify):
cert_loc = None
if (self.verify is not True):
cert_loc = self.verify
if ((not cert_loc) and self.config.get('trust_env')):
cert_loc = os.environ.get('REQUESTS_CA_BUNDLE')
if ((not cert_loc) and self.config.get('trust_env')):
cert_loc = os.environ.get('CURL_CA_BUNDLE')
conn.cert_reqs = 'CERT_REQUIRED'
conn.ca_certs = cert_loc
else:
conn.cert_reqs = 'CERT_NONE'
conn.ca_certs = None
if (self.cert and self.verify):
if (len(self.cert) == 2):
conn.cert_file = self.cert[0]
conn.key_file = self.cert[1]
else:
conn.cert_file = self.cert
if ((not self.sent) or anyway):
if self.cookies:
if ('cookie' not in self.headers):
c = SimpleCookie()
for (k, v) in list(self.cookies.items()):
c[k] = v
cookie_header = c.output(header='', sep='; ').strip()
self.headers['Cookie'] = cookie_header
r = dispatch_hook('pre_request', self.hooks, self)
self.__dict__.update(r.__dict__)
try:
try:
r = conn.urlopen(method=self.method, url=self.path_url, body=body, headers=self.headers, redirect=False, assert_same_host=False, preload_content=False, decode_content=False, retries=self.config.get('max_retries', 0), timeout=self.timeout)
self.sent = True
except MaxRetryError as e:
raise ConnectionError(e)
except (_SSLError, _HTTPError) as e:
if (self.verify and isinstance(e, _SSLError)):
raise SSLError(e)
raise Timeout('Request timed out.')
except RequestException as e:
if self.config.get('safe_mode', False):
r = HTTPResponse()
r.error = e
else:
raise
self._build_response(r)
self.response = dispatch_hook('response', self.hooks, self.response)
r = dispatch_hook('post_request', self.hooks, self)
self.__dict__.update(r.__dict__)
if prefetch:
self.response.content
if self.config.get('danger_mode'):
self.response.raise_for_status()
return self.sent
|
''
| def __bool__(self):
| return self.ok
|
''
| def __nonzero__(self):
| return self.ok
|
''
| def iter_content(self, chunk_size=(10 * 1024), decode_unicode=False):
| if self._content_consumed:
raise RuntimeError('The content for this response was already consumed')
def generate():
while 1:
chunk = self.raw.read(chunk_size)
if (not chunk):
break
(yield chunk)
self._content_consumed = True
gen = stream_untransfer(generate(), self)
if decode_unicode:
gen = stream_decode_response_unicode(gen, self)
return gen
|
''
| def iter_lines(self, chunk_size=(10 * 1024), decode_unicode=None):
| pending = None
for chunk in self.iter_content(chunk_size=chunk_size, decode_unicode=decode_unicode):
if (pending is not None):
chunk = (pending + chunk)
lines = chunk.splitlines()
if (lines[(-1)][(-1)] == chunk[(-1)]):
pending = lines.pop()
else:
pending = None
for line in lines:
(yield line)
if (pending is not None):
(yield pending)
|
''
| @property
def content(self):
| if (self._content is None):
try:
if self._content_consumed:
raise RuntimeError('The content for this response was already consumed')
if (self.status_code is 0):
self._content = None
else:
self._content = (bytes().join(self.iter_content()) or bytes())
except AttributeError:
self._content = None
self._content_consumed = True
return self._content
|
''
| @property
def text(self):
| content = None
encoding = self.encoding
if (self.encoding is None):
encoding = self._detected_encoding()
try:
content = str(self.content, encoding, errors='replace')
except LookupError:
content = str(self.content, errors='replace')
except (UnicodeError, TypeError):
pass
return content
|
''
| def raise_for_status(self, allow_redirects=True):
| if self.error:
raise self.error
if ((self.status_code >= 300) and (self.status_code < 400) and (not allow_redirects)):
http_error = HTTPError(('%s Redirection' % self.status_code))
http_error.response = self
raise http_error
elif ((self.status_code >= 400) and (self.status_code < 500)):
http_error = HTTPError(('%s Client Error' % self.status_code))
http_error.response = self
raise http_error
elif ((self.status_code >= 500) and (self.status_code < 600)):
http_error = HTTPError(('%s Server Error' % self.status_code))
http_error.response = self
raise http_error
|
''
| def handle_401(self, r):
| s_auth = r.headers.get('www-authenticate', '')
if ('digest' in s_auth.lower()):
last_nonce = ''
nonce_count = 0
chal = parse_dict_header(s_auth.replace('Digest ', ''))
realm = chal['realm']
nonce = chal['nonce']
qop = chal.get('qop')
algorithm = chal.get('algorithm', 'MD5')
opaque = chal.get('opaque', None)
algorithm = algorithm.upper()
if (algorithm == 'MD5'):
def h(x):
if isinstance(x, str):
x = x.encode('utf-8')
return hashlib.md5(x).hexdigest()
H = h
elif (algorithm == 'SHA'):
def h(x):
if isinstance(x, str):
x = x.encode('utf-8')
return hashlib.sha1(x).hexdigest()
H = h
KD = (lambda s, d: H(('%s:%s' % (s, d))))
if (H is None):
return None
entdig = None
p_parsed = urlparse(r.request.url)
path = p_parsed.path
if p_parsed.query:
path += ('?' + p_parsed.query)
A1 = ('%s:%s:%s' % (self.username, realm, self.password))
A2 = ('%s:%s' % (r.request.method, path))
if (qop == 'auth'):
if (nonce == last_nonce):
nonce_count += 1
else:
nonce_count = 1
last_nonce = nonce
ncvalue = ('%08x' % nonce_count)
s = str(nonce_count).encode('utf-8')
s += nonce.encode('utf-8')
s += time.ctime().encode('utf-8')
s += randombytes(8)
cnonce = hashlib.sha1(s).hexdigest()[:16]
noncebit = ('%s:%s:%s:%s:%s' % (nonce, ncvalue, cnonce, qop, H(A2)))
respdig = KD(H(A1), noncebit)
elif (qop is None):
respdig = KD(H(A1), ('%s:%s' % (nonce, H(A2))))
else:
return None
base = ('username="%s", realm="%s", nonce="%s", uri="%s", response="%s"' % (self.username, realm, nonce, path, respdig))
if opaque:
base += (', opaque="%s"' % opaque)
if entdig:
base += (', digest="%s"' % entdig)
base += (', algorithm="%s"' % algorithm)
if qop:
base += (', qop=auth, nc=%s, cnonce="%s"' % (ncvalue, cnonce))
r.request.headers['Authorization'] = ('Digest %s' % base)
r.request.send(anyway=True)
_r = r.request.response
_r.history.append(r)
return _r
return r
|
''
| def request(self, method, url, params=None, data=None, headers=None, cookies=None, files=None, auth=None, timeout=None, allow_redirects=True, proxies=None, hooks=None, return_response=True, config=None, prefetch=False, verify=None, cert=None):
| method = str(method).upper()
cookies = ({} if (cookies is None) else cookies)
data = ({} if (data is None) else data)
files = ({} if (files is None) else files)
headers = ({} if (headers is None) else headers)
params = ({} if (params is None) else params)
hooks = ({} if (hooks is None) else hooks)
prefetch = (self.prefetch or prefetch)
for (key, cb) in list(self.hooks.items()):
hooks.setdefault(key, cb)
if headers:
for (k, v) in (list(headers.items()) or {}):
headers[k] = header_expand(v)
args = dict(method=method, url=url, data=data, params=params, headers=headers, cookies=cookies, files=files, auth=auth, hooks=hooks, timeout=timeout, allow_redirects=allow_redirects, proxies=proxies, config=config, verify=verify, cert=cert, _poolmanager=self.poolmanager)
for attr in self.__attrs__:
session_val = getattr(self, attr, None)
local_val = args.get(attr)
args[attr] = merge_kwargs(local_val, session_val)
args = dispatch_hook('args', args['hooks'], args)
r = Request(**args)
r.session = self
if (not return_response):
return r
r.send(prefetch=prefetch)
self.cookies.update(r.response.cookies)
return r.response
|
''
| def get(self, url, **kwargs):
| kwargs.setdefault('allow_redirects', True)
return self.request('get', url, **kwargs)
|
''
| def options(self, url, **kwargs):
| kwargs.setdefault('allow_redirects', True)
return self.request('options', url, **kwargs)
|
''
| def head(self, url, **kwargs):
| kwargs.setdefault('allow_redirects', False)
return self.request('head', url, **kwargs)
|
''
| def post(self, url, data=None, **kwargs):
| return self.request('post', url, data=data, **kwargs)
|
''
| def put(self, url, data=None, **kwargs):
| return self.request('put', url, data=data, **kwargs)
|
''
| def patch(self, url, data=None, **kwargs):
| return self.request('patch', url, data=data, **kwargs)
|
''
| def delete(self, url, **kwargs):
| return self.request('delete', url, **kwargs)
|
''
| def __init__(self, ns, parsers, multi=False, **kwargs):
| self.prompt = ('(%s) > ' % self.name)
self.ns = ns
self.parsers = parsers
self.payload = None
self.action_help = {}
self.action_types = {}
for parser in self.parsers:
for a in parser._actions:
self.action_help[a.dest] = a.help
self.action_types[a.dest] = a.type
self.multi = multi
self.payload = None
if (not ns.host):
ns.__dict__['host'] = ' '
if ((not hasattr(ns, 'mode')) or (not ns.mode)):
ns.__dict__['mode'] = self.modes[0]
for key in ns.__dict__:
if ((not ns.__dict__[key]) and hasattr(self, key)):
ns.__dict__[key] = getattr(self, key)
self.log = logging.getLogger(self.name)
if (not os.path.isdir('/current/down')):
self.logdir = 'logs'
if (not os.path.isdir(self.logdir)):
os.makedirs(self.logdir)
self.logbase = strftime('%Y-%m-%d-%H%M%S')
self.logbase = os.path.join(self.logdir, self.logbase)
hndl = logging.FileHandler((self.logbase + '.log'))
hndl.setLevel(logging.DEBUG)
self.log.addHandler(hndl)
logging.getLogger('fosho.requests.packages.urllib3.connectionpool').addHandler(hndl)
self.httplog = open((self.logbase + '_http.log'), 'w')
Session.__init__(self, **kwargs)
self._apply_settings()
Cmd.__init__(self)
|
''
| def cmdloop(self, **kwargs):
| try:
Cmd.cmdloop(self, **kwargs)
except ExitException as e:
return
except Exception as e:
self.log.error(('cmdloop caught error: %s' % e))
self.continue_prompt('A critical error occurred. Would you like to keep this shell open?')
self.cmdloop(**kwargs)
|
Subsets and Splits
No saved queries yet
Save your SQL queries to embed, download, and access them later. Queries will appear here once saved.