desc
stringlengths 3
26.7k
| decl
stringlengths 11
7.89k
| bodies
stringlengths 8
553k
|
---|---|---|
'Test Management Association Request Ssid getter/setter methods'
| def test_12(self):
| act_ssid = 'ddwrt'
new_ssid = 'holala'
self.assertEqual(self.management_association_request.get_ssid(), act_ssid)
self.management_association_request.set_ssid(new_ssid)
self.assertEqual(self.management_association_request.get_ssid(), new_ssid)
self.assertEqual(self.management_association_request.get_header_size(), (68 + 1))
|
'Test Management Association Request Supported_rates getter/setter methods'
| def test_13(self):
| self.assertEqual(self.management_association_request.get_supported_rates(), (130, 132, 139, 150, 36, 48, 72, 108))
self.assertEqual(self.management_association_request.get_supported_rates(human_readable=True), (1.0, 2.0, 5.5, 11.0, 18.0, 24.0, 36.0, 54.0))
self.management_association_request.set_supported_rates((18, 152, 36, 176, 72, 96))
self.assertEqual(self.management_association_request.get_supported_rates(), (18, 152, 36, 176, 72, 96))
self.assertEqual(self.management_association_request.get_supported_rates(human_readable=True), (9.0, 12.0, 18.0, 24.0, 36.0, 48.0))
self.assertEqual(self.management_association_request.get_header_size(), (68 - 2))
|
'Test Management Association Request RSN getter/setter methods'
| def test_14(self):
| self.assertEqual(self.management_association_request.get_rsn(), '\x01\x00\x00\x0f\xac\x04\x01\x00\x00\x0f\xac\x04\x01\x00\x00\x0f\xac\x02\x08\x00')
self.management_association_request.set_rsn('\xff\x00\x00\x0f\xac\x04\x01\x00\x00\x0f\xac\x04\x01\x00\x00\x0f\xac\x02\x08\xff')
self.assertEqual(self.management_association_request.get_rsn(), '\xff\x00\x00\x0f\xac\x04\x01\x00\x00\x0f\xac\x04\x01\x00\x00\x0f\xac\x02\x08\xff')
self.assertEqual(self.management_association_request.get_header_size(), 68)
|
'Test Management Vendor Specific getter/setter methods'
| def test_15(self):
| self.assertEqual(self.management_association_request.get_vendor_specific(), [('\x00\x10\x18', '\x02\x00\x10\x00\x00\x00')])
self.management_association_request.add_vendor_specific('\x00\x00@', '\x04\x04\x04\x04\x04\x04')
self.assertEqual(self.management_association_request.get_vendor_specific(), [('\x00\x10\x18', '\x02\x00\x10\x00\x00\x00'), ('\x00\x00@', '\x04\x04\x04\x04\x04\x04')])
self.assertEqual(self.management_association_request.get_header_size(), (68 + 11))
|
'Dumps the list of users and shares registered present at
remoteName. remoteName is a valid host name or IP address.'
| def dump(self, remoteName, remoteHost):
| entries = []
logging.info(('Retrieving endpoint list from %s' % remoteName))
stringbinding = ('ncacn_np:%s[\\pipe\\samr]' % remoteName)
logging.debug(('StringBinding %s' % stringbinding))
rpctransport = transport.DCERPCTransportFactory(stringbinding)
rpctransport.set_dport(self.__port)
rpctransport.setRemoteHost(remoteHost)
if hasattr(rpctransport, 'preferred_dialect'):
rpctransport.preferred_dialect(SMB_DIALECT)
if hasattr(rpctransport, 'set_credentials'):
rpctransport.set_credentials(self.__username, self.__password, self.__domain, self.__lmhash, self.__nthash, self.__aesKey)
rpctransport.set_kerberos(self.__doKerberos, self.__kdcHost)
try:
entries = self.__fetchList(rpctransport)
except Exception as e:
logging.critical(str(e))
if (self.__csvOutput is True):
print '#Name,RID,FullName,PrimaryGroupId,BadPasswordCount,LogonCount,PasswordLastSet,PasswordDoesNotExpire,AccountIsDisabled,UserComment,ScriptPath'
for entry in entries:
(username, uid, user) = entry
pwdLastSet = ((user['PasswordLastSet']['HighPart'] << 32) + user['PasswordLastSet']['LowPart'])
if (pwdLastSet == 0):
pwdLastSet = '<never>'
else:
pwdLastSet = str(datetime.fromtimestamp(self.getUnixTime(pwdLastSet)))
if (user['UserAccountControl'] & samr.USER_DONT_EXPIRE_PASSWORD):
dontExpire = 'True'
else:
dontExpire = 'False'
if (user['UserAccountControl'] & samr.USER_ACCOUNT_DISABLED):
accountDisabled = 'True'
else:
accountDisabled = 'False'
if (self.__csvOutput is True):
print ('%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s' % (username, uid, user['FullName'], user['PrimaryGroupId'], user['BadPasswordCount'], user['LogonCount'], pwdLastSet, dontExpire, accountDisabled, user['UserComment'].replace(',', '.'), user['ScriptPath']))
else:
base = ('%s (%d)' % (username, uid))
print (base + '/FullName:'), user['FullName']
print (base + '/UserComment:'), user['UserComment']
print (base + '/PrimaryGroupId:'), user['PrimaryGroupId']
print (base + '/BadPasswordCount:'), user['BadPasswordCount']
print (base + '/LogonCount:'), user['LogonCount']
print (base + '/PasswordLastSet:'), pwdLastSet
print (base + '/PasswordDoesNotExpire:'), dontExpire
print (base + '/AccountIsDisabled:'), accountDisabled
print (base + '/ScriptPath:'), user['ScriptPath']
if entries:
num = len(entries)
if (1 == num):
logging.info('Received one entry.')
else:
logging.info(('Received %d entries.' % num))
else:
logging.info('No entries received.')
|
'Dumps the list of endpoints registered with the mapper
listening at addr. remoteName is a valid host name or IP
address in string format.'
| def dump(self, remoteName, remoteHost):
| logging.info(('Retrieving endpoint list from %s' % remoteName))
entries = []
stringbinding = (self.KNOWN_PROTOCOLS[self.__port]['bindstr'] % remoteName)
logging.debug(('StringBinding %s' % stringbinding))
rpctransport = transport.DCERPCTransportFactory(stringbinding)
rpctransport.set_dport(self.__port)
if self.KNOWN_PROTOCOLS[self.__port]['set_host']:
rpctransport.setRemoteHost(remoteHost)
if hasattr(rpctransport, 'set_credentials'):
rpctransport.set_credentials(self.__username, self.__password, self.__domain, self.__lmhash, self.__nthash)
try:
entries = self.__fetchList(rpctransport)
except Exception as e:
logging.critical(('Protocol failed: %s' % e))
endpoints = {}
for entry in entries:
binding = epm.PrintStringBinding(entry['tower']['Floors'], rpctransport.getRemoteHost())
tmpUUID = str(entry['tower']['Floors'][0])
if (endpoints.has_key(tmpUUID) is not True):
endpoints[tmpUUID] = {}
endpoints[tmpUUID]['Bindings'] = list()
if epm.KNOWN_UUIDS.has_key(uuid.uuidtup_to_bin(uuid.string_to_uuidtup(tmpUUID))[:18]):
endpoints[tmpUUID]['EXE'] = epm.KNOWN_UUIDS[uuid.uuidtup_to_bin(uuid.string_to_uuidtup(tmpUUID))[:18]]
else:
endpoints[tmpUUID]['EXE'] = 'N/A'
endpoints[tmpUUID]['annotation'] = entry['annotation'][:(-1)]
endpoints[tmpUUID]['Bindings'].append(binding)
if epm.KNOWN_PROTOCOLS.has_key(tmpUUID[:36]):
endpoints[tmpUUID]['Protocol'] = epm.KNOWN_PROTOCOLS[tmpUUID[:36]]
else:
endpoints[tmpUUID]['Protocol'] = 'N/A'
for endpoint in endpoints.keys():
print ('Protocol: %s ' % endpoints[endpoint]['Protocol'])
print ('Provider: %s ' % endpoints[endpoint]['EXE'])
print ('UUID : %s %s' % (endpoint, endpoints[endpoint]['annotation']))
print 'Bindings: '
for binding in endpoints[endpoint]['Bindings']:
print (' %s' % binding)
print ''
if entries:
num = len(entries)
if (1 == num):
logging.info('Received one endpoint.')
else:
logging.info(('Received %d endpoints.' % num))
else:
logging.info('No endpoints found.')
|
'Returns a string representation of the final result of this test or None if no response was received'
| def get_final_result(self):
| pass
|
'Returns a string representation of the final result of this test or None if no response was received'
| def get_final_result(self):
| return {self.test_id(): self.get_result_dict()}
|
'This constructor takes two tuples, one for each peer. The first
element in each tuple is the IP address as a string, and the
second is the port as an integer.'
| def __init__(self, p1, p2):
| self.p1 = p1
self.p2 = p2
|
'Utility function that returns a filename composed by the IP
addresses and ports of both peers.'
| def getFilename(self):
| return ('%s.%d-%s.%d.pcap' % (self.p1[0], self.p1[1], self.p2[0], self.p2[1]))
|
'Handles an incoming pcap packet. This method only knows how
to recognize TCP/IP connections.
Be sure that only TCP packets are passed onto this handler (or
fix the code to ignore the others).
Setting r"ip proto cp" as part of the pcap filter expression
suffices, and there shouldn\'t be any problem combining that with
other expressions.'
| def packetHandler(self, hdr, data):
| p = self.decoder.decode(data)
ip = p.child()
tcp = ip.child()
src = (ip.get_ip_src(), tcp.get_th_sport())
dst = (ip.get_ip_dst(), tcp.get_th_dport())
con = Connection(src, dst)
if (not self.connections.has_key(con)):
fn = con.getFilename()
print 'Found a new connection, storing into:', fn
try:
dumper = self.pcap.dump_open(fn)
except pcapy.PcapError as e:
print "Can't write packet to:", fn
return
self.connections[con] = dumper
self.connections[con].dump(hdr, data)
|
'Helper function to fetch values from owning section.
Returns a 2-tuple: the value, and the section where it was found.'
| def _fetch(self, key):
| save_interp = self.section.main.interpolation
self.section.main.interpolation = False
current_section = self.section
while True:
val = current_section.get(key)
if (val is not None):
break
val = current_section.get('DEFAULT', {}).get(key)
if (val is not None):
break
if (current_section.parent is current_section):
break
current_section = current_section.parent
self.section.main.interpolation = save_interp
if (val is None):
raise MissingInterpolationOption(key)
return (val, current_section)
|
'Implementation-dependent helper function.
Will be passed a match object corresponding to the interpolation
key we just found (e.g., "%(foo)s" or "$foo"). Should look up that
key in the appropriate config file section (using the ``_fetch()``
helper function) and return a 3-tuple: (key, value, section)
``key`` is the name of the key we\'re looking for
``value`` is the value found for that key
``section`` is a reference to the section where it was found
``key`` and ``section`` should be None if no further
interpolation should be performed on the resulting value
(e.g., if we interpolated "$$" and returned "$").'
| def _parse_match(self, match):
| raise NotImplementedError()
|
'* parent is the section above
* depth is the depth level of this section
* main is the main ConfigObj
* indict is a dictionary to initialise the section with'
| def __init__(self, parent, depth, main, indict=None, name=None):
| if (indict is None):
indict = {}
dict.__init__(self)
self.parent = parent
self.main = main
self.depth = depth
self.name = name
self._initialise()
for (entry, value) in indict.iteritems():
self[entry] = value
|
'Fetch the item and do string interpolation.'
| def __getitem__(self, key):
| val = dict.__getitem__(self, key)
if (self.main.interpolation and isinstance(val, basestring)):
return self._interpolate(key, val)
return val
|
'Correctly set a value.
Making dictionary values Section instances.
(We have to special case \'Section\' instances - which are also dicts)
Keys must be strings.
Values need only be strings (or lists of strings) if
``main.stringify`` is set.
``unrepr`` must be set when setting a value to a dictionary, without
creating a new sub-section.'
| def __setitem__(self, key, value, unrepr=False):
| if (not isinstance(key, basestring)):
raise ValueError(('The key "%s" is not a string.' % key))
if (not self.comments.has_key(key)):
self.comments[key] = []
self.inline_comments[key] = ''
if (key in self.defaults):
self.defaults.remove(key)
if isinstance(value, Section):
if (not self.has_key(key)):
self.sections.append(key)
dict.__setitem__(self, key, value)
elif (isinstance(value, dict) and (not unrepr)):
if (not self.has_key(key)):
self.sections.append(key)
new_depth = (self.depth + 1)
dict.__setitem__(self, key, Section(self, new_depth, self.main, indict=value, name=key))
else:
if (not self.has_key(key)):
self.scalars.append(key)
if (not self.main.stringify):
if isinstance(value, basestring):
pass
elif isinstance(value, (list, tuple)):
for entry in value:
if (not isinstance(entry, basestring)):
raise TypeError(('Value is not a string "%s".' % entry))
else:
raise TypeError(('Value is not a string "%s".' % value))
dict.__setitem__(self, key, value)
|
'Remove items from the sequence when deleting.'
| def __delitem__(self, key):
| dict.__delitem__(self, key)
if (key in self.scalars):
self.scalars.remove(key)
else:
self.sections.remove(key)
del self.comments[key]
del self.inline_comments[key]
|
'A version of ``get`` that doesn\'t bypass string interpolation.'
| def get(self, key, default=None):
| try:
return self[key]
except KeyError:
return default
|
'A version of update that uses our ``__setitem__``.'
| def update(self, indict):
| for entry in indict:
self[entry] = indict[entry]
|
'\'D.pop(k[,d]) -> v, remove specified key and return the corresponding value.
If key is not found, d is returned if given, otherwise KeyError is raised\''
| def pop(self, key, *args):
| val = dict.pop(self, key, *args)
if (key in self.scalars):
del self.comments[key]
del self.inline_comments[key]
self.scalars.remove(key)
elif (key in self.sections):
del self.comments[key]
del self.inline_comments[key]
self.sections.remove(key)
if (self.main.interpolation and isinstance(val, basestring)):
return self._interpolate(key, val)
return val
|
'Pops the first (key,val)'
| def popitem(self):
| sequence = (self.scalars + self.sections)
if (not sequence):
raise KeyError(": 'popitem(): dictionary is empty'")
key = sequence[0]
val = self[key]
del self[key]
return (key, val)
|
'A version of clear that also affects scalars/sections
Also clears comments and configspec.
Leaves other attributes alone :
depth/main/parent are not affected'
| def clear(self):
| dict.clear(self)
self.scalars = []
self.sections = []
self.comments = {}
self.inline_comments = {}
self.configspec = None
|
'A version of setdefault that sets sequence if appropriate.'
| def setdefault(self, key, default=None):
| try:
return self[key]
except KeyError:
self[key] = default
return self[key]
|
'D.items() -> list of D\'s (key, value) pairs, as 2-tuples'
| def items(self):
| return zip((self.scalars + self.sections), self.values())
|
'D.keys() -> list of D\'s keys'
| def keys(self):
| return (self.scalars + self.sections)
|
'D.values() -> list of D\'s values'
| def values(self):
| return [self[key] for key in (self.scalars + self.sections)]
|
'D.iteritems() -> an iterator over the (key, value) items of D'
| def iteritems(self):
| return iter(self.items())
|
'D.iterkeys() -> an iterator over the keys of D'
| def iterkeys(self):
| return iter((self.scalars + self.sections))
|
'D.itervalues() -> an iterator over the values of D'
| def itervalues(self):
| return iter(self.values())
|
'x.__repr__() <==> repr(x)'
| def __repr__(self):
| return ('{%s}' % ', '.join([('%s: %s' % (repr(key), repr(self[key]))) for key in (self.scalars + self.sections)]))
|
'Return a deepcopy of self as a dictionary.
All members that are ``Section`` instances are recursively turned to
ordinary dictionaries - by calling their ``dict`` method.
>>> n = a.dict()
>>> n == a
1
>>> n is a
0'
| def dict(self):
| newdict = {}
for entry in self:
this_entry = self[entry]
if isinstance(this_entry, Section):
this_entry = this_entry.dict()
elif isinstance(this_entry, list):
this_entry = list(this_entry)
elif isinstance(this_entry, tuple):
this_entry = tuple(this_entry)
newdict[entry] = this_entry
return newdict
|
'A recursive update - useful for merging config files.
>>> a = \'\'\'[section1]
... option1 = True
... [[subsection]]
... more_options = False
... # end of file\'\'\'.splitlines()
>>> b = \'\'\'# File is user.ini
... [section1]
... option1 = False
... # end of file\'\'\'.splitlines()
>>> c1 = ConfigObj(b)
>>> c2 = ConfigObj(a)
>>> c2.merge(c1)
>>> c2
ConfigObj({\'section1\': {\'option1\': \'False\', \'subsection\': {\'more_options\': \'False\'}}})'
| def merge(self, indict):
| for (key, val) in indict.items():
if ((key in self) and isinstance(self[key], dict) and isinstance(val, dict)):
self[key].merge(val)
else:
self[key] = val
|
'Change a keyname to another, without changing position in sequence.
Implemented so that transformations can be made on keys,
as well as on values. (used by encode and decode)
Also renames comments.'
| def rename(self, oldkey, newkey):
| if (oldkey in self.scalars):
the_list = self.scalars
elif (oldkey in self.sections):
the_list = self.sections
else:
raise KeyError(('Key "%s" not found.' % oldkey))
pos = the_list.index(oldkey)
val = self[oldkey]
dict.__delitem__(self, oldkey)
dict.__setitem__(self, newkey, val)
the_list.remove(oldkey)
the_list.insert(pos, newkey)
comm = self.comments[oldkey]
inline_comment = self.inline_comments[oldkey]
del self.comments[oldkey]
del self.inline_comments[oldkey]
self.comments[newkey] = comm
self.inline_comments[newkey] = inline_comment
|
'Walk every member and call a function on the keyword and value.
Return a dictionary of the return values
If the function raises an exception, raise the errror
unless ``raise_errors=False``, in which case set the return value to
``False``.
Any unrecognised keyword arguments you pass to walk, will be pased on
to the function you pass in.
Note: if ``call_on_sections`` is ``True`` then - on encountering a
subsection, *first* the function is called for the *whole* subsection,
and then recurses into it\'s members. This means your function must be
able to handle strings, dictionaries and lists. This allows you
to change the key of subsections as well as for ordinary members. The
return value when called on the whole subsection has to be discarded.
See the encode and decode methods for examples, including functions.
.. admonition:: caution
You can use ``walk`` to transform the names of members of a section
but you mustn\'t add or delete members.
>>> config = \'\'\'[XXXXsection]
... XXXXkey = XXXXvalue\'\'\'.splitlines()
>>> cfg = ConfigObj(config)
>>> cfg
ConfigObj({\'XXXXsection\': {\'XXXXkey\': \'XXXXvalue\'}})
>>> def transform(section, key):
... val = section[key]
... newkey = key.replace(\'XXXX\', \'CLIENT1\')
... section.rename(key, newkey)
... if isinstance(val, (tuple, list, dict)):
... pass
... else:
... val = val.replace(\'XXXX\', \'CLIENT1\')
... section[newkey] = val
>>> cfg.walk(transform, call_on_sections=True)
{\'CLIENT1section\': {\'CLIENT1key\': None}}
>>> cfg
ConfigObj({\'CLIENT1section\': {\'CLIENT1key\': \'CLIENT1value\'}})'
| def walk(self, function, raise_errors=True, call_on_sections=False, **keywargs):
| out = {}
for i in range(len(self.scalars)):
entry = self.scalars[i]
try:
val = function(self, entry, **keywargs)
entry = self.scalars[i]
out[entry] = val
except Exception:
if raise_errors:
raise
else:
entry = self.scalars[i]
out[entry] = False
for i in range(len(self.sections)):
entry = self.sections[i]
if call_on_sections:
try:
function(self, entry, **keywargs)
except Exception:
if raise_errors:
raise
else:
entry = self.sections[i]
out[entry] = False
entry = self.sections[i]
out[entry] = self[entry].walk(function, raise_errors=raise_errors, call_on_sections=call_on_sections, **keywargs)
return out
|
'Accepts a key as input. The corresponding value must be a string or
the objects (``True`` or 1) or (``False`` or 0). We allow 0 and 1 to
retain compatibility with Python 2.2.
If the string is one of ``True``, ``On``, ``Yes``, or ``1`` it returns
``True``.
If the string is one of ``False``, ``Off``, ``No``, or ``0`` it returns
``False``.
``as_bool`` is not case sensitive.
Any other input will raise a ``ValueError``.
>>> a = ConfigObj()
>>> a[\'a\'] = \'fish\'
>>> a.as_bool(\'a\')
Traceback (most recent call last):
ValueError: Value "fish" is neither True nor False
>>> a[\'b\'] = \'True\'
>>> a.as_bool(\'b\')
1
>>> a[\'b\'] = \'off\'
>>> a.as_bool(\'b\')
0'
| def as_bool(self, key):
| val = self[key]
if (val == True):
return True
elif (val == False):
return False
else:
try:
if (not isinstance(val, basestring)):
raise KeyError()
else:
return self.main._bools[val.lower()]
except KeyError:
raise ValueError(('Value "%s" is neither True nor False' % val))
|
'A convenience method which coerces the specified value to an integer.
If the value is an invalid literal for ``int``, a ``ValueError`` will
be raised.
>>> a = ConfigObj()
>>> a[\'a\'] = \'fish\'
>>> a.as_int(\'a\')
Traceback (most recent call last):
ValueError: invalid literal for int() with base 10: \'fish\'
>>> a[\'b\'] = \'1\'
>>> a.as_int(\'b\')
1
>>> a[\'b\'] = \'3.2\'
>>> a.as_int(\'b\')
Traceback (most recent call last):
ValueError: invalid literal for int() with base 10: \'3.2\''
| def as_int(self, key):
| return int(self[key])
|
'A convenience method which coerces the specified value to a float.
If the value is an invalid literal for ``float``, a ``ValueError`` will
be raised.
>>> a = ConfigObj()
>>> a[\'a\'] = \'fish\'
>>> a.as_float(\'a\')
Traceback (most recent call last):
ValueError: invalid literal for float(): fish
>>> a[\'b\'] = \'1\'
>>> a.as_float(\'b\')
1.0
>>> a[\'b\'] = \'3.2\'
>>> a.as_float(\'b\')
3.2000000000000002'
| def as_float(self, key):
| return float(self[key])
|
'A convenience method which fetches the specified value, guaranteeing
that it is a list.
>>> a = ConfigObj()
>>> a[\'a\'] = 1
>>> a.as_list(\'a\')
[1]
>>> a[\'a\'] = (1,)
>>> a.as_list(\'a\')
[1]
>>> a[\'a\'] = [1]
>>> a.as_list(\'a\')
[1]'
| def as_list(self, key):
| result = self[key]
if isinstance(result, (tuple, list)):
return list(result)
return [result]
|
'Restore (and return) default value for the specified key.
This method will only work for a ConfigObj that was created
with a configspec and has been validated.
If there is no default value for this key, ``KeyError`` is raised.'
| def restore_default(self, key):
| default = self.default_values[key]
dict.__setitem__(self, key, default)
if (key not in self.defaults):
self.defaults.append(key)
return default
|
'Recursively restore default values to all members
that have them.
This method will only work for a ConfigObj that was created
with a configspec and has been validated.
It doesn\'t delete or modify entries without default values.'
| def restore_defaults(self):
| for key in self.default_values:
self.restore_default(key)
for section in self.sections:
self[section].restore_defaults()
|
'Parse a config file or create a config file object.
``ConfigObj(infile=None, options=None, **kwargs)``'
| def __init__(self, infile=None, options=None, _inspec=False, **kwargs):
| self._inspec = _inspec
Section.__init__(self, self, 0, self)
infile = (infile or [])
options = dict((options or {}))
options.update(kwargs)
if _inspec:
options['list_values'] = False
defaults = OPTION_DEFAULTS.copy()
for entry in options:
if (entry not in defaults):
raise TypeError(('Unrecognised option "%s".' % entry))
defaults.update(options)
self._initialise(defaults)
configspec = defaults['configspec']
self._original_configspec = configspec
self._load(infile, configspec)
|
'Handle any BOM, and decode if necessary.
If an encoding is specified, that *must* be used - but the BOM should
still be removed (and the BOM attribute set).
(If the encoding is wrongly specified, then a BOM for an alternative
encoding won\'t be discovered or removed.)
If an encoding is not specified, UTF8 or UTF16 BOM will be detected and
removed. The BOM attribute will be set. UTF16 will be decoded to
unicode.
NOTE: This method must not be called with an empty ``infile``.
Specifying the *wrong* encoding is likely to cause a
``UnicodeDecodeError``.
``infile`` must always be returned as a list of lines, but may be
passed in as a single string.'
| def _handle_bom(self, infile):
| if ((self.encoding is not None) and (self.encoding.lower() not in BOM_LIST)):
return self._decode(infile, self.encoding)
if isinstance(infile, (list, tuple)):
line = infile[0]
else:
line = infile
if (self.encoding is not None):
enc = BOM_LIST[self.encoding.lower()]
if (enc == 'utf_16'):
for (BOM, (encoding, final_encoding)) in BOMS.items():
if (not final_encoding):
continue
if infile.startswith(BOM):
return self._decode(infile, encoding)
return self._decode(infile, self.encoding)
BOM = BOM_SET[enc]
if (not line.startswith(BOM)):
return self._decode(infile, self.encoding)
newline = line[len(BOM):]
if isinstance(infile, (list, tuple)):
infile[0] = newline
else:
infile = newline
self.BOM = True
return self._decode(infile, self.encoding)
for (BOM, (encoding, final_encoding)) in BOMS.items():
if (not line.startswith(BOM)):
continue
else:
self.encoding = final_encoding
if (not final_encoding):
self.BOM = True
newline = line[len(BOM):]
if isinstance(infile, (list, tuple)):
infile[0] = newline
else:
infile = newline
if isinstance(infile, basestring):
return infile.splitlines(True)
else:
return infile
return self._decode(infile, encoding)
if isinstance(infile, basestring):
return infile.splitlines(True)
return infile
|
'Decode ASCII strings to unicode if a self.encoding is specified.'
| def _a_to_u(self, aString):
| if self.encoding:
return aString.decode('ascii')
else:
return aString
|
'Decode infile to unicode. Using the specified encoding.
if is a string, it also needs converting to a list.'
| def _decode(self, infile, encoding):
| if isinstance(infile, basestring):
return infile.decode(encoding).splitlines(True)
for (i, line) in enumerate(infile):
if (not isinstance(line, unicode)):
infile[i] = line.decode(encoding)
return infile
|
'Decode element to unicode if necessary.'
| def _decode_element(self, line):
| if (not self.encoding):
return line
if (isinstance(line, str) and self.default_encoding):
return line.decode(self.default_encoding)
return line
|
'Used by ``stringify`` within validate, to turn non-string values
into strings.'
| def _str(self, value):
| if (not isinstance(value, basestring)):
return str(value)
else:
return value
|
'Actually parse the config file.'
| def _parse(self, infile):
| temp_list_values = self.list_values
if self.unrepr:
self.list_values = False
comment_list = []
done_start = False
this_section = self
maxline = (len(infile) - 1)
cur_index = (-1)
reset_comment = False
while (cur_index < maxline):
if reset_comment:
comment_list = []
cur_index += 1
line = infile[cur_index]
sline = line.strip()
if ((not sline) or sline.startswith('#')):
reset_comment = False
comment_list.append(line)
continue
if (not done_start):
self.initial_comment = comment_list
comment_list = []
done_start = True
reset_comment = True
mat = self._sectionmarker.match(line)
if (mat is not None):
(indent, sect_open, sect_name, sect_close, comment) = mat.groups()
if (indent and (self.indent_type is None)):
self.indent_type = indent
cur_depth = sect_open.count('[')
if (cur_depth != sect_close.count(']')):
self._handle_error('Cannot compute the section depth at line %s.', NestingError, infile, cur_index)
continue
if (cur_depth < this_section.depth):
try:
parent = self._match_depth(this_section, cur_depth).parent
except SyntaxError:
self._handle_error('Cannot compute nesting level at line %s.', NestingError, infile, cur_index)
continue
elif (cur_depth == this_section.depth):
parent = this_section.parent
elif (cur_depth == (this_section.depth + 1)):
parent = this_section
else:
self._handle_error('Section too nested at line %s.', NestingError, infile, cur_index)
sect_name = self._unquote(sect_name)
if parent.has_key(sect_name):
self._handle_error('Duplicate section name at line %s.', DuplicateError, infile, cur_index)
continue
this_section = Section(parent, cur_depth, self, name=sect_name)
parent[sect_name] = this_section
parent.inline_comments[sect_name] = comment
parent.comments[sect_name] = comment_list
continue
mat = self._keyword.match(line)
if (mat is None):
self._handle_error('Invalid line at line "%s".', ParseError, infile, cur_index)
else:
(indent, key, value) = mat.groups()
if (indent and (self.indent_type is None)):
self.indent_type = indent
if (value[:3] in ['"""', "'''"]):
try:
(value, comment, cur_index) = self._multiline(value, infile, cur_index, maxline)
except SyntaxError:
self._handle_error('Parse error in value at line %s.', ParseError, infile, cur_index)
continue
else:
if self.unrepr:
comment = ''
try:
value = unrepr(value)
except Exception as e:
if (type(e) == UnknownType):
msg = 'Unknown name or type in value at line %s.'
else:
msg = 'Parse error in value at line %s.'
self._handle_error(msg, UnreprError, infile, cur_index)
continue
elif self.unrepr:
comment = ''
try:
value = unrepr(value)
except Exception as e:
if isinstance(e, UnknownType):
msg = 'Unknown name or type in value at line %s.'
else:
msg = 'Parse error in value at line %s.'
self._handle_error(msg, UnreprError, infile, cur_index)
continue
else:
try:
(value, comment) = self._handle_value(value)
except SyntaxError:
self._handle_error('Parse error in value at line %s.', ParseError, infile, cur_index)
continue
key = self._unquote(key)
if this_section.has_key(key):
self._handle_error('Duplicate keyword name at line %s.', DuplicateError, infile, cur_index)
continue
this_section.__setitem__(key, value, unrepr=True)
this_section.inline_comments[key] = comment
this_section.comments[key] = comment_list
continue
if (self.indent_type is None):
self.indent_type = ''
if ((not self) and (not self.initial_comment)):
self.initial_comment = comment_list
elif (not reset_comment):
self.final_comment = comment_list
self.list_values = temp_list_values
|
'Given a section and a depth level, walk back through the sections
parents to see if the depth level matches a previous section.
Return a reference to the right section,
or raise a SyntaxError.'
| def _match_depth(self, sect, depth):
| while (depth < sect.depth):
if (sect is sect.parent):
raise SyntaxError()
sect = sect.parent
if (sect.depth == depth):
return sect
raise SyntaxError()
|
'Handle an error according to the error settings.
Either raise the error or store it.
The error will have occured at ``cur_index``'
| def _handle_error(self, text, ErrorClass, infile, cur_index):
| line = infile[cur_index]
cur_index += 1
message = (text % cur_index)
error = ErrorClass(message, cur_index, line)
if self.raise_errors:
raise error
self._errors.append(error)
|
'Return an unquoted version of a value'
| def _unquote(self, value):
| if ((value[0] == value[(-1)]) and (value[0] in ('"', "'"))):
value = value[1:(-1)]
return value
|
'Return a safely quoted version of a value.
Raise a ConfigObjError if the value cannot be safely quoted.
If multiline is ``True`` (default) then use triple quotes
if necessary.
* Don\'t quote values that don\'t need it.
* Recursively quote members of a list and return a comma joined list.
* Multiline is ``False`` for lists.
* Obey list syntax for empty and single member lists.
If ``list_values=False`` then the value is only quoted if it contains
a ``\n`` (is multiline) or \'#\'.
If ``write_empty_values`` is set, and the value is an empty string, it
won\'t be quoted.'
| def _quote(self, value, multiline=True):
| if (multiline and self.write_empty_values and (value == '')):
return ''
if (multiline and isinstance(value, (list, tuple))):
if (not value):
return ','
elif (len(value) == 1):
return (self._quote(value[0], multiline=False) + ',')
return ', '.join([self._quote(val, multiline=False) for val in value])
if (not isinstance(value, basestring)):
if self.stringify:
value = str(value)
else:
raise TypeError(('Value "%s" is not a string.' % value))
if (not value):
return '""'
no_lists_no_quotes = ((not self.list_values) and ('\n' not in value) and ('#' not in value))
need_triple = (multiline and ((("'" in value) and ('"' in value)) or ('\n' in value)))
hash_triple_quote = (multiline and (not need_triple) and ("'" in value) and ('"' in value) and ('#' in value))
check_for_single = ((no_lists_no_quotes or (not need_triple)) and (not hash_triple_quote))
if check_for_single:
if (not self.list_values):
quot = noquot
elif ('\n' in value):
raise ConfigObjError(('Value "%s" cannot be safely quoted.' % value))
elif ((value[0] not in wspace_plus) and (value[(-1)] not in wspace_plus) and (',' not in value)):
quot = noquot
else:
quot = self._get_single_quote(value)
else:
quot = self._get_triple_quote(value)
if ((quot == noquot) and ('#' in value) and self.list_values):
quot = self._get_single_quote(value)
return (quot % value)
|
'Given a value string, unquote, remove comment,
handle lists. (including empty and single member lists)'
| def _handle_value(self, value):
| if self._inspec:
return (value, '')
if (not self.list_values):
mat = self._nolistvalue.match(value)
if (mat is None):
raise SyntaxError()
return mat.groups()
mat = self._valueexp.match(value)
if (mat is None):
raise SyntaxError()
(list_values, single, empty_list, comment) = mat.groups()
if ((list_values == '') and (single is None)):
raise SyntaxError()
if (empty_list is not None):
return ([], comment)
if (single is not None):
if (list_values and (not single)):
single = None
else:
single = (single or '""')
single = self._unquote(single)
if (list_values == ''):
return (single, comment)
the_list = self._listvalueexp.findall(list_values)
the_list = [self._unquote(val) for val in the_list]
if (single is not None):
the_list += [single]
return (the_list, comment)
|
'Extract the value, where we are in a multiline situation.'
| def _multiline(self, value, infile, cur_index, maxline):
| quot = value[:3]
newvalue = value[3:]
single_line = self._triple_quote[quot][0]
multi_line = self._triple_quote[quot][1]
mat = single_line.match(value)
if (mat is not None):
retval = list(mat.groups())
retval.append(cur_index)
return retval
elif (newvalue.find(quot) != (-1)):
raise SyntaxError()
while (cur_index < maxline):
cur_index += 1
newvalue += '\n'
line = infile[cur_index]
if (line.find(quot) == (-1)):
newvalue += line
else:
break
else:
raise SyntaxError()
mat = multi_line.match(line)
if (mat is None):
raise SyntaxError()
(value, comment) = mat.groups()
return ((newvalue + value), comment, cur_index)
|
'Parse the configspec.'
| def _handle_configspec(self, configspec):
| if (not isinstance(configspec, ConfigObj)):
try:
configspec = ConfigObj(configspec, raise_errors=True, file_error=True, _inspec=True)
except ConfigObjError as e:
raise ConfigspecError(('Parsing configspec failed: %s' % e))
except IOError as e:
raise IOError(('Reading configspec failed: %s' % e))
self.configspec = configspec
|
'Called by validate. Handles setting the configspec on subsections
including sections to be validated by __many__'
| def _set_configspec(self, section, copy):
| configspec = section.configspec
many = configspec.get('__many__')
if isinstance(many, dict):
for entry in section.sections:
if (entry not in configspec):
section[entry].configspec = many
for entry in configspec.sections:
if (entry == '__many__'):
continue
if (entry not in section):
section[entry] = {}
if copy:
section.comments[entry] = configspec.comments.get(entry, [])
section.inline_comments[entry] = configspec.inline_comments.get(entry, '')
if isinstance(section[entry], Section):
section[entry].configspec = configspec[entry]
|
'Write an individual line, for the write method'
| def _write_line(self, indent_string, entry, this_entry, comment):
| if (not self.unrepr):
val = self._decode_element(self._quote(this_entry))
else:
val = repr(this_entry)
return ('%s%s%s%s%s' % (indent_string, self._decode_element(self._quote(entry, multiline=False)), self._a_to_u(' = '), val, self._decode_element(comment)))
|
'Write a section marker line'
| def _write_marker(self, indent_string, depth, entry, comment):
| return ('%s%s%s%s%s' % (indent_string, self._a_to_u(('[' * depth)), self._quote(self._decode_element(entry), multiline=False), self._a_to_u((']' * depth)), self._decode_element(comment)))
|
'Deal with a comment.'
| def _handle_comment(self, comment):
| if (not comment):
return ''
start = self.indent_type
if (not comment.startswith('#')):
start += self._a_to_u(' # ')
return (start + comment)
|
'Write the current ConfigObj as a file
tekNico: FIXME: use StringIO instead of real files
>>> filename = a.filename
>>> a.filename = \'test.ini\'
>>> a.write()
>>> a.filename = filename
>>> a == ConfigObj(\'test.ini\', raise_errors=True)
1'
| def write(self, outfile=None, section=None):
| if (self.indent_type is None):
self.indent_type = DEFAULT_INDENT_TYPE
out = []
cs = self._a_to_u('#')
csp = self._a_to_u('# ')
if (section is None):
int_val = self.interpolation
self.interpolation = False
section = self
for line in self.initial_comment:
line = self._decode_element(line)
stripped_line = line.strip()
if (stripped_line and (not stripped_line.startswith(cs))):
line = (csp + line)
out.append(line)
indent_string = (self.indent_type * section.depth)
for entry in (section.scalars + section.sections):
if (entry in section.defaults):
continue
for comment_line in section.comments[entry]:
comment_line = self._decode_element(comment_line.lstrip())
if (comment_line and (not comment_line.startswith(cs))):
comment_line = (csp + comment_line)
out.append((indent_string + comment_line))
this_entry = section[entry]
comment = self._handle_comment(section.inline_comments[entry])
if isinstance(this_entry, dict):
out.append(self._write_marker(indent_string, this_entry.depth, entry, comment))
out.extend(self.write(section=this_entry))
else:
out.append(self._write_line(indent_string, entry, this_entry, comment))
if (section is self):
for line in self.final_comment:
line = self._decode_element(line)
stripped_line = line.strip()
if (stripped_line and (not stripped_line.startswith(cs))):
line = (csp + line)
out.append(line)
self.interpolation = int_val
if (section is not self):
return out
if ((self.filename is None) and (outfile is None)):
if self.encoding:
out = [l.encode(self.encoding) for l in out]
if (self.BOM and ((self.encoding is None) or (BOM_LIST.get(self.encoding.lower()) == 'utf_8'))):
if (not out):
out.append('')
out[0] = (BOM_UTF8 + out[0])
return out
newline = (self.newlines or os.linesep)
output = self._a_to_u(newline).join(out)
if self.encoding:
output = output.encode(self.encoding)
if (self.BOM and ((self.encoding is None) or match_utf8(self.encoding))):
output = (BOM_UTF8 + output)
if (not output.endswith(newline)):
output += newline
if (outfile is not None):
outfile.write(output)
else:
h = open(self.filename, 'wb')
h.write(output)
h.close()
|
'Test the ConfigObj against a configspec.
It uses the ``validator`` object from *validate.py*.
To run ``validate`` on the current ConfigObj, call: ::
test = config.validate(validator)
(Normally having previously passed in the configspec when the ConfigObj
was created - you can dynamically assign a dictionary of checks to the
``configspec`` attribute of a section though).
It returns ``True`` if everything passes, or a dictionary of
pass/fails (True/False). If every member of a subsection passes, it
will just have the value ``True``. (It also returns ``False`` if all
members fail).
In addition, it converts the values from strings to their native
types if their checks pass (and ``stringify`` is set).
If ``preserve_errors`` is ``True`` (``False`` is default) then instead
of a marking a fail with a ``False``, it will preserve the actual
exception object. This can contain info about the reason for failure.
For example the ``VdtValueTooSmallError`` indicates that the value
supplied was too small. If a value (or section) is missing it will
still be marked as ``False``.
You must have the validate module to use ``preserve_errors=True``.
You can then use the ``flatten_errors`` function to turn your nested
results dictionary into a flattened list of failures - useful for
displaying meaningful error messages.'
| def validate(self, validator, preserve_errors=False, copy=False, section=None):
| if (section is None):
if (self.configspec is None):
raise ValueError('No configspec supplied.')
if preserve_errors:
from validate import VdtMissingValue
self._vdtMissingValue = VdtMissingValue
section = self
if copy:
section.initial_comment = section.configspec.initial_comment
section.final_comment = section.configspec.final_comment
section.encoding = section.configspec.encoding
section.BOM = section.configspec.BOM
section.newlines = section.configspec.newlines
section.indent_type = section.configspec.indent_type
configspec = section.configspec
self._set_configspec(section, copy)
def validate_entry(entry, spec, val, missing, ret_true, ret_false):
try:
check = validator.check(spec, val, missing=missing)
except validator.baseErrorClass as e:
if ((not preserve_errors) or isinstance(e, self._vdtMissingValue)):
out[entry] = False
else:
out[entry] = e
ret_false = False
ret_true = False
else:
try:
section.default_values.pop(entry, None)
except AttributeError:
try:
del section.default_values[entry]
except KeyError:
pass
try:
section.default_values[entry] = validator.get_default_value(configspec[entry])
except (KeyError, AttributeError):
pass
ret_false = False
out[entry] = True
if (self.stringify or missing):
if (not self.stringify):
if isinstance(check, (list, tuple)):
check = [self._str(item) for item in check]
elif (missing and (check is None)):
check = ''
else:
check = self._str(check)
if ((check != val) or missing):
section[entry] = check
if ((not copy) and missing and (entry not in section.defaults)):
section.defaults.append(entry)
return (ret_true, ret_false)
out = {}
ret_true = True
ret_false = True
unvalidated = [k for k in section.scalars if (k not in configspec)]
incorrect_sections = [k for k in configspec.sections if (k in section.scalars)]
incorrect_scalars = [k for k in configspec.scalars if (k in section.sections)]
for entry in configspec.scalars:
if (entry in ('__many__', '___many___')):
continue
if ((not (entry in section.scalars)) or (entry in section.defaults)):
missing = True
val = None
if (copy and (not (entry in section.scalars))):
section.comments[entry] = configspec.comments.get(entry, [])
section.inline_comments[entry] = configspec.inline_comments.get(entry, '')
else:
missing = False
val = section[entry]
(ret_true, ret_false) = validate_entry(entry, configspec[entry], val, missing, ret_true, ret_false)
many = None
if ('__many__' in configspec.scalars):
many = configspec['__many__']
elif ('___many___' in configspec.scalars):
many = configspec['___many___']
if (many is not None):
for entry in unvalidated:
val = section[entry]
(ret_true, ret_false) = validate_entry(entry, many, val, False, ret_true, ret_false)
for entry in incorrect_scalars:
ret_true = False
if (not preserve_errors):
out[entry] = False
else:
ret_false = False
msg = ('Value %r was provided as a section' % entry)
out[entry] = validator.baseErrorClass(msg)
for entry in incorrect_sections:
ret_true = False
if (not preserve_errors):
out[entry] = False
else:
ret_false = False
msg = ('Section %r was provided as a single value' % entry)
out[entry] = validator.baseErrorClass(msg)
for entry in section.sections:
if ((section is self) and (entry == 'DEFAULT')):
continue
if (section[entry].configspec is None):
continue
if copy:
section.comments[entry] = configspec.comments.get(entry, [])
section.inline_comments[entry] = configspec.inline_comments.get(entry, '')
check = self.validate(validator, preserve_errors=preserve_errors, copy=copy, section=section[entry])
out[entry] = check
if (check == False):
ret_true = False
elif (check == True):
ret_false = False
else:
ret_true = False
ret_false = False
if ret_true:
return True
elif ret_false:
return False
return out
|
'Clear ConfigObj instance and restore to \'freshly created\' state.'
| def reset(self):
| self.clear()
self._initialise()
self.configspec = None
self._original_configspec = None
|
'Reload a ConfigObj from file.
This method raises a ``ReloadError`` if the ConfigObj doesn\'t have
a filename attribute pointing to a file.'
| def reload(self):
| if (not isinstance(self.filename, basestring)):
raise ReloadError()
filename = self.filename
current_options = {}
for entry in OPTION_DEFAULTS:
if (entry == 'configspec'):
continue
current_options[entry] = getattr(self, entry)
configspec = self._original_configspec
current_options['configspec'] = configspec
self.clear()
self._initialise(current_options)
self._load(filename, configspec)
|
'A dummy check method, always returns the value unchanged.'
| def check(self, check, member, missing=False):
| if missing:
raise self.baseErrorClass()
return member
|
'A hack to get around the deprecation errors in 2.6.'
| @property
def message(self):
| return self._message
|
'Returns this token as a plain string, suitable for storage.
The resulting string includes the token\'s secret, so you should never
send or store this string where a third party can read it.'
| def to_string(self):
| data = {'oauth_token': self.key, 'oauth_token_secret': self.secret}
if (self.callback_confirmed is not None):
data['oauth_callback_confirmed'] = self.callback_confirmed
return urllib.urlencode(data)
|
'Deserializes a token from a string like one returned by
`to_string()`.'
| @staticmethod
def from_string(s):
| if (not len(s)):
raise ValueError('Invalid parameter string.')
params = parse_qs(s, keep_blank_values=False)
if (not len(params)):
raise ValueError('Invalid parameter string.')
try:
key = params['oauth_token'][0]
except Exception:
raise ValueError("'oauth_token' not found in OAuth request.")
try:
secret = params['oauth_token_secret'][0]
except Exception:
raise ValueError("'oauth_token_secret' not found in OAuth request.")
token = Token(key, secret)
try:
token.callback_confirmed = params['oauth_callback_confirmed'][0]
except KeyError:
pass
return token
|
'Get any non-OAuth parameters.'
| def get_nonoauth_parameters(self):
| return dict([(k, v) for (k, v) in self.iteritems() if (not k.startswith('oauth_'))])
|
'Serialize as a header for an HTTPAuth request.'
| def to_header(self, realm=''):
| oauth_params = ((k, v) for (k, v) in self.items() if k.startswith('oauth_'))
stringy_params = ((k, escape(str(v))) for (k, v) in oauth_params)
header_params = (('%s="%s"' % (k, v)) for (k, v) in stringy_params)
params_header = ', '.join(header_params)
auth_header = ('OAuth realm="%s"' % realm)
if params_header:
auth_header = ('%s, %s' % (auth_header, params_header))
return {'Authorization': auth_header}
|
'Serialize as post data for a POST request.'
| def to_postdata(self):
| return self.encode_postdata(self)
|
'Serialize as a URL for a GET request.'
| def to_url(self):
| return ('%s?%s' % (self.url, self.to_postdata()))
|
'Return a string that contains the parameters that must be signed.'
| def get_normalized_parameters(self):
| items = [(k, v) for (k, v) in self.items() if (k != 'oauth_signature')]
encoded_str = urllib.urlencode(sorted(items), True)
return encoded_str.replace('+', '%20')
|
'Set the signature parameter to the result of sign.'
| def sign_request(self, signature_method, consumer, token):
| if ('oauth_consumer_key' not in self):
self['oauth_consumer_key'] = consumer.key
if (token and ('oauth_token' not in self)):
self['oauth_token'] = token.key
self['oauth_signature_method'] = signature_method.name
self['oauth_signature'] = signature_method.sign(self, consumer, token)
|
'Get seconds since epoch (UTC).'
| @classmethod
def make_timestamp(cls):
| return str(int(time.time()))
|
'Generate pseudorandom number.'
| @classmethod
def make_nonce(cls):
| return str(random.randint(0, 100000000))
|
'Combines multiple parameter sources.'
| @classmethod
def from_request(cls, http_method, http_url, headers=None, parameters=None, query_string=None):
| if (parameters is None):
parameters = {}
if (headers and ('Authorization' in headers)):
auth_header = headers['Authorization']
if (auth_header[:6] == 'OAuth '):
auth_header = auth_header[6:]
try:
header_params = cls._split_header(auth_header)
parameters.update(header_params)
except:
raise Error('Unable to parse OAuth parameters from Authorization header.')
if query_string:
query_params = cls._split_url_string(query_string)
parameters.update(query_params)
param_str = urlparse.urlparse(http_url)[4]
url_params = cls._split_url_string(param_str)
parameters.update(url_params)
if parameters:
return cls(http_method, http_url, parameters)
return None
|
'Turn Authorization: header into parameters.'
| @staticmethod
def _split_header(header):
| params = {}
parts = header.split(',')
for param in parts:
if (param.find('realm') > (-1)):
continue
param = param.strip()
param_parts = param.split('=', 1)
params[param_parts[0]] = urllib.unquote(param_parts[1].strip('"'))
return params
|
'Turn URL string into parameters.'
| @staticmethod
def _split_url_string(param_str):
| parameters = parse_qs(param_str, keep_blank_values=False)
for (k, v) in parameters.iteritems():
parameters[k] = urllib.unquote(v[0])
return parameters
|
'Verifies an api call and checks all the parameters.'
| def verify_request(self, request, consumer, token):
| version = self._get_version(request)
self._check_signature(request, consumer, token)
parameters = request.get_nonoauth_parameters()
return parameters
|
'Optional support for the authenticate header.'
| def build_authenticate_header(self, realm=''):
| return {'WWW-Authenticate': ('OAuth realm="%s"' % realm)}
|
'Verify the correct version request for this server.'
| def _get_version(self, request):
| try:
version = request.get_parameter('oauth_version')
except:
version = VERSION
if (version and (version != self.version)):
raise Error(('OAuth version %s not supported.' % str(version)))
return version
|
'Figure out the signature with some defaults.'
| def _get_signature_method(self, request):
| try:
signature_method = request.get_parameter('oauth_signature_method')
except:
signature_method = SIGNATURE_METHOD
try:
signature_method = self.signature_methods[signature_method]
except:
signature_method_names = ', '.join(self.signature_methods.keys())
raise Error(('Signature method %s not supported try one of the following: %s' % (signature_method, signature_method_names)))
return signature_method
|
'Verify that timestamp is recentish.'
| def _check_timestamp(self, timestamp):
| timestamp = int(timestamp)
now = int(time.time())
lapsed = (now - timestamp)
if (lapsed > self.timestamp_threshold):
raise Error(('Expired timestamp: given %d and now %s has a greater difference than threshold %d' % (timestamp, now, self.timestamp_threshold)))
|
'Calculates the string that needs to be signed.
This method returns a 2-tuple containing the starting key for the
signing and the message to be signed. The latter may be used in error
messages to help clients debug their software.'
| def signing_base(self, request, consumer, token):
| raise NotImplementedError
|
'Returns the signature for the given request, based on the consumer
and token also provided.
You should use your implementation of `signing_base()` to build the
message to sign. Otherwise it may be less useful for debugging.'
| def sign(self, request, consumer, token):
| raise NotImplementedError
|
'Returns whether the given signature is the correct signature for
the given consumer and token signing the given request.'
| def check(self, request, consumer, token, signature):
| built = self.sign(request, consumer, token)
return (built == signature)
|
'Builds the base signature string.'
| def sign(self, request, consumer, token):
| (key, raw) = self.signing_base(request, consumer, token)
try:
import hashlib
hashed = hmac.new(key, raw, hashlib.sha1)
except ImportError:
import sha
hashed = hmac.new(key, raw, sha)
return binascii.b2a_base64(hashed.digest())[:(-1)]
|
'Concatenates the consumer key and secret with the token\'s
secret.'
| def signing_base(self, request, consumer, token):
| sig = ('%s&' % escape(consumer.secret))
if token:
sig = (sig + escape(token.secret))
return (sig, sig)
|
'Modify the request headers to add the appropriate
Authorization header. Over-rise this in sub-classes.'
| def request(self, method, request_uri, headers, content):
| pass
|
'Gives us a chance to update with new nonces
or such returned from the last authorized response.
Over-rise this in sub-classes if necessary.
Return TRUE is the request is to be retried, for
example Digest may return stale=true.'
| def response(self, response, content):
| return False
|
'Modify the request headers to add the appropriate
Authorization header.'
| def request(self, method, request_uri, headers, content):
| headers['authorization'] = ('Basic ' + base64.b64encode(('%s:%s' % self.credentials)).strip())
|
'Modify the request headers'
| def request(self, method, request_uri, headers, content, cnonce=None):
| H = (lambda x: _md5(x).hexdigest())
KD = (lambda s, d: H(('%s:%s' % (s, d))))
A2 = ''.join([method, ':', request_uri])
self.challenge['cnonce'] = (cnonce or _cnonce())
request_digest = ('"%s"' % KD(H(self.A1), ('%s:%s:%s:%s:%s' % (self.challenge['nonce'], ('%08x' % self.challenge['nc']), self.challenge['cnonce'], self.challenge['qop'], H(A2)))))
headers['Authorization'] = ('Digest username="%s", realm="%s", nonce="%s", uri="%s", algorithm=%s, response=%s, qop=%s, nc=%08x, cnonce="%s"' % (self.credentials[0], self.challenge['realm'], self.challenge['nonce'], request_uri, self.challenge['algorithm'], request_digest, self.challenge['qop'], self.challenge['nc'], self.challenge['cnonce']))
self.challenge['nc'] += 1
|
'Modify the request headers'
| def request(self, method, request_uri, headers, content):
| keys = _get_end2end_headers(headers)
keylist = ''.join([('%s ' % k) for k in keys])
headers_val = ''.join([headers[k] for k in keys])
created = time.strftime('%Y-%m-%dT%H:%M:%SZ', time.gmtime())
cnonce = _cnonce()
request_digest = ('%s:%s:%s:%s:%s' % (method, request_uri, cnonce, self.challenge['snonce'], headers_val))
request_digest = hmac.new(self.key, request_digest, self.hashmod).hexdigest().lower()
headers['Authorization'] = ('HMACDigest username="%s", realm="%s", snonce="%s", cnonce="%s", uri="%s", created="%s", response="%s", headers="%s"' % (self.credentials[0], self.challenge['realm'], self.challenge['snonce'], cnonce, request_uri, created, request_digest, keylist))
|
'Modify the request headers to add the appropriate
Authorization header.'
| def request(self, method, request_uri, headers, content):
| headers['Authorization'] = 'WSSE profile="UsernameToken"'
iso_now = time.strftime('%Y-%m-%dT%H:%M:%SZ', time.gmtime())
cnonce = _cnonce()
password_digest = _wsse_username_token(cnonce, iso_now, self.credentials[1])
headers['X-WSSE'] = ('UsernameToken Username="%s", PasswordDigest="%s", Nonce="%s", Created="%s"' % (self.credentials[0], password_digest, cnonce, iso_now))
|
'Modify the request headers to add the appropriate
Authorization header.'
| def request(self, method, request_uri, headers, content):
| headers['authorization'] = ('GoogleLogin Auth=' + self.Auth)
|
'The parameter proxy_type must be set to one of socks.PROXY_TYPE_XXX
constants. For example:
p = ProxyInfo(proxy_type=socks.PROXY_TYPE_HTTP, proxy_host=\'localhost\', proxy_port=8000)'
| def __init__(self, proxy_type, proxy_host, proxy_port, proxy_rdns=None, proxy_user=None, proxy_pass=None):
| (self.proxy_type, self.proxy_host, self.proxy_port, self.proxy_rdns, self.proxy_user, self.proxy_pass) = (proxy_type, proxy_host, proxy_port, proxy_rdns, proxy_user, proxy_pass)
|
'Connect to the host and port specified in __init__.'
| def connect(self):
| msg = 'getaddrinfo returns an empty list'
for res in socket.getaddrinfo(self.host, self.port, 0, socket.SOCK_STREAM):
(af, socktype, proto, canonname, sa) = res
try:
if (self.proxy_info and self.proxy_info.isgood()):
self.sock = socks.socksocket(af, socktype, proto)
self.sock.setproxy(*self.proxy_info.astuple())
else:
self.sock = socket.socket(af, socktype, proto)
if has_timeout(self.timeout):
self.sock.settimeout(self.timeout)
if (self.debuglevel > 0):
print ('connect: (%s, %s)' % (self.host, self.port))
self.sock.connect(sa)
except socket.error as msg:
if (self.debuglevel > 0):
print 'connect fail:', (self.host, self.port)
if self.sock:
self.sock.close()
self.sock = None
continue
break
if (not self.sock):
raise socket.error, msg
|
'Connect to a host on a given (SSL) port.'
| def connect(self):
| if (self.proxy_info and self.proxy_info.isgood()):
sock = socks.socksocket(socket.AF_INET, socket.SOCK_STREAM)
sock.setproxy(*self.proxy_info.astuple())
else:
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
if has_timeout(self.timeout):
sock.settimeout(self.timeout)
sock.connect((self.host, self.port))
self.sock = _ssl_wrap_socket(sock, self.key_file, self.cert_file)
|
'The value of proxy_info is a ProxyInfo instance.
If \'cache\' is a string then it is used as a directory name
for a disk cache. Otherwise it must be an object that supports
the same interface as FileCache.'
| def __init__(self, cache=None, timeout=None, proxy_info=None):
| self.proxy_info = proxy_info
self.connections = {}
if (cache and isinstance(cache, str)):
self.cache = FileCache(cache)
else:
self.cache = cache
self.credentials = Credentials()
self.certificates = KeyCerts()
self.authorizations = []
self.follow_redirects = True
self.optimistic_concurrency_methods = ['PUT']
self.follow_all_redirects = False
self.ignore_etag = False
self.force_exception_to_status_code = False
self.timeout = timeout
|
'A generator that creates Authorization objects
that can be applied to requests.'
| def _auth_from_challenge(self, host, request_uri, headers, response, content):
| challenges = _parse_www_authenticate(response, 'www-authenticate')
for cred in self.credentials.iter(host):
for scheme in AUTH_SCHEME_ORDER:
if challenges.has_key(scheme):
(yield AUTH_SCHEME_CLASSES[scheme](cred, host, request_uri, headers, response, content, self))
|
'Add a name and password that will be used
any time a request requires authentication.'
| def add_credentials(self, name, password, domain=''):
| self.credentials.add(name, password, domain)
|
Subsets and Splits
No saved queries yet
Save your SQL queries to embed, download, and access them later. Queries will appear here once saved.