rem
stringlengths 0
322k
| add
stringlengths 0
2.05M
| context
stringlengths 8
228k
|
---|---|---|
s = SeqDict() s[10] = 'ten' s[20] = 'twenty' s[30] = 'thirty' del s[20] verify(s[10] == 'ten') verify(s.keys() == [10, 30]) verify(s.has_key(10)) verify(not s.has_key(20)) verify(10 in s) verify(20 not in s) verify([k for k in s] == [10, 30]) verify(len(s) == 2) verify(list(s.iteritems()) == [(10,'ten'), (30, 'thirty')]) verify(list(s.iterkeys()) == [10, 30]) verify(list(s.itervalues()) == ['ten', 'thirty']) verify(s.values() == ['ten', 'thirty']) verify(s.items() == [(10,'ten'), (30, 'thirty')]) verify(s.get(10) == 'ten') verify(s.get(15,'fifteen') == 'fifteen') verify(s.get(15) == None) verify(s.setdefault(40, 'forty') == 'forty') verify(s.setdefault(10, 'null') == 'ten') del s[40] verify(s.pop(10) == 'ten') verify(10 not in s) s[10] = 'ten' k, v = s.popitem() verify(k not in s) s[k] = v s.clear() verify(len(s) == 0) try: s.popitem() except KeyError: pass else: verify(0, "popitem from an empty list should raise KeyError") s.update({10: 'ten', 20:'twenty'}) verify(s[10]=='ten' and s[20]=='twenty') verify(s == {10: 'ten', 20:'twenty'}) t = SeqDict() t[20] = 'twenty' t[10] = 'ten' verify(s == t) | class UserDictMixinTest(unittest.TestCase): def test_all(self): s = SeqDict() s[10] = 'ten' s[20] = 'twenty' s[30] = 'thirty' del s[20] self.assertEqual(s[10], 'ten') self.assertEqual(s.keys(), [10, 30]) self.assert_(s.has_key(10)) self.assert_(not s.has_key(20)) self.assert_(10 in s) self.assert_(20 not in s) self.assertEqual([k for k in s], [10, 30]) self.assertEqual(len(s), 2) self.assertEqual(list(s.iteritems()), [(10,'ten'), (30, 'thirty')]) self.assertEqual(list(s.iterkeys()), [10, 30]) self.assertEqual(list(s.itervalues()), ['ten', 'thirty']) self.assertEqual(s.values(), ['ten', 'thirty']) self.assertEqual(s.items(), [(10,'ten'), (30, 'thirty')]) self.assertEqual(s.get(10), 'ten') self.assertEqual(s.get(15,'fifteen'), 'fifteen') self.assertEqual(s.get(15), None) self.assertEqual(s.setdefault(40, 'forty'), 'forty') self.assertEqual(s.setdefault(10, 'null'), 'ten') del s[40] self.assertEqual(s.pop(10), 'ten') self.assert_(10 not in s) s[10] = 'ten' k, v = s.popitem() self.assert_(k not in s) s[k] = v s.clear() self.assertEqual(len(s), 0) self.assertRaises(KeyError, s.popitem) s.update({10: 'ten', 20:'twenty'}) self.assertEqual(s[10], 'ten') self.assertEqual(s[20], 'twenty') self.assertEqual(s, {10: 'ten', 20:'twenty'}) t = SeqDict() t[20] = 'twenty' t[10] = 'ten' self.assertEqual(s, t) def test_main(): suite = unittest.TestSuite() suite.addTest(unittest.makeSuite(UserDictTest)) suite.addTest(unittest.makeSuite(UserDictMixinTest)) test.test_support.run_suite(suite) if __name__ == "__main__": test_main() | def keys(self): return list(self.keylist) |
for name in ('lib', 'purelib', 'platlib', 'scripts', 'data'): | for name in ('lib', 'purelib', 'platlib', 'scripts', 'data', 'headers'): | def finalize_options (self): |
if hasattr(package, "__loader__"): | if (hasattr(package, "__loader__") and hasattr(package.__loader__, '_files')): | def find_package_modules(package, mask): import fnmatch if hasattr(package, "__loader__"): path = package.__name__.replace(".", os.path.sep) mask = os.path.join(path, mask) for fnm in package.__loader__._files.iterkeys(): if fnmatch.fnmatchcase(fnm, mask): yield os.path.splitext(fnm)[0].replace(os.path.sep, ".") else: path = package.__path__[0] for fnm in os.listdir(path): if fnmatch.fnmatchcase(fnm, mask): yield "%s.%s" % (package.__name__, os.path.splitext(fnm)[0]) |
if typ is dict: | if issubclass(typ, dict): | def _format(self, object, stream, indent, allowance, context, level): level = level + 1 objid = _id(object) if objid in context: stream.write(_recursion(object)) self._recursive = True self._readable = False return rep = self._repr(object, context, level - 1) typ = _type(object) sepLines = _len(rep) > (self._width - 1 - indent - allowance) write = stream.write |
if typ is list or typ is tuple: if typ is list: | if issubclass(typ, list) or issubclass(typ, tuple): if issubclass(typ, list): | def _format(self, object, stream, indent, allowance, context, level): level = level + 1 objid = _id(object) if objid in context: stream.write(_recursion(object)) self._recursive = True self._readable = False return rep = self._repr(object, context, level - 1) typ = _type(object) sepLines = _len(rep) > (self._width - 1 - indent - allowance) write = stream.write |
if typ is tuple and length == 1: | if issubclass(typ, tuple) and length == 1: | def _format(self, object, stream, indent, allowance, context, level): level = level + 1 objid = _id(object) if objid in context: stream.write(_recursion(object)) self._recursive = True self._readable = False return rep = self._repr(object, context, level - 1) typ = _type(object) sepLines = _len(rep) > (self._width - 1 - indent - allowance) write = stream.write |
if typ is str: | if issubclass(typ, basestring): | def _safe_repr(object, context, maxlevels, level): typ = _type(object) if typ is str: if 'locale' not in _sys.modules: return `object`, True, False if "'" in object and '"' not in object: closure = '"' quotes = {'"': '\\"'} else: closure = "'" quotes = {"'": "\\'"} qget = quotes.get sio = _StringIO() write = sio.write for char in object: if char.isalpha(): write(char) else: write(qget(char, `char`[1:-1])) return ("%s%s%s" % (closure, sio.getvalue(), closure)), True, False if typ is dict: if not object: return "{}", True, False objid = _id(object) if maxlevels and level > maxlevels: return "{...}", False, objid in context if objid in context: return _recursion(object), False, True context[objid] = 1 readable = True recursive = False components = [] append = components.append level += 1 saferepr = _safe_repr for k, v in object.iteritems(): krepr, kreadable, krecur = saferepr(k, context, maxlevels, level) vrepr, vreadable, vrecur = saferepr(v, context, maxlevels, level) append("%s: %s" % (krepr, vrepr)) readable = readable and kreadable and vreadable if krecur or vrecur: recursive = True del context[objid] return "{%s}" % _commajoin(components), readable, recursive if typ is list or typ is tuple: if typ is list: if not object: return "[]", True, False format = "[%s]" elif _len(object) == 1: format = "(%s,)" else: if not object: return "()", True, False format = "(%s)" objid = _id(object) if maxlevels and level > maxlevels: return format % "...", False, objid in context if objid in context: return _recursion(object), False, True context[objid] = 1 readable = True recursive = False components = [] append = components.append level += 1 for o in object: orepr, oreadable, orecur = _safe_repr(o, context, maxlevels, level) append(orepr) if not oreadable: readable = False if orecur: recursive = True del context[objid] return format % _commajoin(components), readable, recursive rep = `object` return rep, (rep and not rep.startswith('<')), False |
if typ is dict: | if issubclass(typ, dict): | def _safe_repr(object, context, maxlevels, level): typ = _type(object) if typ is str: if 'locale' not in _sys.modules: return `object`, True, False if "'" in object and '"' not in object: closure = '"' quotes = {'"': '\\"'} else: closure = "'" quotes = {"'": "\\'"} qget = quotes.get sio = _StringIO() write = sio.write for char in object: if char.isalpha(): write(char) else: write(qget(char, `char`[1:-1])) return ("%s%s%s" % (closure, sio.getvalue(), closure)), True, False if typ is dict: if not object: return "{}", True, False objid = _id(object) if maxlevels and level > maxlevels: return "{...}", False, objid in context if objid in context: return _recursion(object), False, True context[objid] = 1 readable = True recursive = False components = [] append = components.append level += 1 saferepr = _safe_repr for k, v in object.iteritems(): krepr, kreadable, krecur = saferepr(k, context, maxlevels, level) vrepr, vreadable, vrecur = saferepr(v, context, maxlevels, level) append("%s: %s" % (krepr, vrepr)) readable = readable and kreadable and vreadable if krecur or vrecur: recursive = True del context[objid] return "{%s}" % _commajoin(components), readable, recursive if typ is list or typ is tuple: if typ is list: if not object: return "[]", True, False format = "[%s]" elif _len(object) == 1: format = "(%s,)" else: if not object: return "()", True, False format = "(%s)" objid = _id(object) if maxlevels and level > maxlevels: return format % "...", False, objid in context if objid in context: return _recursion(object), False, True context[objid] = 1 readable = True recursive = False components = [] append = components.append level += 1 for o in object: orepr, oreadable, orecur = _safe_repr(o, context, maxlevels, level) append(orepr) if not oreadable: readable = False if orecur: recursive = True del context[objid] return format % _commajoin(components), readable, recursive rep = `object` return rep, (rep and not rep.startswith('<')), False |
if typ is list or typ is tuple: if typ is list: | if issubclass(typ, list) or issubclass(typ, tuple): if issubclass(typ, list): | def _safe_repr(object, context, maxlevels, level): typ = _type(object) if typ is str: if 'locale' not in _sys.modules: return `object`, True, False if "'" in object and '"' not in object: closure = '"' quotes = {'"': '\\"'} else: closure = "'" quotes = {"'": "\\'"} qget = quotes.get sio = _StringIO() write = sio.write for char in object: if char.isalpha(): write(char) else: write(qget(char, `char`[1:-1])) return ("%s%s%s" % (closure, sio.getvalue(), closure)), True, False if typ is dict: if not object: return "{}", True, False objid = _id(object) if maxlevels and level > maxlevels: return "{...}", False, objid in context if objid in context: return _recursion(object), False, True context[objid] = 1 readable = True recursive = False components = [] append = components.append level += 1 saferepr = _safe_repr for k, v in object.iteritems(): krepr, kreadable, krecur = saferepr(k, context, maxlevels, level) vrepr, vreadable, vrecur = saferepr(v, context, maxlevels, level) append("%s: %s" % (krepr, vrepr)) readable = readable and kreadable and vreadable if krecur or vrecur: recursive = True del context[objid] return "{%s}" % _commajoin(components), readable, recursive if typ is list or typ is tuple: if typ is list: if not object: return "[]", True, False format = "[%s]" elif _len(object) == 1: format = "(%s,)" else: if not object: return "()", True, False format = "(%s)" objid = _id(object) if maxlevels and level > maxlevels: return format % "...", False, objid in context if objid in context: return _recursion(object), False, True context[objid] = 1 readable = True recursive = False components = [] append = components.append level += 1 for o in object: orepr, oreadable, orecur = _safe_repr(o, context, maxlevels, level) append(orepr) if not oreadable: readable = False if orecur: recursive = True del context[objid] return format % _commajoin(components), readable, recursive rep = `object` return rep, (rep and not rep.startswith('<')), False |
while nl < 0: | while (nl < 0 and size > 0): | def readline(self, size=-1): """Read a line with approx. size. If size is negative, read a whole line. readline() and read() must not be mixed up (!). """ if size < 0: size = sys.maxint |
if size <= 0: break | def readline(self, size=-1): """Read a line with approx. size. If size is negative, read a whole line. readline() and read() must not be mixed up (!). """ if size < 0: size = sys.maxint |
|
return self.parser.names | if self.parser: return self.parser.names else: return [] | def getnames(self): return self.parser.names |
exts.append( Extension('_CF', ['cf/_CFmodule.c'], | exts.append( Extension('_CF', ['cf/_CFmodule.c', 'cf/pycfbridge.c'], | def detect_modules(self): # Ensure that /usr/local is always used add_dir_to_list(self.compiler.library_dirs, '/usr/local/lib') add_dir_to_list(self.compiler.include_dirs, '/usr/local/include') |
waste_incs = find_file("WASTE.h", [], ["../waste/C_C++ Headers"]) | waste_incs = find_file("WASTE.h", [], ['../'*n + 'waste/C_C++ Headers' for n in (0,1,2,3,4)]) | def detect_modules(self): # Ensure that /usr/local is always used add_dir_to_list(self.compiler.library_dirs, '/usr/local/lib') add_dir_to_list(self.compiler.include_dirs, '/usr/local/include') |
["../waste/Static Libraries"]) | ["../"*n + "waste/Static Libraries" for n in (0,1,2,3,4)]) | def detect_modules(self): # Ensure that /usr/local is always used add_dir_to_list(self.compiler.library_dirs, '/usr/local/lib') add_dir_to_list(self.compiler.include_dirs, '/usr/local/include') |
['waste/wastemodule.c', | ['waste/wastemodule.c'] + [ os.path.join(srcdir, d) for d in | def detect_modules(self): # Ensure that /usr/local is always used add_dir_to_list(self.compiler.library_dirs, '/usr/local/lib') add_dir_to_list(self.compiler.include_dirs, '/usr/local/include') |
include_dirs = waste_incs + ['Mac/Wastemods'], | include_dirs = waste_incs + [os.path.join(srcdir, 'Mac/Wastemods')], | def detect_modules(self): # Ensure that /usr/local is always used add_dir_to_list(self.compiler.library_dirs, '/usr/local/lib') add_dir_to_list(self.compiler.include_dirs, '/usr/local/include') |
def detect_tkinter_darwin(self, inc_dirs, lib_dirs): from os.path import join, exists framework_dirs = [ '/System/Library/Frameworks/', '/Library/Frameworks', join(os.getenv('HOME'), '/Library/Frameworks') ] for F in framework_dirs: for fw in 'Tcl', 'Tk': if not exists(join(F, fw + '.framework')): break else: break else: return 0 include_dirs = [ join(F, fw + '.framework', H) for fw in 'Tcl', 'Tk' for H in 'Headers', 'Versions/Current/PrivateHeaders' ] include_dirs.append('/usr/X11R6/include') frameworks = ['-framework', 'Tcl', '-framework', 'Tk'] ext = Extension('_tkinter', ['_tkinter.c', 'tkappinit.c'], define_macros=[('WITH_APPINIT', 1)], include_dirs = include_dirs, libraries = [], extra_compile_args = frameworks, extra_link_args = frameworks, ) self.extensions.append(ext) return 1 | def detect_modules(self): # Ensure that /usr/local is always used add_dir_to_list(self.compiler.library_dirs, '/usr/local/lib') add_dir_to_list(self.compiler.include_dirs, '/usr/local/include') |
|
platform = self.get_platform() | def detect_tkinter(self, inc_dirs, lib_dirs): # The _tkinter module. |
|
self.assertRaises(TypeError, list, chain(N(s))) | self.assertRaises(TypeError, chain, N(s)) | def test_chain(self): for s in ("123", "", range(1000), ('do', 1.2), xrange(2000,2200,5)): for g in (G, I, Ig, S, L, R): self.assertEqual(list(chain(g(s))), list(g(s))) self.assertEqual(list(chain(g(s), g(s))), list(g(s))+list(g(s))) self.assertRaises(TypeError, chain, X(s)) self.assertRaises(TypeError, list, chain(N(s))) self.assertRaises(ZeroDivisionError, list, chain(E(s))) |
self.assertRaises(TypeError, list, cycle(N(s))) | self.assertRaises(TypeError, cycle, N(s)) | def test_cycle(self): for s in ("123", "", range(1000), ('do', 1.2), xrange(2000,2200,5)): for g in (G, I, Ig, S, L, R): tgtlen = len(s) * 3 expected = list(g(s))*3 actual = list(islice(cycle(g(s)), tgtlen)) self.assertEqual(actual, expected) self.assertRaises(TypeError, cycle, X(s)) self.assertRaises(TypeError, list, cycle(N(s))) self.assertRaises(ZeroDivisionError, list, cycle(E(s))) |
self.assertRaises(TypeError, list, groupby(N(s))) | self.assertRaises(TypeError, groupby, N(s)) | def test_groupby(self): for s in (range(10), range(0), range(1000), (7,11), xrange(2000,2200,5)): for g in (G, I, Ig, S, L, R): self.assertEqual([k for k, sb in groupby(g(s))], list(g(s))) self.assertRaises(TypeError, groupby, X(s)) self.assertRaises(TypeError, list, groupby(N(s))) self.assertRaises(ZeroDivisionError, list, groupby(E(s))) |
self.assertRaises(TypeError, list, ifilter(isEven, N(s))) | self.assertRaises(TypeError, ifilter, isEven, N(s)) | def test_ifilter(self): for s in (range(10), range(0), range(1000), (7,11), xrange(2000,2200,5)): for g in (G, I, Ig, S, L, R): self.assertEqual(list(ifilter(isEven, g(s))), filter(isEven, g(s))) self.assertRaises(TypeError, ifilter, isEven, X(s)) self.assertRaises(TypeError, list, ifilter(isEven, N(s))) self.assertRaises(ZeroDivisionError, list, ifilter(isEven, E(s))) |
self.assertRaises(TypeError, list, ifilterfalse(isEven, N(s))) | self.assertRaises(TypeError, ifilterfalse, isEven, N(s)) | def test_ifilterfalse(self): for s in (range(10), range(0), range(1000), (7,11), xrange(2000,2200,5)): for g in (G, I, Ig, S, L, R): self.assertEqual(list(ifilterfalse(isEven, g(s))), filter(isOdd, g(s))) self.assertRaises(TypeError, ifilterfalse, isEven, X(s)) self.assertRaises(TypeError, list, ifilterfalse(isEven, N(s))) self.assertRaises(ZeroDivisionError, list, ifilterfalse(isEven, E(s))) |
self.assertRaises(TypeError, list, izip(N(s))) | self.assertRaises(TypeError, izip, N(s)) | def test_izip(self): for s in ("123", "", range(1000), ('do', 1.2), xrange(2000,2200,5)): for g in (G, I, Ig, S, L, R): self.assertEqual(list(izip(g(s))), zip(g(s))) self.assertEqual(list(izip(g(s), g(s))), zip(g(s), g(s))) self.assertRaises(TypeError, izip, X(s)) self.assertRaises(TypeError, list, izip(N(s))) self.assertRaises(ZeroDivisionError, list, izip(E(s))) |
self.assertRaises(TypeError, list, imap(onearg, N(s))) | self.assertRaises(TypeError, imap, onearg, N(s)) | def test_imap(self): for s in (range(10), range(0), range(100), (7,11), xrange(20,50,5)): for g in (G, I, Ig, S, L, R): self.assertEqual(list(imap(onearg, g(s))), map(onearg, g(s))) self.assertEqual(list(imap(operator.pow, g(s), g(s))), map(operator.pow, g(s), g(s))) self.assertRaises(TypeError, imap, onearg, X(s)) self.assertRaises(TypeError, list, imap(onearg, N(s))) self.assertRaises(ZeroDivisionError, list, imap(onearg, E(s))) |
self.assertRaises(TypeError, list, islice(N(s), 10)) | self.assertRaises(TypeError, islice, N(s), 10) | def test_islice(self): for s in ("12345", "", range(1000), ('do', 1.2), xrange(2000,2200,5)): for g in (G, I, Ig, S, L, R): self.assertEqual(list(islice(g(s),1,None,2)), list(g(s))[1::2]) self.assertRaises(TypeError, islice, X(s), 10) self.assertRaises(TypeError, list, islice(N(s), 10)) self.assertRaises(ZeroDivisionError, list, islice(E(s), 10)) |
self.assertRaises(TypeError, list, starmap(operator.pow, N(ss))) | self.assertRaises(TypeError, starmap, operator.pow, N(ss)) | def test_starmap(self): for s in (range(10), range(0), range(100), (7,11), xrange(20,50,5)): for g in (G, I, Ig, S, L, R): ss = zip(s, s) self.assertEqual(list(starmap(operator.pow, g(ss))), map(operator.pow, g(s), g(s))) self.assertRaises(TypeError, starmap, operator.pow, X(ss)) self.assertRaises(TypeError, list, starmap(operator.pow, N(ss))) self.assertRaises(ZeroDivisionError, list, starmap(operator.pow, E(ss))) |
self.assertRaises(TypeError, list, takewhile(isEven, N(s))) | self.assertRaises(TypeError, takewhile, isEven, N(s)) | def test_takewhile(self): for s in (range(10), range(0), range(1000), (7,11), xrange(2000,2200,5)): for g in (G, I, Ig, S, L, R): tgt = [] for elem in g(s): if not isEven(elem): break tgt.append(elem) self.assertEqual(list(takewhile(isEven, g(s))), tgt) self.assertRaises(TypeError, takewhile, isEven, X(s)) self.assertRaises(TypeError, list, takewhile(isEven, N(s))) self.assertRaises(ZeroDivisionError, list, takewhile(isEven, E(s))) |
self.assertRaises(TypeError, list, dropwhile(isOdd, N(s))) | self.assertRaises(TypeError, dropwhile, isOdd, N(s)) | def test_dropwhile(self): for s in (range(10), range(0), range(1000), (7,11), xrange(2000,2200,5)): for g in (G, I, Ig, S, L, R): tgt = [] for elem in g(s): if not tgt and isOdd(elem): continue tgt.append(elem) self.assertEqual(list(dropwhile(isOdd, g(s))), tgt) self.assertRaises(TypeError, dropwhile, isOdd, X(s)) self.assertRaises(TypeError, list, dropwhile(isOdd, N(s))) self.assertRaises(ZeroDivisionError, list, dropwhile(isOdd, E(s))) |
self.assertRaises(TypeError, list, tee(N(s))[0]) | self.assertRaises(TypeError, tee, N(s)) | def test_tee(self): for s in ("123", "", range(1000), ('do', 1.2), xrange(2000,2200,5)): for g in (G, I, Ig, S, L, R): it1, it2 = tee(g(s)) self.assertEqual(list(it1), list(g(s))) self.assertEqual(list(it2), list(g(s))) self.assertRaises(TypeError, tee, X(s)) self.assertRaises(TypeError, list, tee(N(s))[0]) self.assertRaises(ZeroDivisionError, list, tee(E(s))[0]) |
if proxy_auth: h.putheader('Proxy-Authorization: Basic %s' % proxy_auth) if auth: h.putheader('Authorization: Basic %s' % auth) | if proxy_auth: h.putheader('Proxy-Authorization', 'Basic %s' % proxy_auth) if auth: h.putheader('Authorization', 'Basic %s' % auth) | def open_https(self, url, data=None): """Use HTTPS protocol.""" import httplib user_passwd = None proxy_passwd = None if isinstance(url, str): host, selector = splithost(url) if host: user_passwd, host = splituser(host) host = unquote(host) realhost = host else: host, selector = url # here, we determine, whether the proxy contains authorization information proxy_passwd, host = splituser(host) urltype, rest = splittype(selector) url = rest user_passwd = None if urltype.lower() != 'https': realhost = None else: realhost, rest = splithost(rest) if realhost: user_passwd, realhost = splituser(realhost) if user_passwd: selector = "%s://%s%s" % (urltype, realhost, rest) #print "proxy via https:", host, selector if not host: raise IOError, ('https error', 'no host given') if proxy_passwd: import base64 proxy_auth = base64.b64encode(proxy_passwd).strip() else: proxy_auth = None if user_passwd: import base64 auth = base64.b64encode(user_passwd).strip() else: auth = None h = httplib.HTTPS(host, 0, key_file=self.key_file, cert_file=self.cert_file) if data is not None: h.putrequest('POST', selector) h.putheader('Content-Type', 'application/x-www-form-urlencoded') h.putheader('Content-Length', '%d' % len(data)) else: h.putrequest('GET', selector) if proxy_auth: h.putheader('Proxy-Authorization: Basic %s' % proxy_auth) if auth: h.putheader('Authorization: Basic %s' % auth) if realhost: h.putheader('Host', realhost) for args in self.addheaders: h.putheader(*args) h.endheaders() if data is not None: h.send(data) errcode, errmsg, headers = h.getreply() fp = h.getfile() if errcode == 200: return addinfourl(fp, headers, "https:" + url) else: if data is None: return self.http_error(url, fp, errcode, errmsg, headers) else: return self.http_error(url, fp, errcode, errmsg, headers, data) |
s.bind(hostname, PORT) | s.bind((hostname, PORT)) | def missing_ok(str): try: getattr(socket, str) except AttributeError: pass |
s.connect(hostname, PORT) | s.connect((hostname, PORT)) | def missing_ok(str): try: getattr(socket, str) except AttributeError: pass |
if version[:11] == '$Revision$': version = version[11:-1] | if version[:11] == '$' + 'Revision: ' and version[-1:] == '$': version = strip(version[11:-1]) | def docmodule(self, object): """Produce HTML documentation for a module object.""" name = object.__name__ result = '' head = '<br><big><big><strong> %s</strong></big></big>' % name try: file = inspect.getsourcefile(object) filelink = '<a href="file:%s">%s</a>' % (file, file) except TypeError: filelink = '(built-in)' info = [] if hasattr(object, '__version__'): version = str(object.__version__) if version[:11] == '$Revision$': version = version[11:-1] info.append('version: %s' % self.escape(version)) if hasattr(object, '__date__'): info.append(self.escape(str(object.__date__))) if info: head = head + ' (%s)' % join(info, ', ') result = result + self.heading( head, '#ffffff', '#7799ee', '<a href=".">index</a><br>' + filelink) |
self._name = 'PY_VAR' + `_varnum` | self._name = 'PY_VAR' + repr(_varnum) | def __init__(self, master=None, value=None, name=None): """Construct a variable |
def _interact(): """Make sure the application is in the foreground""" AE.AEInteractWithUser(50000000) | def _initialize(): global _initialized if _initialized: return macresource.need("DLOG", 260, "dialogs.rsrc", __name__) |
|
return self | return iter(self.next, None) | def __iter__(self): return self |
self.checksyntax(filename) | def check_module_event(self, event): filename = self.getfilename() if not filename: return if not self.tabnanny(filename): return self.checksyntax(filename) |
|
except IndentationError: pass | def tabnanny(self, filename): f = open(filename, 'r') try: tabnanny.process_tokens(tokenize.generate_tokens(f.readline)) except tokenize.TokenError, msg: msgtxt, (lineno, start) = msg self.editwin.gotoline(lineno) self.errorbox("Tabnanny Tokenizing Error", "Token Error: %s" % msgtxt) return False except tabnanny.NannyNag, nag: # The error messages from tabnanny are too confusing... self.editwin.gotoline(nag.get_lineno()) self.errorbox("Tab/space error", indent_message) return False except IndentationError: # From tokenize(), let compile() in checksyntax find it again. pass return True |
|
if not self.tabnanny(filename): return | def run_module_event(self, event): """Run the module after setting up the environment. |
|
self.packageRootFolder = root | self.sourceFolder = root | def build(self, root, resources=None, **options): """Create a package for some given root folder. |
self.packageResourceFolder = root | self.resourceFolder = root else: self.resourceFolder = resources | def build(self, root, resources=None, **options): """Create a package for some given root folder. |
elif not k in ["OutputDir"]: raise Error, "Unknown package option: %s" % k outputdir = options.get("OutputDir", os.getcwd()) packageName = self.packageInfo["Title"] self.PackageRootFolder = os.path.join(outputdir, packageName + ".pkg") | def build(self, root, resources=None, **options): """Create a package for some given root folder. |
|
packageName = self.packageInfo["Title"] rootFolder = packageName + ".pkg" contFolder = join(rootFolder, "Contents") resourceFolder = join(contFolder, "Resources") os.mkdir(rootFolder) | contFolder = join(self.PackageRootFolder, "Contents") self.packageResourceFolder = join(contFolder, "Resources") os.mkdir(self.PackageRootFolder) | def _makeFolders(self): "Create package folder structure." |
os.mkdir(resourceFolder) self.resourceFolder = resourceFolder | os.mkdir(self.packageResourceFolder) | def _makeFolders(self): "Create package folder structure." |
base = basename(self.packageRootFolder) + ".info" path = join(self.resourceFolder, base) | base = self.packageInfo["Title"] + ".info" path = join(self.packageResourceFolder, base) | def _addInfo(self): "Write .info file containing installing options." |
base = basename(self.packageRootFolder) + ".bom" bomPath = join(self.resourceFolder, base) cmd = "mkbom %s %s" % (self.packageRootFolder, bomPath) | base = self.packageInfo["Title"] + ".bom" bomPath = join(self.packageResourceFolder, base) cmd = "mkbom %s %s" % (self.sourceFolder, bomPath) | def _addBom(self): "Write .bom file containing 'Bill of Materials'." |
packageRootFolder = self.packageRootFolder try: d = dirname(packageRootFolder) os.chdir(packageRootFolder) base = basename(packageRootFolder) + ".pax" archPath = join(d, self.resourceFolder, base) cmd = "pax -w -f %s %s" % (archPath, ".") res = os.system(cmd) cmd = "gzip %s" % archPath res = os.system(cmd) except: pass | os.chdir(self.sourceFolder) base = basename(self.packageInfo["Title"]) + ".pax" self.archPath = join(self.packageResourceFolder, base) cmd = "pax -w -f %s %s" % (self.archPath, ".") res = os.system(cmd) cmd = "gzip %s" % self.archPath res = os.system(cmd) | def _addArchive(self): "Write .pax.gz file, a compressed archive using pax/gzip." |
if not self.packageResourceFolder: | if not self.resourceFolder: | def _addResources(self): "Add Welcome/ReadMe/License files, .lproj folders and scripts." |
pattern = join(self.packageResourceFolder, pat) | pattern = join(self.resourceFolder, pat) | def _addResources(self): "Add Welcome/ReadMe/License files, .lproj folders and scripts." |
pattern = join(self.packageResourceFolder, packageName + pat) | pattern = join(self.resourceFolder, packageName + pat) | def _addResources(self): "Add Welcome/ReadMe/License files, .lproj folders and scripts." |
files.append(f) | files.append((f, f)) | def _addResources(self): "Add Welcome/ReadMe/License files, .lproj folders and scripts." |
files.append(f) | files.append((f, f)) elif f in ["pre-upgrade", "pre-install", "post-upgrade", "post-install"]: files.append((f, self.packageInfo["Title"]+"."+f)) | def _addResources(self): "Add Welcome/ReadMe/License files, .lproj folders and scripts." |
for g in files: f = join(self.packageResourceFolder, g) | for src, dst in files: f = join(self.resourceFolder, src) | def _addResources(self): "Add Welcome/ReadMe/License files, .lproj folders and scripts." |
shutil.copy(f, self.resourceFolder) | shutil.copy(f, os.path.join(self.packageResourceFolder, dst)) | def _addResources(self): "Add Welcome/ReadMe/License files, .lproj folders and scripts." |
d = join(self.resourceFolder, basename(f)) | d = join(self.packageResourceFolder, dst) | def _addResources(self): "Add Welcome/ReadMe/License files, .lproj folders and scripts." |
packageRootFolder = self.packageRootFolder files = GlobDirectoryWalker(packageRootFolder) | files = GlobDirectoryWalker(self.sourceFolder) | def _addSizes(self): "Write .sizes file with info about number and size of files." |
installedSize = installedSize + os.stat(f)[6] d = dirname(packageRootFolder) base = basename(packageRootFolder) + ".pax.gz" archPath = join(d, self.resourceFolder, base) | installedSize = installedSize + os.lstat(f)[6] | def _addSizes(self): "Write .sizes file with info about number and size of files." |
zippedSize = os.stat(archPath)[6] | zippedSize = os.stat(self.archPath+ ".gz")[6] | def _addSizes(self): "Write .sizes file with info about number and size of files." |
base = basename(packageRootFolder) + ".sizes" f = open(join(self.resourceFolder, base), "w") format = "NumFiles %d\nInstalledSize %d\nCompressedSize %d" | base = self.packageInfo["Title"] + ".sizes" f = open(join(self.packageResourceFolder, base), "w") format = "NumFiles %d\nInstalledSize %d\nCompressedSize %d\n" | def _addSizes(self): "Write .sizes file with info about number and size of files." |
self._read(length, self.fp.read) | return self._read(length, self.fp.read) | def read(self, length = None): self._read(length, self.fp.read) |
self._read(length, self.fp.readline) | return self._read(length, self.fp.readline) | def readline(self, length = None): self._read(length, self.fp.readline) |
def __init__(self, allow_none): | def __init__(self, allow_none, encoding): | def __init__(self, allow_none): self.funcs = {} self.instance = None self.allow_none = allow_none |
allow_none = self.allow_none) | allow_none=self.allow_none, encoding=self.encoding) | def _marshaled_dispatch(self, data, dispatch_method = None): """Dispatches an XML-RPC method from marshalled (XML) data. |
response = xmlrpclib.dumps(fault) | response = xmlrpclib.dumps(fault, allow_none=self.allow_none, encoding=self.encoding) | def _marshaled_dispatch(self, data, dispatch_method = None): """Dispatches an XML-RPC method from marshalled (XML) data. |
xmlrpclib.Fault(1, "%s:%s" % (sys.exc_type, sys.exc_value)) | xmlrpclib.Fault(1, "%s:%s" % (sys.exc_type, sys.exc_value)), encoding=self.encoding, allow_none=self.allow_none, | def _marshaled_dispatch(self, data, dispatch_method = None): """Dispatches an XML-RPC method from marshalled (XML) data. |
logRequests=True, allow_none=False): | logRequests=True, allow_none=False, encoding=None): | def __init__(self, addr, requestHandler=SimpleXMLRPCRequestHandler, logRequests=True, allow_none=False): self.logRequests = logRequests |
SimpleXMLRPCDispatcher.__init__(self, allow_none) | SimpleXMLRPCDispatcher.__init__(self, allow_none, encoding) | def __init__(self, addr, requestHandler=SimpleXMLRPCRequestHandler, logRequests=True, allow_none=False): self.logRequests = logRequests |
def __init__(self, allow_none=False): SimpleXMLRPCDispatcher.__init__(self, allow_none) | def __init__(self, allow_none=False, encoding=None): SimpleXMLRPCDispatcher.__init__(self, allow_none, encoding) | def __init__(self, allow_none=False): SimpleXMLRPCDispatcher.__init__(self, allow_none) |
n = int(s.rstrip(NUL) or "0", 8) | n = int(s.rstrip(NUL + " ") or "0", 8) | def nti(s): """Convert a number field to a python number. """ # There are two possible encodings for a number field, see # itn() below. if s[0] != chr(0200): n = int(s.rstrip(NUL) or "0", 8) else: n = 0L for i in xrange(len(s) - 1): n <<= 8 n += ord(s[i + 1]) return n |
def read(self, n = 0): if n <= 0: | def read(self, n = -1): if n < 0: | def read(self, n = 0): if n <= 0: newpos = len(self.buf) else: newpos = min(self.pos+n, len(self.buf)) r = self.buf[self.pos:newpos] self.pos = newpos return r |
"timeout (%g) is %g seconds more than expected (%g)" | "timeout (%g) is more than %g seconds more than expected (%g)" | def testConnectTimeout(self): # Test connect() timeout _timeout = 0.001 self.sock.settimeout(_timeout) |
print "<H3>Shell environment:</H3>" | print "<H3>Shell Environment:</H3>" | def print_environ(): """Dump the shell environment as HTML.""" keys = environ.keys() keys.sort() print print "<H3>Shell environment:</H3>" print "<DL>" for key in keys: print "<DT>", escape(key), "<DD>", escape(environ[key]) print "</DL>" print |
print "<H3>Form contents:</H3>" | print "<H3>Form Contents:</H3>" | def print_form(form): """Dump the contents of a form as HTML.""" keys = form.keys() keys.sort() print print "<H3>Form contents:</H3>" print "<DL>" for key in keys: print "<DT>" + escape(key) + ":", value = form[key] print "<i>" + escape(`type(value)`) + "</i>" print "<DD>" + escape(`value`) print "</DL>" print |
print "<H3>Command line Arguments:</H3>" | print "<H3>Command Line Arguments:</H3>" | def print_arguments(): print print "<H3>Command line Arguments:</H3>" print print sys.argv print |
sqstring = r"(\b[rR])?'([^'\\\n]|\\.)*'?" dqstring = r'(\b[rR])?"([^"\\\n]|\\.)*"?' sq3string = r"(\b[rR])?'''([^'\\]|\\.|'(?!''))*(''')?" dq3string = r'(\b[rR])?"""([^"\\]|\\.|"(?!""))*(""")?' | sqstring = r"(\b[rR])?'[^'\\\n]*(\\.[^'\\\n]*)*'?" dqstring = r'(\b[rR])?"[^"\\\n]*(\\.[^"\\\n]*)*"?' sq3string = r"(\b[rR])?'''[^'\\]*((\\.|'(?!''))[^'\\]*)*(''')?" dq3string = r'(\b[rR])?"""[^"\\]*((\\.|"(?!""))[^"\\]*)*(""")?' | def make_pat(): kw = r"\b" + any("KEYWORD", keyword.kwlist) + r"\b" comment = any("COMMENT", [r"#[^\n]*"]) sqstring = r"(\b[rR])?'([^'\\\n]|\\.)*'?" dqstring = r'(\b[rR])?"([^"\\\n]|\\.)*"?' sq3string = r"(\b[rR])?'''([^'\\]|\\.|'(?!''))*(''')?" dq3string = r'(\b[rR])?"""([^"\\]|\\.|"(?!""))*(""")?' string = any("STRING", [sq3string, dq3string, sqstring, dqstring]) return kw + "|" + comment + "|" + string + "|" + any("SYNC", [r"\n"]) |
headers, 'file:'+pathname2url(file)) | headers, 'file:'+file) | def open_local_file(self, url): import mimetypes, mimetools, StringIO mtype = mimetypes.guess_type(url)[0] headers = mimetools.Message(StringIO.StringIO( 'Content-Type: %s\n' % (mtype or 'text/plain'))) host, file = splithost(url) if not host: return addinfourl(open(url2pathname(file), 'rb'), headers, 'file:'+pathname2url(file)) host, port = splitport(host) if not port \ and socket.gethostbyname(host) in (localhost(), thishost()): return addinfourl(open(url2pathname(file), 'rb'), headers, 'file:'+pathname2url(file)) raise IOError, ('local file error', 'not on local host') |
file = mod.get_file() self.assertEquals(file, os.path.join(TEMP_ZIP, os.sep.join(modules) + expected_ext)) | if expected_ext: file = mod.get_file() self.assertEquals(file, os.path.join(TEMP_ZIP, os.sep.join(modules) + expected_ext)) | def doTest(self, expected_ext, files, *modules): z = ZipFile(TEMP_ZIP, "w") try: for name, (mtime, data) in files.items(): zinfo = ZipInfo(name, time.localtime(mtime)) zinfo.compress_type = self.compression z.writestr(zinfo, data) z.close() sys.path.insert(0, TEMP_ZIP) |
>>> print a.__dict__ {'default': -1000, 'x2': 200, 'x1': 100} | >>> print sortdict(a.__dict__) {'default': -1000, 'x1': 100, 'x2': 200} | >>> def sorted(seq): |
for file in filenames: fp = open(file) self.__read(fp) | for filename in filenames: try: fp = open(filename) except IOError: continue self.__read(fp, filename) | def read(self, filenames): """Read and parse a list of filenames.""" if type(filenames) is type(''): filenames = [filenames] for file in filenames: fp = open(file) self.__read(fp) fp.close() |
def __read(self, fp): | def __read(self, fp, fpname): | def __read(self, fp): """Parse a sectioned setup file. |
raise MissingSectionHeaderError(fp.name, lineno, `line`) | raise MissingSectionHeaderError(fpname, lineno, `line`) | def __read(self, fp): """Parse a sectioned setup file. |
e = ParsingError(fp.name) | e = ParsingError(fpname) | def __read(self, fp): """Parse a sectioned setup file. |
genpluginproject("carbon", "_CF", sources=[":cf:_CFmodule.c", ":cf:pycfbridge.c"], outputdir="::Lib:Carbon") | genpluginproject("carbon", "_CF", sources=["_CFmodule.c", "pycfbridge.c"], outputdir="::Lib:Carbon") | def genpluginproject(architecture, module, project=None, projectdir=None, sources=[], sourcedirs=[], libraries=[], extradirs=[], extraexportsymbols=[], outputdir=":::Lib:lib-dynload", libraryflags=None, stdlibraryflags=None, prefixname=None, initialize=None): if CARBON_ONLY and architecture == "ppc": return if architecture == "all": # For the time being we generate two project files. Not as nice as # a single multitarget project, but easier to implement for now. genpluginproject("ppc", module, project, projectdir, sources, sourcedirs, libraries, extradirs, extraexportsymbols, outputdir, libraryflags, stdlibraryflags, prefixname, initialize) genpluginproject("carbon", module, project, projectdir, sources, sourcedirs, libraries, extradirs, extraexportsymbols, outputdir, libraryflags, stdlibraryflags, prefixname, initialize) return templatename = "template-%s" % architecture targetname = "%s.%s" % (module, architecture) dllname = "%s.%s.slb" % (module, architecture) if not project: if architecture != "ppc": project = "%s.%s.mcp"%(module, architecture) else: project = "%s.mcp"%module if not projectdir: projectdir = PROJECTDIR if not sources: sources = [module + 'module.c'] if not sourcedirs: for moduledir in MODULEDIRS: if '%' in moduledir: # For historical reasons an initial _ in the modulename # is not reflected in the folder name if module[0] == '_': modulewithout_ = module[1:] else: modulewithout_ = module moduledir = moduledir % modulewithout_ fn = os.path.join(projectdir, os.path.join(moduledir, sources[0])) if os.path.exists(fn): moduledir, sourcefile = os.path.split(fn) sourcedirs = [relpath(projectdir, moduledir)] sources[0] = sourcefile break else: print "Warning: %s: sourcefile not found: %s"%(module, sources[0]) sourcedirs = [] if prefixname: pass elif architecture == "carbon": prefixname = "mwerks_shcarbon_pch" else: prefixname = "mwerks_plugin_config.h" dict = { "sysprefix" : relpath(projectdir, sys.prefix), "sources" : sources, "extrasearchdirs" : sourcedirs + extradirs, "libraries": libraries, "mac_outputdir" : outputdir, "extraexportsymbols" : extraexportsymbols, "mac_targetname" : targetname, "mac_dllname" : dllname, "prefixname" : prefixname, } if libraryflags: dict['libraryflags'] = libraryflags if stdlibraryflags: dict['stdlibraryflags'] = stdlibraryflags if initialize: dict['initialize'] = initialize mkcwproject.mkproject(os.path.join(projectdir, project), module, dict, force=FORCEREBUILD, templatename=templatename) |
def item_cget(self, col, opt): return self.tk.call(self._w, 'item', 'cget', col, opt) | def item_cget(self, entry, col, opt): return self.tk.call(self._w, 'item', 'cget', entry, col, opt) | def item_cget(self, col, opt): return self.tk.call(self._w, 'item', 'cget', col, opt) |
import linecache | def formatwarning(message, category, filename, lineno): """Function to format a warning the standard way.""" import linecache s = "%s:%s: %s: %s\n" % (filename, lineno, category.__name__, message) line = linecache.getline(filename, lineno).strip() if line: s = s + " " + line + "\n" return s |
|
if sys.byteorder == 'little': fmt = ossaudiodev.AFMT_S16_LE else: fmt = ossaudiodev.AFMT_S16_BE | def play_sound_file(data, rate, ssize, nchannels): try: dsp = ossaudiodev.open('w') except IOError, msg: if msg[0] in (errno.EACCES, errno.ENODEV, errno.EBUSY): raise TestSkipped, msg raise TestFailed, msg # set the data format if sys.byteorder == 'little': fmt = ossaudiodev.AFMT_S16_LE else: fmt = ossaudiodev.AFMT_S16_BE # at least check that these methods can be invoked dsp.bufsize() dsp.obufcount() dsp.obuffree() dsp.getptr() dsp.fileno() # set parameters based on .au file headers dsp.setparameters(fmt, nchannels, rate) t1 = time.time() print "playing test sound file..." dsp.write(data) dsp.close() t2 = time.time() print "elapsed time: %.1f sec" % (t2-t1) |
|
dsp.setparameters(fmt, nchannels, rate) | dsp.setparameters(AFMT_S16_NE, nchannels, rate) | def play_sound_file(data, rate, ssize, nchannels): try: dsp = ossaudiodev.open('w') except IOError, msg: if msg[0] in (errno.EACCES, errno.ENODEV, errno.EBUSY): raise TestSkipped, msg raise TestFailed, msg # set the data format if sys.byteorder == 'little': fmt = ossaudiodev.AFMT_S16_LE else: fmt = ossaudiodev.AFMT_S16_BE # at least check that these methods can be invoked dsp.bufsize() dsp.obufcount() dsp.obuffree() dsp.getptr() dsp.fileno() # set parameters based on .au file headers dsp.setparameters(fmt, nchannels, rate) t1 = time.time() print "playing test sound file..." dsp.write(data) dsp.close() t2 = time.time() print "elapsed time: %.1f sec" % (t2-t1) |
def_build = "%s setup.py build" % self.python | def_setup_call = "%s %s" % (self.python,os.path.basename(sys.argv[0])) def_build = "%s build" % def_setup_call | def _make_spec_file(self): """Generate the text of an RPM spec file and return it as a list of strings (one per line). """ # definitions and headers spec_file = [ '%define name ' + self.distribution.get_name(), '%define version ' + self.distribution.get_version().replace('-','_'), '%define release ' + self.release.replace('-','_'), '', 'Summary: ' + self.distribution.get_description(), ] |
("%s setup.py install " | ("%s install " | def _make_spec_file(self): """Generate the text of an RPM spec file and return it as a list of strings (one per line). """ # definitions and headers spec_file = [ '%define name ' + self.distribution.get_name(), '%define version ' + self.distribution.get_version().replace('-','_'), '%define release ' + self.release.replace('-','_'), '', 'Summary: ' + self.distribution.get_description(), ] |
"--record=INSTALLED_FILES") % self.python), | "--record=INSTALLED_FILES") % def_setup_call), | def _make_spec_file(self): """Generate the text of an RPM spec file and return it as a list of strings (one per line). """ # definitions and headers spec_file = [ '%define name ' + self.distribution.get_name(), '%define version ' + self.distribution.get_version().replace('-','_'), '%define release ' + self.release.replace('-','_'), '', 'Summary: ' + self.distribution.get_description(), ] |
del names["__builtins__"] | if names.has_key("__builtins__"): del names["__builtins__"] | def check_all(modname): names = {} try: exec "import %s" % modname in names except ImportError: # silent fail here seems the best route since some modules # may not be available in all environments return verify(hasattr(sys.modules[modname], "__all__"), "%s has no __all__ attribute" % modname) names = {} exec "from %s import *" % modname in names del names["__builtins__"] keys = names.keys() keys.sort() all = list(sys.modules[modname].__all__) # in case it's a tuple all.sort() verify(keys==all, "%s != %s" % (keys, all)) |
([("Point", "indent", "OutMode")], [("Point_ptr", "indent", "InMode")]), | def makerepairinstructions(self): return [ ([('ListBounds_ptr', '*', 'InMode')], [('Rect_ptr', '*', 'InMode')]), |
|
def sample(self, population, k, int=int): | def sample(self, population, k): | def sample(self, population, k, int=int): """Chooses k unique random elements from a population sequence. |
j = int(random() * (n-i)) | j = _int(random() * (n-i)) | def sample(self, population, k, int=int): """Chooses k unique random elements from a population sequence. |
j = int(random() * n) | j = _int(random() * n) | def sample(self, population, k, int=int): """Chooses k unique random elements from a population sequence. |
transform = element._as_temporarily_immutable except AttributeError: pass else: element = transform() return element in self._data | return element in self._data except TypeError: transform = getattr(element, "_as_temporary_immutable", None) if transform is None: raise return transform() in self._data | def __contains__(self, element): """Report whether an element is a member of a set. |
for elt in iterable: | for element in iterable: | def update(self, iterable): """Add all values from an iterable (such as a list or file).""" data = self._data value = True for elt in iterable: try: transform = elt._as_immutable except AttributeError: pass else: elt = transform() data[elt] = value |
transform = elt._as_immutable except AttributeError: pass else: elt = transform() data[elt] = value | data[element] = value except TypeError: transform = getattr(element, "_as_temporary_immutable", None) if transform is None: raise data[transform()] = value | def update(self, iterable): """Add all values from an iterable (such as a list or file).""" data = self._data value = True for elt in iterable: try: transform = elt._as_immutable except AttributeError: pass else: elt = transform() data[elt] = value |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.