rem
stringlengths 0
322k
| add
stringlengths 0
2.05M
| context
stringlengths 8
228k
|
---|---|---|
setupname = os.path.join(self._buildDirname, "setup.py") if not os.path.exists(setupname) and not NO_EXECUTE: return "no setup.py found after unpack of archive" | def installPackageOnly(self, output=None): """Default install method, to be overridden by subclasses""" return "Cannot automatically install package %s" % self.fullname() | def unpackSinglePackage(self, output=None): """Unpack a downloaded package archive.""" filename = os.path.split(self.archiveFilename)[1] for ext, cmd in ARCHIVE_FORMATS: if filename[-len(ext):] == ext: break else: return "unknown extension for archive file: %s" % filename basename = filename[:-len(ext)] cmd = cmd % self.archiveFilename self._buildDirname = os.path.join(self._db.preferences.buildDir, basename) if self._cmd(output, self._db.preferences.buildDir, cmd): return "unpack command failed" setupname = os.path.join(self._buildDirname, "setup.py") if not os.path.exists(setupname) and not NO_EXECUTE: return "no setup.py found after unpack of archive" |
msg = self.downloadSinglePackage(output) | msg = self.downloadPackageOnly(output) | def installSinglePackage(self, output=None): """Download, unpack and install a single package. If output is given it should be a file-like object and it will receive a log of what happened.""" if not self._dict['Download-URL']: return "%s: This package needs to be installed manually" % _fmtpackagename(self) msg = self.downloadSinglePackage(output) if msg: return "download %s: %s" % (self.fullname(), msg) msg = self.unpackSinglePackage(output) if msg: return "unpack %s: %s" % (self.fullname(), msg) if self._dict.has_key('Pre-install-command'): if self._cmd(output, self._buildDirname, self._dict['Pre-install-command']): return "pre-install %s: running \"%s\" failed" % \ (self.fullname(), self._dict['Pre-install-command']) old_contents = os.listdir(self._db.preferences.installDir) installcmd = self._dict.get('Install-command') if not installcmd: installcmd = '"%s" setup.py install' % sys.executable if self._cmd(output, self._buildDirname, installcmd): return "install %s: running \"%s\" failed" % self.fullname() new_contents = os.listdir(self._db.preferences.installDir) self._interpretPthFiles(old_contents, new_contents) if self._dict.has_key('Post-install-command'): if self._cmd(output, self._buildDirname, self._dict['Post-install-command']): return "post-install %s: running \"%s\" failed" % \ (self.fullname(), self._dict['Post-install-command']) return None |
msg = self.unpackSinglePackage(output) | msg = self.unpackPackageOnly(output) | def installSinglePackage(self, output=None): """Download, unpack and install a single package. If output is given it should be a file-like object and it will receive a log of what happened.""" if not self._dict['Download-URL']: return "%s: This package needs to be installed manually" % _fmtpackagename(self) msg = self.downloadSinglePackage(output) if msg: return "download %s: %s" % (self.fullname(), msg) msg = self.unpackSinglePackage(output) if msg: return "unpack %s: %s" % (self.fullname(), msg) if self._dict.has_key('Pre-install-command'): if self._cmd(output, self._buildDirname, self._dict['Pre-install-command']): return "pre-install %s: running \"%s\" failed" % \ (self.fullname(), self._dict['Pre-install-command']) old_contents = os.listdir(self._db.preferences.installDir) installcmd = self._dict.get('Install-command') if not installcmd: installcmd = '"%s" setup.py install' % sys.executable if self._cmd(output, self._buildDirname, installcmd): return "install %s: running \"%s\" failed" % self.fullname() new_contents = os.listdir(self._db.preferences.installDir) self._interpretPthFiles(old_contents, new_contents) if self._dict.has_key('Post-install-command'): if self._cmd(output, self._buildDirname, self._dict['Post-install-command']): return "post-install %s: running \"%s\" failed" % \ (self.fullname(), self._dict['Post-install-command']) return None |
if self._dict.has_key('Pre-install-command'): if self._cmd(output, self._buildDirname, self._dict['Pre-install-command']): return "pre-install %s: running \"%s\" failed" % \ (self.fullname(), self._dict['Pre-install-command']) old_contents = os.listdir(self._db.preferences.installDir) installcmd = self._dict.get('Install-command') if not installcmd: installcmd = '"%s" setup.py install' % sys.executable if self._cmd(output, self._buildDirname, installcmd): return "install %s: running \"%s\" failed" % self.fullname() | return self.installPackageOnly(output) def beforeInstall(self): """Bookkeeping before installation: remember what we have in site-packages""" self._old_contents = os.listdir(self._db.preferences.installDir) def afterInstall(self): """Bookkeeping after installation: interpret any new .pth files that have appeared""" | def installSinglePackage(self, output=None): """Download, unpack and install a single package. If output is given it should be a file-like object and it will receive a log of what happened.""" if not self._dict['Download-URL']: return "%s: This package needs to be installed manually" % _fmtpackagename(self) msg = self.downloadSinglePackage(output) if msg: return "download %s: %s" % (self.fullname(), msg) msg = self.unpackSinglePackage(output) if msg: return "unpack %s: %s" % (self.fullname(), msg) if self._dict.has_key('Pre-install-command'): if self._cmd(output, self._buildDirname, self._dict['Pre-install-command']): return "pre-install %s: running \"%s\" failed" % \ (self.fullname(), self._dict['Pre-install-command']) old_contents = os.listdir(self._db.preferences.installDir) installcmd = self._dict.get('Install-command') if not installcmd: installcmd = '"%s" setup.py install' % sys.executable if self._cmd(output, self._buildDirname, installcmd): return "install %s: running \"%s\" failed" % self.fullname() new_contents = os.listdir(self._db.preferences.installDir) self._interpretPthFiles(old_contents, new_contents) if self._dict.has_key('Post-install-command'): if self._cmd(output, self._buildDirname, self._dict['Post-install-command']): return "post-install %s: running \"%s\" failed" % \ (self.fullname(), self._dict['Post-install-command']) return None |
self._interpretPthFiles(old_contents, new_contents) if self._dict.has_key('Post-install-command'): if self._cmd(output, self._buildDirname, self._dict['Post-install-command']): return "post-install %s: running \"%s\" failed" % \ (self.fullname(), self._dict['Post-install-command']) return None def _interpretPthFiles(self, old_contents, new_contents): """Evaluate any new .pth files that have appeared after installing""" | def installSinglePackage(self, output=None): """Download, unpack and install a single package. If output is given it should be a file-like object and it will receive a log of what happened.""" if not self._dict['Download-URL']: return "%s: This package needs to be installed manually" % _fmtpackagename(self) msg = self.downloadSinglePackage(output) if msg: return "download %s: %s" % (self.fullname(), msg) msg = self.unpackSinglePackage(output) if msg: return "unpack %s: %s" % (self.fullname(), msg) if self._dict.has_key('Pre-install-command'): if self._cmd(output, self._buildDirname, self._dict['Pre-install-command']): return "pre-install %s: running \"%s\" failed" % \ (self.fullname(), self._dict['Pre-install-command']) old_contents = os.listdir(self._db.preferences.installDir) installcmd = self._dict.get('Install-command') if not installcmd: installcmd = '"%s" setup.py install' % sys.executable if self._cmd(output, self._buildDirname, installcmd): return "install %s: running \"%s\" failed" % self.fullname() new_contents = os.listdir(self._db.preferences.installDir) self._interpretPthFiles(old_contents, new_contents) if self._dict.has_key('Post-install-command'): if self._cmd(output, self._buildDirname, self._dict['Post-install-command']): return "post-install %s: running \"%s\" failed" % \ (self.fullname(), self._dict['Post-install-command']) return None |
|
if fn in old_contents: | if fn in self._old_contents: | def _interpretPthFiles(self, old_contents, new_contents): """Evaluate any new .pth files that have appeared after installing""" for fn in new_contents: if fn in old_contents: continue if fn[-4:] != '.pth': continue fullname = os.path.join(self._db.preferences.installDir, fn) f = open(fullname) for line in f.readlines(): if not line: continue if line[0] == '#': continue if line[:6] == 'import': exec line continue if line[-1] == '\n': line = line[:-1] if not os.path.isabs(line): line = os.path.join(self._db.preferences.installDir, line) line = os.path.realpath(line) if not line in sys.path: sys.path.append(line) |
sys.path.append(line) | sys.path.append(line) class PimpPackage_binary(PimpPackage): def unpackPackageOnly(self, output=None): """We don't unpack binary packages until installing""" pass def installPackageOnly(self, output=None): """Install a single source package. If output is given it should be a file-like object and it will receive a log of what happened.""" msgs = [] if self._dict.has_key('Pre-install-command'): msg.append("%s: Pre-install-command ignored" % self.fullname()) if self._dict.has_key('Install-command'): msgs.append("%s: Install-command ignored" % self.fullname()) if self._dict.has_key('Post-install-command'): msgs.append("%s: Post-install-command ignored" % self.fullname()) self.beforeInstall() filename = os.path.split(self.archiveFilename)[1] for ext, cmd in ARCHIVE_FORMATS: if filename[-len(ext):] == ext: break else: return "unknown extension for archive file: %s" % filename prefixmod = '-C /' cmd = cmd % self.archiveFilename if self._cmd(output, self._db.preferences.buildDir, cmd, prefixmod): return "unpack command failed" self.afterInstall() if self._dict.has_key('Post-install-command'): if self._cmd(output, self._buildDirname, self._dict['Post-install-command']): return "post-install %s: running \"%s\" failed" % \ (self.fullname(), self._dict['Post-install-command']) return None class PimpPackage_source(PimpPackage): def unpackPackageOnly(self, output=None): """Unpack a source package and check that setup.py exists""" PimpPackage.unpackPackageOnly(self, output) self._buildDirname = os.path.join(self._db.preferences.buildDir, basename) setupname = os.path.join(self._buildDirname, "setup.py") if not os.path.exists(setupname) and not NO_EXECUTE: return "no setup.py found after unpack of archive" def installPackageOnly(self, output=None): """Install a single source package. If output is given it should be a file-like object and it will receive a log of what happened.""" if self._dict.has_key('Pre-install-command'): if self._cmd(output, self._buildDirname, self._dict['Pre-install-command']): return "pre-install %s: running \"%s\" failed" % \ (self.fullname(), self._dict['Pre-install-command']) self.beforeInstall() installcmd = self._dict.get('Install-command') if not installcmd: installcmd = '"%s" setup.py install' % sys.executable if self._cmd(output, self._buildDirname, installcmd): return "install %s: running \"%s\" failed" % self.fullname() self.afterInstall() if self._dict.has_key('Post-install-command'): if self._cmd(output, self._buildDirname, self._dict['Post-install-command']): return "post-install %s: running \"%s\" failed" % \ (self.fullname(), self._dict['Post-install-command']) return None | def _interpretPthFiles(self, old_contents, new_contents): """Evaluate any new .pth files that have appeared after installing""" for fn in new_contents: if fn in old_contents: continue if fn[-4:] != '.pth': continue fullname = os.path.join(self._db.preferences.installDir, fn) f = open(fullname) for line in f.readlines(): if not line: continue if line[0] == '#': continue if line[:6] == 'import': exec line continue if line[-1] == '\n': line = line[:-1] if not os.path.isabs(line): line = os.path.join(self._db.preferences.installDir, line) line = os.path.realpath(line) if not line in sys.path: sys.path.append(line) |
output = struct.pack("iiiiiii", 0x950412de, | output = struct.pack("Iiiiiii", 0x950412deL, | def generate(): "Return the generated output." global MESSAGES keys = MESSAGES.keys() # the keys are sorted in the .mo file keys.sort() offsets = [] ids = strs = '' for id in keys: # For each string, we need size and file offset. Each string is NUL # terminated; the NUL does not count into the size. offsets.append((len(ids), len(id), len(strs), len(MESSAGES[id]))) ids += id + '\0' strs += MESSAGES[id] + '\0' output = '' # The header is 7 32-bit unsigned integers. We don't use hash tables, so # the keys start right after the index tables. # translated string. keystart = 7*4+16*len(keys) # and the values start after the keys valuestart = keystart + len(ids) koffsets = [] voffsets = [] # The string table first has the list of keys, then the list of values. # Each entry has first the size of the string, then the file offset. for o1, l1, o2, l2 in offsets: koffsets += [l1, o1+keystart] voffsets += [l2, o2+valuestart] offsets = koffsets + voffsets output = struct.pack("iiiiiii", 0x950412de, # Magic 0, # Version len(keys), # # of entries 7*4, # start of key index 7*4+len(keys)*8, # start of value index 0, 0) # size and offset of hash table output += array.array("i", offsets).tostring() output += ids output += strs return output |
f2 = self.tar.extractfile("/S-SPARSE-WITH-NULLS") | f2 = self.tar.extractfile("S-SPARSE-WITH-NULLS") | def test_sparse(self): """Test sparse member extraction. """ if self.sep != "|": f1 = self.tar.extractfile("S-SPARSE") f2 = self.tar.extractfile("/S-SPARSE-WITH-NULLS") self.assert_(f1.read() == f2.read(), "_FileObject failed on sparse file member") |
filename = "/0-REGTYPE-TEXT" | filename = "0-REGTYPE-TEXT" | def test_readlines(self): """Test readlines() method of _FileObject. """ if self.sep != "|": filename = "/0-REGTYPE-TEXT" self.tar.extract(filename, dirname()) lines1 = file(os.path.join(dirname(), filename), "rU").readlines() lines2 = self.tar.extractfile(filename).readlines() self.assert_(lines1 == lines2, "_FileObject.readline() does not work correctly") |
filename = "/0-REGTYPE" | filename = "0-REGTYPE" | def test_seek(self): """Test seek() method of _FileObject, incl. random reading. """ if self.sep != "|": filename = "/0-REGTYPE" self.tar.extract(filename, dirname()) data = file(os.path.join(dirname(), filename), "rb").read() |
ssl = socket.ssl(sock, self.key_file, self.cert_file) | realsock = sock if hasattr(sock, "_sock"): realsock = sock._sock ssl = socket.ssl(realsock, self.key_file, self.cert_file) | def connect(self): "Connect to a host on a given (SSL) port." |
if sys.platform[:4] != 'java': value = u"%(x)s, %()s" % {'x':u"abc", u''.encode('utf-8'):"def"} else: value = u"%(x)s, %()s" % {'x':u"abc", u'':"def"} | value = u"%(x)s, %()s" % {'x':u"abc", u'':"def"} | def test_fixup(s): s2 = u'\ud800\udc01' test_lecmp(s, s2) s2 = u'\ud900\udc01' test_lecmp(s, s2) s2 = u'\uda00\udc01' test_lecmp(s, s2) s2 = u'\udb00\udc01' test_lecmp(s, s2) s2 = u'\ud800\udd01' test_lecmp(s, s2) s2 = u'\ud900\udd01' test_lecmp(s, s2) s2 = u'\uda00\udd01' test_lecmp(s, s2) s2 = u'\udb00\udd01' test_lecmp(s, s2) s2 = u'\ud800\ude01' test_lecmp(s, s2) s2 = u'\ud900\ude01' test_lecmp(s, s2) s2 = u'\uda00\ude01' test_lecmp(s, s2) s2 = u'\udb00\ude01' test_lecmp(s, s2) s2 = u'\ud800\udfff' test_lecmp(s, s2) s2 = u'\ud900\udfff' test_lecmp(s, s2) s2 = u'\uda00\udfff' test_lecmp(s, s2) s2 = u'\udb00\udfff' test_lecmp(s, s2) |
print 'd=', d | def message(str = "Hello, world!", id = MESSAGE_ID): """Show a simple alert with a text message""" d = GetNewDialog(id, -1) d.SetDialogDefaultItem(1) print 'd=', d tp, h, rect = d.GetDialogItem(2) SetDialogItemText(h, str) while 1: n = ModalDialog(None) if n == 1: break |
|
return [0]*7, None return map(lambda x: ord(x), opr.data), opr | return [0]*9, None options = map(lambda x: ord(x), opr.data) while len(options) < 9: options = options + [0] return options, opr | def getoptions(id): try: opr = GetResource('Popt', id) except (MacOS.Error, Res.Error): return [0]*7, None return map(lambda x: ord(x), opr.data), opr |
print 'Created new GUSI option' ngusi_opr = Resource(gusi_opr.data) | ngusi_opr = Resource(newdata) | def edit_preferences(): preff_handle = openpreffile(WRITE) l, sr = getprefpath(PATH_STRINGS_ID) if l == None: message('Cannot find any sys.path resource! (Old python?)') sys.exit(0) fss, dr, fss_changed = getprefdir(DIRECTORY_ID) if fss == None: fss = macfs.FSSpec(os.getcwd()) fss_changed = 1 options, opr = getoptions(OPTIONS_ID) saved_options = options[:] creator, type, delaycons, gusi_opr = getgusioptions(GUSI_ID) saved_gusi_options = creator, type, delaycons # Let the user play away result = interact(l, fss, (options, creator, type, delaycons), 'System-wide preferences') # See what we have to update, and how if result == None: sys.exit(0) pathlist, nfss, (options, creator, type, delaycons) = result if nfss != fss: fss_changed = 1 if fss_changed: alias = nfss.NewAlias() if dr: dr.data = alias.data dr.ChangedResource() else: dr = Resource(alias.data) dr.AddResource('alis', DIRECTORY_ID, '') if pathlist != l: if pathlist == []: if sr.HomeResFile() == preff_handle: sr.RemoveResource() elif sr.HomeResFile() == preff_handle: sr.data = listtores(pathlist) sr.ChangedResource() else: sr = Resource(listtores(pathlist)) sr.AddResource('STR#', PATH_STRINGS_ID, '') if options != saved_options: newdata = reduce(lambda x, y: x+chr(y), options, '') if opr and opr.HomeResFile() == preff_handle: opr.data = newdata opr.ChangedResource() else: opr = Resource(newdata) opr.AddResource('Popt', OPTIONS_ID, '') if (creator, type, delaycons) != saved_gusi_options: newdata = setgusioptions(gusi_opr, creator, type, delaycons) if gusi_opr.HomeResFile() == preff_handle: gusi_opr.data = newdata gusi_opr.ChangedResource() else: print 'Created new GUSI option' ngusi_opr = Resource(gusi_opr.data) ngusi_opr.AddResource('GU\267I', GUSI_ID, '') CloseResFile(preff_handle) |
if not opr: | if not gusi_opr: | def edit_applet(name): pref_handle = openpreffile(READ) app_handle = openapplet(name) notfound = '' l, sr = getprefpath(OVERRIDE_PATH_STRINGS_ID) if l == None: notfound = 'path' l, dummy = getprefpath(PATH_STRINGS_ID) if l == None: message('Cannot find any sys.path resource! (Old python?)') sys.exit(0) fss, dr, fss_changed = getprefdir(OVERRIDE_DIRECTORY_ID) if fss == None: if notfound: notfound = notfound + ', directory' else: notfound = 'directory' fss, dummy, dummy2 = getprefdir(DIRECTORY_ID) if fss == None: fss = macfs.FSSpec(os.getcwd()) fss_changed = 1 options, opr = getoptions(OVERRIDE_OPTIONS_ID) if not opr: if notfound: notfound = notfound + ', options' else: notfound = 'options' options, dummy = getoptions(OPTIONS_ID) saved_options = options[:] creator, type, delaycons, gusi_opr = getgusioptions(OVERRIDE_GUSI_ID) if not opr: if notfound: notfound = notfound + ', GUSI options' else: notfound = 'GUSI options' creator, type, delaycons, dummy = getgusioptions(GUSI_ID) saved_gusi_options = creator, type, delaycons dummy = dummy2 = None # Discard them. if notfound: message('Warning: initial %s taken from system-wide defaults'%notfound) # Let the user play away result = interact(l, fss, (options, creator, type, delaycons), name) # See what we have to update, and how if result == None: sys.exit(0) pathlist, nfss, options = result if nfss != fss: fss_changed = 1 if fss_changed: alias = nfss.NewAlias() if dr: dr.data = alias.data dr.ChangedResource() else: dr = Resource(alias.data) dr.AddResource('alis', OVERRIDE_DIRECTORY_ID, '') if pathlist != l: if pathlist == []: if sr.HomeResFile() == app_handle: sr.RemoveResource() elif sr and sr.HomeResFile() == app_handle: sr.data = listtores(pathlist) sr.ChangedResource() else: sr = Resource(listtores(pathlist)) sr.AddResource('STR#', OVERRIDE_PATH_STRINGS_ID, '') if options != saved_options: newdata = reduce(lambda x, y: x+chr(y), options, '') if opr and opr.HomeResFile() == app_handle: opr.data = newdata opr.ChangedResource() else: opr = Resource(newdata) opr.AddResource('Popt', OVERRIDE_OPTIONS_ID, '') if (creator, type, delaycons) != saved_gusi_options: newdata = setgusioptions(gusi_opr, creator, type, delaycons) if gusi_opr.HomeResFile == app_handle: gusi_opr.data = newdata gusi_opr.ChangedResource() else: gusi_opr = Resource(gusi_opr.data) gusi_opr.AddResource('GU\267I', OVERRIDE_GUSI_ID, '') CloseResFile(app_handle) |
creator, type, delaycons, dummy = getgusioptions(GUSI_ID) | creator, type, delaycons, gusi_opr = getgusioptions(GUSI_ID) | def edit_applet(name): pref_handle = openpreffile(READ) app_handle = openapplet(name) notfound = '' l, sr = getprefpath(OVERRIDE_PATH_STRINGS_ID) if l == None: notfound = 'path' l, dummy = getprefpath(PATH_STRINGS_ID) if l == None: message('Cannot find any sys.path resource! (Old python?)') sys.exit(0) fss, dr, fss_changed = getprefdir(OVERRIDE_DIRECTORY_ID) if fss == None: if notfound: notfound = notfound + ', directory' else: notfound = 'directory' fss, dummy, dummy2 = getprefdir(DIRECTORY_ID) if fss == None: fss = macfs.FSSpec(os.getcwd()) fss_changed = 1 options, opr = getoptions(OVERRIDE_OPTIONS_ID) if not opr: if notfound: notfound = notfound + ', options' else: notfound = 'options' options, dummy = getoptions(OPTIONS_ID) saved_options = options[:] creator, type, delaycons, gusi_opr = getgusioptions(OVERRIDE_GUSI_ID) if not opr: if notfound: notfound = notfound + ', GUSI options' else: notfound = 'GUSI options' creator, type, delaycons, dummy = getgusioptions(GUSI_ID) saved_gusi_options = creator, type, delaycons dummy = dummy2 = None # Discard them. if notfound: message('Warning: initial %s taken from system-wide defaults'%notfound) # Let the user play away result = interact(l, fss, (options, creator, type, delaycons), name) # See what we have to update, and how if result == None: sys.exit(0) pathlist, nfss, options = result if nfss != fss: fss_changed = 1 if fss_changed: alias = nfss.NewAlias() if dr: dr.data = alias.data dr.ChangedResource() else: dr = Resource(alias.data) dr.AddResource('alis', OVERRIDE_DIRECTORY_ID, '') if pathlist != l: if pathlist == []: if sr.HomeResFile() == app_handle: sr.RemoveResource() elif sr and sr.HomeResFile() == app_handle: sr.data = listtores(pathlist) sr.ChangedResource() else: sr = Resource(listtores(pathlist)) sr.AddResource('STR#', OVERRIDE_PATH_STRINGS_ID, '') if options != saved_options: newdata = reduce(lambda x, y: x+chr(y), options, '') if opr and opr.HomeResFile() == app_handle: opr.data = newdata opr.ChangedResource() else: opr = Resource(newdata) opr.AddResource('Popt', OVERRIDE_OPTIONS_ID, '') if (creator, type, delaycons) != saved_gusi_options: newdata = setgusioptions(gusi_opr, creator, type, delaycons) if gusi_opr.HomeResFile == app_handle: gusi_opr.data = newdata gusi_opr.ChangedResource() else: gusi_opr = Resource(gusi_opr.data) gusi_opr.AddResource('GU\267I', OVERRIDE_GUSI_ID, '') CloseResFile(app_handle) |
pathlist, nfss, options = result | pathlist, nfss, (options, creator, type, delaycons) = result | def edit_applet(name): pref_handle = openpreffile(READ) app_handle = openapplet(name) notfound = '' l, sr = getprefpath(OVERRIDE_PATH_STRINGS_ID) if l == None: notfound = 'path' l, dummy = getprefpath(PATH_STRINGS_ID) if l == None: message('Cannot find any sys.path resource! (Old python?)') sys.exit(0) fss, dr, fss_changed = getprefdir(OVERRIDE_DIRECTORY_ID) if fss == None: if notfound: notfound = notfound + ', directory' else: notfound = 'directory' fss, dummy, dummy2 = getprefdir(DIRECTORY_ID) if fss == None: fss = macfs.FSSpec(os.getcwd()) fss_changed = 1 options, opr = getoptions(OVERRIDE_OPTIONS_ID) if not opr: if notfound: notfound = notfound + ', options' else: notfound = 'options' options, dummy = getoptions(OPTIONS_ID) saved_options = options[:] creator, type, delaycons, gusi_opr = getgusioptions(OVERRIDE_GUSI_ID) if not opr: if notfound: notfound = notfound + ', GUSI options' else: notfound = 'GUSI options' creator, type, delaycons, dummy = getgusioptions(GUSI_ID) saved_gusi_options = creator, type, delaycons dummy = dummy2 = None # Discard them. if notfound: message('Warning: initial %s taken from system-wide defaults'%notfound) # Let the user play away result = interact(l, fss, (options, creator, type, delaycons), name) # See what we have to update, and how if result == None: sys.exit(0) pathlist, nfss, options = result if nfss != fss: fss_changed = 1 if fss_changed: alias = nfss.NewAlias() if dr: dr.data = alias.data dr.ChangedResource() else: dr = Resource(alias.data) dr.AddResource('alis', OVERRIDE_DIRECTORY_ID, '') if pathlist != l: if pathlist == []: if sr.HomeResFile() == app_handle: sr.RemoveResource() elif sr and sr.HomeResFile() == app_handle: sr.data = listtores(pathlist) sr.ChangedResource() else: sr = Resource(listtores(pathlist)) sr.AddResource('STR#', OVERRIDE_PATH_STRINGS_ID, '') if options != saved_options: newdata = reduce(lambda x, y: x+chr(y), options, '') if opr and opr.HomeResFile() == app_handle: opr.data = newdata opr.ChangedResource() else: opr = Resource(newdata) opr.AddResource('Popt', OVERRIDE_OPTIONS_ID, '') if (creator, type, delaycons) != saved_gusi_options: newdata = setgusioptions(gusi_opr, creator, type, delaycons) if gusi_opr.HomeResFile == app_handle: gusi_opr.data = newdata gusi_opr.ChangedResource() else: gusi_opr = Resource(gusi_opr.data) gusi_opr.AddResource('GU\267I', OVERRIDE_GUSI_ID, '') CloseResFile(app_handle) |
if gusi_opr.HomeResFile == app_handle: | id, type, name = gusi_opr.GetResInfo() if gusi_opr.HomeResFile() == app_handle and id == OVERRIDE_GUSI_ID: | def edit_applet(name): pref_handle = openpreffile(READ) app_handle = openapplet(name) notfound = '' l, sr = getprefpath(OVERRIDE_PATH_STRINGS_ID) if l == None: notfound = 'path' l, dummy = getprefpath(PATH_STRINGS_ID) if l == None: message('Cannot find any sys.path resource! (Old python?)') sys.exit(0) fss, dr, fss_changed = getprefdir(OVERRIDE_DIRECTORY_ID) if fss == None: if notfound: notfound = notfound + ', directory' else: notfound = 'directory' fss, dummy, dummy2 = getprefdir(DIRECTORY_ID) if fss == None: fss = macfs.FSSpec(os.getcwd()) fss_changed = 1 options, opr = getoptions(OVERRIDE_OPTIONS_ID) if not opr: if notfound: notfound = notfound + ', options' else: notfound = 'options' options, dummy = getoptions(OPTIONS_ID) saved_options = options[:] creator, type, delaycons, gusi_opr = getgusioptions(OVERRIDE_GUSI_ID) if not opr: if notfound: notfound = notfound + ', GUSI options' else: notfound = 'GUSI options' creator, type, delaycons, dummy = getgusioptions(GUSI_ID) saved_gusi_options = creator, type, delaycons dummy = dummy2 = None # Discard them. if notfound: message('Warning: initial %s taken from system-wide defaults'%notfound) # Let the user play away result = interact(l, fss, (options, creator, type, delaycons), name) # See what we have to update, and how if result == None: sys.exit(0) pathlist, nfss, options = result if nfss != fss: fss_changed = 1 if fss_changed: alias = nfss.NewAlias() if dr: dr.data = alias.data dr.ChangedResource() else: dr = Resource(alias.data) dr.AddResource('alis', OVERRIDE_DIRECTORY_ID, '') if pathlist != l: if pathlist == []: if sr.HomeResFile() == app_handle: sr.RemoveResource() elif sr and sr.HomeResFile() == app_handle: sr.data = listtores(pathlist) sr.ChangedResource() else: sr = Resource(listtores(pathlist)) sr.AddResource('STR#', OVERRIDE_PATH_STRINGS_ID, '') if options != saved_options: newdata = reduce(lambda x, y: x+chr(y), options, '') if opr and opr.HomeResFile() == app_handle: opr.data = newdata opr.ChangedResource() else: opr = Resource(newdata) opr.AddResource('Popt', OVERRIDE_OPTIONS_ID, '') if (creator, type, delaycons) != saved_gusi_options: newdata = setgusioptions(gusi_opr, creator, type, delaycons) if gusi_opr.HomeResFile == app_handle: gusi_opr.data = newdata gusi_opr.ChangedResource() else: gusi_opr = Resource(gusi_opr.data) gusi_opr.AddResource('GU\267I', OVERRIDE_GUSI_ID, '') CloseResFile(app_handle) |
gusi_opr = Resource(gusi_opr.data) gusi_opr.AddResource('GU\267I', OVERRIDE_GUSI_ID, '') | ngusi_opr = Resource(newdata) ngusi_opr.AddResource('GU\267I', OVERRIDE_GUSI_ID, '') | def edit_applet(name): pref_handle = openpreffile(READ) app_handle = openapplet(name) notfound = '' l, sr = getprefpath(OVERRIDE_PATH_STRINGS_ID) if l == None: notfound = 'path' l, dummy = getprefpath(PATH_STRINGS_ID) if l == None: message('Cannot find any sys.path resource! (Old python?)') sys.exit(0) fss, dr, fss_changed = getprefdir(OVERRIDE_DIRECTORY_ID) if fss == None: if notfound: notfound = notfound + ', directory' else: notfound = 'directory' fss, dummy, dummy2 = getprefdir(DIRECTORY_ID) if fss == None: fss = macfs.FSSpec(os.getcwd()) fss_changed = 1 options, opr = getoptions(OVERRIDE_OPTIONS_ID) if not opr: if notfound: notfound = notfound + ', options' else: notfound = 'options' options, dummy = getoptions(OPTIONS_ID) saved_options = options[:] creator, type, delaycons, gusi_opr = getgusioptions(OVERRIDE_GUSI_ID) if not opr: if notfound: notfound = notfound + ', GUSI options' else: notfound = 'GUSI options' creator, type, delaycons, dummy = getgusioptions(GUSI_ID) saved_gusi_options = creator, type, delaycons dummy = dummy2 = None # Discard them. if notfound: message('Warning: initial %s taken from system-wide defaults'%notfound) # Let the user play away result = interact(l, fss, (options, creator, type, delaycons), name) # See what we have to update, and how if result == None: sys.exit(0) pathlist, nfss, options = result if nfss != fss: fss_changed = 1 if fss_changed: alias = nfss.NewAlias() if dr: dr.data = alias.data dr.ChangedResource() else: dr = Resource(alias.data) dr.AddResource('alis', OVERRIDE_DIRECTORY_ID, '') if pathlist != l: if pathlist == []: if sr.HomeResFile() == app_handle: sr.RemoveResource() elif sr and sr.HomeResFile() == app_handle: sr.data = listtores(pathlist) sr.ChangedResource() else: sr = Resource(listtores(pathlist)) sr.AddResource('STR#', OVERRIDE_PATH_STRINGS_ID, '') if options != saved_options: newdata = reduce(lambda x, y: x+chr(y), options, '') if opr and opr.HomeResFile() == app_handle: opr.data = newdata opr.ChangedResource() else: opr = Resource(newdata) opr.AddResource('Popt', OVERRIDE_OPTIONS_ID, '') if (creator, type, delaycons) != saved_gusi_options: newdata = setgusioptions(gusi_opr, creator, type, delaycons) if gusi_opr.HomeResFile == app_handle: gusi_opr.data = newdata gusi_opr.ChangedResource() else: gusi_opr = Resource(gusi_opr.data) gusi_opr.AddResource('GU\267I', OVERRIDE_GUSI_ID, '') CloseResFile(app_handle) |
self.build_scripts = os.path.join(self.build_base, 'scripts') | self.build_scripts = os.path.join(self.build_base, 'scripts-' + sys.version[0:3]) | def finalize_options (self): |
if not self.have_fork: for k in ('QUERY_STRING', 'REMOTE_HOST', 'CONTENT_LENGTH', 'HTTP_USER_AGENT', 'HTTP_COOKIE'): env.setdefault(k, "") | for k in ('QUERY_STRING', 'REMOTE_HOST', 'CONTENT_LENGTH', 'HTTP_USER_AGENT', 'HTTP_COOKIE'): env.setdefault(k, "") | def run_cgi(self): """Execute a CGI script.""" dir, rest = self.cgi_info i = rest.rfind('?') if i >= 0: rest, query = rest[:i], rest[i+1:] else: query = '' i = rest.find('/') if i >= 0: script, rest = rest[:i], rest[i:] else: script, rest = rest, '' scriptname = dir + '/' + script scriptfile = self.translate_path(scriptname) if not os.path.exists(scriptfile): self.send_error(404, "No such CGI script (%r)" % scriptname) return if not os.path.isfile(scriptfile): self.send_error(403, "CGI script is not a plain file (%r)" % scriptname) return ispy = self.is_python(scriptname) if not ispy: if not (self.have_fork or self.have_popen2 or self.have_popen3): self.send_error(403, "CGI script is not a Python script (%r)" % scriptname) return if not self.is_executable(scriptfile): self.send_error(403, "CGI script is not executable (%r)" % scriptname) return |
underline=0,indicatoron=FALSE,highlightthickness=0, | indicatoron=FALSE,highlightthickness=0, | def __init__(self,parent): Frame.__init__(self, parent,borderwidth=2,relief=RIDGE) self.button=Radiobutton(self,padx=5,pady=5,takefocus=FALSE, underline=0,indicatoron=FALSE,highlightthickness=0, borderwidth=0,selectcolor=self.cget('bg')) self.button.pack() |
def __init__(self,parent,pageNames,**kw): | def __init__(self,parent,pageNames=[],**kw): | def __init__(self,parent,pageNames,**kw): """ pageNames - a list of strings, each string will be the dictionary key to a page's data, and the name displayed on the page's tab. Should be specified in desired page order. The first page will be the default and first active page. """ Frame.__init__(self, parent, kw) self.grid_location(0,0) self.columnconfigure(0,weight=1) self.rowconfigure(1,weight=1) self.tabBar=Frame(self) self.tabBar.grid(row=0,column=0,sticky=EW) self.activePage=StringVar(self) self.defaultPage='' self.pages={} for name in pageNames: self.AddPage(name) |
def testmod(m, name=None, globs=None, verbose=None, isprivate=None, | def testmod(m=None, name=None, globs=None, verbose=None, isprivate=None, | def testmod(m, name=None, globs=None, verbose=None, isprivate=None, report=1): """m, name=None, globs=None, verbose=None, isprivate=None, report=1 Test examples in docstrings in functions and classes reachable from module m, starting with m.__doc__. Private names are skipped. Also test examples reachable from dict m.__test__ if it exists and is not None. m.__dict__ maps names to functions, classes and strings; function and class docstrings are tested even if the name is private; strings are tested directly, as if they were docstrings. Return (#failures, #tests). See doctest.__doc__ for an overview. Optional keyword arg "name" gives the name of the module; by default use m.__name__. Optional keyword arg "globs" gives a dict to be used as the globals when executing examples; by default, use m.__dict__. A copy of this dict is actually used for each docstring, so that each docstring's examples start with a clean slate. Optional keyword arg "verbose" prints lots of stuff if true, prints only failures if false; by default, it's true iff "-v" is in sys.argv. Optional keyword arg "isprivate" specifies a function used to determine whether a name is private. The default function is doctest.is_private; see its docs for details. Optional keyword arg "report" prints a summary at the end when true, else prints nothing at the end. In verbose mode, the summary is detailed, else very brief (in fact, empty if all tests passed). Advanced tomfoolery: testmod runs methods of a local instance of class doctest.Tester, then merges the results into (or creates) global Tester instance doctest.master. Methods of doctest.master can be called directly too, if you want to do something unusual. Passing report=0 to testmod is especially useful then, to delay displaying a summary. Invoke doctest.master.summarize(verbose) when you're done fiddling. """ global master if not _ismodule(m): raise TypeError("testmod: module required; " + `m`) if name is None: name = m.__name__ tester = Tester(m, globs=globs, verbose=verbose, isprivate=isprivate) failures, tries = tester.rundoc(m, name) f, t = tester.rundict(m.__dict__, name, m) failures = failures + f tries = tries + t if hasattr(m, "__test__"): testdict = m.__test__ if testdict: if not hasattr(testdict, "items"): raise TypeError("testmod: module.__test__ must support " ".items(); " + `testdict`) f, t = tester.run__test__(testdict, name + ".__test__") failures = failures + f tries = tries + t if report: tester.summarize() if master is None: master = tester else: master.merge(tester) return failures, tries |
"""m, name=None, globs=None, verbose=None, isprivate=None, report=1 Test examples in docstrings in functions and classes reachable from module m, starting with m.__doc__. Private names are skipped. | """m=None, name=None, globs=None, verbose=None, isprivate=None, report=1 Test examples in docstrings in functions and classes reachable from module m (or the current module if m is not supplied), starting with m.__doc__. Private names are skipped. | def testmod(m, name=None, globs=None, verbose=None, isprivate=None, report=1): """m, name=None, globs=None, verbose=None, isprivate=None, report=1 Test examples in docstrings in functions and classes reachable from module m, starting with m.__doc__. Private names are skipped. Also test examples reachable from dict m.__test__ if it exists and is not None. m.__dict__ maps names to functions, classes and strings; function and class docstrings are tested even if the name is private; strings are tested directly, as if they were docstrings. Return (#failures, #tests). See doctest.__doc__ for an overview. Optional keyword arg "name" gives the name of the module; by default use m.__name__. Optional keyword arg "globs" gives a dict to be used as the globals when executing examples; by default, use m.__dict__. A copy of this dict is actually used for each docstring, so that each docstring's examples start with a clean slate. Optional keyword arg "verbose" prints lots of stuff if true, prints only failures if false; by default, it's true iff "-v" is in sys.argv. Optional keyword arg "isprivate" specifies a function used to determine whether a name is private. The default function is doctest.is_private; see its docs for details. Optional keyword arg "report" prints a summary at the end when true, else prints nothing at the end. In verbose mode, the summary is detailed, else very brief (in fact, empty if all tests passed). Advanced tomfoolery: testmod runs methods of a local instance of class doctest.Tester, then merges the results into (or creates) global Tester instance doctest.master. Methods of doctest.master can be called directly too, if you want to do something unusual. Passing report=0 to testmod is especially useful then, to delay displaying a summary. Invoke doctest.master.summarize(verbose) when you're done fiddling. """ global master if not _ismodule(m): raise TypeError("testmod: module required; " + `m`) if name is None: name = m.__name__ tester = Tester(m, globs=globs, verbose=verbose, isprivate=isprivate) failures, tries = tester.rundoc(m, name) f, t = tester.rundict(m.__dict__, name, m) failures = failures + f tries = tries + t if hasattr(m, "__test__"): testdict = m.__test__ if testdict: if not hasattr(testdict, "items"): raise TypeError("testmod: module.__test__ must support " ".items(); " + `testdict`) f, t = tester.run__test__(testdict, name + ".__test__") failures = failures + f tries = tries + t if report: tester.summarize() if master is None: master = tester else: master.merge(tester) return failures, tries |
ignores.append(regex.compile(a)) | ignores.append(re.compile(a)) | def main(): global filedict opts, args = getopt.getopt(sys.argv[1:], 'i:') for o, a in opts: if o == '-i': ignores.append(regex.compile(a)) if not args: args = ['-'] for filename in args: if filename == '-': sys.stdout.write('# Generated by h2py from stdin\n') process(sys.stdin, sys.stdout) else: fp = open(filename, 'r') outfile = os.path.basename(filename) i = string.rfind(outfile, '.') if i > 0: outfile = outfile[:i] outfile = string.upper(outfile) outfile = outfile + '.py' outfp = open(outfile, 'w') outfp.write('# Generated by h2py from %s\n' % filename) filedict = {} for dir in searchdirs: if filename[:len(dir)] == dir: filedict[filename[len(dir)+1:]] = None # no '/' trailing break process(fp, outfp) outfp.close() fp.close() |
i = string.rfind(outfile, '.') | i = outfile.rfind('.') | def main(): global filedict opts, args = getopt.getopt(sys.argv[1:], 'i:') for o, a in opts: if o == '-i': ignores.append(regex.compile(a)) if not args: args = ['-'] for filename in args: if filename == '-': sys.stdout.write('# Generated by h2py from stdin\n') process(sys.stdin, sys.stdout) else: fp = open(filename, 'r') outfile = os.path.basename(filename) i = string.rfind(outfile, '.') if i > 0: outfile = outfile[:i] outfile = string.upper(outfile) outfile = outfile + '.py' outfp = open(outfile, 'w') outfp.write('# Generated by h2py from %s\n' % filename) filedict = {} for dir in searchdirs: if filename[:len(dir)] == dir: filedict[filename[len(dir)+1:]] = None # no '/' trailing break process(fp, outfp) outfp.close() fp.close() |
outfile = string.upper(outfile) outfile = outfile + '.py' | modname = outfile.upper() outfile = modname + '.py' | def main(): global filedict opts, args = getopt.getopt(sys.argv[1:], 'i:') for o, a in opts: if o == '-i': ignores.append(regex.compile(a)) if not args: args = ['-'] for filename in args: if filename == '-': sys.stdout.write('# Generated by h2py from stdin\n') process(sys.stdin, sys.stdout) else: fp = open(filename, 'r') outfile = os.path.basename(filename) i = string.rfind(outfile, '.') if i > 0: outfile = outfile[:i] outfile = string.upper(outfile) outfile = outfile + '.py' outfp = open(outfile, 'w') outfp.write('# Generated by h2py from %s\n' % filename) filedict = {} for dir in searchdirs: if filename[:len(dir)] == dir: filedict[filename[len(dir)+1:]] = None # no '/' trailing break process(fp, outfp) outfp.close() fp.close() |
if p_include.match(line) >= 0: regs = p_include.regs | match = p_include.match(line) if match: regs = match.regs | stmt = 'def %s(%s): return %s\n' % (macro, arg, body) |
if not filedict.has_key(filename): | if importable.has_key(filename): outfp.write('import %s\n' % importable[filename]) elif not filedict.has_key(filename): | stmt = 'def %s(%s): return %s\n' % (macro, arg, body) |
inclfp = open(dir + '/' + filename, 'r') | inclfp = open(dir + '/' + filename) | stmt = 'def %s(%s): return %s\n' % (macro, arg, body) |
try: execv(tempfile.mktemp(), ()) | try: execv(tempfile.mktemp(), ('blah',)) | def _execvpe(file, args, env=None): if env is not None: func = execve argrest = (args, env) else: func = execv argrest = (args,) env = environ global _notfound head, tail = path.split(file) if head: apply(func, (file,) + argrest) return if env.has_key('PATH'): envpath = env['PATH'] else: envpath = defpath PATH = envpath.split(pathsep) if not _notfound: import tempfile # Exec a file that is guaranteed not to exist try: execv(tempfile.mktemp(), ()) except error, _notfound: pass exc, arg = error, _notfound for dir in PATH: fullname = path.join(dir, file) try: apply(func, (fullname,) + argrest) except error, (errno, msg): if errno != arg[0]: exc, arg = error, (errno, msg) raise exc, arg |
code = co.co_code n = len(code) i = 0 lastname = None while i < n: c = code[i] i = i+1 op = ord(c) if op >= dis.HAVE_ARGUMENT: oparg = ord(code[i]) + ord(code[i+1])*256 i = i+2 if op == IMPORT_NAME: name = lastname = co.co_names[oparg] if not self.badmodules.has_key(lastname): try: self.import_hook(name, m) except ImportError, msg: self.msg(2, "ImportError:", str(msg)) self.badmodules[name] = None elif op == IMPORT_FROM: name = co.co_names[oparg] assert lastname is not None if not self.badmodules.has_key(lastname): try: self.import_hook(lastname, m, [name]) except ImportError, msg: self.msg(2, "ImportError:", str(msg)) fullname = lastname + "." + name self.badmodules[fullname] = None else: lastname = None | self.scan_code(co, m) | def load_module(self, fqname, fp, pathname, (suffix, mode, type)): self.msgin(2, "load_module", fqname, fp and "fp", pathname) if type == imp.PKG_DIRECTORY: m = self.load_package(fqname, pathname) self.msgout(2, "load_module ->", m) return m if type == imp.PY_SOURCE: co = compile(fp.read(), pathname, 'exec') elif type == imp.PY_COMPILED: if fp.read(4) != imp.get_magic(): self.msgout(2, "raise ImportError: Bad magic number", pathname) raise ImportError, "Bad magic number in %s", pathname fp.read(4) co = marshal.load(fp) else: co = None m = self.add_module(fqname) if co: m.__file__ = pathname m.__code__ = co code = co.co_code n = len(code) i = 0 lastname = None while i < n: c = code[i] i = i+1 op = ord(c) if op >= dis.HAVE_ARGUMENT: oparg = ord(code[i]) + ord(code[i+1])*256 i = i+2 if op == IMPORT_NAME: name = lastname = co.co_names[oparg] if not self.badmodules.has_key(lastname): try: self.import_hook(name, m) except ImportError, msg: self.msg(2, "ImportError:", str(msg)) self.badmodules[name] = None elif op == IMPORT_FROM: name = co.co_names[oparg] assert lastname is not None if not self.badmodules.has_key(lastname): try: self.import_hook(lastname, m, [name]) except ImportError, msg: self.msg(2, "ImportError:", str(msg)) fullname = lastname + "." + name self.badmodules[fullname] = None else: lastname = None self.msgout(2, "load_module ->", m) return m |
Output("self->ob_type->tp_base->tp_dealloc((PyObject *)self);") | Output("%s.tp_dealloc((PyObject *)self);", self.basetype) | def outputDealloc(self): Output() Output("static void %s_dealloc(%s *self)", self.prefix, self.objecttype) OutLbrace() self.outputCleanupStructMembers() if self.basetype: Output("self->ob_type->tp_base->tp_dealloc((PyObject *)self);") elif hasattr(self, 'output_tp_free'): # This is a new-style object with tp_free slot Output("self->ob_type->tp_free((PyObject *)self);") else: Output("PyObject_Free((PyObject *)self);") OutRbrace() |
Output("if ((_self = type->tp_alloc(type, 0)) == NULL) return NULL;") | if self.basetype: Output("if (%s.tp_new)", self.basetype) OutLbrace() Output("if ( (*%s.tp_init)(_self, _args, _kwds) == NULL) return NULL;", self.basetype) Dedent() Output("} else {") Indent() Output("if ((_self = type->tp_alloc(type, 0)) == NULL) return NULL;") OutRbrace() else: Output("if ((_self = type->tp_alloc(type, 0)) == NULL) return NULL;") | def output_tp_newBody(self): Output("PyObject *_self;"); Output("%s itself;", self.itselftype); Output("char *kw[] = {\"itself\", 0};") Output() Output("if (!PyArg_ParseTupleAndKeywords(_args, _kwds, \"O&\", kw, %s_Convert, &itself)) return NULL;", self.prefix); Output("if ((_self = type->tp_alloc(type, 0)) == NULL) return NULL;") Output("((%s *)_self)->ob_itself = itself;", self.objecttype) Output("return _self;") |
result = pdb.Pdb.trace_dispatch(self, *args) sys.stdout = save_stdout return result | try: return pdb.Pdb.trace_dispatch(self, *args) finally: sys.stdout = save_stdout | def trace_dispatch(self, *args): # Redirect stdout to the given stream. save_stdout = sys.stdout sys.stdout = self.__out # Call Pdb's trace dispatch method. result = pdb.Pdb.trace_dispatch(self, *args) # Restore stdout. sys.stdout = save_stdout return result |
print "source", nodelist[-1] | def factor(self, nodelist): elt = nodelist[0] t = elt[0] print "source", nodelist[-1] node = self.com_node(nodelist[-1]) # need to handle (unary op)constant here... if t == token.PLUS: node = UnaryAdd(node) node.lineno = elt[2] elif t == token.MINUS: print node node = UnarySub(node) node.lineno = elt[2] elif t == token.TILDE: node = Invert(node) node.lineno = elt[2] return node |
|
print node | def factor(self, nodelist): elt = nodelist[0] t = elt[0] print "source", nodelist[-1] node = self.com_node(nodelist[-1]) # need to handle (unary op)constant here... if t == token.PLUS: node = UnaryAdd(node) node.lineno = elt[2] elif t == token.MINUS: print node node = UnarySub(node) node.lineno = elt[2] elif t == token.TILDE: node = Invert(node) node.lineno = elt[2] return node |
|
(?P<escaped>%(delim)s{2}) | %(delim)s(?P<named>%(id)s) | %(delim)s{(?P<braced>%(id)s)} | (?P<invalid>%(delim)s) | %(delim)s(?: (?P<escaped>%(delim)s) | (?P<named>%(id)s) | {(?P<braced>%(id)s)} | (?P<invalid>) ) | def __getitem__(self, key): try: return self._primary[key] except KeyError: return self._secondary[key] |
if froms == '*': froms = theMod.__dict__.keys() for item in froms: (envLocals or envGlobals)[item] = theMod.__dict__[item] | while froms: item = froms[0]; del froms[0] if item == '*': froms = theMod.__dict__.keys() + froms else: try: (envLocals or envGlobals)[item] = theMod.__dict__[item] except KeyError: raise ImportError, ("name '%s' not found in module %s" % (item, theMod.__name__)) | def import_module(name, envLocals=None, envGlobals=None, froms=None, inPkg=None): """Primary service routine implementing 'import' with package nesting.""" # The job is divided into a few distinct steps: # # - Look for either an already loaded module or a file to be loaded. # * if neither loaded module nor prospect file is found, raise an error. # - If we have a file, not an already loaded module: # - Load the file into a module. # - Register the new module and intermediate package stubs. # (We have a module at this point...) # - Bind requested syms (module or specified 'from' defs) in calling env. # - Return the appropriate component. note("import_module: seeking '%s'%s" % (name, ((inPkg and ' (in package %s)' % inPkg.__name__) or ''))) # We need callers environment dict for local path and resulting module # binding. if not (envLocals or envGlobals): envLocals, envGlobals = exterior() modList = theMod = absNm = container = None # Get module obj if one already established, or else module file if not: if inPkg: # We've been invoked with a specific containing package: pkg, pkgPath, pkgNm = inPkg, inPkg.__dict__[PKG_PATH], inPkg.__name__ relNm = name absNm = pkgNm + '.' + name elif name[:PKG_SHORT_NM_LEN+1] != PKG_SHORT_NM + '.': # name is NOT '__.something' - setup to seek according to specified # absolute name. pkg = __python__ pkgPath = sys.path absNm = name relNm = absNm else: # name IS '__.' + something - setup to seek according to relative name, # in current package. relNm = name[len(PKG_SHORT_NM)+1:] # Relative portion of name. try: pkg = envGlobals[PKG_NM] # The immediately containing package. pkgPath = pkg.__dict__[PKG_PATH] if pkg == __python__: # At outermost package. absNm = relNm else: absNm = (pkg.__name__ + '.' + relNm) except KeyError: # Missing package, path, or name. note("Can't identify parent package, package name, or pkgpath") pass # ==v # Try to find existing module: if sys.modules.has_key(absNm): note('found ' + absNm + ' already imported') theMod = sys.modules[absNm] else: # Try for builtin or frozen first: theMod = imp.init_builtin(absNm) if theMod: note('found builtin ' + absNm) else: theMod = imp.init_frozen(absNm) if theMod: note('found frozen ' + absNm) if not theMod: if type(pkgPath) == types.StringType: pkgPath = [pkgPath] modList = find_module(relNm, pkgPath, absNm) if not modList: raise ImportError, "module '%s' not found" % absNm # ===X # We have a list of successively nested files leading to the # module, register them as stubs: container = register_module_nesting(modList, pkg) # Load from file if necessary and possible: modNm, modf, path, ty = modList[-1] note('found type ' + modes[ty[2]] + ' - ' + absNm) # Do the load: theMod = load_module(absNm, ty[2], modf, inPkg) # Loaded successfully - promote module to full module status: register_module(theMod, theMod.__name__, pkgPath, pkg) # Have a loaded module, impose designated components, and return # appropriate thing - according to guido: # "Note that for "from spam.ham import bacon" your function should # return the object denoted by 'spam.ham', while for "import # spam.ham" it should return the object denoted by 'spam' -- the # STORE instructions following the import statement expect it this # way." if not froms: # Establish the module defs in the importing name space: (envLocals or envGlobals)[name] = theMod return (container or theMod) else: # Implement 'from': Populate immediate env with module defs: if froms == '*': froms = theMod.__dict__.keys() # resolve '*' for item in froms: (envLocals or envGlobals)[item] = theMod.__dict__[item] return theMod |
def read(self, size=-1, chars=-1): | def read(self, size=-1, chars=-1, firstline=False): | def read(self, size=-1, chars=-1): |
newchars, decodedbytes = self.decode(data, self.errors) | try: newchars, decodedbytes = self.decode(data, self.errors) except UnicodeDecodeError, exc: if firstline: newchars, decodedbytes = self.decode(data[:exc.start], self.errors) lines = newchars.splitlines(True) if len(lines)<=1: raise else: raise | def read(self, size=-1, chars=-1): |
data = self.read(readsize) | data = self.read(readsize, firstline=True) | def readline(self, size=None, keepends=True): |
attrs['script_name'] = sys.argv[0] | attrs['script_name'] = os.path.basename(sys.argv[0]) | def setup (**attrs): """The gateway to the Distutils: do everything your setup script needs to do, in a highly flexible and user-driven way. Briefly: create a Distribution instance; find and parse config files; parse the command line; run each Distutils command found there, customized by the options supplied to 'setup()' (as keyword arguments), in config files, and on the command line. The Distribution instance might be an instance of a class supplied via the 'distclass' keyword argument to 'setup'; if no such class is supplied, then the Distribution class (in dist.py) is instantiated. All other arguments to 'setup' (except for 'cmdclass') are used to set attributes of the Distribution instance. The 'cmdclass' argument, if supplied, is a dictionary mapping command names to command classes. Each command encountered on the command line will be turned into a command class, which is in turn instantiated; any class found in 'cmdclass' is used in place of the default, which is (for command 'foo_bar') class 'foo_bar' in module 'distutils.command.foo_bar'. The command class must provide a 'user_options' attribute which is a list of option specifiers for 'distutils.fancy_getopt'. Any command-line options between the current and the next command are used to set attributes of the current command object. When the entire command-line has been successfully parsed, calls the 'run()' method on each command object in turn. This method will be driven entirely by the Distribution object (which each command object has a reference to, thanks to its constructor), and the command-specific options that became attributes of each command object. """ global _setup_stop_after, _setup_distribution # Determine the distribution class -- either caller-supplied or # our Distribution (see below). klass = attrs.get('distclass') if klass: del attrs['distclass'] else: klass = Distribution if not attrs.has_key('script_name'): attrs['script_name'] = sys.argv[0] if not attrs.has_key('script_args'): attrs['script_args'] = sys.argv[1:] # Create the Distribution instance, using the remaining arguments # (ie. everything except distclass) to initialize it try: _setup_distribution = dist = klass(attrs) except DistutilsSetupError, msg: if attrs.has_key('name'): raise SystemExit, "error in %s setup command: %s" % \ (attrs['name'], msg) else: raise SystemExit, "error in setup command: %s" % msg if _setup_stop_after == "init": return dist # Find and parse the config file(s): they will override options from # the setup script, but be overridden by the command line. dist.parse_config_files() if DEBUG: print "options (after parsing config files):" dist.dump_option_dicts() if _setup_stop_after == "config": return dist # Parse the command line; any command-line errors are the end user's # fault, so turn them into SystemExit to suppress tracebacks. try: ok = dist.parse_command_line() except DistutilsArgError, msg: raise SystemExit, gen_usage(dist.script_name) + "\nerror: %s" % msg if DEBUG: print "options (after parsing command line):" dist.dump_option_dicts() if _setup_stop_after == "commandline": return dist # And finally, run all the commands found on the command line. if ok: try: dist.run_commands() except KeyboardInterrupt: raise SystemExit, "interrupted" except (IOError, os.error), exc: error = grok_environment_error(exc) if DEBUG: sys.stderr.write(error + "\n") raise else: raise SystemExit, error except (DistutilsExecError, DistutilsFileError, DistutilsOptionError, CCompilerError), msg: if DEBUG: raise else: raise SystemExit, "error: " + str(msg) return dist |
x = self.dict[headerseen] + "\n " + line.strip() | def readheaders(self): """Read header lines. |
|
conn = self.msg.getheader('connection') if conn: conn = conn.lower() self.will_close = conn.find('close') != -1 or \ ( self.version != 11 and \ not self.msg.getheader('keep-alive') ) else: self.will_close = self.version != 11 and \ not self.msg.getheader('keep-alive') | self.will_close = self._check_close() | def begin(self): if self.msg is not None: # we've already started reading the response return |
"setparameters%r: returned %r" % (config + result)) | "setparameters%r: returned %r" % (config, result)) | def test_setparameters(dsp): # Two configurations for testing: # config1 (8-bit, mono, 8 kHz) should work on even the most # ancient and crufty sound card, but maybe not on special- # purpose high-end hardware # config2 (16-bit, stereo, 44.1kHz) should work on all but the # most ancient and crufty hardware config1 = (ossaudiodev.AFMT_U8, 1, 8000) config2 = (AFMT_S16_NE, 2, 44100) for config in [config1, config2]: (fmt, channels, rate) = config if (dsp.setfmt(fmt) == fmt and dsp.channels(channels) == channels and dsp.speed(rate) == rate): break else: raise RuntimeError("unable to set audio sampling parameters: " "you must have really weird audio hardware") # setparameters() should be able to set this configuration in # either strict or non-strict mode. result = dsp.setparameters(fmt, channels, rate, False) _assert(result == (fmt, channels, rate), "setparameters%r: returned %r" % (config + result)) result = dsp.setparameters(fmt, channels, rate, True) _assert(result == (fmt, channels, rate), "setparameters%r: returned %r" % (config + result)) |
self.sock.send(str) | sendptr = 0 while sendptr < len(str): sendptr = sendptr + self.sock.send(str[sendptr:]) | def send(self, str): """Send `str' to the server.""" if self.debuglevel > 0: print 'send:', `str` if self.sock: try: self.sock.send(str) except socket.error: raise SMTPServerDisconnected('Server not connected') else: raise SMTPServerDisconnected('please run connect() first') |
ref = Res.FSpOpenResFile(dst, 1) | ref = Res.FSpOpenResFile(dst, 2) | def buildone(template, wrapper, src, dst): buildtools.process(template, wrapper, dst, 1) # write source as a PYC resource into dst ref = Res.FSpOpenResFile(dst, 1) try: Res.UseResFile(ref) py_resource.frompyfile(src, "CGI_MAIN", preload=1) finally: Res.CloseResFile(ref) |
d0 = base.replace(minute=3, tzinfo=OperandDependentOffset()) d1 = base.replace(minute=9, tzinfo=OperandDependentOffset()) d2 = base.replace(minute=11, tzinfo=OperandDependentOffset()) for x in d0, d1, d2: for y in d0, d1, d2: got = cmp(x, y) if (x is d0 or x is d1) and (y is d0 or y is d1): expected = 0 elif x is y is d2: expected = 0 elif x is d2: expected = -1 else: assert y is d2 expected = 1 self.assertEqual(got, expected) | if cls is not timetz: d0 = base.replace(minute=3, tzinfo=OperandDependentOffset()) d1 = base.replace(minute=9, tzinfo=OperandDependentOffset()) d2 = base.replace(minute=11, tzinfo=OperandDependentOffset()) for x in d0, d1, d2: for y in d0, d1, d2: got = cmp(x, y) if (x is d0 or x is d1) and (y is d0 or y is d1): expected = 0 elif x is y is d2: expected = 0 elif x is d2: expected = -1 else: assert y is d2 expected = 1 self.assertEqual(got, expected) | def utcoffset(self, t): if t.minute < 10: return t.minute # d0 and d1 equal after adjustment else: return 59 # d2 off in the weeds |
if dt is None or isinstance(dt, time) or dt.tzinfo is None: | if dt is None or dt.tzinfo is None: | def dst(self, dt): if dt is None or isinstance(dt, time) or dt.tzinfo is None: # An exception instead may be sensible here, in one or more of # the cases. return ZERO |
user_pass = base64.encode_string(unquote(user_passw)).strip() req.addheader('Proxy-Authorization', user_pass) | user_pass = base64.encodestring(unquote(user_pass)).strip() req.add_header('Proxy-Authorization', 'Basic '+user_pass) | def proxy_open(self, req, proxy, type): orig_type = req.get_type() type, r_type = splittype(proxy) host, XXX = splithost(r_type) if '@' in host: user_pass, host = host.split('@', 1) user_pass = base64.encode_string(unquote(user_passw)).strip() req.addheader('Proxy-Authorization', user_pass) host = unquote(host) req.set_proxy(host, type) if orig_type == type: # let other handlers take care of it # XXX this only makes sense if the proxy is before the # other handlers return None else: # need to start over, because the other handlers don't # grok the proxy's URL type return self.parent.open(req) |
host = req.get_host() | host = urlparse.urlparse(req.get_full_url())[1] | def do_open(self, http_class, req): host = req.get_host() if not host: raise URLError('no host given') |
h.putheader('Content-type', 'application/x-www-form-urlencoded') h.putheader('Content-length', '%d' % len(data)) | if not req.headers.has_key('Content-type'): h.putheader('Content-type', 'application/x-www-form-urlencoded') if not req.headers.has_key('Content-length'): h.putheader('Content-length', '%d' % len(data)) | def do_open(self, http_class, req): host = req.get_host() if not host: raise URLError('no host given') |
if hasattr(x, 'im_func'): x = x.im_func if hasattr(x, 'func_code'): x = x.func_code if hasattr(x, 'co_code'): disassemble(x) else: raise TypeError, \ "don't know how to disassemble %s objects" % \ type(x).__name__ | raise TypeError, \ "don't know how to disassemble %s objects" % \ type(x).__name__ | def dis(x=None): """Disassemble classes, methods, functions, or code. With no argument, disassemble the last traceback. """ if not x: distb() return if type(x) is types.InstanceType: x = x.__class__ if hasattr(x, '__dict__'): items = x.__dict__.items() items.sort() for name, x1 in items: if type(x1) in (types.MethodType, types.FunctionType, types.CodeType): print "Disassembly of %s:" % name try: dis(x1) except TypeError, msg: print "Sorry:", msg print else: if hasattr(x, 'im_func'): x = x.im_func if hasattr(x, 'func_code'): x = x.func_code if hasattr(x, 'co_code'): disassemble(x) else: raise TypeError, \ "don't know how to disassemble %s objects" % \ type(x).__name__ |
user,pw = self.passwd.find_user_password(realm, host) | user, pw = self.passwd.find_user_password(realm, req.get_full_url()) | def retry_http_basic_auth(self, host, req, realm): user,pw = self.passwd.find_user_password(realm, host) if pw is not None: raw = "%s:%s" % (user, pw) auth = 'Basic %s' % base64.encodestring(raw).strip() if req.headers.get(self.auth_header, None) == auth: return None req.add_header(self.auth_header, auth) return self.parent.open(req) else: return None |
base = base + ', opaque="%s"' % opaque | base += ', opaque="%s"' % opaque | def get_authorization(self, req, chal): try: realm = chal['realm'] nonce = chal['nonce'] qop = chal.get('qop') algorithm = chal.get('algorithm', 'MD5') # mod_digest doesn't send an opaque, even though it isn't # supposed to be optional opaque = chal.get('opaque', None) except KeyError: return None |
base = base + ', digest="%s"' % entdig if algorithm != 'MD5': base = base + ', algorithm="%s"' % algorithm | base += ', digest="%s"' % entdig base += ', algorithm="%s"' % algorithm | def get_authorization(self, req, chal): try: realm = chal['realm'] nonce = chal['nonce'] qop = chal.get('qop') algorithm = chal.get('algorithm', 'MD5') # mod_digest doesn't send an opaque, even though it isn't # supposed to be optional opaque = chal.get('opaque', None) except KeyError: return None |
base = base + ', qop=auth, nc=%s, cnonce="%s"' % (ncvalue, cnonce) | base += ', qop=auth, nc=%s, cnonce="%s"' % (ncvalue, cnonce) | def get_authorization(self, req, chal): try: realm = chal['realm'] nonce = chal['nonce'] qop = chal.get('qop') algorithm = chal.get('algorithm', 'MD5') # mod_digest doesn't send an opaque, even though it isn't # supposed to be optional opaque = chal.get('opaque', None) except KeyError: return None |
if type(ob)==types.ClassType: | if type(ob) in (types.ClassType, types.TypeType): | def get_arg_text(ob): """Get a string describing the arguments for the given object""" argText = "" if ob is not None: argOffset = 0 if type(ob)==types.ClassType: # Look for the highest __init__ in the class chain. fob = _find_constructor(ob) if fob is None: fob = lambda: None else: argOffset = 1 elif type(ob)==types.MethodType: # bit of a hack for methods - turn it into a function # but we drop the "self" param. fob = ob.im_func argOffset = 1 else: fob = ob # Try and build one for Python defined functions if type(fob) in [types.FunctionType, types.LambdaType]: try: realArgs = fob.func_code.co_varnames[argOffset:fob.func_code.co_argcount] defaults = fob.func_defaults or [] defaults = list(map(lambda name: "=%s" % repr(name), defaults)) defaults = [""] * (len(realArgs)-len(defaults)) + defaults items = map(lambda arg, dflt: arg+dflt, realArgs, defaults) if fob.func_code.co_flags & 0x4: items.append("...") if fob.func_code.co_flags & 0x8: items.append("***") argText = ", ".join(items) argText = "(%s)" % argText except: pass # See if we can use the docstring doc = getattr(ob, "__doc__", "") if doc: doc = doc.lstrip() pos = doc.find("\n") if pos < 0 or pos > 70: pos = 70 if argText: argText += "\n" argText += doc[:pos] return argText |
lines = f.readlines() | lines = [] line = f.readline() while line: lines.append(line.strip()) line = f.readline() | def read(self): opener = URLopener() f = opener.open(self.url) lines = f.readlines() self.errcode = opener.errcode if self.errcode == 401 or self.errcode == 403: self.disallow_all = 1 _debug("disallow all") elif self.errcode >= 400: self.allow_all = 1 _debug("allow all") elif self.errcode == 200 and lines: _debug("parse lines") self.parse(lines) |
line = line.strip() | def parse(self, lines): """parse the input lines from a robot.txt file. We allow that a user-agent: line is not preceded by one or more blank lines.""" state = 0 linenumber = 0 entry = Entry() |
|
self.tries = 0 self.maxtries = 10 | def __init__(self, *args): apply(urllib.FancyURLopener.__init__, (self,) + args) self.errcode = 200 self.tries = 0 self.maxtries = 10 |
|
def http_error_302(self, url, fp, errcode, errmsg, headers, data=None): self.tries += 1 if self.tries >= self.maxtries: return self.http_error_default(url, fp, 500, "Internal Server Error: Redirect Recursion", headers) result = urllib.FancyURLopener.http_error_302(self, url, fp, errcode, errmsg, headers, data) self.tries = 0 return result | def http_error_default(self, url, fp, errcode, errmsg, headers): self.errcode = errcode return urllib.FancyURLopener.http_error_default(self, url, fp, errcode, errmsg, headers) |
|
formlength = formlength - 8 - chunk.chunksize if chunk.chunksize & 1: formlength = formlength - 1 | def initfp(self, file): self._file = file self._version = 0 self._decomp = None self._markers = [] self._soundpos = 0 form = self._file.read(4) if form != 'FORM': raise Error, 'file does not start with FORM id' formlength = _read_long(self._file) if formlength <= 0: raise Error, 'invalid FORM chunk data size' formdata = self._file.read(4) formlength = formlength - 4 if formdata == 'AIFF': self._aifc = 0 elif formdata == 'AIFC': self._aifc = 1 else: raise Error, 'not an AIFF or AIFF-C file' self._comm_chunk_read = 0 while formlength > 0: self._ssnd_seek_needed = 1 #DEBUG: SGI's soundfiler has a bug. There should # be no need to check for EOF here. try: chunk = Chunk().init(self._file) except EOFError: if formlength == 8: print 'Warning: FORM chunk size too large' formlength = 0 break raise EOFError # different error, raise exception formlength = formlength - 8 - chunk.chunksize if chunk.chunksize & 1: formlength = formlength - 1 if chunk.chunkname == 'COMM': self._read_comm_chunk(chunk) self._comm_chunk_read = 1 elif chunk.chunkname == 'SSND': self._ssnd_chunk = chunk dummy = chunk.read(8) self._ssnd_seek_needed = 0 elif chunk.chunkname == 'FVER': self._version = _read_long(chunk) elif chunk.chunkname == 'MARK': self._readmark(chunk) elif chunk.chunkname in _skiplist: pass else: raise Error, 'unrecognized chunk type '+chunk.chunkname if formlength > 0: chunk.skip() if not self._comm_chunk_read or not self._ssnd_chunk: raise Error, 'COMM chunk and/or SSND chunk missing' if self._aifc and self._decomp: params = [CL.ORIGINAL_FORMAT, 0, \ CL.BITS_PER_COMPONENT, 0, \ CL.FRAME_RATE, self._framerate] if self._nchannels == AL.MONO: params[1] = CL.MONO else: params[1] = CL.STEREO_INTERLEAVED if self._sampwidth == AL.SAMPLE_8: params[3] = 8 elif self._sampwidth == AL.SAMPLE_16: params[3] = 16 else: params[3] = 24 self._decomp.SetParams(params) return self |
|
level = 0 if "absolute_import" in self.futures else -1 | level = 0 if self.graph.checkFlag(CO_FUTURE_ABSIMPORT) else -1 | def visitImport(self, node): self.set_lineno(node) level = 0 if "absolute_import" in self.futures else -1 for name, alias in node.names: if VERSION > 1: self.emit('LOAD_CONST', level) self.emit('LOAD_CONST', None) self.emit('IMPORT_NAME', name) mod = name.split(".")[0] if alias: self._resolveDots(name) self.storeName(alias) else: self.storeName(mod) |
if level == 0 and "absolute_import" not in self.futures: | if level == 0 and not self.graph.checkFlag(CO_FUTURE_ABSIMPORT): | def visitFrom(self, node): self.set_lineno(node) level = node.level if level == 0 and "absolute_import" not in self.futures: level = -1 fromlist = map(lambda (name, alias): name, node.names) if VERSION > 1: self.emit('LOAD_CONST', level) self.emit('LOAD_CONST', tuple(fromlist)) self.emit('IMPORT_NAME', node.modname) for name, alias in node.names: if VERSION > 1: if name == '*': self.namespace = 0 self.emit('IMPORT_STAR') # There can only be one name w/ from ... import * assert len(node.names) == 1 return else: self.emit('IMPORT_FROM', name) self._resolveDots(name) self.storeName(alias or name) else: self.emit('IMPORT_FROM', name) self.emit('POP_TOP') |
if self.addr == types.TupleType: | if type(self.addr) == types.TupleType: | def __repr__ (self): try: status = [] if self.accepting and self.addr: status.append ('listening') elif self.connected: status.append ('connected') if self.addr: if self.addr == types.TupleType: status.append ('%s:%d' % self.addr) else: status.append (self.addr) return '<%s %s at %x>' % (self.__class__.__name__, ' '.join (status), id (self)) except: pass |
import httplib tries = 0 while tries<5: connection = httplib.HTTP(self.host) connection.putrequest("GET", self.path) connection.putheader("Host", self.host) connection.endheaders() status, text, mime = connection.getreply() if status in [301,302] and mime: tries = tries + 1 newurl = mime.get("Location", mime.get("Uri", "")) newurl = urlparse.urljoin(self.url, newurl) self.set_url(newurl) else: break if status==401 or status==403: | opener = URLopener() f = opener.open(self.url) lines = f.readlines() self.errcode = opener.errcode if self.errcode == 401 or self.errcode == 403: | def read(self): import httplib tries = 0 while tries<5: connection = httplib.HTTP(self.host) connection.putrequest("GET", self.path) connection.putheader("Host", self.host) connection.endheaders() status, text, mime = connection.getreply() if status in [301,302] and mime: tries = tries + 1 newurl = mime.get("Location", mime.get("Uri", "")) newurl = urlparse.urljoin(self.url, newurl) self.set_url(newurl) else: break if status==401 or status==403: self.disallow_all = 1 elif status>=400: self.allow_all = 1 else: # status < 400 self.parse(connection.getfile().readlines()) |
elif status>=400: | _debug("disallow all") elif self.errcode >= 400: | def read(self): import httplib tries = 0 while tries<5: connection = httplib.HTTP(self.host) connection.putrequest("GET", self.path) connection.putheader("Host", self.host) connection.endheaders() status, text, mime = connection.getreply() if status in [301,302] and mime: tries = tries + 1 newurl = mime.get("Location", mime.get("Uri", "")) newurl = urlparse.urljoin(self.url, newurl) self.set_url(newurl) else: break if status==401 or status==403: self.disallow_all = 1 elif status>=400: self.allow_all = 1 else: # status < 400 self.parse(connection.getfile().readlines()) |
else: self.parse(connection.getfile().readlines()) | _debug("allow all") elif self.errcode == 200 and lines: _debug("parse lines") self.parse(lines) | def read(self): import httplib tries = 0 while tries<5: connection = httplib.HTTP(self.host) connection.putrequest("GET", self.path) connection.putheader("Host", self.host) connection.endheaders() status, text, mime = connection.getreply() if status in [301,302] and mime: tries = tries + 1 newurl = mime.get("Location", mime.get("Uri", "")) newurl = urlparse.urljoin(self.url, newurl) self.set_url(newurl) else: break if status==401 or status==403: self.disallow_all = 1 elif status>=400: self.allow_all = 1 else: # status < 400 self.parse(connection.getfile().readlines()) |
_debug("Checking robot.txt allowance for\n%s\n%s" % (useragent, url)) | _debug("Checking robot.txt allowance for:\n user agent: %s\n url: %s" % (useragent, url)) | def can_fetch(self, useragent, url): """using the parsed robots.txt decide if useragent can fetch url""" _debug("Checking robot.txt allowance for\n%s\n%s" % (useragent, url)) if self.disallow_all: return 0 if self.allow_all: return 1 # search for given user agent matches # the first match counts useragent = useragent.lower() url = urllib.quote(urlparse.urlparse(url)[2]) for entry in self.entries: if entry.applies_to(useragent): return entry.allowance(url) # agent not found ==> access granted return 1 |
useragent = useragent.lower() url = urllib.quote(urlparse.urlparse(url)[2]) | url = urllib.quote(urlparse.urlparse(url)[2]) or "/" | def can_fetch(self, useragent, url): """using the parsed robots.txt decide if useragent can fetch url""" _debug("Checking robot.txt allowance for\n%s\n%s" % (useragent, url)) if self.disallow_all: return 0 if self.allow_all: return 1 # search for given user agent matches # the first match counts useragent = useragent.lower() url = urllib.quote(urlparse.urlparse(url)[2]) for entry in self.entries: if entry.applies_to(useragent): return entry.allowance(url) # agent not found ==> access granted return 1 |
"check if this entry applies to the specified agent" | useragent = useragent.split("/")[0].lower() | def applies_to(self, useragent): "check if this entry applies to the specified agent" for agent in self.useragents: if agent=="*": return 1 if re.match(agent, useragent): return 1 return 0 |
if agent=="*": | if agent=='*': | def applies_to(self, useragent): "check if this entry applies to the specified agent" for agent in self.useragents: if agent=="*": return 1 if re.match(agent, useragent): return 1 return 0 |
if re.match(agent, useragent): | agent = agent.lower() if re.search(re.escape(useragent), agent): | def applies_to(self, useragent): "check if this entry applies to the specified agent" for agent in self.useragents: if agent=="*": return 1 if re.match(agent, useragent): return 1 return 0 |
if len(sys.argv) <= 1: rp.set_url('http://www.musi-cal.com/robots.txt') rp.read() else: rp.parse(open(sys.argv[1]).readlines()) print rp.can_fetch('*', 'http://www.musi-cal.com/') print rp.can_fetch('Musi-Cal-Robot/1.0', | rp.set_url('http://www.musi-cal.com/robots.txt') rp.read() _check(rp.can_fetch('*', 'http://www.musi-cal.com/'), 1) _check(rp.can_fetch('', 'http://www.musi-cal.com/'), 0) _check(rp.can_fetch('CherryPickerSE', | def _test(): global debug import sys rp = RobotFileParser() debug = 1 if len(sys.argv) <= 1: rp.set_url('http://www.musi-cal.com/robots.txt') rp.read() else: rp.parse(open(sys.argv[1]).readlines()) print rp.can_fetch('*', 'http://www.musi-cal.com/') print rp.can_fetch('Musi-Cal-Robot/1.0', 'http://www.musi-cal.com/cgi-bin/event-search' '?city=San+Francisco') |
'?city=San+Francisco') | '?city=San+Francisco'), 0) _check(rp.can_fetch('CherryPickerSE/1.0', 'http://www.musi-cal.com/cgi-bin/event-search' '?city=San+Francisco'), 0) _check(rp.can_fetch('CherryPickerSE/1.5', 'http://www.musi-cal.com/cgi-bin/event-search' '?city=San+Francisco'), 0) _check(rp.can_fetch('ExtractorPro', 'http://www.musi-cal.com/blubba'), 0) _check(rp.can_fetch('extractorpro', 'http://www.musi-cal.com/blubba'), 0) _check(rp.can_fetch('toolpak/1.1', 'http://www.musi-cal.com/blubba'), 0) _check(rp.can_fetch('spam', 'http://www.musi-cal.com/search'), 0) _check(rp.can_fetch('spam', 'http://www.musi-cal.com/Musician/me'), 1) _check(rp.can_fetch('spam', 'http://www.musi-cal.com/'), 1) _check(rp.can_fetch('spam', 'http://www.musi-cal.com/'), 1) rp.set_url('http://www.lycos.com/robots.txt') rp.read() _check(rp.can_fetch('Mozilla', 'http://www.lycos.com/search'), 1) | def _test(): global debug import sys rp = RobotFileParser() debug = 1 if len(sys.argv) <= 1: rp.set_url('http://www.musi-cal.com/robots.txt') rp.read() else: rp.parse(open(sys.argv[1]).readlines()) print rp.can_fetch('*', 'http://www.musi-cal.com/') print rp.can_fetch('Musi-Cal-Robot/1.0', 'http://www.musi-cal.com/cgi-bin/event-search' '?city=San+Francisco') |
def grid_slaves(self, master, row=None, column=None): args = (master,) | def grid_slaves(self, row=None, column=None): args = () | def grid_slaves(self, master, row=None, column=None): args = (master,) if row: args = args + ('-row', row) if column: args = args + ('-column', column) return map(self._nametowidget, self.tk.splitlist( apply(self.tk.call, ('grid', 'slaves', self._w) + args))) |
return self.tk.call(self._w, 'entrycget', '-' + option) | return self.tk.call(self._w, 'entrycget', index, '-' + option) | def entrycget(self, index, option): return self.tk.call(self._w, 'entrycget', '-' + option) |
if _active: | if _active is not None: | def __del__(self): # In case the child hasn't been waited on, check if it's done. self.poll(_deadstate=sys.maxint) if self.sts < 0: if _active: # Child is still running, keep us alive until we can wait on it. _active.append(self) |
if size < 0: size = sys.maxint bufs = [] readsize = min(100, size) | if size < 0: size = sys.maxint readsize = self.min_readsize else: readsize = size bufs = "" | def readline(self, size=-1): if size < 0: size = sys.maxint bufs = [] readsize = min(100, size) # Read from the file in small chunks while True: if size == 0: return "".join(bufs) # Return resulting line |
if size == 0: return "".join(bufs) | if size == 0: return bufs | def readline(self, size=-1): if size < 0: size = sys.maxint bufs = [] readsize = min(100, size) # Read from the file in small chunks while True: if size == 0: return "".join(bufs) # Return resulting line |
if size is not None: if i==-1 and len(c) > size: i=size-1 elif size <= i: i = size -1 | def readline(self, size=-1): if size < 0: size = sys.maxint bufs = [] readsize = min(100, size) # Read from the file in small chunks while True: if size == 0: return "".join(bufs) # Return resulting line |
|
bufs.append(c[:i+1]) self._unread(c[i+1:]) return ''.join(bufs) bufs.append(c) size = size - len(c) readsize = min(size, readsize * 2) | if size <= i: i = size - 1 self._unread(c[i+1:]) return bufs + c[:i+1] else: if len(c) > size: i = size - 1 bufs = bufs + c size = size - len(c) readsize = min(size, int(readsize * 1.1)) if readsize > self.min_readsize: self.min_readsize = readsize | def readline(self, size=-1): if size < 0: size = sys.maxint bufs = [] readsize = min(100, size) # Read from the file in small chunks while True: if size == 0: return "".join(bufs) # Return resulting line |
def check(a, b): if a != b: print '*** check failed: %s != %s' % (repr(a), repr(b)) else: print '%s == %s: OK' % (a, b) | import test.test_support, unittest | def check(a, b): if a != b: print '*** check failed: %s != %s' % (repr(a), repr(b)) else: print '%s == %s: OK' % (a, b) |
check(unicode('abc', codecname), u'abc') check(unicode('xdef', codecname), u'abcdef') check(unicode('defx', codecname), u'defabc') check(unicode('dxf', codecname), u'dabcf') check(unicode('dxfx', codecname), u'dabcfabc') | class CharmapCodecTest(unittest.TestCase): def test_constructorx(self): self.assertEquals(unicode('abc', codecname), u'abc') self.assertEquals(unicode('xdef', codecname), u'abcdef') self.assertEquals(unicode('defx', codecname), u'defabc') self.assertEquals(unicode('dxf', codecname), u'dabcf') self.assertEquals(unicode('dxfx', codecname), u'dabcfabc') | def check(a, b): if a != b: print '*** check failed: %s != %s' % (repr(a), repr(b)) else: print '%s == %s: OK' % (a, b) |
check(u'abc'.encode(codecname), 'abc') check(u'xdef'.encode(codecname), 'abcdef') check(u'defx'.encode(codecname), 'defabc') check(u'dxf'.encode(codecname), 'dabcf') check(u'dxfx'.encode(codecname), 'dabcfabc') | def test_encodex(self): self.assertEquals(u'abc'.encode(codecname), 'abc') self.assertEquals(u'xdef'.encode(codecname), 'abcdef') self.assertEquals(u'defx'.encode(codecname), 'defabc') self.assertEquals(u'dxf'.encode(codecname), 'dabcf') self.assertEquals(u'dxfx'.encode(codecname), 'dabcfabc') | def check(a, b): if a != b: print '*** check failed: %s != %s' % (repr(a), repr(b)) else: print '%s == %s: OK' % (a, b) |
check(unicode('ydef', codecname), u'def') check(unicode('defy', codecname), u'def') check(unicode('dyf', codecname), u'df') check(unicode('dyfy', codecname), u'df') | def test_constructory(self): self.assertEquals(unicode('ydef', codecname), u'def') self.assertEquals(unicode('defy', codecname), u'def') self.assertEquals(unicode('dyf', codecname), u'df') self.assertEquals(unicode('dyfy', codecname), u'df') | def check(a, b): if a != b: print '*** check failed: %s != %s' % (repr(a), repr(b)) else: print '%s == %s: OK' % (a, b) |
try: unicode('abc\001', codecname) except UnicodeError: print '\\001 maps to undefined: OK' else: print '*** check failed: \\001 does not map to undefined' | def test_maptoundefined(self): self.assertRaises(UnicodeError, unicode, 'abc\001', codecname) def test_main(): suite = unittest.TestSuite() suite.addTest(unittest.makeSuite(CharmapCodecTest)) test.test_support.run_suite(suite) if __name__ == "__main__": test_main() | def check(a, b): if a != b: print '*** check failed: %s != %s' % (repr(a), repr(b)) else: print '%s == %s: OK' % (a, b) |
def testWriteXML(): pass | def testWriteXML(): str = '<a b="c"/>' dom = parseString(str) domstr = dom.toxml() dom.unlink() confirm(str == domstr) | def testWriteXML(): pass |
"Strange! filename %s has two different module names" % \ (key, modules[key], other_module[key]) | "Strange! filename %s has two different module " \ "names: %s and %s" % \ (key, modules[key], other_modules[key]) | def update(self, other): """Merge in the data from another CoverageResults""" counts = self.counts other_counts = other.counts modules = self.modules other_modules = other.modules |
save_counts = 0): | save_counts = 0, summary = 0, coverdir = None): | def create_results_log(results, dirname = ".", show_missing = 1, save_counts = 0): import re # turn the counts data ("(filename, lineno) = count") into something # accessible on a per-file basis per_file = {} for filename, lineno in results.counts.keys(): lines_hit = per_file[filename] = per_file.get(filename, {}) lines_hit[lineno] = results.counts[(filename, lineno)] # try and merge existing counts and modules file from dirname try: counts = marshal.load(open(os.path.join(dirname, "counts"))) modules = marshal.load(open(os.path.join(dirname, "modules"))) results.update(results.__class__(counts, modules)) except IOError: pass # there are many places where this is insufficient, like a blank # line embedded in a multiline string. blank = re.compile(r'^\s*(#.*)?$') # generate file paths for the coverage files we are going to write... fnlist = [] tfdir = tempfile.gettempdir() for key in per_file.keys(): filename = key # skip some "files" we don't care about... if filename == "<string>": continue # are these caused by code compiled using exec or something? if filename.startswith(tfdir): continue # XXX this is almost certainly not portable!!! fndir = os.path.dirname(filename) if filename[:1] == os.sep: coverpath = os.path.join(dirname, "."+fndir) else: coverpath = os.path.join(dirname, fndir) if filename.endswith(".pyc") or filename.endswith(".pyo"): filename = filename[:-1] # Get the original lines from the .py file try: lines = open(filename, 'r').readlines() except IOError, err: sys.stderr.write("%s: Could not open %s for reading " \ "because: %s - skipping\n" % \ ("trace", `filename`, err.strerror)) continue modulename = os.path.split(results.modules[key])[1] # build list file name by appending a ".cover" to the module name # and sticking it into the specified directory listfilename = os.path.join(coverpath, modulename + ".cover") #sys.stderr.write("modulename: %(modulename)s\n" # "filename: %(filename)s\n" # "coverpath: %(coverpath)s\n" # "listfilename: %(listfilename)s\n" # "dirname: %(dirname)s\n" # % locals()) try: outfile = open(listfilename, 'w') except IOError, err: sys.stderr.write( '%s: Could not open %s for writing because: %s" \ "- skipping\n' % ("trace", `listfilename`, err.strerror)) continue # If desired, get a list of the line numbers which represent # executable content (returned as a dict for better lookup speed) if show_missing: executable_linenos = find_executable_linenos(filename) else: executable_linenos = {} lines_hit = per_file[key] for i in range(len(lines)): line = lines[i] # do the blank/comment match to try to mark more lines # (help the reader find stuff that hasn't been covered) if lines_hit.has_key(i+1): # count precedes the lines that we captured outfile.write('%5d: ' % lines_hit[i+1]) elif blank.match(line): # blank lines and comments are preceded by dots outfile.write(' . ') else: # lines preceded by no marks weren't hit # Highlight them if so indicated, unless the line contains # '#pragma: NO COVER' (it is possible to embed this into # the text as a non-comment; no easy fix) if executable_linenos.has_key(i+1) and \ string.find(lines[i], string.join(['#pragma', 'NO COVER'])) == -1: outfile.write('>>>>>> ') else: outfile.write(' '*7) outfile.write(string.expandtabs(lines[i], 8)) outfile.close() if save_counts: # try and store counts and module info into dirname try: marshal.dump(results.counts, open(os.path.join(dirname, "counts"), "w")) marshal.dump(results.modules, open(os.path.join(dirname, "modules"), "w")) except IOError, err: sys.stderr.write("cannot save counts/modules " \ "files because %s" % err.strerror) |
fndir = os.path.dirname(filename) if filename[:1] == os.sep: coverpath = os.path.join(dirname, "."+fndir) else: coverpath = os.path.join(dirname, fndir) | modulename = os.path.split(results.modules[key])[1] | def create_results_log(results, dirname = ".", show_missing = 1, save_counts = 0): import re # turn the counts data ("(filename, lineno) = count") into something # accessible on a per-file basis per_file = {} for filename, lineno in results.counts.keys(): lines_hit = per_file[filename] = per_file.get(filename, {}) lines_hit[lineno] = results.counts[(filename, lineno)] # try and merge existing counts and modules file from dirname try: counts = marshal.load(open(os.path.join(dirname, "counts"))) modules = marshal.load(open(os.path.join(dirname, "modules"))) results.update(results.__class__(counts, modules)) except IOError: pass # there are many places where this is insufficient, like a blank # line embedded in a multiline string. blank = re.compile(r'^\s*(#.*)?$') # generate file paths for the coverage files we are going to write... fnlist = [] tfdir = tempfile.gettempdir() for key in per_file.keys(): filename = key # skip some "files" we don't care about... if filename == "<string>": continue # are these caused by code compiled using exec or something? if filename.startswith(tfdir): continue # XXX this is almost certainly not portable!!! fndir = os.path.dirname(filename) if filename[:1] == os.sep: coverpath = os.path.join(dirname, "."+fndir) else: coverpath = os.path.join(dirname, fndir) if filename.endswith(".pyc") or filename.endswith(".pyo"): filename = filename[:-1] # Get the original lines from the .py file try: lines = open(filename, 'r').readlines() except IOError, err: sys.stderr.write("%s: Could not open %s for reading " \ "because: %s - skipping\n" % \ ("trace", `filename`, err.strerror)) continue modulename = os.path.split(results.modules[key])[1] # build list file name by appending a ".cover" to the module name # and sticking it into the specified directory listfilename = os.path.join(coverpath, modulename + ".cover") #sys.stderr.write("modulename: %(modulename)s\n" # "filename: %(filename)s\n" # "coverpath: %(coverpath)s\n" # "listfilename: %(listfilename)s\n" # "dirname: %(dirname)s\n" # % locals()) try: outfile = open(listfilename, 'w') except IOError, err: sys.stderr.write( '%s: Could not open %s for writing because: %s" \ "- skipping\n' % ("trace", `listfilename`, err.strerror)) continue # If desired, get a list of the line numbers which represent # executable content (returned as a dict for better lookup speed) if show_missing: executable_linenos = find_executable_linenos(filename) else: executable_linenos = {} lines_hit = per_file[key] for i in range(len(lines)): line = lines[i] # do the blank/comment match to try to mark more lines # (help the reader find stuff that hasn't been covered) if lines_hit.has_key(i+1): # count precedes the lines that we captured outfile.write('%5d: ' % lines_hit[i+1]) elif blank.match(line): # blank lines and comments are preceded by dots outfile.write(' . ') else: # lines preceded by no marks weren't hit # Highlight them if so indicated, unless the line contains # '#pragma: NO COVER' (it is possible to embed this into # the text as a non-comment; no easy fix) if executable_linenos.has_key(i+1) and \ string.find(lines[i], string.join(['#pragma', 'NO COVER'])) == -1: outfile.write('>>>>>> ') else: outfile.write(' '*7) outfile.write(string.expandtabs(lines[i], 8)) outfile.close() if save_counts: # try and store counts and module info into dirname try: marshal.dump(results.counts, open(os.path.join(dirname, "counts"), "w")) marshal.dump(results.modules, open(os.path.join(dirname, "modules"), "w")) except IOError, err: sys.stderr.write("cannot save counts/modules " \ "files because %s" % err.strerror) |
sys.stderr.write("%s: Could not open %s for reading " \ "because: %s - skipping\n" % \ ("trace", `filename`, err.strerror)) continue modulename = os.path.split(results.modules[key])[1] listfilename = os.path.join(coverpath, modulename + ".cover") | print >> sys.stderr, "trace: Could not open %s for reading " \ "because: %s - skipping" % (`filename`, err.strerror) continue | def create_results_log(results, dirname = ".", show_missing = 1, save_counts = 0): import re # turn the counts data ("(filename, lineno) = count") into something # accessible on a per-file basis per_file = {} for filename, lineno in results.counts.keys(): lines_hit = per_file[filename] = per_file.get(filename, {}) lines_hit[lineno] = results.counts[(filename, lineno)] # try and merge existing counts and modules file from dirname try: counts = marshal.load(open(os.path.join(dirname, "counts"))) modules = marshal.load(open(os.path.join(dirname, "modules"))) results.update(results.__class__(counts, modules)) except IOError: pass # there are many places where this is insufficient, like a blank # line embedded in a multiline string. blank = re.compile(r'^\s*(#.*)?$') # generate file paths for the coverage files we are going to write... fnlist = [] tfdir = tempfile.gettempdir() for key in per_file.keys(): filename = key # skip some "files" we don't care about... if filename == "<string>": continue # are these caused by code compiled using exec or something? if filename.startswith(tfdir): continue # XXX this is almost certainly not portable!!! fndir = os.path.dirname(filename) if filename[:1] == os.sep: coverpath = os.path.join(dirname, "."+fndir) else: coverpath = os.path.join(dirname, fndir) if filename.endswith(".pyc") or filename.endswith(".pyo"): filename = filename[:-1] # Get the original lines from the .py file try: lines = open(filename, 'r').readlines() except IOError, err: sys.stderr.write("%s: Could not open %s for reading " \ "because: %s - skipping\n" % \ ("trace", `filename`, err.strerror)) continue modulename = os.path.split(results.modules[key])[1] # build list file name by appending a ".cover" to the module name # and sticking it into the specified directory listfilename = os.path.join(coverpath, modulename + ".cover") #sys.stderr.write("modulename: %(modulename)s\n" # "filename: %(filename)s\n" # "coverpath: %(coverpath)s\n" # "listfilename: %(listfilename)s\n" # "dirname: %(dirname)s\n" # % locals()) try: outfile = open(listfilename, 'w') except IOError, err: sys.stderr.write( '%s: Could not open %s for writing because: %s" \ "- skipping\n' % ("trace", `listfilename`, err.strerror)) continue # If desired, get a list of the line numbers which represent # executable content (returned as a dict for better lookup speed) if show_missing: executable_linenos = find_executable_linenos(filename) else: executable_linenos = {} lines_hit = per_file[key] for i in range(len(lines)): line = lines[i] # do the blank/comment match to try to mark more lines # (help the reader find stuff that hasn't been covered) if lines_hit.has_key(i+1): # count precedes the lines that we captured outfile.write('%5d: ' % lines_hit[i+1]) elif blank.match(line): # blank lines and comments are preceded by dots outfile.write(' . ') else: # lines preceded by no marks weren't hit # Highlight them if so indicated, unless the line contains # '#pragma: NO COVER' (it is possible to embed this into # the text as a non-comment; no easy fix) if executable_linenos.has_key(i+1) and \ string.find(lines[i], string.join(['#pragma', 'NO COVER'])) == -1: outfile.write('>>>>>> ') else: outfile.write(' '*7) outfile.write(string.expandtabs(lines[i], 8)) outfile.close() if save_counts: # try and store counts and module info into dirname try: marshal.dump(results.counts, open(os.path.join(dirname, "counts"), "w")) marshal.dump(results.modules, open(os.path.join(dirname, "modules"), "w")) except IOError, err: sys.stderr.write("cannot save counts/modules " \ "files because %s" % err.strerror) |
modulename = frame.f_globals["__name__"] | try: modulename = frame.f_globals["__name__"] except KeyError: modulename = None | def trace(self, frame, why, arg): if why == 'line': # something is fishy about getting the file name filename = frame.f_globals.get("__file__", None) if filename is None: filename = frame.f_code.co_filename modulename = frame.f_globals["__name__"] |
modulename = frame.f_globals["__name__"] | try: modulename = frame.f_globals["__name__"] except KeyError: modulename = None | def trace(self, frame, why, arg): if why == 'line': filename = frame.f_code.co_filename modulename = frame.f_globals["__name__"] |
sys.stderr.write("%s: %s\n" % (sys.argv[0], msg)) | print >> sys.stderr, "%s: %s" % (sys.argv[0], msg) | def _err_exit(msg): sys.stderr.write("%s: %s\n" % (sys.argv[0], msg)) sys.exit(1) |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.