rem
stringlengths 0
322k
| add
stringlengths 0
2.05M
| context
stringlengths 8
228k
|
---|---|---|
host, port = splitport(host) if port is None: port = ftplib.FTP_PORT user, host = splituser(host) if user: user, passwd = splitpasswd(user) else: passwd = None host = unquote(host) user = unquote(user or '') passwd = unquote(passwd or '') | def ftp_open(self, req): host = req.get_host() if not host: raise IOError, ('ftp error', 'no host given') # XXX handle custom username & password try: host = socket.gethostbyname(host) except socket.error, msg: raise URLError(msg) host, port = splitport(host) if port is None: port = ftplib.FTP_PORT path, attrs = splitattr(req.get_selector()) dirs = path.split('/') dirs = map(unquote, dirs) dirs, file = dirs[:-1], dirs[-1] if dirs and not dirs[0]: dirs = dirs[1:] user = passwd = '' # XXX try: fw = self.connect_ftp(user, passwd, host, port, dirs) type = file and 'I' or 'D' for attr in attrs: attr, value = splitattr(attr) if attr.lower() == 'type' and \ value in ('a', 'A', 'i', 'I', 'd', 'D'): type = value.upper() fp, retrlen = fw.retrfile(file, type) headers = "" mtype = mimetypes.guess_type(req.get_full_url())[0] if mtype: headers += "Content-type: %s\n" % mtype if retrlen is not None and retrlen >= 0: headers += "Content-length: %d\n" % retrlen sf = StringIO(headers) headers = mimetools.Message(sf) return addinfourl(fp, headers, req.get_full_url()) except ftplib.all_errors, msg: raise IOError, ('ftp error', msg), sys.exc_info()[2] |
|
host, port = splitport(host) if port is None: port = ftplib.FTP_PORT | def ftp_open(self, req): host = req.get_host() if not host: raise IOError, ('ftp error', 'no host given') # XXX handle custom username & password try: host = socket.gethostbyname(host) except socket.error, msg: raise URLError(msg) host, port = splitport(host) if port is None: port = ftplib.FTP_PORT path, attrs = splitattr(req.get_selector()) dirs = path.split('/') dirs = map(unquote, dirs) dirs, file = dirs[:-1], dirs[-1] if dirs and not dirs[0]: dirs = dirs[1:] user = passwd = '' # XXX try: fw = self.connect_ftp(user, passwd, host, port, dirs) type = file and 'I' or 'D' for attr in attrs: attr, value = splitattr(attr) if attr.lower() == 'type' and \ value in ('a', 'A', 'i', 'I', 'd', 'D'): type = value.upper() fp, retrlen = fw.retrfile(file, type) headers = "" mtype = mimetypes.guess_type(req.get_full_url())[0] if mtype: headers += "Content-type: %s\n" % mtype if retrlen is not None and retrlen >= 0: headers += "Content-length: %d\n" % retrlen sf = StringIO(headers) headers = mimetools.Message(sf) return addinfourl(fp, headers, req.get_full_url()) except ftplib.all_errors, msg: raise IOError, ('ftp error', msg), sys.exc_info()[2] |
|
user = passwd = '' | def ftp_open(self, req): host = req.get_host() if not host: raise IOError, ('ftp error', 'no host given') # XXX handle custom username & password try: host = socket.gethostbyname(host) except socket.error, msg: raise URLError(msg) host, port = splitport(host) if port is None: port = ftplib.FTP_PORT path, attrs = splitattr(req.get_selector()) dirs = path.split('/') dirs = map(unquote, dirs) dirs, file = dirs[:-1], dirs[-1] if dirs and not dirs[0]: dirs = dirs[1:] user = passwd = '' # XXX try: fw = self.connect_ftp(user, passwd, host, port, dirs) type = file and 'I' or 'D' for attr in attrs: attr, value = splitattr(attr) if attr.lower() == 'type' and \ value in ('a', 'A', 'i', 'I', 'd', 'D'): type = value.upper() fp, retrlen = fw.retrfile(file, type) headers = "" mtype = mimetypes.guess_type(req.get_full_url())[0] if mtype: headers += "Content-type: %s\n" % mtype if retrlen is not None and retrlen >= 0: headers += "Content-length: %d\n" % retrlen sf = StringIO(headers) headers = mimetools.Message(sf) return addinfourl(fp, headers, req.get_full_url()) except ftplib.all_errors, msg: raise IOError, ('ftp error', msg), sys.exc_info()[2] |
|
print " setting options:" for (option, (source, value)) in options.items(): print " %s = %s (from %s)" % (option, value, source) if not hasattr(cmd_obj, option): raise DistutilsOptionError, \ ("%s: command '%s' has no such option '%s'") % \ (source, command, option) setattr(cmd_obj, option, value) | self._set_command_options(cmd_obj, options) | def get_command_obj (self, command, create=1): """Return the command object for 'command'. Normally this object is cached on a previous call to 'get_command_obj()'; if no comand object for 'command' is in the cache, then we either create and return it (if 'create' is true) or return None. """ cmd_obj = self.command_obj.get(command) if not cmd_obj and create: print "Distribution.get_command_obj(): " \ "creating '%s' command object" % command |
return EOFError, "Reached EOF" | raise EOFError, "Reached EOF" | def _read(self, size=1024): if self.fileobj is None: raise EOFError, "Reached EOF" |
install = self.reinitialize_command('install') | install = self.reinitialize_command('install', reinit_subcommands=1) | def run (self): if (sys.platform != "win32" and (self.distribution.has_ext_modules() or self.distribution.has_c_libraries())): raise DistutilsPlatformError \ ("distribution contains extensions and/or C libraries; " "must be compiled on a Windows 32 platform") |
prefix = PREFIX | prefix = None | def main(): # overridable context prefix = PREFIX # settable with -p option exec_prefix = None # settable with -P option extensions = [] path = sys.path # output files frozen_c = 'frozen.c' config_c = 'config.c' target = 'a.out' # normally derived from script name makefile = 'Makefile' # parse command line try: opts, args = getopt.getopt(sys.argv[1:], 'e:p:P:') except getopt.error, msg: usage('getopt error: ' + str(msg)) # proces option arguments for o, a in opts: if o == '-e': extensions.append(a) if o == '-p': prefix = a if o == '-P': exec_prefix = a # default exec_prefix if exec_prefix is None: exec_prefix = EXEC_PREFIX if exec_prefix is None: exec_prefix = prefix # locations derived from options binlib = os.path.join(exec_prefix, 'lib/python1.4/config') incldir = os.path.join(prefix, 'include/python1.4') config_c_in = os.path.join(binlib, 'config.c.in') frozenmain_c = os.path.join(binlib, 'frozenmain.c') getpath_c = os.path.join(binlib, 'getpath.c') supp_sources = [frozenmain_c, getpath_c] makefile_in = os.path.join(binlib, 'Makefile') defines = ['-DPYTHONPATH=\\"$(PYTHONPATH)\\"'] includes = ['-I' + incldir, '-I' + binlib] # sanity check of directories and files for dir in [prefix, exec_prefix, binlib, incldir] + extensions: if not os.path.exists(dir): usage('needed directory %s not found' % dir) if not os.path.isdir(dir): usage('%s: not a directory' % dir) for file in [config_c_in, makefile_in] + supp_sources: if not os.path.exists(file): usage('needed file %s not found' % file) if not os.path.isfile(file): usage('%s: not a plain file' % file) for dir in extensions: setup = os.path.join(dir, 'Setup') if not os.path.exists(setup): usage('needed file %s not found' % setup) if not os.path.isfile(setup): usage('%s: not a plain file' % setup) # check that enough arguments are passed if not args: usage('at least one filename argument required') # check that the script name ends in ".py" if args[0][-3:] != ".py": usage('the script name must have a .py suffix') # check that file arguments exist for arg in args: if not os.path.exists(arg): usage('argument %s not found' % arg) if not os.path.isfile(arg): usage('%s: not a plain file' % arg) # process non-option arguments scriptfile = args[0] modules = args[1:] # derive target name from script name base = os.path.basename(scriptfile) base, ext = os.path.splitext(base) if base: if base != scriptfile: target = base else: target = base + '.bin' # Actual work starts here... dict = findmodules.findmodules(scriptfile, modules, path) names = dict.keys() names.sort() print "Modules being frozen:" for name in names: print '\t', name backup = frozen_c + '~' try: os.rename(frozen_c, backup) except os.error: backup = None outfp = open(frozen_c, 'w') try: makefreeze.makefreeze(outfp, dict) finally: outfp.close() if backup: if cmp.cmp(backup, frozen_c): sys.stderr.write('%s not changed, not written\n' % frozen_c) os.rename(backup, frozen_c) builtins = [] unknown = [] mods = dict.keys() mods.sort() for mod in mods: if dict[mod] == '<builtin>': builtins.append(mod) elif dict[mod] == '<unknown>': unknown.append(mod) addfiles = [] if unknown: addfiles, addmods = \ checkextensions.checkextensions(unknown, extensions) for mod in addmods: unknown.remove(mod) builtins = builtins + addmods if unknown: sys.stderr.write('Warning: unknown modules remain: %s\n' % string.join(unknown)) builtins.sort() infp = open(config_c_in) backup = config_c + '~' try: os.rename(config_c, backup) except os.error: backup = None outfp = open(config_c, 'w') try: makeconfig.makeconfig(infp, outfp, builtins) finally: outfp.close() infp.close() if backup: if cmp.cmp(backup, config_c): sys.stderr.write('%s not changed, not written\n' % config_c) os.rename(backup, config_c) cflags = defines + includes + ['$(OPT)'] libs = [] for n in 'Modules', 'Python', 'Objects', 'Parser': n = 'lib%s.a' % n n = os.path.join(binlib, n) libs.append(n) makevars = parsesetup.getmakevars(makefile_in) somevars = {} for key in makevars.keys(): somevars[key] = makevars[key] somevars['CFLAGS'] = string.join(cflags) # override files = ['$(OPT)', '$(LDFLAGS)', config_c, frozen_c] + \ supp_sources + addfiles + libs + \ ['$(MODLIBS)', '$(LIBS)', '$(SYSLIBS)'] backup = makefile + '~' try: os.rename(makefile, backup) except os.error: backup = None outfp = open(makefile, 'w') try: makemakefile.makemakefile(outfp, somevars, files, target) finally: outfp.close() if backup: if not cmp.cmp(backup, makefile): print 'previous Makefile saved as', backup else: sys.stderr.write('%s not changed, not written\n' % makefile) os.rename(backup, makefile) # Done! print 'Now run make to build the target:', target |
opts, args = getopt.getopt(sys.argv[1:], 'e:p:P:') | opts, args = getopt.getopt(sys.argv[1:], 'e:o:p:P:') | def main(): # overridable context prefix = PREFIX # settable with -p option exec_prefix = None # settable with -P option extensions = [] path = sys.path # output files frozen_c = 'frozen.c' config_c = 'config.c' target = 'a.out' # normally derived from script name makefile = 'Makefile' # parse command line try: opts, args = getopt.getopt(sys.argv[1:], 'e:p:P:') except getopt.error, msg: usage('getopt error: ' + str(msg)) # proces option arguments for o, a in opts: if o == '-e': extensions.append(a) if o == '-p': prefix = a if o == '-P': exec_prefix = a # default exec_prefix if exec_prefix is None: exec_prefix = EXEC_PREFIX if exec_prefix is None: exec_prefix = prefix # locations derived from options binlib = os.path.join(exec_prefix, 'lib/python1.4/config') incldir = os.path.join(prefix, 'include/python1.4') config_c_in = os.path.join(binlib, 'config.c.in') frozenmain_c = os.path.join(binlib, 'frozenmain.c') getpath_c = os.path.join(binlib, 'getpath.c') supp_sources = [frozenmain_c, getpath_c] makefile_in = os.path.join(binlib, 'Makefile') defines = ['-DPYTHONPATH=\\"$(PYTHONPATH)\\"'] includes = ['-I' + incldir, '-I' + binlib] # sanity check of directories and files for dir in [prefix, exec_prefix, binlib, incldir] + extensions: if not os.path.exists(dir): usage('needed directory %s not found' % dir) if not os.path.isdir(dir): usage('%s: not a directory' % dir) for file in [config_c_in, makefile_in] + supp_sources: if not os.path.exists(file): usage('needed file %s not found' % file) if not os.path.isfile(file): usage('%s: not a plain file' % file) for dir in extensions: setup = os.path.join(dir, 'Setup') if not os.path.exists(setup): usage('needed file %s not found' % setup) if not os.path.isfile(setup): usage('%s: not a plain file' % setup) # check that enough arguments are passed if not args: usage('at least one filename argument required') # check that the script name ends in ".py" if args[0][-3:] != ".py": usage('the script name must have a .py suffix') # check that file arguments exist for arg in args: if not os.path.exists(arg): usage('argument %s not found' % arg) if not os.path.isfile(arg): usage('%s: not a plain file' % arg) # process non-option arguments scriptfile = args[0] modules = args[1:] # derive target name from script name base = os.path.basename(scriptfile) base, ext = os.path.splitext(base) if base: if base != scriptfile: target = base else: target = base + '.bin' # Actual work starts here... dict = findmodules.findmodules(scriptfile, modules, path) names = dict.keys() names.sort() print "Modules being frozen:" for name in names: print '\t', name backup = frozen_c + '~' try: os.rename(frozen_c, backup) except os.error: backup = None outfp = open(frozen_c, 'w') try: makefreeze.makefreeze(outfp, dict) finally: outfp.close() if backup: if cmp.cmp(backup, frozen_c): sys.stderr.write('%s not changed, not written\n' % frozen_c) os.rename(backup, frozen_c) builtins = [] unknown = [] mods = dict.keys() mods.sort() for mod in mods: if dict[mod] == '<builtin>': builtins.append(mod) elif dict[mod] == '<unknown>': unknown.append(mod) addfiles = [] if unknown: addfiles, addmods = \ checkextensions.checkextensions(unknown, extensions) for mod in addmods: unknown.remove(mod) builtins = builtins + addmods if unknown: sys.stderr.write('Warning: unknown modules remain: %s\n' % string.join(unknown)) builtins.sort() infp = open(config_c_in) backup = config_c + '~' try: os.rename(config_c, backup) except os.error: backup = None outfp = open(config_c, 'w') try: makeconfig.makeconfig(infp, outfp, builtins) finally: outfp.close() infp.close() if backup: if cmp.cmp(backup, config_c): sys.stderr.write('%s not changed, not written\n' % config_c) os.rename(backup, config_c) cflags = defines + includes + ['$(OPT)'] libs = [] for n in 'Modules', 'Python', 'Objects', 'Parser': n = 'lib%s.a' % n n = os.path.join(binlib, n) libs.append(n) makevars = parsesetup.getmakevars(makefile_in) somevars = {} for key in makevars.keys(): somevars[key] = makevars[key] somevars['CFLAGS'] = string.join(cflags) # override files = ['$(OPT)', '$(LDFLAGS)', config_c, frozen_c] + \ supp_sources + addfiles + libs + \ ['$(MODLIBS)', '$(LIBS)', '$(SYSLIBS)'] backup = makefile + '~' try: os.rename(makefile, backup) except os.error: backup = None outfp = open(makefile, 'w') try: makemakefile.makemakefile(outfp, somevars, files, target) finally: outfp.close() if backup: if not cmp.cmp(backup, makefile): print 'previous Makefile saved as', backup else: sys.stderr.write('%s not changed, not written\n' % makefile) os.rename(backup, makefile) # Done! print 'Now run make to build the target:', target |
if exec_prefix is None: exec_prefix = EXEC_PREFIX if exec_prefix is None: | if not exec_prefix: if prefix: | def main(): # overridable context prefix = PREFIX # settable with -p option exec_prefix = None # settable with -P option extensions = [] path = sys.path # output files frozen_c = 'frozen.c' config_c = 'config.c' target = 'a.out' # normally derived from script name makefile = 'Makefile' # parse command line try: opts, args = getopt.getopt(sys.argv[1:], 'e:p:P:') except getopt.error, msg: usage('getopt error: ' + str(msg)) # proces option arguments for o, a in opts: if o == '-e': extensions.append(a) if o == '-p': prefix = a if o == '-P': exec_prefix = a # default exec_prefix if exec_prefix is None: exec_prefix = EXEC_PREFIX if exec_prefix is None: exec_prefix = prefix # locations derived from options binlib = os.path.join(exec_prefix, 'lib/python1.4/config') incldir = os.path.join(prefix, 'include/python1.4') config_c_in = os.path.join(binlib, 'config.c.in') frozenmain_c = os.path.join(binlib, 'frozenmain.c') getpath_c = os.path.join(binlib, 'getpath.c') supp_sources = [frozenmain_c, getpath_c] makefile_in = os.path.join(binlib, 'Makefile') defines = ['-DPYTHONPATH=\\"$(PYTHONPATH)\\"'] includes = ['-I' + incldir, '-I' + binlib] # sanity check of directories and files for dir in [prefix, exec_prefix, binlib, incldir] + extensions: if not os.path.exists(dir): usage('needed directory %s not found' % dir) if not os.path.isdir(dir): usage('%s: not a directory' % dir) for file in [config_c_in, makefile_in] + supp_sources: if not os.path.exists(file): usage('needed file %s not found' % file) if not os.path.isfile(file): usage('%s: not a plain file' % file) for dir in extensions: setup = os.path.join(dir, 'Setup') if not os.path.exists(setup): usage('needed file %s not found' % setup) if not os.path.isfile(setup): usage('%s: not a plain file' % setup) # check that enough arguments are passed if not args: usage('at least one filename argument required') # check that the script name ends in ".py" if args[0][-3:] != ".py": usage('the script name must have a .py suffix') # check that file arguments exist for arg in args: if not os.path.exists(arg): usage('argument %s not found' % arg) if not os.path.isfile(arg): usage('%s: not a plain file' % arg) # process non-option arguments scriptfile = args[0] modules = args[1:] # derive target name from script name base = os.path.basename(scriptfile) base, ext = os.path.splitext(base) if base: if base != scriptfile: target = base else: target = base + '.bin' # Actual work starts here... dict = findmodules.findmodules(scriptfile, modules, path) names = dict.keys() names.sort() print "Modules being frozen:" for name in names: print '\t', name backup = frozen_c + '~' try: os.rename(frozen_c, backup) except os.error: backup = None outfp = open(frozen_c, 'w') try: makefreeze.makefreeze(outfp, dict) finally: outfp.close() if backup: if cmp.cmp(backup, frozen_c): sys.stderr.write('%s not changed, not written\n' % frozen_c) os.rename(backup, frozen_c) builtins = [] unknown = [] mods = dict.keys() mods.sort() for mod in mods: if dict[mod] == '<builtin>': builtins.append(mod) elif dict[mod] == '<unknown>': unknown.append(mod) addfiles = [] if unknown: addfiles, addmods = \ checkextensions.checkextensions(unknown, extensions) for mod in addmods: unknown.remove(mod) builtins = builtins + addmods if unknown: sys.stderr.write('Warning: unknown modules remain: %s\n' % string.join(unknown)) builtins.sort() infp = open(config_c_in) backup = config_c + '~' try: os.rename(config_c, backup) except os.error: backup = None outfp = open(config_c, 'w') try: makeconfig.makeconfig(infp, outfp, builtins) finally: outfp.close() infp.close() if backup: if cmp.cmp(backup, config_c): sys.stderr.write('%s not changed, not written\n' % config_c) os.rename(backup, config_c) cflags = defines + includes + ['$(OPT)'] libs = [] for n in 'Modules', 'Python', 'Objects', 'Parser': n = 'lib%s.a' % n n = os.path.join(binlib, n) libs.append(n) makevars = parsesetup.getmakevars(makefile_in) somevars = {} for key in makevars.keys(): somevars[key] = makevars[key] somevars['CFLAGS'] = string.join(cflags) # override files = ['$(OPT)', '$(LDFLAGS)', config_c, frozen_c] + \ supp_sources + addfiles + libs + \ ['$(MODLIBS)', '$(LIBS)', '$(SYSLIBS)'] backup = makefile + '~' try: os.rename(makefile, backup) except os.error: backup = None outfp = open(makefile, 'w') try: makemakefile.makemakefile(outfp, somevars, files, target) finally: outfp.close() if backup: if not cmp.cmp(backup, makefile): print 'previous Makefile saved as', backup else: sys.stderr.write('%s not changed, not written\n' % makefile) os.rename(backup, makefile) # Done! print 'Now run make to build the target:', target |
binlib = os.path.join(exec_prefix, 'lib/python1.4/config') incldir = os.path.join(prefix, 'include/python1.4') | version = sys.version[:3] binlib = os.path.join(exec_prefix, 'lib/python%s/config' % version) incldir = os.path.join(prefix, 'include/python%s' % version) | def main(): # overridable context prefix = PREFIX # settable with -p option exec_prefix = None # settable with -P option extensions = [] path = sys.path # output files frozen_c = 'frozen.c' config_c = 'config.c' target = 'a.out' # normally derived from script name makefile = 'Makefile' # parse command line try: opts, args = getopt.getopt(sys.argv[1:], 'e:p:P:') except getopt.error, msg: usage('getopt error: ' + str(msg)) # proces option arguments for o, a in opts: if o == '-e': extensions.append(a) if o == '-p': prefix = a if o == '-P': exec_prefix = a # default exec_prefix if exec_prefix is None: exec_prefix = EXEC_PREFIX if exec_prefix is None: exec_prefix = prefix # locations derived from options binlib = os.path.join(exec_prefix, 'lib/python1.4/config') incldir = os.path.join(prefix, 'include/python1.4') config_c_in = os.path.join(binlib, 'config.c.in') frozenmain_c = os.path.join(binlib, 'frozenmain.c') getpath_c = os.path.join(binlib, 'getpath.c') supp_sources = [frozenmain_c, getpath_c] makefile_in = os.path.join(binlib, 'Makefile') defines = ['-DPYTHONPATH=\\"$(PYTHONPATH)\\"'] includes = ['-I' + incldir, '-I' + binlib] # sanity check of directories and files for dir in [prefix, exec_prefix, binlib, incldir] + extensions: if not os.path.exists(dir): usage('needed directory %s not found' % dir) if not os.path.isdir(dir): usage('%s: not a directory' % dir) for file in [config_c_in, makefile_in] + supp_sources: if not os.path.exists(file): usage('needed file %s not found' % file) if not os.path.isfile(file): usage('%s: not a plain file' % file) for dir in extensions: setup = os.path.join(dir, 'Setup') if not os.path.exists(setup): usage('needed file %s not found' % setup) if not os.path.isfile(setup): usage('%s: not a plain file' % setup) # check that enough arguments are passed if not args: usage('at least one filename argument required') # check that the script name ends in ".py" if args[0][-3:] != ".py": usage('the script name must have a .py suffix') # check that file arguments exist for arg in args: if not os.path.exists(arg): usage('argument %s not found' % arg) if not os.path.isfile(arg): usage('%s: not a plain file' % arg) # process non-option arguments scriptfile = args[0] modules = args[1:] # derive target name from script name base = os.path.basename(scriptfile) base, ext = os.path.splitext(base) if base: if base != scriptfile: target = base else: target = base + '.bin' # Actual work starts here... dict = findmodules.findmodules(scriptfile, modules, path) names = dict.keys() names.sort() print "Modules being frozen:" for name in names: print '\t', name backup = frozen_c + '~' try: os.rename(frozen_c, backup) except os.error: backup = None outfp = open(frozen_c, 'w') try: makefreeze.makefreeze(outfp, dict) finally: outfp.close() if backup: if cmp.cmp(backup, frozen_c): sys.stderr.write('%s not changed, not written\n' % frozen_c) os.rename(backup, frozen_c) builtins = [] unknown = [] mods = dict.keys() mods.sort() for mod in mods: if dict[mod] == '<builtin>': builtins.append(mod) elif dict[mod] == '<unknown>': unknown.append(mod) addfiles = [] if unknown: addfiles, addmods = \ checkextensions.checkextensions(unknown, extensions) for mod in addmods: unknown.remove(mod) builtins = builtins + addmods if unknown: sys.stderr.write('Warning: unknown modules remain: %s\n' % string.join(unknown)) builtins.sort() infp = open(config_c_in) backup = config_c + '~' try: os.rename(config_c, backup) except os.error: backup = None outfp = open(config_c, 'w') try: makeconfig.makeconfig(infp, outfp, builtins) finally: outfp.close() infp.close() if backup: if cmp.cmp(backup, config_c): sys.stderr.write('%s not changed, not written\n' % config_c) os.rename(backup, config_c) cflags = defines + includes + ['$(OPT)'] libs = [] for n in 'Modules', 'Python', 'Objects', 'Parser': n = 'lib%s.a' % n n = os.path.join(binlib, n) libs.append(n) makevars = parsesetup.getmakevars(makefile_in) somevars = {} for key in makevars.keys(): somevars[key] = makevars[key] somevars['CFLAGS'] = string.join(cflags) # override files = ['$(OPT)', '$(LDFLAGS)', config_c, frozen_c] + \ supp_sources + addfiles + libs + \ ['$(MODLIBS)', '$(LIBS)', '$(SYSLIBS)'] backup = makefile + '~' try: os.rename(makefile, backup) except os.error: backup = None outfp = open(makefile, 'w') try: makemakefile.makemakefile(outfp, somevars, files, target) finally: outfp.close() if backup: if not cmp.cmp(backup, makefile): print 'previous Makefile saved as', backup else: sys.stderr.write('%s not changed, not written\n' % makefile) os.rename(backup, makefile) # Done! print 'Now run make to build the target:', target |
files = ['$(OPT)', '$(LDFLAGS)', config_c, frozen_c] + \ | files = ['$(OPT)', '$(LDFLAGS)', base_config_c, base_frozen_c] + \ | def main(): # overridable context prefix = PREFIX # settable with -p option exec_prefix = None # settable with -P option extensions = [] path = sys.path # output files frozen_c = 'frozen.c' config_c = 'config.c' target = 'a.out' # normally derived from script name makefile = 'Makefile' # parse command line try: opts, args = getopt.getopt(sys.argv[1:], 'e:p:P:') except getopt.error, msg: usage('getopt error: ' + str(msg)) # proces option arguments for o, a in opts: if o == '-e': extensions.append(a) if o == '-p': prefix = a if o == '-P': exec_prefix = a # default exec_prefix if exec_prefix is None: exec_prefix = EXEC_PREFIX if exec_prefix is None: exec_prefix = prefix # locations derived from options binlib = os.path.join(exec_prefix, 'lib/python1.4/config') incldir = os.path.join(prefix, 'include/python1.4') config_c_in = os.path.join(binlib, 'config.c.in') frozenmain_c = os.path.join(binlib, 'frozenmain.c') getpath_c = os.path.join(binlib, 'getpath.c') supp_sources = [frozenmain_c, getpath_c] makefile_in = os.path.join(binlib, 'Makefile') defines = ['-DPYTHONPATH=\\"$(PYTHONPATH)\\"'] includes = ['-I' + incldir, '-I' + binlib] # sanity check of directories and files for dir in [prefix, exec_prefix, binlib, incldir] + extensions: if not os.path.exists(dir): usage('needed directory %s not found' % dir) if not os.path.isdir(dir): usage('%s: not a directory' % dir) for file in [config_c_in, makefile_in] + supp_sources: if not os.path.exists(file): usage('needed file %s not found' % file) if not os.path.isfile(file): usage('%s: not a plain file' % file) for dir in extensions: setup = os.path.join(dir, 'Setup') if not os.path.exists(setup): usage('needed file %s not found' % setup) if not os.path.isfile(setup): usage('%s: not a plain file' % setup) # check that enough arguments are passed if not args: usage('at least one filename argument required') # check that the script name ends in ".py" if args[0][-3:] != ".py": usage('the script name must have a .py suffix') # check that file arguments exist for arg in args: if not os.path.exists(arg): usage('argument %s not found' % arg) if not os.path.isfile(arg): usage('%s: not a plain file' % arg) # process non-option arguments scriptfile = args[0] modules = args[1:] # derive target name from script name base = os.path.basename(scriptfile) base, ext = os.path.splitext(base) if base: if base != scriptfile: target = base else: target = base + '.bin' # Actual work starts here... dict = findmodules.findmodules(scriptfile, modules, path) names = dict.keys() names.sort() print "Modules being frozen:" for name in names: print '\t', name backup = frozen_c + '~' try: os.rename(frozen_c, backup) except os.error: backup = None outfp = open(frozen_c, 'w') try: makefreeze.makefreeze(outfp, dict) finally: outfp.close() if backup: if cmp.cmp(backup, frozen_c): sys.stderr.write('%s not changed, not written\n' % frozen_c) os.rename(backup, frozen_c) builtins = [] unknown = [] mods = dict.keys() mods.sort() for mod in mods: if dict[mod] == '<builtin>': builtins.append(mod) elif dict[mod] == '<unknown>': unknown.append(mod) addfiles = [] if unknown: addfiles, addmods = \ checkextensions.checkextensions(unknown, extensions) for mod in addmods: unknown.remove(mod) builtins = builtins + addmods if unknown: sys.stderr.write('Warning: unknown modules remain: %s\n' % string.join(unknown)) builtins.sort() infp = open(config_c_in) backup = config_c + '~' try: os.rename(config_c, backup) except os.error: backup = None outfp = open(config_c, 'w') try: makeconfig.makeconfig(infp, outfp, builtins) finally: outfp.close() infp.close() if backup: if cmp.cmp(backup, config_c): sys.stderr.write('%s not changed, not written\n' % config_c) os.rename(backup, config_c) cflags = defines + includes + ['$(OPT)'] libs = [] for n in 'Modules', 'Python', 'Objects', 'Parser': n = 'lib%s.a' % n n = os.path.join(binlib, n) libs.append(n) makevars = parsesetup.getmakevars(makefile_in) somevars = {} for key in makevars.keys(): somevars[key] = makevars[key] somevars['CFLAGS'] = string.join(cflags) # override files = ['$(OPT)', '$(LDFLAGS)', config_c, frozen_c] + \ supp_sources + addfiles + libs + \ ['$(MODLIBS)', '$(LIBS)', '$(SYSLIBS)'] backup = makefile + '~' try: os.rename(makefile, backup) except os.error: backup = None outfp = open(makefile, 'w') try: makemakefile.makemakefile(outfp, somevars, files, target) finally: outfp.close() if backup: if not cmp.cmp(backup, makefile): print 'previous Makefile saved as', backup else: sys.stderr.write('%s not changed, not written\n' % makefile) os.rename(backup, makefile) # Done! print 'Now run make to build the target:', target |
makemakefile.makemakefile(outfp, somevars, files, target) | makemakefile.makemakefile(outfp, somevars, files, base_target) | def main(): # overridable context prefix = PREFIX # settable with -p option exec_prefix = None # settable with -P option extensions = [] path = sys.path # output files frozen_c = 'frozen.c' config_c = 'config.c' target = 'a.out' # normally derived from script name makefile = 'Makefile' # parse command line try: opts, args = getopt.getopt(sys.argv[1:], 'e:p:P:') except getopt.error, msg: usage('getopt error: ' + str(msg)) # proces option arguments for o, a in opts: if o == '-e': extensions.append(a) if o == '-p': prefix = a if o == '-P': exec_prefix = a # default exec_prefix if exec_prefix is None: exec_prefix = EXEC_PREFIX if exec_prefix is None: exec_prefix = prefix # locations derived from options binlib = os.path.join(exec_prefix, 'lib/python1.4/config') incldir = os.path.join(prefix, 'include/python1.4') config_c_in = os.path.join(binlib, 'config.c.in') frozenmain_c = os.path.join(binlib, 'frozenmain.c') getpath_c = os.path.join(binlib, 'getpath.c') supp_sources = [frozenmain_c, getpath_c] makefile_in = os.path.join(binlib, 'Makefile') defines = ['-DPYTHONPATH=\\"$(PYTHONPATH)\\"'] includes = ['-I' + incldir, '-I' + binlib] # sanity check of directories and files for dir in [prefix, exec_prefix, binlib, incldir] + extensions: if not os.path.exists(dir): usage('needed directory %s not found' % dir) if not os.path.isdir(dir): usage('%s: not a directory' % dir) for file in [config_c_in, makefile_in] + supp_sources: if not os.path.exists(file): usage('needed file %s not found' % file) if not os.path.isfile(file): usage('%s: not a plain file' % file) for dir in extensions: setup = os.path.join(dir, 'Setup') if not os.path.exists(setup): usage('needed file %s not found' % setup) if not os.path.isfile(setup): usage('%s: not a plain file' % setup) # check that enough arguments are passed if not args: usage('at least one filename argument required') # check that the script name ends in ".py" if args[0][-3:] != ".py": usage('the script name must have a .py suffix') # check that file arguments exist for arg in args: if not os.path.exists(arg): usage('argument %s not found' % arg) if not os.path.isfile(arg): usage('%s: not a plain file' % arg) # process non-option arguments scriptfile = args[0] modules = args[1:] # derive target name from script name base = os.path.basename(scriptfile) base, ext = os.path.splitext(base) if base: if base != scriptfile: target = base else: target = base + '.bin' # Actual work starts here... dict = findmodules.findmodules(scriptfile, modules, path) names = dict.keys() names.sort() print "Modules being frozen:" for name in names: print '\t', name backup = frozen_c + '~' try: os.rename(frozen_c, backup) except os.error: backup = None outfp = open(frozen_c, 'w') try: makefreeze.makefreeze(outfp, dict) finally: outfp.close() if backup: if cmp.cmp(backup, frozen_c): sys.stderr.write('%s not changed, not written\n' % frozen_c) os.rename(backup, frozen_c) builtins = [] unknown = [] mods = dict.keys() mods.sort() for mod in mods: if dict[mod] == '<builtin>': builtins.append(mod) elif dict[mod] == '<unknown>': unknown.append(mod) addfiles = [] if unknown: addfiles, addmods = \ checkextensions.checkextensions(unknown, extensions) for mod in addmods: unknown.remove(mod) builtins = builtins + addmods if unknown: sys.stderr.write('Warning: unknown modules remain: %s\n' % string.join(unknown)) builtins.sort() infp = open(config_c_in) backup = config_c + '~' try: os.rename(config_c, backup) except os.error: backup = None outfp = open(config_c, 'w') try: makeconfig.makeconfig(infp, outfp, builtins) finally: outfp.close() infp.close() if backup: if cmp.cmp(backup, config_c): sys.stderr.write('%s not changed, not written\n' % config_c) os.rename(backup, config_c) cflags = defines + includes + ['$(OPT)'] libs = [] for n in 'Modules', 'Python', 'Objects', 'Parser': n = 'lib%s.a' % n n = os.path.join(binlib, n) libs.append(n) makevars = parsesetup.getmakevars(makefile_in) somevars = {} for key in makevars.keys(): somevars[key] = makevars[key] somevars['CFLAGS'] = string.join(cflags) # override files = ['$(OPT)', '$(LDFLAGS)', config_c, frozen_c] + \ supp_sources + addfiles + libs + \ ['$(MODLIBS)', '$(LIBS)', '$(SYSLIBS)'] backup = makefile + '~' try: os.rename(makefile, backup) except os.error: backup = None outfp = open(makefile, 'w') try: makemakefile.makemakefile(outfp, somevars, files, target) finally: outfp.close() if backup: if not cmp.cmp(backup, makefile): print 'previous Makefile saved as', backup else: sys.stderr.write('%s not changed, not written\n' % makefile) os.rename(backup, makefile) # Done! print 'Now run make to build the target:', target |
print 'Now run make to build the target:', target | if odir: print 'Now run make in', odir, print 'to build the target:', base_target else: print 'Now run make to build the target:', base_target | def main(): # overridable context prefix = PREFIX # settable with -p option exec_prefix = None # settable with -P option extensions = [] path = sys.path # output files frozen_c = 'frozen.c' config_c = 'config.c' target = 'a.out' # normally derived from script name makefile = 'Makefile' # parse command line try: opts, args = getopt.getopt(sys.argv[1:], 'e:p:P:') except getopt.error, msg: usage('getopt error: ' + str(msg)) # proces option arguments for o, a in opts: if o == '-e': extensions.append(a) if o == '-p': prefix = a if o == '-P': exec_prefix = a # default exec_prefix if exec_prefix is None: exec_prefix = EXEC_PREFIX if exec_prefix is None: exec_prefix = prefix # locations derived from options binlib = os.path.join(exec_prefix, 'lib/python1.4/config') incldir = os.path.join(prefix, 'include/python1.4') config_c_in = os.path.join(binlib, 'config.c.in') frozenmain_c = os.path.join(binlib, 'frozenmain.c') getpath_c = os.path.join(binlib, 'getpath.c') supp_sources = [frozenmain_c, getpath_c] makefile_in = os.path.join(binlib, 'Makefile') defines = ['-DPYTHONPATH=\\"$(PYTHONPATH)\\"'] includes = ['-I' + incldir, '-I' + binlib] # sanity check of directories and files for dir in [prefix, exec_prefix, binlib, incldir] + extensions: if not os.path.exists(dir): usage('needed directory %s not found' % dir) if not os.path.isdir(dir): usage('%s: not a directory' % dir) for file in [config_c_in, makefile_in] + supp_sources: if not os.path.exists(file): usage('needed file %s not found' % file) if not os.path.isfile(file): usage('%s: not a plain file' % file) for dir in extensions: setup = os.path.join(dir, 'Setup') if not os.path.exists(setup): usage('needed file %s not found' % setup) if not os.path.isfile(setup): usage('%s: not a plain file' % setup) # check that enough arguments are passed if not args: usage('at least one filename argument required') # check that the script name ends in ".py" if args[0][-3:] != ".py": usage('the script name must have a .py suffix') # check that file arguments exist for arg in args: if not os.path.exists(arg): usage('argument %s not found' % arg) if not os.path.isfile(arg): usage('%s: not a plain file' % arg) # process non-option arguments scriptfile = args[0] modules = args[1:] # derive target name from script name base = os.path.basename(scriptfile) base, ext = os.path.splitext(base) if base: if base != scriptfile: target = base else: target = base + '.bin' # Actual work starts here... dict = findmodules.findmodules(scriptfile, modules, path) names = dict.keys() names.sort() print "Modules being frozen:" for name in names: print '\t', name backup = frozen_c + '~' try: os.rename(frozen_c, backup) except os.error: backup = None outfp = open(frozen_c, 'w') try: makefreeze.makefreeze(outfp, dict) finally: outfp.close() if backup: if cmp.cmp(backup, frozen_c): sys.stderr.write('%s not changed, not written\n' % frozen_c) os.rename(backup, frozen_c) builtins = [] unknown = [] mods = dict.keys() mods.sort() for mod in mods: if dict[mod] == '<builtin>': builtins.append(mod) elif dict[mod] == '<unknown>': unknown.append(mod) addfiles = [] if unknown: addfiles, addmods = \ checkextensions.checkextensions(unknown, extensions) for mod in addmods: unknown.remove(mod) builtins = builtins + addmods if unknown: sys.stderr.write('Warning: unknown modules remain: %s\n' % string.join(unknown)) builtins.sort() infp = open(config_c_in) backup = config_c + '~' try: os.rename(config_c, backup) except os.error: backup = None outfp = open(config_c, 'w') try: makeconfig.makeconfig(infp, outfp, builtins) finally: outfp.close() infp.close() if backup: if cmp.cmp(backup, config_c): sys.stderr.write('%s not changed, not written\n' % config_c) os.rename(backup, config_c) cflags = defines + includes + ['$(OPT)'] libs = [] for n in 'Modules', 'Python', 'Objects', 'Parser': n = 'lib%s.a' % n n = os.path.join(binlib, n) libs.append(n) makevars = parsesetup.getmakevars(makefile_in) somevars = {} for key in makevars.keys(): somevars[key] = makevars[key] somevars['CFLAGS'] = string.join(cflags) # override files = ['$(OPT)', '$(LDFLAGS)', config_c, frozen_c] + \ supp_sources + addfiles + libs + \ ['$(MODLIBS)', '$(LIBS)', '$(SYSLIBS)'] backup = makefile + '~' try: os.rename(makefile, backup) except os.error: backup = None outfp = open(makefile, 'w') try: makemakefile.makemakefile(outfp, somevars, files, target) finally: outfp.close() if backup: if not cmp.cmp(backup, makefile): print 'previous Makefile saved as', backup else: sys.stderr.write('%s not changed, not written\n' % makefile) os.rename(backup, makefile) # Done! print 'Now run make to build the target:', target |
sys.stdout.flush() os.system("/depot/gnu/plat/bin/rlog -r %s </dev/null 2>&1" % self.name) | p = os.popen("/depot/gnu/plat/bin/rlog -r %s </dev/null 2>&1" % self.name) output = p.read() p.close() print cgi.escape(output) | def do_info(self): |
sys.stdout.flush() os.system("/depot/gnu/plat/bin/rlog %s </dev/null 2>&1" % self.name) | p = os.popen("/depot/gnu/plat/bin/rlog %s </dev/null 2>&1" % self.name) output = p.read() p.close() print cgi.escape(output) | def do_rlog(self): |
print "domain=%s;" % os.environ['HTTP_HOST'], | print "domain=%s;" % hostname, | def set_cookie(self, author, email): |
<TEXTAREA COLS=80 ROWS=20 NAME=text>""" % title | <TEXTAREA COLS=80 ROWS=20 NAME=text>""" % self.escape(title) | def showedit(self, name, title, text): |
""" % (author, email, self.log) | """ % (self.escape(author), self.escape(email), self.escape(self.log)) def escape(self, s): import regsub if '&' in s: s = regsub.gsub("&", "&", s) if '<' in s: s = regsub.gsub("<", "<", s) if '>' in s: s = regsub.gsub(">", ">", s) if '"' in s: s = regsub.gsub('"', """, s) return s | def showedit(self, name, title, text): |
while url[-1] in ");:,.?": | while url[-1] in ");:,.?'\"": | def translate(self, text): |
url = cgi.escape(url) | url = self.escape(url) | def translate(self, text): |
pos = f.tell() | pos = int(f.tell()) | def _addval(self, val): f = _open(self._datfile, 'rb+') f.seek(0, 2) pos = f.tell() |
fp.write("\t\tif _arguments.has_key('errn'):\n") | fp.write("\t\tif _arguments.get('errn', 0):\n") | fp.write("\tdef %s(self, "%funcname) |
return split(p)[1] | i = p.rfind('/') + 1 return p[i:] | def basename(p): """Returns the final component of a pathname""" return split(p)[1] |
return split(p)[0] | i = p.rfind('/') + 1 head = p[:i] if head and head != '/'*len(head): head = head.rstrip('/') return head | def dirname(p): """Returns the directory component of a pathname""" return split(p)[0] |
testclasses = (WichmannHill_TestBasicOps, | testclasses = [WichmannHill_TestBasicOps, | def test_main(verbose=None): testclasses = (WichmannHill_TestBasicOps, MersenneTwister_TestBasicOps, HardwareRandom_TestBasicOps, TestDistributions, TestModule) test_support.run_unittest(*testclasses) # verify reference counting import sys if verbose and hasattr(sys, "gettotalrefcount"): counts = [None] * 5 for i in xrange(len(counts)): test_support.run_unittest(*testclasses) counts[i] = sys.gettotalrefcount() print counts |
HardwareRandom_TestBasicOps, | def test_main(verbose=None): testclasses = (WichmannHill_TestBasicOps, MersenneTwister_TestBasicOps, HardwareRandom_TestBasicOps, TestDistributions, TestModule) test_support.run_unittest(*testclasses) # verify reference counting import sys if verbose and hasattr(sys, "gettotalrefcount"): counts = [None] * 5 for i in xrange(len(counts)): test_support.run_unittest(*testclasses) counts[i] = sys.gettotalrefcount() print counts |
|
TestModule) | TestModule] if random._urandom is not None: testclasses.append(HardwareRandom_TestBasicOps) | def test_main(verbose=None): testclasses = (WichmannHill_TestBasicOps, MersenneTwister_TestBasicOps, HardwareRandom_TestBasicOps, TestDistributions, TestModule) test_support.run_unittest(*testclasses) # verify reference counting import sys if verbose and hasattr(sys, "gettotalrefcount"): counts = [None] * 5 for i in xrange(len(counts)): test_support.run_unittest(*testclasses) counts[i] = sys.gettotalrefcount() print counts |
print '\nmorefindertools version %s\nTests coming up' %__version__ | print '\nmorefindertools version %s\nTests coming up...' %__version__ | def _test2(): print '\nmorefindertools version %s\nTests coming up' %__version__ import os import random # miscellaneous print '\tfilesharing on?', filesharing() # is file sharing on, off, starting up? print '\tOS version', OSversion() # the version of the system software # set the soundvolume in a simple way print '\tSystem beep volume' for i in range(0, 7): volumelevel(i) MacOS.SysBeep() # Finder's windows, file location, file attributes open("@findertoolstest", "w") f = macfs.FSSpec("@findertoolstest").as_pathname() reveal(f) # reveal this file in a Finder window select(f) # select this file base, file = os.path.split(f) closewindow(base) # close the window this file is in (opened by reveal) openwindow(base) # open it again windowview(base, 1) # set the view by list label(f, 2) # set the label of this file to something orange print '\tlabel', label(f) # get the label of this file # the file location only works in a window with icon view! print 'Random locations for an icon' windowview(base, 0) # set the view by icon windowsize(base, (600, 600)) for i in range(50): location(f, (random.randint(10, 590), random.randint(10, 590))) windowsize(base, (200, 400)) windowview(base, 1) # set the view by icon orgpos = windowposition(base) print 'Animated window location' for i in range(10): pos = (100+i*10, 100+i*10) windowposition(base, pos) print '\twindow position', pos windowposition(base, orgpos) # park it where it was before print 'Put a comment in file', f, ':' print '\t', comment(f) # print the Finder comment this file has s = 'This is a comment no one reads!' comment(f, s) # set the Finder comment |
if '@' in localename: | if '@' in code: | def _parse_localename(localename): """ Parses the locale code for localename and returns the result as tuple (language code, encoding). The localename is normalized and passed through the locale alias engine. A ValueError is raised in case the locale name cannot be parsed. The language code corresponds to RFC 1766. code and encoding can be None in case the values cannot be determined or are unknown to this implementation. """ code = normalize(localename) if '@' in localename: # Deal with locale modifiers code, modifier = code.split('@') if modifier == 'euro' and '.' not in code: # Assume Latin-9 for @euro locales. This is bogus, # since some systems may use other encodings for these # locales. Also, we ignore other modifiers. return code, 'iso-8859-15' if '.' in code: return tuple(code.split('.')[:2]) elif code == 'C': return None, None raise ValueError, 'unknown locale: %s' % localename |
0x0404: "zh_TW", 0x0804: "zh_CN", | 0x0436: "af_ZA", 0x041c: "sq_AL", 0x0401: "ar_SA", 0x0801: "ar_IQ", 0x0c01: "ar_EG", 0x1001: "ar_LY", 0x1401: "ar_DZ", 0x1801: "ar_MA", 0x1c01: "ar_TN", 0x2001: "ar_OM", 0x2401: "ar_YE", 0x2801: "ar_SY", 0x2c01: "ar_JO", 0x3001: "ar_LB", 0x3401: "ar_KW", 0x3801: "ar_AE", 0x3c01: "ar_BH", 0x4001: "ar_QA", 0x042b: "hy_AM", 0x042c: "az_AZ", 0x082c: "az_AZ", 0x042d: "eu_ES", 0x0423: "be_BY", 0x0445: "bn_IN", 0x201a: "bs_BA", 0x141a: "bs_BA", 0x047e: "br_FR", 0x0402: "bg_BG", 0x0403: "ca_ES", 0x0004: "zh_CHS", 0x0404: "zh_TW", 0x0804: "zh_CN", 0x0c04: "zh_HK", 0x1004: "zh_SG", 0x1404: "zh_MO", 0x7c04: "zh_CHT", 0x041a: "hr_HR", 0x101a: "hr_BA", 0x0405: "cs_CZ", | def getpreferredencoding(do_setlocale = True): """Return the charset that the user is likely using, according to the system configuration.""" if do_setlocale: oldloc = setlocale(LC_CTYPE) setlocale(LC_CTYPE, "") result = nl_langinfo(CODESET) setlocale(LC_CTYPE, oldloc) return result else: return nl_langinfo(CODESET) |
0x0413: "nl_NL", 0x0409: "en_US", 0x0809: "en_UK", 0x0c09: "en_AU", 0x1009: "en_CA", 0x1409: "en_NZ", 0x1809: "en_IE", 0x1c09: "en_ZA", | 0x048c: "gbz_AF", 0x0465: "div_MV", 0x0413: "nl_NL", 0x0813: "nl_BE", 0x0409: "en_US", 0x0809: "en_GB", 0x0c09: "en_AU", 0x1009: "en_CA", 0x1409: "en_NZ", 0x1809: "en_IE", 0x1c09: "en_ZA", 0x2009: "en_JA", 0x2409: "en_CB", 0x2809: "en_BZ", 0x2c09: "en_TT", 0x3009: "en_ZW", 0x3409: "en_PH", 0x0425: "et_EE", 0x0438: "fo_FO", 0x0464: "fil_PH", | def getpreferredencoding(do_setlocale = True): """Return the charset that the user is likely using, according to the system configuration.""" if do_setlocale: oldloc = setlocale(LC_CTYPE) setlocale(LC_CTYPE, "") result = nl_langinfo(CODESET) setlocale(LC_CTYPE, oldloc) return result else: return nl_langinfo(CODESET) |
0x040c: "fr_FR", 0x080c: "fr_BE", 0x0c0c: "fr_CA", 0x100c: "fr_CH", 0x0407: "de_DE", | 0x040c: "fr_FR", 0x080c: "fr_BE", 0x0c0c: "fr_CA", 0x100c: "fr_CH", 0x140c: "fr_LU", 0x180c: "fr_MC", 0x0462: "fy_NL", 0x0456: "gl_ES", 0x0437: "ka_GE", 0x0407: "de_DE", 0x0807: "de_CH", 0x0c07: "de_AT", 0x1007: "de_LU", 0x1407: "de_LI", | def getpreferredencoding(do_setlocale = True): """Return the charset that the user is likely using, according to the system configuration.""" if do_setlocale: oldloc = setlocale(LC_CTYPE) setlocale(LC_CTYPE, "") result = nl_langinfo(CODESET) setlocale(LC_CTYPE, oldloc) return result else: return nl_langinfo(CODESET) |
0x040d: "iw_IL", | 0x0447: "gu_IN", 0x040d: "he_IL", 0x0439: "hi_IN", 0x040e: "hu_HU", | def getpreferredencoding(do_setlocale = True): """Return the charset that the user is likely using, according to the system configuration.""" if do_setlocale: oldloc = setlocale(LC_CTYPE) setlocale(LC_CTYPE, "") result = nl_langinfo(CODESET) setlocale(LC_CTYPE, oldloc) return result else: return nl_langinfo(CODESET) |
0x0410: "it_IT", 0x0411: "ja_JA", 0x0414: "no_NO", 0x0816: "pt_PT", 0x0c0a: "es_ES", 0x0441: "sw_KE", 0x041d: "sv_SE", 0x081d: "sv_FI", | 0x0421: "id_ID", 0x045d: "iu_CA", 0x085d: "iu_CA", 0x083c: "ga_IE", 0x0434: "xh_ZA", 0x0435: "zu_ZA", 0x0410: "it_IT", 0x0810: "it_CH", 0x0411: "ja_JP", 0x044b: "kn_IN", 0x043f: "kk_KZ", 0x0457: "kok_IN", 0x0412: "ko_KR", 0x0440: "ky_KG", 0x0426: "lv_LV", 0x0427: "lt_LT", 0x046e: "lb_LU", 0x042f: "mk_MK", 0x043e: "ms_MY", 0x083e: "ms_BN", 0x044c: "ml_IN", 0x043a: "mt_MT", 0x0481: "mi_NZ", 0x047a: "arn_CL", 0x044e: "mr_IN", 0x047c: "moh_CA", 0x0450: "mn_MN", 0x0461: "ne_NP", 0x0414: "nb_NO", 0x0814: "nn_NO", 0x0482: "oc_FR", 0x0448: "or_IN", 0x0463: "ps_AF", 0x0429: "fa_IR", 0x0415: "pl_PL", 0x0416: "pt_BR", 0x0816: "pt_PT", 0x0446: "pa_IN", 0x046b: "quz_BO", 0x086b: "quz_EC", 0x0c6b: "quz_PE", 0x0418: "ro_RO", 0x0417: "rm_CH", 0x0419: "ru_RU", 0x243b: "smn_FI", 0x103b: "smj_NO", 0x143b: "smj_SE", 0x043b: "se_NO", 0x083b: "se_SE", 0x0c3b: "se_FI", 0x203b: "sms_FI", 0x183b: "sma_NO", 0x1c3b: "sma_SE", 0x044f: "sa_IN", 0x0c1a: "sr_SP", 0x1c1a: "sr_BA", 0x081a: "sr_SP", 0x181a: "sr_BA", 0x046c: "ns_ZA", 0x0432: "tn_ZA", 0x041b: "sk_SK", 0x0424: "sl_SI", 0x040a: "es_ES", 0x080a: "es_MX", 0x0c0a: "es_ES", 0x100a: "es_GT", 0x140a: "es_CR", 0x180a: "es_PA", 0x1c0a: "es_DO", 0x200a: "es_VE", 0x240a: "es_CO", 0x280a: "es_PE", 0x2c0a: "es_AR", 0x300a: "es_EC", 0x340a: "es_CL", 0x380a: "es_UR", 0x3c0a: "es_PY", 0x400a: "es_BO", 0x440a: "es_SV", 0x480a: "es_HN", 0x4c0a: "es_NI", 0x500a: "es_PR", 0x0441: "sw_KE", 0x041d: "sv_SE", 0x081d: "sv_FI", 0x045a: "syr_SY", 0x0449: "ta_IN", 0x0444: "tt_RU", 0x044a: "te_IN", 0x041e: "th_TH", | def getpreferredencoding(do_setlocale = True): """Return the charset that the user is likely using, according to the system configuration.""" if do_setlocale: oldloc = setlocale(LC_CTYPE) setlocale(LC_CTYPE, "") result = nl_langinfo(CODESET) setlocale(LC_CTYPE, oldloc) return result else: return nl_langinfo(CODESET) |
self.close() | try: self.close() except: pass | def __del__(self): self.close() |
chars = self.text.get("1.0", "end-1c") | def writefile(self, filename): self.fixlastline() try: f = open(filename, "w") chars = self.text.get("1.0", "end-1c") f.write(chars) f.close() ## print "saved to", `filename` return True except IOError, msg: tkMessageBox.showerror("I/O Error", str(msg), master=self.text) return False |
|
finfo = macfs.FSSpec(name).GetFInfo() | finfo = FSSpec(name).FSpGetFInfo() | def getfileinfo(name): finfo = macfs.FSSpec(name).GetFInfo() dir, file = os.path.split(name) # XXXX Get resource/data sizes fp = open(name, 'rb') fp.seek(0, 2) dlen = fp.tell() fp = openrf(name, '*rb') fp.seek(0, 2) rlen = fp.tell() return file, finfo, dlen, rlen |
else: | except ImportError: | def openrsrc(name, *mode): if not mode: mode = '*rb' else: mode = '*' + mode[0] return openrf(name, mode) |
fss = macfs.FSSpec(ofname) | fss = FSSpec(ofname) | def __init__(self, (name, finfo, dlen, rlen), ofp): if type(ofp) == type(''): ofname = ofp ofp = open(ofname, 'w') if os.name == 'mac': fss = macfs.FSSpec(ofname) fss.SetCreatorType('BnHq', 'TEXT') ofp.write('(This file must be converted with BinHex 4.0)\n\n:') hqxer = _Hqxcoderengine(ofp) self.ofp = _Rlecoderengine(hqxer) self.crc = 0 if finfo is None: finfo = FInfo() self.dlen = dlen self.rlen = rlen self._writeinfo(name, finfo) self.state = _DID_HEADER |
ofss = macfs.FSSpec(out) | ofss = FSSpec(out) | def hexbin(inp, out): """(infilename, outfilename) - Decode binhexed file""" ifp = HexBin(inp) finfo = ifp.FInfo if not out: out = ifp.FName if os.name == 'mac': ofss = macfs.FSSpec(out) out = ofss.as_pathname() ofp = open(out, 'wb') # XXXX Do translation on non-mac systems while 1: d = ifp.read(128000) if not d: break ofp.write(d) ofp.close() ifp.close_data() d = ifp.read_rsrc(128000) if d: ofp = openrsrc(out, 'wb') ofp.write(d) while 1: d = ifp.read_rsrc(128000) if not d: break ofp.write(d) ofp.close() if os.name == 'mac': nfinfo = ofss.GetFInfo() nfinfo.Creator = finfo.Creator nfinfo.Type = finfo.Type nfinfo.Flags = finfo.Flags ofss.SetFInfo(nfinfo) ifp.close() |
self.hashcode = random.randrange(1000000000) def __hash__(self): return self.hashcode | def __hash__(self): return 42 | def __init__(self, i): # Comparison outcomes are determined by the value of i. self.i = i |
XXX | def test_one(n): global mutate, dict1, dict2, dict1keys, dict2keys # Fill the dicts without mutating them. mutate = 0 dict1keys = fill_dict(dict1, range(n), n) dict2keys = fill_dict(dict2, range(n), n) # Enable mutation, then compare the dicts so long as they have the # same size. mutate = 1 if verbose: print "trying w/ lengths", len(dict1), len(dict2), while dict1 and len(dict1) == len(dict2): if verbose: print ".", c = dict1 == dict2 XXX # Can't figure out how to make this work if verbose: print |
|
referenced = C() a = C() | referenced = A() a = A() | def callback(*args): pass |
a = C() a.wrc = weakref.ref(referenced, callback) | a = A() weakref.ref(referenced, callback) | def callback(*args): pass |
def writestr(self, zinfo, bytes): | def writestr(self, zinfo_or_arcname, bytes): | def writestr(self, zinfo, bytes): """Write a file into the archive. The contents is the string 'bytes'.""" self._writecheck(zinfo) zinfo.file_size = len(bytes) # Uncompressed size zinfo.CRC = binascii.crc32(bytes) # CRC-32 checksum if zinfo.compress_type == ZIP_DEFLATED: co = zlib.compressobj(zlib.Z_DEFAULT_COMPRESSION, zlib.DEFLATED, -15) bytes = co.compress(bytes) + co.flush() zinfo.compress_size = len(bytes) # Compressed size else: zinfo.compress_size = zinfo.file_size zinfo.header_offset = self.fp.tell() # Start of header bytes self.fp.write(zinfo.FileHeader()) zinfo.file_offset = self.fp.tell() # Start of file bytes self.fp.write(bytes) if zinfo.flag_bits & 0x08: # Write CRC and file sizes after the file data self.fp.write(struct.pack("<lll", zinfo.CRC, zinfo.compress_size, zinfo.file_size)) self.filelist.append(zinfo) self.NameToInfo[zinfo.filename] = zinfo |
'bytes'.""" | 'bytes'. 'zinfo_or_arcname' is either a ZipInfo instance or the name of the file in the archive.""" if not isinstance(zinfo_or_arcname, ZipInfo): zinfo = ZipInfo(filename=zinfo_or_arcname, date_time=time.localtime(time.time())) zinfo.compress_type = self.compression else: zinfo = zinfo_or_arcname | def writestr(self, zinfo, bytes): """Write a file into the archive. The contents is the string 'bytes'.""" self._writecheck(zinfo) zinfo.file_size = len(bytes) # Uncompressed size zinfo.CRC = binascii.crc32(bytes) # CRC-32 checksum if zinfo.compress_type == ZIP_DEFLATED: co = zlib.compressobj(zlib.Z_DEFAULT_COMPRESSION, zlib.DEFLATED, -15) bytes = co.compress(bytes) + co.flush() zinfo.compress_size = len(bytes) # Compressed size else: zinfo.compress_size = zinfo.file_size zinfo.header_offset = self.fp.tell() # Start of header bytes self.fp.write(zinfo.FileHeader()) zinfo.file_offset = self.fp.tell() # Start of file bytes self.fp.write(bytes) if zinfo.flag_bits & 0x08: # Write CRC and file sizes after the file data self.fp.write(struct.pack("<lll", zinfo.CRC, zinfo.compress_size, zinfo.file_size)) self.filelist.append(zinfo) self.NameToInfo[zinfo.filename] = zinfo |
urllib.urlopen, "http://www.sadflkjsasadf.com/") | urllib.urlopen, "http://www.python.invalid/") | def test_bad_address(self): # Make sure proper exception is raised when connecting to a bogus # address. self.assertRaises(IOError, urllib.urlopen, "http://www.sadflkjsasadf.com/") |
for (func, cmd) in self.sub_commands: | for (func, cmd_name) in self.sub_commands: | def run (self): |
self.run_peer (cmd) | self.run_peer (cmd_name) | def run (self): |
for (func, cmd) in self.sub_commands: | for (func, cmd_name) in self.sub_commands: | def get_outputs (self): # This command doesn't have any outputs of its own, so just # get the outputs of all its sub-commands. outputs = [] for (func, cmd) in self.sub_commands: if func is None or func(): outputs.extend (self.run_peer (cmd)) |
outputs.extend (self.run_peer (cmd)) | cmd = self.find_peer (cmd_name) outputs.extend (cmd.get_outputs()) | def get_outputs (self): # This command doesn't have any outputs of its own, so just # get the outputs of all its sub-commands. outputs = [] for (func, cmd) in self.sub_commands: if func is None or func(): outputs.extend (self.run_peer (cmd)) |
self.write("Unsupported characters in input") | self.write("Unsupported characters in input\n") | def runsource(self, source): "Extend base class method: Stuff the source in the line cache first" filename = self.stuffsource(source) self.more = 0 self.save_warnings_filters = warnings.filters[:] warnings.filterwarnings(action="error", category=SyntaxWarning) if isinstance(source, types.UnicodeType): import IOBinding try: source = source.encode(IOBinding.encoding) except UnicodeError: self.tkconsole.resetoutput() self.write("Unsupported characters in input") return try: # InteractiveInterpreter.runsource() calls its runcode() method, # which is overridden (see below) return InteractiveInterpreter.runsource(self, source, filename) finally: if self.save_warnings_filters is not None: warnings.filters[:] = self.save_warnings_filters self.save_warnings_filters = None |
s = s.strip() lines = s.split('\n') prefix = self.text.get("insert linestart","insert").rstrip() if prefix and prefix[-1]==':': | prefix = self.text.get("insert linestart", "insert") if prefix.rstrip().endswith(':'): | def recall(self, s, event): self.text.undo_block_start() try: self.text.tag_remove("sel", "1.0", "end") self.text.mark_set("insert", "end-1c") s = s.strip() lines = s.split('\n') prefix = self.text.get("insert linestart","insert").rstrip() if prefix and prefix[-1]==':': self.newline_and_indent_event(event) self.text.insert("insert",lines[0].strip()) if len(lines) > 1: self.newline_and_indent_event(event) for line in lines[1:]: self.text.insert("insert", line.strip()) self.newline_and_indent_event(event) finally: self.text.see("insert") self.text.undo_block_stop() |
self.text.insert("insert",lines[0].strip()) | prefix = self.text.get("insert linestart", "insert") self.text.insert("insert", lines[0].strip()) | def recall(self, s, event): self.text.undo_block_start() try: self.text.tag_remove("sel", "1.0", "end") self.text.mark_set("insert", "end-1c") s = s.strip() lines = s.split('\n') prefix = self.text.get("insert linestart","insert").rstrip() if prefix and prefix[-1]==':': self.newline_and_indent_event(event) self.text.insert("insert",lines[0].strip()) if len(lines) > 1: self.newline_and_indent_event(event) for line in lines[1:]: self.text.insert("insert", line.strip()) self.newline_and_indent_event(event) finally: self.text.see("insert") self.text.undo_block_stop() |
self.newline_and_indent_event(event) | orig_base_indent = re.search(r'^([ \t]*)', lines[0]).group(0) new_base_indent = re.search(r'^([ \t]*)', prefix).group(0) | def recall(self, s, event): self.text.undo_block_start() try: self.text.tag_remove("sel", "1.0", "end") self.text.mark_set("insert", "end-1c") s = s.strip() lines = s.split('\n') prefix = self.text.get("insert linestart","insert").rstrip() if prefix and prefix[-1]==':': self.newline_and_indent_event(event) self.text.insert("insert",lines[0].strip()) if len(lines) > 1: self.newline_and_indent_event(event) for line in lines[1:]: self.text.insert("insert", line.strip()) self.newline_and_indent_event(event) finally: self.text.see("insert") self.text.undo_block_stop() |
self.text.insert("insert", line.strip()) self.newline_and_indent_event(event) | if line.startswith(orig_base_indent): line = new_base_indent + line[len(orig_base_indent):] self.text.insert('insert', '\n'+line.rstrip()) | def recall(self, s, event): self.text.undo_block_start() try: self.text.tag_remove("sel", "1.0", "end") self.text.mark_set("insert", "end-1c") s = s.strip() lines = s.split('\n') prefix = self.text.get("insert linestart","insert").rstrip() if prefix and prefix[-1]==':': self.newline_and_indent_event(event) self.text.insert("insert",lines[0].strip()) if len(lines) > 1: self.newline_and_indent_event(event) for line in lines[1:]: self.text.insert("insert", line.strip()) self.newline_and_indent_event(event) finally: self.text.see("insert") self.text.undo_block_stop() |
import pdb ; pdb.set_trace() | def extractarg(self, part): mode = "InMode" if self.asplit.match(part) < 0: self.error("Indecipherable argument: %s", `part`) import pdb ; pdb.set_trace() return ("unknown", part, mode) type, name, array = self.asplit.group('type', 'name', 'array') if array: # array matches an optional [] after the argument name type = type + " ptr " type = regsub.gsub("\*", " ptr ", type) type = string.strip(type) type = regsub.gsub("[ \t]+", "_", type) return self.modifyarg(type, name, mode) |
|
t = test() | t = Test() | def str(self): return str(self) |
return beta * self.stdgamma(alpha) def stdgamma(self, alpha, *args): | if alpha <= 0.0 or beta <= 0.0: raise ValueError, 'gammavariate: alpha and beta must be > 0.0' | def gammavariate(self, alpha, beta): # beta times standard gamma return beta * self.stdgamma(alpha) |
if alpha <= 0.0: raise ValueError, 'stdgamma: alpha must be > 0.0' | def stdgamma(self, alpha, *args): # *args for Py2.2 compatiblity random = self.random if alpha <= 0.0: raise ValueError, 'stdgamma: alpha must be > 0.0' |
|
return x | return x * beta | def stdgamma(self, alpha, *args): # *args for Py2.2 compatiblity random = self.random if alpha <= 0.0: raise ValueError, 'stdgamma: alpha must be > 0.0' |
return -_log(u) | return -_log(u) * beta | def stdgamma(self, alpha, *args): # *args for Py2.2 compatiblity random = self.random if alpha <= 0.0: raise ValueError, 'stdgamma: alpha must be > 0.0' |
return x | return x * beta def stdgamma(self, alpha, ainv, bbb, ccc): import warnings warnings.warn("The stdgamma function is deprecated; " "use gammavariate() instead", DeprecationWarning) return self.gammavariate(alpha, 1.0) | def stdgamma(self, alpha, *args): # *args for Py2.2 compatiblity random = self.random if alpha <= 0.0: raise ValueError, 'stdgamma: alpha must be > 0.0' |
def _test(N=200): | def _test(N=20000): | def _test(N=200): print 'TWOPI =', TWOPI print 'LOG4 =', LOG4 print 'NV_MAGICCONST =', NV_MAGICCONST print 'SG_MAGICCONST =', SG_MAGICCONST _test_generator(N, 'random()') _test_generator(N, 'normalvariate(0.0, 1.0)') _test_generator(N, 'lognormvariate(0.0, 1.0)') _test_generator(N, 'cunifvariate(0.0, 1.0)') _test_generator(N, 'expovariate(1.0)') _test_generator(N, 'vonmisesvariate(0.0, 1.0)') _test_generator(N, 'gammavariate(0.5, 1.0)') _test_generator(N, 'gammavariate(0.9, 1.0)') _test_generator(N, 'gammavariate(1.0, 1.0)') _test_generator(N, 'gammavariate(2.0, 1.0)') _test_generator(N, 'gammavariate(20.0, 1.0)') _test_generator(N, 'gammavariate(200.0, 1.0)') _test_generator(N, 'gauss(0.0, 1.0)') _test_generator(N, 'betavariate(3.0, 3.0)') _test_generator(N, 'paretovariate(1.0)') _test_generator(N, 'weibullvariate(1.0, 1.0)') # Test jumpahead. s = getstate() jumpahead(N) r1 = random() # now do it the slow way setstate(s) for i in range(N): random() r2 = random() if r1 != r2: raise ValueError("jumpahead test failed " + `(N, r1, r2)`) |
re-inserted are alwyas appended to the header list. | re-inserted are always appended to the header list. | def values(self): """Return a list of all the message's header values. |
re-inserted are alwyas appended to the header list. | re-inserted are always appended to the header list. | def items(self): """Get all the message's header fields and values. |
compile(file) | compileFile(file) | def wrap_aug(node): return wrapper[node.__class__](node) |
port = int(host[i+1:]) | try: port = int(host[i+1:]) except ValueError, msg: raise socket.error, str(msg) | def _set_hostport(self, host, port): if port is None: i = host.find(':') if i >= 0: port = int(host[i+1:]) host = host[:i] else: port = self.default_port self.host = host self.port = port |
print time.strftime("expires=%a, %d-%b-%x %X GMT", gmt) | print time.strftime("expires=%a, %d-%b-%y %X GMT", gmt) | def send_my_cookie(ui): name = COOKIE_NAME value = "%s/%s/%s" % (ui.author, ui.email, ui.password) import urllib value = urllib.quote(value) then = now + COOKIE_LIFETIME gmt = time.gmtime(then) print "Set-Cookie: %s=%s; path=/cgi-bin/;" % (name, value), print time.strftime("expires=%a, %d-%b-%x %X GMT", gmt) |
s = pickle.dumps(d) e = pickle.loads(s) self.assertNotEqual(id(d), id(e)) self.assertEqual(list(d), list(e)) | for i in (0, 1, 2): s = pickle.dumps(d, i) e = pickle.loads(s) self.assertNotEqual(id(d), id(e)) self.assertEqual(list(d), list(e)) def test_pickle_recursive(self): d = deque('abc') d.append(d) for i in (0, 1, 2): e = pickle.loads(pickle.dumps(d, i)) self.assertNotEqual(id(d), id(e)) self.assertEqual(id(e), id(e[-1])) | def test_pickle(self): d = deque(xrange(200)) s = pickle.dumps(d) e = pickle.loads(s) self.assertNotEqual(id(d), id(e)) self.assertEqual(list(d), list(e)) |
test_mesg = 'From: %s@localhost\nSubject: IMAP4 test\n\ndata...\n' % USER | test_mesg = 'From: %(user)s@localhost%(lf)sSubject: IMAP4 test%(lf)s%(lf)sdata...%(lf)s' % {'user':USER, 'lf':CRLF} | def print_log(): _mesg('last %d IMAP4 interactions:' % len(_cmd_log)) for secs,line in _cmd_log: _mesg(line, secs) |
if auth: h.putheader('Authorization: Basic %s' % auth) | if auth: h.putheader('Authorization', 'Basic %s' % auth) | def open_https(self, url, data=None): """Use HTTPS protocol.""" import httplib user_passwd = None if isinstance(url, str): host, selector = splithost(url) if host: user_passwd, host = splituser(host) host = unquote(host) realhost = host else: host, selector = url urltype, rest = splittype(selector) url = rest user_passwd = None if urltype.lower() != 'https': realhost = None else: realhost, rest = splithost(rest) if realhost: user_passwd, realhost = splituser(realhost) if user_passwd: selector = "%s://%s%s" % (urltype, realhost, rest) #print "proxy via https:", host, selector if not host: raise IOError, ('https error', 'no host given') if user_passwd: import base64 auth = base64.encodestring(user_passwd).strip() else: auth = None h = httplib.HTTPS(host, 0, key_file=self.key_file, cert_file=self.cert_file) if data is not None: h.putrequest('POST', selector) h.putheader('Content-type', 'application/x-www-form-urlencoded') h.putheader('Content-length', '%d' % len(data)) else: h.putrequest('GET', selector) if auth: h.putheader('Authorization: Basic %s' % auth) if realhost: h.putheader('Host', realhost) for args in self.addheaders: h.putheader(*args) h.endheaders() if data is not None: h.send(data) errcode, errmsg, headers = h.getreply() fp = h.getfile() if errcode == 200: return addinfourl(fp, headers, "https:" + url) else: if data is None: return self.http_error(url, fp, errcode, errmsg, headers) else: return self.http_error(url, fp, errcode, errmsg, headers, data) |
sys.stdout.writelines(restored) | for line in restored: print line, | def restore(which): restored = difflib.restore(sys.stdin.readlines(), which) sys.stdout.writelines(restored) |
name = string.join(string.split(name, '-'), '_') | name = '_'.join(name.split('-')) | def open(self, fullurl, data=None): """Use URLopener().open(file) instead of open(file, 'r').""" fullurl = unwrap(toBytes(fullurl)) if self.tempcache and self.tempcache.has_key(fullurl): filename, headers = self.tempcache[fullurl] fp = open(filename, 'rb') return addinfourl(fp, headers, fullurl) urltype, url = splittype(fullurl) if not urltype: urltype = 'file' if self.proxies.has_key(urltype): proxy = self.proxies[urltype] urltype, proxyhost = splittype(proxy) host, selector = splithost(proxyhost) url = (host, fullurl) # Signal special case to open_*() else: proxy = None name = 'open_' + urltype self.type = urltype if '-' in name: # replace - with _ name = string.join(string.split(name, '-'), '_') if not hasattr(self, name): if proxy: return self.open_unknown_proxy(proxy, fullurl, data) else: return self.open_unknown(fullurl, data) try: if data is None: return getattr(self, name)(url) else: return getattr(self, name)(url, data) except socket.error, msg: raise IOError, ('socket error', msg), sys.exc_info()[2] |
if string.lower(urltype) != 'http': | if urltype.lower() != 'http': | def open_http(self, url, data=None): """Use HTTP protocol.""" import httplib user_passwd = None if type(url) is types.StringType: host, selector = splithost(url) if host: user_passwd, host = splituser(host) host = unquote(host) realhost = host else: host, selector = url urltype, rest = splittype(selector) url = rest user_passwd = None if string.lower(urltype) != 'http': realhost = None else: realhost, rest = splithost(rest) if realhost: user_passwd, realhost = splituser(realhost) if user_passwd: selector = "%s://%s%s" % (urltype, realhost, rest) #print "proxy via http:", host, selector if not host: raise IOError, ('http error', 'no host given') if user_passwd: import base64 auth = string.strip(base64.encodestring(user_passwd)) else: auth = None h = httplib.HTTP(host) if data is not None: h.putrequest('POST', selector) h.putheader('Content-type', 'application/x-www-form-urlencoded') h.putheader('Content-length', '%d' % len(data)) else: h.putrequest('GET', selector) if auth: h.putheader('Authorization', 'Basic %s' % auth) if realhost: h.putheader('Host', realhost) for args in self.addheaders: apply(h.putheader, args) h.endheaders() if data is not None: h.send(data + '\r\n') errcode, errmsg, headers = h.getreply() fp = h.getfile() if errcode == 200: return addinfourl(fp, headers, "http:" + url) else: if data is None: return self.http_error(url, fp, errcode, errmsg, headers) else: return self.http_error(url, fp, errcode, errmsg, headers, data) |
auth = string.strip(base64.encodestring(user_passwd)) | auth = base64.encodestring(user_passwd).strip() | def open_http(self, url, data=None): """Use HTTP protocol.""" import httplib user_passwd = None if type(url) is types.StringType: host, selector = splithost(url) if host: user_passwd, host = splituser(host) host = unquote(host) realhost = host else: host, selector = url urltype, rest = splittype(selector) url = rest user_passwd = None if string.lower(urltype) != 'http': realhost = None else: realhost, rest = splithost(rest) if realhost: user_passwd, realhost = splituser(realhost) if user_passwd: selector = "%s://%s%s" % (urltype, realhost, rest) #print "proxy via http:", host, selector if not host: raise IOError, ('http error', 'no host given') if user_passwd: import base64 auth = string.strip(base64.encodestring(user_passwd)) else: auth = None h = httplib.HTTP(host) if data is not None: h.putrequest('POST', selector) h.putheader('Content-type', 'application/x-www-form-urlencoded') h.putheader('Content-length', '%d' % len(data)) else: h.putrequest('GET', selector) if auth: h.putheader('Authorization', 'Basic %s' % auth) if realhost: h.putheader('Host', realhost) for args in self.addheaders: apply(h.putheader, args) h.endheaders() if data is not None: h.send(data + '\r\n') errcode, errmsg, headers = h.getreply() fp = h.getfile() if errcode == 200: return addinfourl(fp, headers, "http:" + url) else: if data is None: return self.http_error(url, fp, errcode, errmsg, headers) else: return self.http_error(url, fp, errcode, errmsg, headers, data) |
if string.lower(urltype) != 'https': | if urltype.lower() != 'https': | def open_https(self, url, data=None): """Use HTTPS protocol.""" import httplib user_passwd = None if type(url) in types.StringTypes: host, selector = splithost(url) if host: user_passwd, host = splituser(host) host = unquote(host) realhost = host else: host, selector = url urltype, rest = splittype(selector) url = rest user_passwd = None if string.lower(urltype) != 'https': realhost = None else: realhost, rest = splithost(rest) if realhost: user_passwd, realhost = splituser(realhost) if user_passwd: selector = "%s://%s%s" % (urltype, realhost, rest) #print "proxy via https:", host, selector if not host: raise IOError, ('https error', 'no host given') if user_passwd: import base64 auth = string.strip(base64.encodestring(user_passwd)) else: auth = None h = httplib.HTTPS(host, 0, key_file=self.key_file, cert_file=self.cert_file) if data is not None: h.putrequest('POST', selector) h.putheader('Content-type', 'application/x-www-form-urlencoded') h.putheader('Content-length', '%d' % len(data)) else: h.putrequest('GET', selector) if auth: h.putheader('Authorization: Basic %s' % auth) if realhost: h.putheader('Host', realhost) for args in self.addheaders: apply(h.putheader, args) h.endheaders() if data is not None: h.send(data + '\r\n') errcode, errmsg, headers = h.getreply() fp = h.getfile() if errcode == 200: return addinfourl(fp, headers, url) else: if data is None: return self.http_error(url, fp, errcode, errmsg, headers) else: return self.http_error(url, fp, errcode, errmsg, headers, data) |
auth = string.strip(base64.encodestring(user_passwd)) | auth = base64.encodestring(user_passwd).strip() | def open_https(self, url, data=None): """Use HTTPS protocol.""" import httplib user_passwd = None if type(url) in types.StringTypes: host, selector = splithost(url) if host: user_passwd, host = splituser(host) host = unquote(host) realhost = host else: host, selector = url urltype, rest = splittype(selector) url = rest user_passwd = None if string.lower(urltype) != 'https': realhost = None else: realhost, rest = splithost(rest) if realhost: user_passwd, realhost = splituser(realhost) if user_passwd: selector = "%s://%s%s" % (urltype, realhost, rest) #print "proxy via https:", host, selector if not host: raise IOError, ('https error', 'no host given') if user_passwd: import base64 auth = string.strip(base64.encodestring(user_passwd)) else: auth = None h = httplib.HTTPS(host, 0, key_file=self.key_file, cert_file=self.cert_file) if data is not None: h.putrequest('POST', selector) h.putheader('Content-type', 'application/x-www-form-urlencoded') h.putheader('Content-length', '%d' % len(data)) else: h.putrequest('GET', selector) if auth: h.putheader('Authorization: Basic %s' % auth) if realhost: h.putheader('Host', realhost) for args in self.addheaders: apply(h.putheader, args) h.endheaders() if data is not None: h.send(data + '\r\n') errcode, errmsg, headers = h.getreply() fp = h.getfile() if errcode == 200: return addinfourl(fp, headers, url) else: if data is None: return self.http_error(url, fp, errcode, errmsg, headers) else: return self.http_error(url, fp, errcode, errmsg, headers, data) |
dirs = string.splitfields(path, '/') | dirs = path.split('/') | def open_ftp(self, url): """Use FTP protocol.""" host, path = splithost(url) if not host: raise IOError, ('ftp error', 'no host given') host, port = splitport(host) user, host = splituser(host) if user: user, passwd = splitpasswd(user) else: passwd = None host = unquote(host) user = unquote(user or '') passwd = unquote(passwd or '') host = socket.gethostbyname(host) if not port: import ftplib port = ftplib.FTP_PORT else: port = int(port) path, attrs = splitattr(path) path = unquote(path) dirs = string.splitfields(path, '/') dirs, file = dirs[:-1], dirs[-1] if dirs and not dirs[0]: dirs = dirs[1:] if dirs and not dirs[0]: dirs[0] = '/' key = user, host, port, string.join(dirs, '/') # XXX thread unsafe! if len(self.ftpcache) > MAXFTPCACHE: # Prune the cache, rather arbitrarily for k in self.ftpcache.keys(): if k != key: v = self.ftpcache[k] del self.ftpcache[k] v.close() try: if not self.ftpcache.has_key(key): self.ftpcache[key] = \ ftpwrapper(user, passwd, host, port, dirs) if not file: type = 'D' else: type = 'I' for attr in attrs: attr, value = splitvalue(attr) if string.lower(attr) == 'type' and \ value in ('a', 'A', 'i', 'I', 'd', 'D'): type = string.upper(value) (fp, retrlen) = self.ftpcache[key].retrfile(file, type) if retrlen is not None and retrlen >= 0: import mimetools, StringIO headers = mimetools.Message(StringIO.StringIO( 'Content-Length: %d\n' % retrlen)) else: headers = noheaders() return addinfourl(fp, headers, "ftp:" + url) except ftperrors(), msg: raise IOError, ('ftp error', msg), sys.exc_info()[2] |
key = user, host, port, string.join(dirs, '/') | key = user, host, port, '/'.join(dirs) | def open_ftp(self, url): """Use FTP protocol.""" host, path = splithost(url) if not host: raise IOError, ('ftp error', 'no host given') host, port = splitport(host) user, host = splituser(host) if user: user, passwd = splitpasswd(user) else: passwd = None host = unquote(host) user = unquote(user or '') passwd = unquote(passwd or '') host = socket.gethostbyname(host) if not port: import ftplib port = ftplib.FTP_PORT else: port = int(port) path, attrs = splitattr(path) path = unquote(path) dirs = string.splitfields(path, '/') dirs, file = dirs[:-1], dirs[-1] if dirs and not dirs[0]: dirs = dirs[1:] if dirs and not dirs[0]: dirs[0] = '/' key = user, host, port, string.join(dirs, '/') # XXX thread unsafe! if len(self.ftpcache) > MAXFTPCACHE: # Prune the cache, rather arbitrarily for k in self.ftpcache.keys(): if k != key: v = self.ftpcache[k] del self.ftpcache[k] v.close() try: if not self.ftpcache.has_key(key): self.ftpcache[key] = \ ftpwrapper(user, passwd, host, port, dirs) if not file: type = 'D' else: type = 'I' for attr in attrs: attr, value = splitvalue(attr) if string.lower(attr) == 'type' and \ value in ('a', 'A', 'i', 'I', 'd', 'D'): type = string.upper(value) (fp, retrlen) = self.ftpcache[key].retrfile(file, type) if retrlen is not None and retrlen >= 0: import mimetools, StringIO headers = mimetools.Message(StringIO.StringIO( 'Content-Length: %d\n' % retrlen)) else: headers = noheaders() return addinfourl(fp, headers, "ftp:" + url) except ftperrors(), msg: raise IOError, ('ftp error', msg), sys.exc_info()[2] |
if string.lower(attr) == 'type' and \ | if attr.lower() == 'type' and \ | def open_ftp(self, url): """Use FTP protocol.""" host, path = splithost(url) if not host: raise IOError, ('ftp error', 'no host given') host, port = splitport(host) user, host = splituser(host) if user: user, passwd = splitpasswd(user) else: passwd = None host = unquote(host) user = unquote(user or '') passwd = unquote(passwd or '') host = socket.gethostbyname(host) if not port: import ftplib port = ftplib.FTP_PORT else: port = int(port) path, attrs = splitattr(path) path = unquote(path) dirs = string.splitfields(path, '/') dirs, file = dirs[:-1], dirs[-1] if dirs and not dirs[0]: dirs = dirs[1:] if dirs and not dirs[0]: dirs[0] = '/' key = user, host, port, string.join(dirs, '/') # XXX thread unsafe! if len(self.ftpcache) > MAXFTPCACHE: # Prune the cache, rather arbitrarily for k in self.ftpcache.keys(): if k != key: v = self.ftpcache[k] del self.ftpcache[k] v.close() try: if not self.ftpcache.has_key(key): self.ftpcache[key] = \ ftpwrapper(user, passwd, host, port, dirs) if not file: type = 'D' else: type = 'I' for attr in attrs: attr, value = splitvalue(attr) if string.lower(attr) == 'type' and \ value in ('a', 'A', 'i', 'I', 'd', 'D'): type = string.upper(value) (fp, retrlen) = self.ftpcache[key].retrfile(file, type) if retrlen is not None and retrlen >= 0: import mimetools, StringIO headers = mimetools.Message(StringIO.StringIO( 'Content-Length: %d\n' % retrlen)) else: headers = noheaders() return addinfourl(fp, headers, "ftp:" + url) except ftperrors(), msg: raise IOError, ('ftp error', msg), sys.exc_info()[2] |
type = string.upper(value) | type = value.upper() | def open_ftp(self, url): """Use FTP protocol.""" host, path = splithost(url) if not host: raise IOError, ('ftp error', 'no host given') host, port = splitport(host) user, host = splituser(host) if user: user, passwd = splitpasswd(user) else: passwd = None host = unquote(host) user = unquote(user or '') passwd = unquote(passwd or '') host = socket.gethostbyname(host) if not port: import ftplib port = ftplib.FTP_PORT else: port = int(port) path, attrs = splitattr(path) path = unquote(path) dirs = string.splitfields(path, '/') dirs, file = dirs[:-1], dirs[-1] if dirs and not dirs[0]: dirs = dirs[1:] if dirs and not dirs[0]: dirs[0] = '/' key = user, host, port, string.join(dirs, '/') # XXX thread unsafe! if len(self.ftpcache) > MAXFTPCACHE: # Prune the cache, rather arbitrarily for k in self.ftpcache.keys(): if k != key: v = self.ftpcache[k] del self.ftpcache[k] v.close() try: if not self.ftpcache.has_key(key): self.ftpcache[key] = \ ftpwrapper(user, passwd, host, port, dirs) if not file: type = 'D' else: type = 'I' for attr in attrs: attr, value = splitvalue(attr) if string.lower(attr) == 'type' and \ value in ('a', 'A', 'i', 'I', 'd', 'D'): type = string.upper(value) (fp, retrlen) = self.ftpcache[key].retrfile(file, type) if retrlen is not None and retrlen >= 0: import mimetools, StringIO headers = mimetools.Message(StringIO.StringIO( 'Content-Length: %d\n' % retrlen)) else: headers = noheaders() return addinfourl(fp, headers, "ftp:" + url) except ftperrors(), msg: raise IOError, ('ftp error', msg), sys.exc_info()[2] |
[type, data] = string.split(url, ',', 1) | [type, data] = url.split(',', 1) | def open_data(self, url, data=None): """Use "data" URL.""" # ignore POSTed data # # syntax of data URLs: # dataurl := "data:" [ mediatype ] [ ";base64" ] "," data # mediatype := [ type "/" subtype ] *( ";" parameter ) # data := *urlchar # parameter := attribute "=" value import StringIO, mimetools, time try: [type, data] = string.split(url, ',', 1) except ValueError: raise IOError, ('data error', 'bad data URL') if not type: type = 'text/plain;charset=US-ASCII' semi = string.rfind(type, ';') if semi >= 0 and '=' not in type[semi:]: encoding = type[semi+1:] type = type[:semi] else: encoding = '' msg = [] msg.append('Date: %s'%time.strftime('%a, %d %b %Y %T GMT', time.gmtime(time.time()))) msg.append('Content-type: %s' % type) if encoding == 'base64': import base64 data = base64.decodestring(data) else: data = unquote(data) msg.append('Content-length: %d' % len(data)) msg.append('') msg.append(data) msg = string.join(msg, '\n') f = StringIO.StringIO(msg) headers = mimetools.Message(f, 0) f.fileno = None # needed for addinfourl return addinfourl(f, headers, url) |
semi = string.rfind(type, ';') | semi = type.rfind(';') | def open_data(self, url, data=None): """Use "data" URL.""" # ignore POSTed data # # syntax of data URLs: # dataurl := "data:" [ mediatype ] [ ";base64" ] "," data # mediatype := [ type "/" subtype ] *( ";" parameter ) # data := *urlchar # parameter := attribute "=" value import StringIO, mimetools, time try: [type, data] = string.split(url, ',', 1) except ValueError: raise IOError, ('data error', 'bad data URL') if not type: type = 'text/plain;charset=US-ASCII' semi = string.rfind(type, ';') if semi >= 0 and '=' not in type[semi:]: encoding = type[semi+1:] type = type[:semi] else: encoding = '' msg = [] msg.append('Date: %s'%time.strftime('%a, %d %b %Y %T GMT', time.gmtime(time.time()))) msg.append('Content-type: %s' % type) if encoding == 'base64': import base64 data = base64.decodestring(data) else: data = unquote(data) msg.append('Content-length: %d' % len(data)) msg.append('') msg.append(data) msg = string.join(msg, '\n') f = StringIO.StringIO(msg) headers = mimetools.Message(f, 0) f.fileno = None # needed for addinfourl return addinfourl(f, headers, url) |
msg = string.join(msg, '\n') | msg = '\n'.join(msg) | def open_data(self, url, data=None): """Use "data" URL.""" # ignore POSTed data # # syntax of data URLs: # dataurl := "data:" [ mediatype ] [ ";base64" ] "," data # mediatype := [ type "/" subtype ] *( ";" parameter ) # data := *urlchar # parameter := attribute "=" value import StringIO, mimetools, time try: [type, data] = string.split(url, ',', 1) except ValueError: raise IOError, ('data error', 'bad data URL') if not type: type = 'text/plain;charset=US-ASCII' semi = string.rfind(type, ';') if semi >= 0 and '=' not in type[semi:]: encoding = type[semi+1:] type = type[:semi] else: encoding = '' msg = [] msg.append('Date: %s'%time.strftime('%a, %d %b %Y %T GMT', time.gmtime(time.time()))) msg.append('Content-type: %s' % type) if encoding == 'base64': import base64 data = base64.decodestring(data) else: data = unquote(data) msg.append('Content-length: %d' % len(data)) msg.append('') msg.append(data) msg = string.join(msg, '\n') f = StringIO.StringIO(msg) headers = mimetools.Message(f, 0) f.fileno = None # needed for addinfourl return addinfourl(f, headers, url) |
if string.lower(scheme) == 'basic': | if scheme.lower() == 'basic': | def http_error_401(self, url, fp, errcode, errmsg, headers, data=None): """Error 401 -- authentication required. See this URL for a description of the basic authentication scheme: http://www.ics.uci.edu/pub/ietf/http/draft-ietf-http-v10-spec-00.txt""" if headers.has_key('www-authenticate'): stuff = headers['www-authenticate'] import re match = re.match('[ \t]*([^ \t]+)[ \t]+realm="([^"]*)"', stuff) if match: scheme, realm = match.groups() if string.lower(scheme) == 'basic': name = 'retry_' + self.type + '_basic_auth' if data is None: return getattr(self,name)(url, realm) else: return getattr(self,name)(url, realm, data) |
i = string.find(host, '@') + 1 | i = host.find('@') + 1 | def retry_http_basic_auth(self, url, realm, data=None): host, selector = splithost(url) i = string.find(host, '@') + 1 host = host[i:] user, passwd = self.get_user_passwd(host, realm, i) if not (user or passwd): return None host = user + ':' + passwd + '@' + host newurl = 'http://' + host + selector if data is None: return self.open(newurl) else: return self.open(newurl, data) |
i = string.find(host, '@') + 1 | i = host.find('@') + 1 | def retry_https_basic_auth(self, url, realm, data=None): host, selector = splithost(url) i = string.find(host, '@') + 1 host = host[i:] user, passwd = self.get_user_passwd(host, realm, i) if not (user or passwd): return None host = user + ':' + passwd + '@' + host newurl = '//' + host + selector return self.open_https(newurl) |
key = realm + '@' + string.lower(host) | key = realm + '@' + host.lower() | def get_user_passwd(self, host, realm, clear_cache = 0): key = realm + '@' + string.lower(host) if self.auth_cache.has_key(key): if clear_cache: del self.auth_cache[key] else: return self.auth_cache[key] user, passwd = self.prompt_user_passwd(host, realm) if user or passwd: self.auth_cache[key] = (user, passwd) return user, passwd |
if reason[:3] != '550': | if str(reason)[:3] != '550': | def retrfile(self, file, type): import ftplib self.endtransfer() if type in ('d', 'D'): cmd = 'TYPE A'; isdir = 1 else: cmd = 'TYPE ' + type; isdir = 0 try: self.ftp.voidcmd(cmd) except ftplib.all_errors: self.init() self.ftp.voidcmd(cmd) conn = None if file and not isdir: # Use nlst to see if the file exists at all try: self.ftp.nlst(file) except ftplib.error_perm, reason: raise IOError, ('ftp error', reason), sys.exc_info()[2] # Restore the transfer mode! self.ftp.voidcmd(cmd) # Try to retrieve as a file try: cmd = 'RETR ' + file conn = self.ftp.ntransfercmd(cmd) except ftplib.error_perm, reason: if reason[:3] != '550': raise IOError, ('ftp error', reason), sys.exc_info()[2] if not conn: # Set transfer mode to ASCII! self.ftp.voidcmd('TYPE A') # Try a directory listing if file: cmd = 'LIST ' + file else: cmd = 'LIST' conn = self.ftp.ntransfercmd(cmd) self.busy = 1 # Pass back both a suitably decorated object and a retrieval length return (addclosehook(conn[0].makefile('rb'), self.endtransfer), conn[1]) |
i = string.rfind(basepath, '/') | i = basepath.rfind('/') | def basejoin(base, url): """Utility to combine a URL with a base URL to form a new URL.""" type, path = splittype(url) if type: # if url is complete (i.e., it contains a type), return it return url host, path = splithost(path) type, basepath = splittype(base) # inherit type from base if host: # if url contains host, just inherit type if type: return type + '://' + host + path else: # no type inherited, so url must have started with // # just return it return url host, basepath = splithost(basepath) # inherit host basepath, basetag = splittag(basepath) # remove extraneous cruft basepath, basequery = splitquery(basepath) # idem if path[:1] != '/': # non-absolute path name if path[:1] in ('#', '?'): # path is just a tag or query, attach to basepath i = len(basepath) else: # else replace last component i = string.rfind(basepath, '/') if i < 0: # basepath not absolute if host: # host present, make absolute basepath = '/' else: # else keep non-absolute basepath = '' else: # remove last file component basepath = basepath[:i+1] # Interpret ../ (important because of symlinks) while basepath and path[:3] == '../': path = path[3:] i = string.rfind(basepath[:-1], '/') if i > 0: basepath = basepath[:i+1] elif i == 0: basepath = '/' break else: basepath = '' path = basepath + path if type and host: return type + '://' + host + path elif type: return type + ':' + path elif host: return '//' + host + path # don't know what this means else: return path |
i = string.rfind(basepath[:-1], '/') | i = basepath[:-1].rfind('/') | def basejoin(base, url): """Utility to combine a URL with a base URL to form a new URL.""" type, path = splittype(url) if type: # if url is complete (i.e., it contains a type), return it return url host, path = splithost(path) type, basepath = splittype(base) # inherit type from base if host: # if url contains host, just inherit type if type: return type + '://' + host + path else: # no type inherited, so url must have started with // # just return it return url host, basepath = splithost(basepath) # inherit host basepath, basetag = splittag(basepath) # remove extraneous cruft basepath, basequery = splitquery(basepath) # idem if path[:1] != '/': # non-absolute path name if path[:1] in ('#', '?'): # path is just a tag or query, attach to basepath i = len(basepath) else: # else replace last component i = string.rfind(basepath, '/') if i < 0: # basepath not absolute if host: # host present, make absolute basepath = '/' else: # else keep non-absolute basepath = '' else: # remove last file component basepath = basepath[:i+1] # Interpret ../ (important because of symlinks) while basepath and path[:3] == '../': path = path[3:] i = string.rfind(basepath[:-1], '/') if i > 0: basepath = basepath[:i+1] elif i == 0: basepath = '/' break else: basepath = '' path = basepath + path if type and host: return type + '://' + host + path elif type: return type + ':' + path elif host: return '//' + host + path # don't know what this means else: return path |
url = string.strip(url) | url = url.strip() | def unwrap(url): """unwrap('<URL:type://host/path>') --> 'type://host/path'.""" url = string.strip(url) if url[:1] == '<' and url[-1:] == '>': url = string.strip(url[1:-1]) if url[:4] == 'URL:': url = string.strip(url[4:]) return url |
url = string.strip(url[1:-1]) if url[:4] == 'URL:': url = string.strip(url[4:]) | url = url[1:-1].strip() if url[:4] == 'URL:': url = url[4:].strip() | def unwrap(url): """unwrap('<URL:type://host/path>') --> 'type://host/path'.""" url = string.strip(url) if url[:1] == '<' and url[-1:] == '>': url = string.strip(url[1:-1]) if url[:4] == 'URL:': url = string.strip(url[4:]) return url |
if not port: raise string.atoi_error, "no digits" nport = string.atoi(port) except string.atoi_error: | if not port: raise ValueError, "no digits" nport = int(port) except ValueError: | def splitnport(host, defport=-1): """Split host and port, returning numeric port. Return given default port if no ':' found; defaults to -1. Return numerical port if a valid number are found after ':'. Return None if ':' but not a valid number.""" global _nportprog if _nportprog is None: import re _nportprog = re.compile('^(.*):(.*)$') match = _nportprog.match(host) if match: host, port = match.group(1, 2) try: if not port: raise string.atoi_error, "no digits" nport = string.atoi(port) except string.atoi_error: nport = None return host, nport return host, defport |
words = string.splitfields(url, ';') | words = url.split(';') | def splitattr(url): """splitattr('/path;attr1=value1;attr2=value2;...') -> '/path', ['attr1=value1', 'attr2=value2', ...].""" words = string.splitfields(url, ';') return words[0], words[1:] |
myatoi = string.atoi list = string.split(s, '%') | myatoi = int list = s.split('%') | def unquote(s): """unquote('abc%20def') -> 'abc def'.""" mychr = chr myatoi = string.atoi list = string.split(s, '%') res = [list[0]] myappend = res.append del list[0] for item in list: if item[1:2]: try: myappend(mychr(myatoi(item[:2], 16)) + item[2:]) except: myappend('%' + item) else: myappend('%' + item) return string.join(res, "") |
return string.join(res, "") | return "".join(res) | def unquote(s): """unquote('abc%20def') -> 'abc def'.""" mychr = chr myatoi = string.atoi list = string.split(s, '%') res = [list[0]] myappend = res.append del list[0] for item in list: if item[1:2]: try: myappend(mychr(myatoi(item[:2], 16)) + item[2:]) except: myappend('%' + item) else: myappend('%' + item) return string.join(res, "") |
s = string.join(string.split(s, '+'), ' ') | s = ' '.join(s.split('+')) | def unquote_plus(s): """unquote('%7e/abc+def') -> '~/abc def'""" if '+' in s: # replace '+' with ' ' s = string.join(string.split(s, '+'), ' ') return unquote(s) |
return string.join(res, '') | return ''.join(res) | def _fast_quote(s): global _fast_safe if _fast_safe is None: _fast_safe = {} for c in _fast_safe_test: _fast_safe[c] = c res = list(s) for i in range(len(res)): c = res[i] if not _fast_safe.has_key(c): res[i] = '%%%02x' % ord(c) return string.join(res, '') |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.