rem
stringlengths
0
322k
add
stringlengths
0
2.05M
context
stringlengths
8
228k
os.system('invoke-rc.d webcleaner reload')
from wc import daemon daemon.reload()
def onCmdProxyReload (self, sender, sel, ptr): os.system('invoke-rc.d webcleaner reload') debug(GUI, "webcleaner reload") return 1
self.assertEqual(rating_range("1"), None)
def test_rating_range (self): """ Test range parsing. """ # rating_range (range) rating_range = wc.filter.rating.category.intrange_from_string self.assertEqual(rating_range(""), (None, None)) self.assertEqual(rating_range("-"), (None, None)) self.assertEqual(rating_range("1"), (1, None)) self.assertEqual(rating_range("1-"), (1, None)) self.assertEqual(rating_range("-1"), (None, 1)) self.assertEqual(rating_range("1-1"), (1, 1)) self.assertEqual(rating_range("1"), None) self.assertEqual(rating_range("-1-"), None)
self.js_script = ''
self.js_script = u''
def __init__ (self, url, opts): # True if javascript has to be filtered self.javascript = opts['javascript'] self.level = opts.get('level', 0) self.comments = opts['comments'] self.url = url or "unknown" self.js_src = False self.js_script = '' # HttpProxyClient object used in background downloads, # has self.jsScriptData as handler self.js_client = None # gets set by parent parser self.htmlparser = None # HtmlParser used in background downloads self.js_htmlparser = None if self.javascript: self.js_env = wc.js.jslib.JSEnv() #self.js_env.setBrowser(opts['browser']) self.js_output = 0 self.js_popup = 0
data = self.js_htmlparser.getoutput()
data = unicode(self.js_htmlparser.getoutput())
def js_end_script (self, item): """</script> was encountered""" wc.log.debug(wc.LOG_JS, "%s js_end_script %s", self, item) self.htmlparser.debugbuf(wc.LOG_JS) if len(self.htmlparser.tagbuf) < 2: assert False, "parser %s must have script start and content " \ "tags in tag buffer" % self.htmlparser if self.js_output: try: self.js_htmlparser.feed('') self.js_htmlparser.flush() except wc.filter.FilterWait: wc.log.debug(wc.LOG_JS, "%s JS subprocessor is waiting", self) self.htmlparser.state = ('wait', 'recursive script') self.htmlparser.waited = 1 wc.proxy.make_timer(1, lambda: self.js_end_script(item)) return self.js_htmlparser.debugbuf(wc.LOG_JS) assert not self.js_htmlparser.inbuf.getvalue() assert not self.js_htmlparser.waitbuf assert len(self.htmlparser.tagbuf) >= 2, \ "too small buffer %s" % self.htmlparser.tagbuf data = self.js_htmlparser.getoutput() self.htmlparser.tagbuf[-2:-2] = \ [[wc.filter.rules.RewriteRule.DATA, data]]+self.js_htmlparser.tagbuf self.htmlparser.debugbuf(wc.LOG_JS) self.js_htmlparser = None if self.js_popup or self.js_output: # either the javascript part popped up some windows or # it wrote something with document.write() # in both cases the javascript is deleted # This could potentially delete too much as there might be # valid JS functions defined that get used by other scripts. # In this case use an exception url in the Javascript filter # rule. del self.htmlparser.tagbuf[-1] del self.htmlparser.tagbuf[-1] elif not self.filter_end_element(item[1]): self.htmlparser.tagbuf.append(item) self.htmlparser.state = ('parse',) wc.log.debug(wc.LOG_JS, "%s switching back to parse with", self) self.htmlparser.debugbuf(wc.LOG_JS)
"\n<!--\n%s\n//-->\n" % wc.js.escape_js(script)
u"\n<!--\n%s\n//-->\n" % wc.js.escape_js(script)
def js_end_element (self, item): """parse generated html for scripts""" wc.log.debug(wc.LOG_JS, "%s js_end_element buf %r", self, self.htmlparser.tagbuf) if len(self.htmlparser.tagbuf)<2: # syntax error, ignore wc.log.warn(wc.LOG_JS, "JS syntax error, self.tagbuf %r", self.htmlparser.tagbuf) return if self.js_src: wc.log.debug(wc.LOG_JS, "JS src, self.tagbuf %r", self.htmlparser.tagbuf) del self.htmlparser.tagbuf[-1] if len(self.htmlparser.tagbuf)<2: # syntax error, ignore wc.log.warn(wc.LOG_JS, "JS end, self.tagbuf %s", self.htmlparser.tagbuf) return if len(self.htmlparser.tagbuf) > 2 and \ self.htmlparser.tagbuf[-3][0] == \ wc.filter.rules.RewriteRule.STARTTAG and \ self.htmlparser.tagbuf[-3][1] == 'script': del self.htmlparser.tagbuf[-1] if len(self.htmlparser.tagbuf)<2 or \ self.htmlparser.tagbuf[-1][0] != \ wc.filter.rules.RewriteRule.DATA or \ self.htmlparser.tagbuf[-2][0] != \ wc.filter.rules.RewriteRule.STARTTAG or \ self.htmlparser.tagbuf[-2][1] != 'script': # syntax error, ignore return js_ok, js_lang = wc.js.get_js_data(self.htmlparser.tagbuf[-2][2]) if not js_ok: # no JavaScript, add end tag and ignore self.htmlparser.tagbuf.append(item) return ver = wc.js.get_js_ver(js_lang) # get script data script = self.htmlparser.tagbuf[-1][1].strip() # remove html comments script = wc.js.remove_html_comments(script) if not script: # again, ignore an empty script del self.htmlparser.tagbuf[-1] del self.htmlparser.tagbuf[-1] return # put correctly quoted script data into buffer self.htmlparser.tagbuf[-1][1] = \ "\n<!--\n%s\n//-->\n" % wc.js.escape_js(script) # execute script self.jsScript(script, ver, item)
[wc.filter.rules.RewriteRule.ENDTAG, "script"])
[wc.filter.rules.RewriteRule.ENDTAG, u"script"])
def jsScriptData (self, data, url, ver): """Callback for loading <script src=""> data in the background If downloading is finished, data is None""" assert self.htmlparser.state[0] == 'wait', "non-wait state" wc.log.debug(wc.LOG_JS, "%s jsScriptData %r", self, data) if data is None: if not self.js_script: wc.log.warn(wc.LOG_JS, "empty JavaScript src %s", url) self.js_script = u"// "+\ _("error fetching script from %r") % url self.htmlparser.tagbuf.append( [wc.filter.rules.RewriteRule.STARTTAG, "script", {'type': 'text/javascript'}]) # norm html comments script = wc.js.remove_html_comments(self.js_script) script = u"\n<!--\n%s\n//-->\n" % wc.js.escape_js(script) self.htmlparser.tagbuf.append( [wc.filter.rules.RewriteRule.DATA, script]) # Note: <script src=""> could be missing an end tag, # but now we need one. Look later for a duplicate </script>. self.htmlparser.tagbuf.append( [wc.filter.rules.RewriteRule.ENDTAG, "script"]) self.js_script = u'' self.htmlparser.state = ('parse',) wc.log.debug(wc.LOG_JS, "%s switching back to parse with", self) self.htmlparser.debugbuf(wc.LOG_JS) else: wc.log.debug(wc.LOG_JS, "JS read %d <= %s", len(data), url) self.js_script += data
self.connected = False make_timer(secs, self.set_readable) def set_readable (self): self.connected = True
oldstate, self.state = self.state, 'client' make_timer(secs, lambda: self.set_readable(oldstate)) def set_readable (self, state): self.state = state
def set_unreadable (self, secs): self.connected = False make_timer(secs, self.set_readable)
""" Return the line number of s[index]. Lines are assumed to be separated by the ASCII character '\\n'.
r""" Return the line number of s[index] or zero on errors. Lines are assumed to be separated by the ASCII character '\n'.
def get_line_number (s, index): """ Return the line number of s[index]. Lines are assumed to be separated by the ASCII character '\\n'. """ i = 0 if index < 0: index = 0 line = 1 while i < index: if s[i] == '\n': line += 1 i += 1 return line
index = 0
return 0
def get_line_number (s, index): """ Return the line number of s[index]. Lines are assumed to be separated by the ASCII character '\\n'. """ i = 0 if index < 0: index = 0 line = 1 while i < index: if s[i] == '\n': line += 1 i += 1 return line
return u"%.2f GB"
return u"%.2f GB" % b
def strsize (b): """ Return human representation of bytes b. A negative number of bytes raises a value error. """ if b < 0: raise ValueError("Invalid negative byte number") if b == 1: return u"%d Byte" % b if b < 1024: return u"%d Bytes" % b b /= 1024.0 if b < 1024: return u"%.2f kB" % b b /= 1024.0 if b < 1024: return u"%.2f MB" % b b /= 1024.0 return u"%.2f GB"
if duration > 60: duration = duration / 60
if duration >= 60: duration /= 60
def strduration (duration): """ Return translated and formatted time duration. """ name = _("seconds") if duration > 60: duration = duration / 60 name = _("minutes") if duration > 60: duration = duration / 60 name = _("hours") return u" %.3f %s" % (duration, name)
def get_exe_bytes (self): if win_cross_compiling: bv = "7.1" directory = os.path.dirname(distutils.command.__file__) filename = os.path.join(directory, "wininst-%s.exe" % bv) return open(filename, "rb").read() return super(MyBdistWininst, self).get_exe_bytes()
def run (self): if (sys.platform != "win32" and not win_cross_compiling and (self.distribution.has_ext_modules() or self.distribution.has_c_libraries())): raise DistutilsPlatformError \ ("distribution contains extensions and/or C libraries; " "must be compiled on a Windows 32 platform")
libraries.append("python%d.%d" % tuple(sys.version_info[0:2]))
libraries.append("python%s" % get_python_version())
def run (self): if (sys.platform != "win32" and not win_cross_compiling and (self.distribution.has_ext_modules() or self.distribution.has_c_libraries())): raise DistutilsPlatformError \ ("distribution contains extensions and/or C libraries; " "must be compiled on a Windows 32 platform")
if service in blurb:
if blurb.find(service) != -1:
def check_pics (rule, labellist): """parse and check pics labels according to given PicsRule return None if no rating is exceeded return non-empty match message if some rating exceeds the configured rating level """ last = 0 for mo in ratings(labellist): rating = mo.group('rating') debug(NIGHTMARE, "PICS rating", rating) # the blurb contains the service name and options blurb = labellist[last:mo.start()].lower() debug(NIGHTMARE, "PICS blurb", blurb) last = mo.end() for service, sdata in services.items(): if service in blurb: msg = check_service(rating, sdata['categories'], sdata['name'], rule.options[service]) if msg: return msg return None
sdata['name'], rule.options[service])
sdata['name'], rule.ratings[service])
def check_pics (rule, labellist): """parse and check pics labels according to given PicsRule return None if no rating is exceeded return non-empty match message if some rating exceeds the configured rating level """ last = 0 for mo in ratings(labellist): rating = mo.group('rating') debug(NIGHTMARE, "PICS rating", rating) # the blurb contains the service name and options blurb = labellist[last:mo.start()].lower() debug(NIGHTMARE, "PICS blurb", blurb) last = mo.end() for service, sdata in services.items(): if service in blurb: msg = check_service(rating, sdata['categories'], sdata['name'], rule.options[service]) if msg: return msg return None
for category, value in options:
for category, value in options.items():
def check_service (rating, categories, name, options): """find given categories in rating and compare the according option value with the rating value. If one of the ratings exceed its option value, return a non-empty message, else return None. """ for category, value in options: category_label = categories[category] msg = check_pics_option(rating, category_label, value, "%s %s" % (name, category)); if msg: return msg return None
def check_pics_option (rating, category_label, option, category) {
def check_pics_option (rating, category_label, option, category):
def check_pics_option (rating, category_label, option, category) { """find the given label in rating and compare the value with option. If the rating exceeds the option, a non-empty message is returned, else None""" mo = re.search(r'%s\s+(?P<val>\d+)'%category_label, rating) if not mo: # label not found return None # get the rating value rating = int(mo.group("val")) if rating > option: return i18n._("PICS %s match") % category return None
class MyInstall (install):
class MyInstall (install, object):
def p (path): """norm a path name to platform specific notation""" return os.path.normpath(path)
if os.name=="nt": self.install_nt_service() def state_nt_service (self, name): import win32serviceutil return win32serviceutil.QueryServiceStatus(name)[1] def install_nt_service (self): from wc import win32start, AppName, Configuration import win32serviceutil oldargs = sys.argv sys.argv = ['webcleaner', 'install'] win32serviceutil.HandleCommandLine(win32start.ProxyService) state = self.state_nt_service(AppName) while state==win32service.SERVICE_START_PENDING: time.sleep(1) state = self.state_nt_service(AppName) if state==win32service.SERVICE_RUNNING: sys.argv = ['webcleaner', 'stop'] win32serviceutil.HandleCommandLine(win32start.ProxyService) state = self.state_nt_service(AppName) while state==win32service.SERVICE_STOP_PENDING: time.sleep(1) state = self.state_nt_service(AppName) sys.argv = ['webcleaner', 'start'] win32serviceutil.HandleCommandLine(win32start.ProxyService) sys.argv = oldargs config = Configuration() config_url = "http://localhost:%d/" % config['port'] import time, webbrowser time.sleep(5) webbrowser.open(config_url)
def run (self): super(MyInstall, self).run() # we have to write a configuration file because we need the # <install_data> directory (and other stuff like author, url, ...) data = [] for d in ['purelib', 'platlib', 'lib', 'headers', 'scripts', 'data']: attr = 'install_%s'%d if self.root: # cut off root path prefix val = getattr(self, attr)[len(self.root):] else: val = getattr(self, attr) if attr=="install_data": base = os.path.join(val, 'share', 'webcleaner') data.append('config_dir = %r' % \ os.path.normcase(os.path.join(base, 'config'))) data.append('template_dir = %r' % \ os.path.normcase(os.path.join(base, 'templates'))) data.append("%s = %r" % (attr, val)) from pprint import pformat data.append('outputs = %s' % pformat(self.get_outputs())) self.distribution.create_conf_file(self.install_lib, data) # install proxy service if os.name=="nt": self.install_nt_service()
scripts = ['webcleaner'],
scripts = scripts,
def create_batch_file (self, directory, data, filename): filename = os.path.join(directory, filename) # write the batch file util.execute(write_file, (filename, data), "creating %s" % filename, self.verbose>=1, self.dry_run)
self.assertEqual(data, unchunker.decode(s))
self.assertEqual(data, unchunker.process(s))
def testUnchunk (self): unchunker = wc.proxy.decoder.UnchunkStream.UnchunkStream() data = "a"*0x30 s = "000000000030\r\n%s\r\n0\r\n\r\n" % data self.assertEqual(data, unchunker.decode(s))
if _rating_delete(url):
try: del rating_store[url] rating_store.write()
def _form_delete (): global url if _rating_delete(url): info['ratingdeleted'] = True else: error['ratingdeleted'] = True
else:
except:
def _form_delete (): global url if _rating_delete(url): info['ratingdeleted'] = True else: error['ratingdeleted'] = True
generic = form.has_key('generic'):
generic = form.has_key('generic')
def _form_generic (form): """Check generic validity""" global generic generic = form.has_key('generic'): return True
self._key = _winreg.OpenKey(key, sub_key)
self._key = OpenKey(key, sub_key)
def __init__ (self, key, sub_key): self._key = _winreg.OpenKey(key, sub_key) self.closed = False
val = _winreg.QueryValueEx(self._key, key)
val = QueryValueEx(self._key, key)
def __getitem__ (self, key): if type(key) != StringType: raise TypeError, "key type must be string" try: val = _winreg.QueryValueEx(self._key, key) except WindowsError: raise IndexError, "subkey %s not found"%key return val[0]
keys.append(key_handle(self._key, _winreg.EnumKey(self._key, i)))
keys.append(key_handle(self._key, EnumKey(self._key, i)))
def subkeys (self): """get the list of subkeys as key_handle objects""" i = 0 keys = [] while 1: try: #print repr(EnumKey(self._key, i)) keys.append(key_handle(self._key, _winreg.EnumKey(self._key, i))) except EnvironmentError: break i += 1 return keys
return _winreg.QueryInfoKey(self._key)[0]
return QueryInfoKey(self._key)[0]
def __len__ (self): return _winreg.QueryInfoKey(self._key)[0]
_winreg.SetValueEx(self._key, key, value[0], value[1])
SetValueEx(self._key, key, value[0], value[1])
def __setitem__ (self, key, value): """Set a registry key value. key is the key name, value is a tuple (type, val). For available types see the _winreg module documentation.""" key = self.__getitem__(key) _winreg.SetValueEx(self._key, key, value[0], value[1])
_winreg.CloseKey(self._key)
CloseKey(self._key)
def close (self): _winreg.CloseKey(self._key) self.closed = True
FXButton(f, i18n._("Add\tAdd hostname and networks that are not filtered.\nNetworks can be either in a.b.d.c/n or a.b.c.d/e.f.g.h format."), None, self, ConfWindow.ID_NOPROXYFOR_ADD) FXButton(f, i18n._("Edit"), None, self, ConfWindow.ID_NOPROXYFOR_EDIT) FXButton(f, i18n._("Remove"), None, self, ConfWindow.ID_NOPROXYFOR_REMOVE)
FXButton(f, i18n._("Add\tAdd hostname and networks that are not filtered.\nNetworks can be either in a.b.d.c/n or a.b.c.d/e.f.g.h format."), None, self, ConfWindow.ID_NOFILTERHOSTS_ADD) FXButton(f, i18n._("Edit"), None, self, ConfWindow.ID_NOFILTERHOSTS_EDIT) FXButton(f, i18n._("Remove"), None, self, ConfWindow.ID_NOFILTERHOSTS_REMOVE)
def proxySettings (self, tabbook): """generate the proxy setting tab""" FXTabItem(tabbook, i18n._("P&roxy Settings"), None) proxy = FXVerticalFrame(tabbook, FRAME_THICK|FRAME_RAISED) proxy_top = FXHorizontalFrame(proxy, LAYOUT_FILL_X|LAYOUT_FILL_Y|LAYOUT_SIDE_TOP)
def onUpdFilterHosts (self, sender, sel, ptr):
def onUpdNoFilterHosts (self, sender, sel, ptr):
def onUpdFilterHosts (self, sender, sel, ptr): i = self.nofilterlist.getCurrentItem() if i<0: sender.disable() elif self.nofilterlist.isItemSelected(i): sender.enable() else: sender.disable() return 1
self.nofilterhosts = ip.strhosts2map(self.nofilterhosts) self.allowedhosts = ip.strhosts2map(self.allowedhosts)
self.nofilterhosts = ip.map2hosts(self.nofilterhosts) self.allowedhosts = ip.map2hosts(self.allowedhosts)
def readconfig (self): """read the configuration from disc""" debug(GUI, "reading config") self.config = Configuration() for key in ['version','port','parentproxy','parentproxyport',
hosts = sort_seq(ip.map2hosts(self.nofilterhosts))
hosts = sort_seq(self.nofilterhosts)
def toxml (self): s = """<?xml version="1.0"?>
hosts = sort_seq(ip.map2hosts(self.allowedhosts))
hosts = sort_seq(self.allowedhosts)
def toxml (self): s = """<?xml version="1.0"?>
import wc.proxy wc.proxy.mainloop(handle=handle)
from wc.proxy import mainloop mainloop(handle=handle)
def startfunc (handle=None): # init logging initlog(os.path.join(ConfigDir, "logging.conf")) # we run single-threaded, decrease check interval sys.setcheckinterval(500) # support reload on posix systems if os.name=='posix': import signal signal.signal(signal.SIGHUP, reload_config) # drop privileges os.chdir("/") # for web configuration, we cannot drop privileges #if os.geteuid()==0: # import pwd, grp # try: # pentry = pwd.getpwnam("nobody") # pw_uid = 2 # nobody = pentry[pw_uid] # gentry = grp.getgrnam("nogroup") # gr_gid = 2 # nogroup = gentry[gr_gid] # os.setgid(nogroup) # os.setuid(nobody) # except KeyError: # warn(WC, "could not drop root privileges, user nobody "+\ # "and/or group nogroup not found") # pass # read configuration global config config = Configuration() config.init_filter_modules() # start the proxy import wc.proxy wc.proxy.mainloop(handle=handle)
p.parse(file(filename))
p.parse(file(filename), self)
def read_filterconf (self): """read filter rules""" # filter configuration for filename in filterconf_files(): p = ZapperParser(filename) p.parse(file(filename)) self['folderrules'].append(p.folder) self.sort()
def parse (self, fp):
def parse (self, fp, _config): self.config = _config
def parse (self, fp): debug(WC, "Parsing %s", self.filename) try: self.p.ParseFile(fp) except xml.parsers.expat.ExpatError: error(WC, "Error parsing %s", self.filename) raise
self.config = _config super(WConfigParser, self).parse(fp)
super(WConfigParser, self).parse(fp, _config)
def parse (self, fp, _config): self.config = _config super(WConfigParser, self).parse(fp) self.config['configfile'] = self.filename self.config['filters'].sort()
except IOError, msg:
except IOError:
def iswriteable (fname): if os.path.isdir(fname) or os.path.islink(fname): return False try: if os.path.exists(fname): f = file(fname, 'a') f.close() return True else: f = file(fname, 'w') f.close() os.remove(fname) return True except IOError, msg: pass return False
def reload_config (signum, frame):
def reload_config (*dummy): """reload configuration function with dummy params for (signum, frame) from the signal handler prototype """
def startfunc (handle=None): # init logging initlog(os.path.join(ConfigDir, "logging.conf")) # we run single-threaded, decrease check interval sys.setcheckinterval(500) # support reload on posix systems if os.name=='posix': import signal signal.signal(signal.SIGHUP, reload_config) # drop privileges os.chdir("/") # for web configuration, we cannot drop privileges #if os.geteuid()==0: # import pwd, grp # try: # pentry = pwd.getpwnam("nobody") # pw_uid = 2 # nobody = pentry[pw_uid] # gentry = grp.getgrnam("nogroup") # gr_gid = 2 # nogroup = gentry[gr_gid] # os.setgid(nogroup) # os.setuid(nobody) # except KeyError: # warn(WC, "could not drop root privileges, user nobody "+\ # "and/or group nogroup not found") # pass # read configuration global config config = Configuration() config.init_filter_modules() # start the proxy import wc.proxy wc.proxy.mainloop(handle=handle)
def parse (self, fp, config): self.config = config
def parse (self, fp, _config): self.config = _config
def parse (self, fp, config): self.config = config super(WConfigParser, self).parse(fp) self.config['configfile'] = self.filename self.config['filters'].sort()
self['noproxyfor'] = {} self['allowedhosts'] = {}
self['noproxyfor'] = [{}, [], {}] self['allowedhosts'] = [{}, [], {}]
def reset (self): """Reset to default values""" self['port'] = 8080 self['proxyuser'] = "" self['proxypass'] = "" self['parentproxy'] = "" self['parentproxyport'] = 3128 self['parentproxyuser'] = "" self['parentproxypass'] = "" self['logfile'] = "" self['strict_whitelist'] = 0 self['debuglevel'] = 0 self['rules'] = [] self['filters'] = [] self['filterlist'] = [[],[],[],[],[],[],[],[],[],[]] self['colorize'] = 0 self['noproxyfor'] = {} self['allowedhosts'] = {} self['starttime'] = time.time() self['requests'] = {'valid':0, 'error':0, 'blocked':0} self['local_sockets_only'] = 0 self['localip'] = socket.gethostbyname(socket.gethostname()) self['mime_content_rewriting'] = [] self['headersave'] = 100 self['showerrors'] = None
control = AnsiControl.get(ctype, '')+";"
control = AnsiControl.get(control, '')+";"
def esc_ansicolor (color): """convert a named color definition to an escaped ANSI color""" control = '' if ";" in color: control, color = color.split(";", 1) control = AnsiControl.get(ctype, '')+";" cnum = AnsiColor.get(color, '0') return AnsiEsc % (control+cnum)
n = 0
n = 0L
def addr2bin (addr): if type(addr) == type(0): return addr bytes = addr.split('.') if len(bytes) != 4: raise ValueError, 'bad IP address' n = 0 for byte in bytes: n = n<<8 | int(byte) return n
wc.log.error(wc.LOG_JS, "JS error at %s", self.url) wc.log.error(wc.LOG_JS, msg)
wc.log.debug(wc.LOG_JS, "JS error at %s", self.url) wc.log.debug(wc.LOG_JS, msg.rstrip())
def js_process_error (self, msg): """ Process javascript syntax error. """ wc.log.error(wc.LOG_JS, "JS error at %s", self.url) wc.log.error(wc.LOG_JS, msg)
print >>log, wc.i18n._("adding new filter %s"), filename
print >>log, wc.i18n._("adding new filter %s")%filename
def update_filter (wconfig, dryrun=False, log=None): """Update the given configuration object with .zap files found at baseurl. If dryrun is True, only print out the changes but do nothing throws IOError on error """ chg = False baseurl = wconfig['baseurl']+"filter/" url = baseurl+"filter-md5sums.txt" try: page = open_url(url) except IOError, msg: print >>log, "error fetching %s:"%url, msg return chg # remember all local config files filemap = {} for filename in wc.filterconf_files(wconfig.filterdir): filemap[os.path.basename(filename)] = filename # read md5sums for line in page.read().splitlines(): if "<" in line: print >>log, "error fetching", url return chg if not line: continue md5sum, filename = line.split() assert filename.endswith('.zap') fullname = os.path.join(wc.ConfigDir, filename) # compare checksums if filemap.has_key(filename): f = file(fullname) data = f.read() digest = list(md5.new(data).digest()) f.close() digest = "".join([ "%0.2x"%ord(c) for c in digest ]) if digest==md5sum: print >>log, wc.i18n._("filter %s not changed, ignoring")%filename continue print >>log, wc.i18n._("updating filter %s")%filename else: print >>log, wc.i18n._("adding new filter %s"), filename # parse new filter url = baseurl+filename page = open_url(url) p = wc.ZapperParser(fullname, wconfig, compile_data=False) p.parse(fp=page) page.close() if wconfig.merge_folder(p.folder, dryrun=dryrun, log=log): chg = True url = baseurl+"extern-md5sums.txt" try: page = open_url(url) except IOError, msg: print >>log, wc.i18n._("error fetching %s:")%url, msg return chg lines = page.read().splitlines() page.close() for line in lines: if "<" in line: print >>log, wc.i18n._("error fetching %s:")%url, wc.i18n._("invalid content") return chg if not line: continue md5sum, filename = line.split() # XXX UNIX-generated md5sum filenames with subdirs are not portable fullname = os.path.join(wc.ConfigDir, filename) # compare checksums if os.path.exists(fullname): f = file(fullname) data = f.read() digest = list(md5.new(data).digest()) f.close() digest = "".join([ "%0.2x"%ord(c) for c in digest ]) if digest==md5sum: print >>log, wc.i18n._("extern filter %s not changed, ignoring")%filename continue print >>log, wc.i18n._("updating extern filter %s")%filename else: print >>log, wc.i18n._("adding new extern filter %s")%filename chg = True if not dryrun: url = baseurl+filename try: page = open_url(url) except IOError, msg: print >>log, wc.i18n._("error fetching %s:")%url, msg continue data = page.read() if not data: print >>log, wc.i18n._("error fetching %s:")%url, \ wc.i18n._("got no data") continue f = file(fullname, 'wb') f.write(data) f.close() return chg
version = "0.14",
version = "0.15",
def create_conf_file(self, directory, data=[]): data.insert(0, "# this file is automatically created by setup.py") filename = os.path.join(directory, self.config_file) # add metadata metanames = dir(self.metadata) + \ ['fullname', 'contact', 'contact_email'] for name in metanames: method = "get_" + name cmd = "%s = %s" % (name, `getattr(self.metadata, method)()`) data.append(cmd) util.execute(write_file, (filename, data), "creating %s" % filename, self.verbose>=1, self.dry_run)
val = getattr(self, attr)[len(self.root):]
cutoff = len(self.root) if self.root.endswith(os.sep): cutoff -= 1 val = getattr(self, attr)[cutoff:]
def run (self): super(MyInstall, self).run() # we have to write a configuration file because we need the # <install_data> directory (and other stuff like author, url, ...) data = [] for d in ['purelib', 'platlib', 'lib', 'headers', 'scripts', 'data']: attr = 'install_%s'%d if self.root: # cut off root path prefix val = getattr(self, attr)[len(self.root):] else: val = getattr(self, attr) if attr=="install_data": base = os.path.join(val, 'share', 'webcleaner') data.append('config_dir = %r' % \ os.path.normcase(os.path.join(base, 'config'))) data.append('template_dir = %r' % \ os.path.normcase(os.path.join(base, 'templates'))) data.append("%s = %r" % (attr, val)) from pprint import pformat data.append('outputs = %s' % pformat(self.get_outputs())) self.distribution.create_conf_file(self.install_lib, data)
context=None, target_language=None, default=None):
def translate (self, domain, msgid, mapping=None, """Interpolates and translate TAL expression.""" context=None, target_language=None, default=None): _msg = self.gettext(msgid) wc.log.debug(wc.LOG_TAL, "TRANSLATED %r %r", msgid, _msg) return wc.webgui.TAL.TALInterpreter.interpolate(_msg, mapping)
['webcleaner.bat', 'filtertest', 'filtertest.html']),
['webcleaner.bat']),
def create_conf_file(self, directory, data=[]): data.insert(0, "# this file is automatically created by setup.py") filename = os.path.join(directory, self.config_file) # add metadata metanames = dir(self.metadata) + \ ['fullname', 'contact', 'contact_email'] for name in metanames: method = "get_" + name cmd = "%s = %s" % (name, `getattr(self.metadata, method)()`) data.append(cmd) util.execute(write_file, (filename, data), "creating %s" % filename, self.verbose>=1, self.dry_run)
fp = file(os.path.join(os.getcwd(), "templates", "classic", "macros", "rules.html"))
fp = file(os.path.join(os.getcwd(), "test", "html", "taltest.html"))
def get_context (): # init and return TALES context context = simpleTALES.Context() context.addGlobal("parameter", "hullabulla") return context
headers = WcMessage()
headers = wc.http.header.WcMessage()
def get_wc_client_headers (host): """ Get default webcleaner proxy request headers. """ headers = WcMessage() headers['Host'] = '%s\r' % host headers['Accept-Encoding'] = 'gzip;q=1.0, deflate;q=0.9, identity;q=0.5\r' headers['Connection'] = 'Keep-Alive\r' headers['Keep-Alive'] = 'timeout=300\r' headers['User-Agent'] = 'Calzilla/6.0\r' return headers
for key, value in nonces.items():
for nonce, value in nonces.items():
def check_nonces (): # deprecate old nonces for key, value in nonces.items(): noncetime = time.time() - value if noncetime > max_noncesecs: del nonces[nonce]
reaminder = remainder.strip()
remainder = remainder.strip()
def parse_ntlm_challenge (challenge): """parse both type0 and type2 challenges""" if "," in challenge: chal, remainder = challenge.split(",", 1) else: chal, remainder = challenge, "" chal = chal.strip() reaminder = remainder.strip() if not chal: # empty challenge (type0) encountered res = {'type': NTLMSSP_INIT} else: msg = base64.decodestring(chal) res = parse_message2(msg) if not res: warn(AUTH, "invalid NTLM challenge %s", `msg`) return res, remainder
kind = "string"
if "c" in kind[7:]: kind = "stringnocase" else: kind = "string"
def _kind (self,full_type,endian): if endian == 'local': kind = full_type else: kind = full_type[2:]
if kind.startswith("ldate-"):
elif kind.startswith("ldate-"):
def _kind (self,full_type,endian): if endian == 'local': kind = full_type else: kind = full_type[2:]
elif kind == "string" and (result[pos] in string.ascii_letters or result[pos] in string.digits): data.append(ord(result[pos])*1L) pos +=1
def _data (self,kind,result): pos = 0 data = list('') while pos < len(result): if convert.is_c_escape(result[pos:]): # \0 is not a number it is the null string if result[pos+1] == '0': data.append(result[pos]) data.append(0L) # \rnt are special else: data.append(result[pos:pos+2]) pos +=2 elif kind == "string" and (result[pos] in string.ascii_letters or result[pos] in string.digits): data.append(ord(result[pos])*1L) pos +=1 else: base = convert.which_base(result[pos:])
if kind == "string":
if kind.startswith("string"):
def _length (self, kind, data): # Calculate the size of the data to read in the file if kind == "string": replace = "" for i in data: # except: Too lazy to handle the '\r' and co otherwise try: replace += chr(i) except: replace+='*' # This is for "\0" replace = replace.replace('*\0','*') # This is for two "\" replace = replace.replace('\\\\','*') # This is for the remaining "\{whatever}" replace = replace.replace('\\','') length = len(replace) else: length = self.data_size[kind] return length
index += 1
def read_magic (self, magic_file): self.magic = []
if kind == 'string': if self._is_null_string(data): success = True elif len(data) == len(extract): success = True for index in range(len(data)): if ord(extract[index]) != data[index]: success = False
if kind.startswith('string'): success = (data==value)
def classify (self, f): if not self.entries: raise StandardError("Not initialised properly") # Are we still looking for the ruleset to apply or are we in a rule found_rule = False # When we found the rule, what is the level that we successfull passed in_level = 0 # If we failed part of the rule there is no point looking for higher level subrule allow_next = 0 # String provided by the successfull rule result = ""
except (Failed, IOError):
except (Failed, IOError), msg:
def classify (self, f): if not self.entries: raise StandardError("Not initialised properly") # Are we still looking for the ruleset to apply or are we in a rule found_rule = False # When we found the rule, what is the level that we successfull passed in_level = 0 # If we failed part of the rule there is no point looking for higher level subrule allow_next = 0 # String provided by the successfull rule result = ""
label = Label(master, text=msg % d, anchor=tk.W, justify=tk.LEFT)
label = tk.Label(master, text=msg % d, anchor=tk.W, justify=tk.LEFT)
def body(self, master): d = {"appname": wc.AppName} msg = _("""The administrator password protects the web
label = Label(master, text=_("Password:"))
label = tk.Label(master, text=_("Password:"))
def body(self, master): d = {"appname": wc.AppName} msg = _("""The administrator password protects the web
self.pass_entry = Entry(master)
self.pass_entry = tk.Entry(master)
def body(self, master): d = {"appname": wc.AppName} msg = _("""The administrator password protects the web
wc.log.debug(wc.LOG_DNS, "%s switching to TCP", self)
def handle_timeout (self): # The DNS server hasn't responded to us, or we've lost the # packet somewhere, so let's try it again, unless the retry # count is too large. Each time we retry, we increase the # timeout (see send_dns_request). if not self.callback: return # It's already handled, so ignore this wc.log.warn(wc.LOG_DNS, "%s DNS timeout", self) if not self.connected: self.callback(self.hostname, DnsResponse('error', 'timed out connecting')) self.callback = None return self.retries += 1 if (not self.tcp and dns_accepts_tcp.get(self.nameserver, True) and self.retries == 1): # Switch to TCP self.TIMEOUT = 20 self.close() self.tcp = True self.establish_connection() elif self.retries < 5: self.send_dns_request() elif not self.tcp and self.retries < 12: self.send_dns_request() else: if self.callback: self.callback(self.hostname, DnsResponse('error', 'timed out')) self.callback = None if self.connected: self.close()
wc.log.warn(wc.LOG_DNS, '%s was no response to %s',
wc.log.warn(wc.LOG_DNS, 'Wrong response %s to query %s',
def process_read (self): if not self.callback: self.close() # Assume that the entire answer comes in one packet if self.tcp: if len(self.recv_buffer) < 2: return header = self.recv_buffer[:2] (l,) = struct.unpack("!H", header) if len(self.recv_buffer) < 2+l: return self.read(2) # header wire = self.read(l) try: self.socket.shutdown(1) except socket.error: pass else: wire = self.read(1024) response = wc.dns.message.from_wire( wire, keyring=self.query.keyring, request_mac=self.query.mac) wc.log.debug(wc.LOG_DNS, "got DNS response %s", response) if not self.query.is_response(response): wc.log.warn(wc.LOG_DNS, '%s was no response to %s', response, self.query) # Oops, this doesn't answer the right question. This can # happen because we're using UDP, and UDP replies might end # up in the wrong place: open conn A, send question to A, # timeout, send question to A, receive answer, close our # object, then open a new conn B, send question to B, # but get the OLD answer to A as a reply. This doesn't happen # with TCP but then TCP is slower.
pattern = re.sub(r"([^.])\*([^?])", r"\1[^/]*\2", pattern)
pattern = re.sub(r"([^.])\*([^?]|$)", r"\1[^/]*\2", pattern)
def convert_adzapper_pattern (pattern): pattern = pattern.replace(".", "\\.") pattern = pattern.replace("?", "\\?") pattern = pattern.replace("**", ".*?") pattern = re.sub(r"([^.])\*([^?])", r"\1[^/]*\2", pattern) return pattern
url="%(url)s"
url="%(url)s"/>
def write_allow (zapfile, adclass, pattern): #print "%s allow %s" % (adclass, `pattern`) d = get_rule_dict(adclass, pattern) zapfile.write("""<allow title="%(title)s" desc="%(desc)s" url="%(url)s"
url="%(url)s" """ % d)
url="%(url)s""" % d)
def write_block (zapfile, adclass, pattern, replacement=None): #print "%s block %s => %s" % (adclass, `pattern`, `replacement`) d = get_rule_dict(adclass, pattern) zapfile.write("""<block title="%(title)s" desc="%(desc)s" url="%(url)s" """ % d) if replacement is not None: zapfile.write(">%s</block>" % xmlify(replacement)) else: zapfile.write("/>") zapfile.write("\n")
zapfile.write(">%s</block>" % xmlify(replacement))
zapfile.write("\">%s</block>" % xmlify(replacement))
def write_block (zapfile, adclass, pattern, replacement=None): #print "%s block %s => %s" % (adclass, `pattern`, `replacement`) d = get_rule_dict(adclass, pattern) zapfile.write("""<block title="%(title)s" desc="%(desc)s" url="%(url)s" """ % d) if replacement is not None: zapfile.write(">%s</block>" % xmlify(replacement)) else: zapfile.write("/>") zapfile.write("\n")
zapfile.write("/>")
zapfile.write("\"/>")
def write_block (zapfile, adclass, pattern, replacement=None): #print "%s block %s => %s" % (adclass, `pattern`, `replacement`) d = get_rule_dict(adclass, pattern) zapfile.write("""<block title="%(title)s" desc="%(desc)s" url="%(url)s" """ % d) if replacement is not None: zapfile.write(">%s</block>" % xmlify(replacement)) else: zapfile.write("/>") zapfile.write("\n")
newfunc.__doc__ += func.__doc__
if func.__doc__ is not None: newfunc.__doc__ += func.__doc__
def newfunc (*args, **kwargs): """ Print deprecated warning and execute original function. """ warnings.warn("Call to deprecated function %s." % func.__name__, category=DeprecationWarning) return func(*args, **kwargs)
newfunc.__doc__ += func.__doc__
if func.__doc__ is not None: newfunc.__doc__ += func.__doc__
def newfunc (*args, **kwargs): """ Execute function synchronized. """ lock.acquire(True) # blocking try: return func(*args, **kwargs) finally: lock.release()
newfunc.__doc__ = func.__doc__
if func.__doc__ is not None: newfunc.__doc__ = func.__doc__
def newfunc (*args, **kwargs): """ Raise NotImplementedError """ raise NotImplementedError("%s not implemented" % func.__name__)
lines = get_file_data(rule.file)
lines = self.get_file_data(rule.file)
def add_blockdomains (self, rule): print "blockdomains", rule.file lines = get_file_data(rule.file) for line in lines: line = line.strip() if not line or line[0]=='#': continue self.blocked_domains.append(line)
lines = get_file_data(rule.file)
lines = self.get_file_data(rule.file)
def add_blockurls (self, rule): print "blockurls", rule.file lines = get_file_data(rule.file) for line in lines: line = line.strip() if not line or line[0]=='#': continue self.blocked_urls.append(line.split("/", 1))
if urltuple[1] == _block:
if urlTuple[1] == _block:
def blocked (self, urlTuple): # check blocked domains for _block in self.blocked_domains: debug(NIGHTMARE, "block domain", _block) if urltuple[1] == _block: return 0 # check blocked urls for _block in self.blocked_urls: debug(NIGHTMARE, "block url", _block) if urlTuple[1]==_block[0] and urlTuple[2].startswith(_block[1]): return 0 # check block patterns for _block in self.block: match = 1 for i in range(len(urlTuple)): if _block[i]: debug(NIGHTMARE, "block pattern", _block[i].pattern) if not _block[i].search(urlTuple[i]): debug(NIGHTMARE, "no match") match = 0 if match and not self.allowed(urlTuple): debug(HURT_ME_PLENTY, "blocked", urlTuple, "with", _block[-1]) return _block[-1] return None
p = wc.configuration.ZapperParser(fullname, compile_data=False)
parserclass = wc.configuration.confparse.ZapperParser p = parserclass(fullname, compile_data=False)
def update_filter (wconfig, dryrun=False, log=None): """ Update the given configuration object with .zap files found at baseurl. If dryrun is True, only print out the changes but do nothing. @raise: IOError """ print >> log, _("updating filters"), "..." chg = False baseurl = wconfig['baseurl']+"filter/" url = baseurl+"filter-md5sums.txt" try: page = open_url(url) except IOError, msg: print >> log, _("error fetching %s") % url, msg print >> log, "...", _("done") return chg # remember all local config files filemap = {} for filename in wc.configuration.filterconf_files(wconfig.filterdir): filemap[os.path.basename(filename)] = filename # read md5sums for line in page.read().splitlines(): if "<" in line: print >> log, _("error fetching %s") % url print >> log, "...", _("done") return chg if not line: continue md5sum, filename = line.split() assert filename.endswith('.zap') fullname = os.path.join(wconfig.configdir, filename) # compare checksums if filemap.has_key(filename): f = file(fullname) data = f.read() digest = list(md5.new(data).digest()) f.close() digest = "".join([ "%0.2x"%ord(c) for c in digest ]) if digest == md5sum: print >> log, \ _("filter %s not changed, ignoring") % filename continue print >> log, _("updating filter %s") % filename else: print >> log, _("adding new filter %s") % filename # parse new filter url = baseurl + filename page = open_url(url) p = wc.configuration.ZapperParser(fullname, compile_data=False) p.parse(fp=page) page.close() # compare version compatibility if wconfig['configversion'][0] != p.folder.configversion[0]: print >> log, _("Incompatible folder version %s, must be %s") % \ (wconfig['configversion'], p.folder.configversion) if wconfig.merge_folder(p.folder, dryrun=dryrun, log=log): chg = True url = baseurl + "extern-md5sums.txt" try: page = open_url(url) except IOError, msg: print >> log, _("error fetching %s:") % url, msg print >> log, "...", _("done") return chg lines = page.read().splitlines() page.close() for line in lines: if "<" in line: print >> log, _("error fetching %s:") % url, \ _("invalid content") print >> log, "...", _("done") return chg if not line: continue md5sum, filename = line.split() # XXX UNIX-generated md5sum filenames with subdirs are not portable fullname = os.path.join(wconfig.configdir, filename) # compare checksums if os.path.exists(fullname): f = file(fullname) data = f.read() digest = list(md5.new(data).digest()) f.close() digest = "".join([ "%0.2x"%ord(c) for c in digest ]) if digest == md5sum: print >> log, \ _("extern filter %s not changed, ignoring")%filename continue print >> log, _("updating extern filter %s") % filename else: print >> log, _("adding new extern filter %s") % filename chg = True if not dryrun: url = baseurl+filename try: page = open_url(url) except IOError, msg: print >> log, _("error fetching %s:") % url, msg continue data = page.read() if not data: print >> log, _("error fetching %s:") % url, \ _("got no data") continue f = file(fullname, 'wb') f.write(data) f.close() print >> log, "...", _("done") return chg
self.macintosh = os.name == 'mac' or \ (os.name == 'posix' and sys.platform.startswith('darwin'))
def __init__ (self): self.in_winhelp = False # inside object tag calling WinHelp # running on MacOS or MacOSX self.macintosh = os.name == 'mac' or \ (os.name == 'posix' and sys.platform.startswith('darwin'))
if attrs.has_key('content') and self.macintosh: if attrs.get('http-equiv', '').lower() == 'refresh':
if attrs.has_key('content'): refresh = attrs.get('http-equiv', attrs.get('name', '')) if refresh.lower() == 'refresh':
def meta_start (self, attrs, htmlfilter): """ Check <meta> start tag. """ if attrs.has_key('content') and self.macintosh: # prevent CVE-2002-0153 if attrs.get('http-equiv', '').lower() == 'refresh': url = attrs['content'].lower() if ";" in url: url = url.split(";", 1)[1] if url.startswith('url='): url = url[4:] if url.startswith('file:/'): msg = "%s %r\n Detected and prevented local file " \ "redirection" wc.log.warn(wc.LOG_FILTER, msg, htmlfilter, attrs['content']) del attrs['content']
if ";" in url: url = url.split(";", 1)[1] if url.startswith('url='): url = url[4:] if url.startswith('file:/'): msg = "%s %r\n Detected and prevented local file " \ "redirection" wc.log.warn(wc.LOG_FILTER, msg, htmlfilter, attrs['content']) del attrs['content']
url = wc.strformat.stripall(url) for url in url.split(";url="): if not url.startswith('http://'): msg = "%s %r\n Detected invalid redirection." wc.log.warn(wc.LOG_FILTER, msg, htmlfilter, attrs['content']) del attrs['content'] break
def meta_start (self, attrs, htmlfilter): """ Check <meta> start tag. """ if attrs.has_key('content') and self.macintosh: # prevent CVE-2002-0153 if attrs.get('http-equiv', '').lower() == 'refresh': url = attrs['content'].lower() if ";" in url: url = url.split(";", 1)[1] if url.startswith('url='): url = url[4:] if url.startswith('file:/'): msg = "%s %r\n Detected and prevented local file " \ "redirection" wc.log.warn(wc.LOG_FILTER, msg, htmlfilter, attrs['content']) del attrs['content']
title = "[%s] %s" % (self.rule.get_name(), title) sender.setText(title) debug(BRING_IT_ON, "Rule title changed")
tmptitle = "[%s] %s" % (self.rule.get_name(), title) else: tmptitle = title sender.setText(tmptitle)
def onCmdTitle (self, sender, sel, ptr): title = sender.getText().strip() if not title: error(i18n._("empty title")) sender.setText(self.rule.title) return 1 self.rule.title = title self.getApp().dirty = 1 if self.rule.get_name()!="folder": title = "[%s] %s" % (self.rule.get_name(), title) sender.setText(title) debug(BRING_IT_ON, "Rule title changed") # send message to main window for treelist updating win = self.getApp().getMainWindow() win.handle(sender, MKUINT(win.ID_TITLE, SEL_COMMAND), ptr) return 1
"blocked url %s by rule %s", url, sid) if isinstance(blocked, basestring):
"blocked url %s with %s by rule %s", url, blocked, sid) if isinstance(blocked, str):
def doit (self, data, attrs): """ Investigate request data for a block.
scheme = "http" return 'GET %s://localhost:%d%s HTTP/1.1' % (scheme, port, doc)
if not doc.startswith("http://"): doc = "http://localhost:%d%s" % (port, doc) return 'GET %s HTTP/1.1' % doc
def doit (self, data, attrs): """ Investigate request data for a block.
def ignorableWhitespace(self, d): """handler for ignorable whitespace""" self.buffer_append_data([DATA, d])
def ignorableWhitespace(self, d): """handler for ignorable whitespace""" self.buffer_append_data([DATA, d])
if '"' in val: s += " %s='%s'"%(name,val) else: s += ' %s="%s"'%(name,val)
s += ' %s'%name if val: if val.find('"')!=-1: s += "='%s'"%val else: s += '="%s"'%val
def buffer2data(self): """Append all tags of the buffer to the data""" for n in self.buffer: if n[0]==DATA: self.data += n[1] elif n[0]==COMMENT: self.data += "<!--%s-->"%n[1] elif n[0]==STARTTAG: s = "<"+n[1] for name,val in n[2].items(): if '"' in val: s += " %s='%s'"%(name,val) else: s += ' %s="%s"'%(name,val) self.data += s+">" elif n[0]==ENDTAG: self.data += "</%s>"%n[1] else: error("unknown buffer element %s" % n[0]) self.buffer = []
print "XXX", lang, msg
def add_i18n_context (context, lang): # language and i18n context_add(context, "lang", lang) try: translator = wc.get_translator(lang, translatorklass=Translator) except IOError, msg: print "XXX", lang, msg translator = NullTranslator() context_add(context, "i18n", translator)
_msg = TALInterpreter.interpolate(_msg, mapping) return _msg
return wc.webgui.TAL.TALInterpreter.interpolate(_msg, mapping)
def translate (self, domain, msgid, mapping=None, context=None, target_language=None, default=None): _msg = self.gettext(msgid) wc.log.debug(wc.LOG_TAL, "TRANSLATE %s %s %s %s", msgid, _msg, mapping, context) _msg = TALInterpreter.interpolate(_msg, mapping) return _msg
def _buf_append_data (self, data):
def buf_append_data (self, data):
def _buf_append_data (self, data): """we have to make sure that we have no two following DATA things in the tag buffer. Why? To be 100% sure that an ENCLOSED match really matches enclosed data. """ #self._debug(NIGHTMARE, "buf_append_data") if data[0]==DATA and self.buf and self.buf[-1][0]==DATA: self.buf[-1][1] += data[1] else: self.buf.append(data)
// error fetching script from 'http://imadoofus.org/notfound.js'
// error fetching script from u'http://imadoofus.org/notfound.js'
def testScriptSrc4 (self): self.filt(
self.client.server_close()
self.client.server_close(self)
def put_response (self, data, protocol, status, msg, headers): response = "%s %d %s"%(protocol, status, msg) self.client.server_response(self, response, status, headers) self.client.server_content(data) self.client.server_close()
wc.proxy.HEADERS.append((self.url, 1, self.headers.headers))
def process_headers(self): # Headers are terminated by a blank line .. now in the regexp, # we want to say it's either a newline at the beginning of # the document, or it's a lot of headers followed by two newlines. # The cleaner alternative would be to read one line at a time # until we get to a blank line... m = re.match(r'^((?:[^\r\n]+\r?\n)*\r?\n)', self.recv_buffer) if not m: return
self.attrs = initStateObjects(self.headers, self.url)
def process_headers(self): # Headers are terminated by a blank line .. now in the regexp, # we want to say it's either a newline at the beginning of # the document, or it's a lot of headers followed by two newlines. # The cleaner alternative would be to read one line at a time # until we get to a blank line... m = re.match(r'^((?:[^\r\n]+\r?\n)*\r?\n)', self.recv_buffer) if not m: return
rating_store = _get_rating_store(_Storage)
rating_store = _get_ratings()
def _calc_ratings_display (): """ Calculate current set of ratings to display. """ global ratings_display, rating_modified urls = rating_store.keys() urls.sort() ratings_display = urls[curindex:curindex+_entries_per_page] rating_modified.clear() for _url in ratings_display: t = _strtime(rating_store[_url].modified) rating_modified[_url] = t.replace(u" ", u"&nbsp;")
if url in rating_store: rating = rating_store[url] else: rating = _Rating(url, generic)
rating = _Rating(url, generic)
def _form_apply (): """ Store changed ratings. """ if url in rating_store: rating = rating_store[url] else: rating = _Rating(url, generic) rating.remove_categories() for catname, value in values.items(): category = _get_category(catname) if category.iterable: value = [x for x in value if value[x]][0] else: value = _intrange_from_string(value) if value is None: error['ratingupdated'] = True return rating.add_category_value(category, value) rating_store[url] = rating try: rating_store.write() info['ratingupdated'] = True except: error['ratingupdated'] = True
except SSL.WantReadError:
wc.log.debug(wc.LOG_NET, 'data %r', data) except SSL.WantReadError, err: wc.log.debug(wc.LOG_NET, '%s want read error %s', self, err)
def handle_read (self): """read data from SSL connection, put it into recv_buffer and call process_read""" assert self.connected wc.log.debug(wc.LOG_PROXY, '%s SslConnection.handle_read', self) if len(self.recv_buffer) > wc.proxy.Connection.MAX_BUFSIZE: wc.log.warn(wc.LOG_PROXY, '%s read buffer full', self) return try: data = self.socket.read(wc.proxy.Connection.RECV_BUFSIZE) except SSL.WantReadError: # you _are_ already reading, stupid return except SSL.WantWriteError: # you want to write? here you go self.handle_write() return except SSL.WantX509LookupError, err: wc.log.exception(wc.LOG_PROXY, "%s ssl read message", self) return except SSL.ZeroReturnError, err: wc.log.debug(wc.LOG_PROXY, "%s ssl finished successfully", self) self.delayed_close() return except SSL.Error, err: wc.log.exception(wc.LOG_PROXY, "read error %s", err) self.handle_error('read error') return if not data: # It's been closed, and handle_close has been called wc.log.debug(wc.LOG_PROXY, "%s closed, got empty data", self) return wc.log.debug(wc.LOG_NET, '%s <= read %d', self, len(data)) wc.log.debug(wc.LOG_NET, 'data %r', data) self.recv_buffer += data self.process_read()
except SSL.WantWriteError:
except SSL.WantWriteError, err: wc.log.debug(wc.LOG_NET, '%s want write error %s', self, err)
def handle_read (self): """read data from SSL connection, put it into recv_buffer and call process_read""" assert self.connected wc.log.debug(wc.LOG_PROXY, '%s SslConnection.handle_read', self) if len(self.recv_buffer) > wc.proxy.Connection.MAX_BUFSIZE: wc.log.warn(wc.LOG_PROXY, '%s read buffer full', self) return try: data = self.socket.read(wc.proxy.Connection.RECV_BUFSIZE) except SSL.WantReadError: # you _are_ already reading, stupid return except SSL.WantWriteError: # you want to write? here you go self.handle_write() return except SSL.WantX509LookupError, err: wc.log.exception(wc.LOG_PROXY, "%s ssl read message", self) return except SSL.ZeroReturnError, err: wc.log.debug(wc.LOG_PROXY, "%s ssl finished successfully", self) self.delayed_close() return except SSL.Error, err: wc.log.exception(wc.LOG_PROXY, "read error %s", err) self.handle_error('read error') return if not data: # It's been closed, and handle_close has been called wc.log.debug(wc.LOG_PROXY, "%s closed, got empty data", self) return wc.log.debug(wc.LOG_NET, '%s <= read %d', self, len(data)) wc.log.debug(wc.LOG_NET, 'data %r', data) self.recv_buffer += data self.process_read()
wc.log.exception(wc.LOG_PROXY, "%s ssl read message", self)
wc.log.exception(wc.LOG_PROXY, "%s ssl read message %s", self, err)
def handle_read (self): """read data from SSL connection, put it into recv_buffer and call process_read""" assert self.connected wc.log.debug(wc.LOG_PROXY, '%s SslConnection.handle_read', self) if len(self.recv_buffer) > wc.proxy.Connection.MAX_BUFSIZE: wc.log.warn(wc.LOG_PROXY, '%s read buffer full', self) return try: data = self.socket.read(wc.proxy.Connection.RECV_BUFSIZE) except SSL.WantReadError: # you _are_ already reading, stupid return except SSL.WantWriteError: # you want to write? here you go self.handle_write() return except SSL.WantX509LookupError, err: wc.log.exception(wc.LOG_PROXY, "%s ssl read message", self) return except SSL.ZeroReturnError, err: wc.log.debug(wc.LOG_PROXY, "%s ssl finished successfully", self) self.delayed_close() return except SSL.Error, err: wc.log.exception(wc.LOG_PROXY, "read error %s", err) self.handle_error('read error') return if not data: # It's been closed, and handle_close has been called wc.log.debug(wc.LOG_PROXY, "%s closed, got empty data", self) return wc.log.debug(wc.LOG_NET, '%s <= read %d', self, len(data)) wc.log.debug(wc.LOG_NET, 'data %r', data) self.recv_buffer += data self.process_read()
wc.log.debug(wc.LOG_PROXY, "%s ssl finished successfully", self)
wc.log.debug(wc.LOG_PROXY, "%s ssl finished successfully (%s)", self, err)
def handle_read (self): """read data from SSL connection, put it into recv_buffer and call process_read""" assert self.connected wc.log.debug(wc.LOG_PROXY, '%s SslConnection.handle_read', self) if len(self.recv_buffer) > wc.proxy.Connection.MAX_BUFSIZE: wc.log.warn(wc.LOG_PROXY, '%s read buffer full', self) return try: data = self.socket.read(wc.proxy.Connection.RECV_BUFSIZE) except SSL.WantReadError: # you _are_ already reading, stupid return except SSL.WantWriteError: # you want to write? here you go self.handle_write() return except SSL.WantX509LookupError, err: wc.log.exception(wc.LOG_PROXY, "%s ssl read message", self) return except SSL.ZeroReturnError, err: wc.log.debug(wc.LOG_PROXY, "%s ssl finished successfully", self) self.delayed_close() return except SSL.Error, err: wc.log.exception(wc.LOG_PROXY, "read error %s", err) self.handle_error('read error') return if not data: # It's been closed, and handle_close has been called wc.log.debug(wc.LOG_PROXY, "%s closed, got empty data", self) return wc.log.debug(wc.LOG_NET, '%s <= read %d', self, len(data)) wc.log.debug(wc.LOG_NET, 'data %r', data) self.recv_buffer += data self.process_read()