rem
stringlengths
0
322k
add
stringlengths
0
2.05M
context
stringlengths
8
228k
lines.append(fix_install_path(line))
lines.append(fix_install_path(line.rstrip()))
def fix_configdata (): """ Fix install and config paths in the config file. """ name = "_webcleaner2_configdata.py" conffile = os.path.join(sys.prefix, "Lib", "site-packages", name) lines = [] for line in file(conffile): if line.startswith("install_") or \ line.startswith("config_") or \ line.startswith("template_"): lines.append(fix_install_path(line)) else: lines.append(line) f = file(conffile, "w") f.write("".join(lines)) f.close()
frame = tk.Frame(top)
frame = tk.Frame(root)
def do_quit (event=None): root.destroy()
import tkMessageBox answer = tkMessageBox.askyesno(_("%s config purge") % wc.AppName, _("""There are local filter rules in the configuration directory. Do you want to remove them? They can be re-used in other installations of %s, but are useless otherwise.""") % wc.AppName) if answer:
import Tkinter as tk root = tk.Tk() def do_ok (event=None):
def purge_tempfiles (): """ Ask if user wants to purge local config files. """ import wc files = glob.glob(os.path.join(wc.ConfigDir, "local_*.zap")) if not files: return import tkMessageBox answer = tkMessageBox.askyesno(_("%s config purge") % wc.AppName, _("""There are local filter rules in the configuration directory.
if msg==rating.MISSING:
if msg==MISSING:
def process_headers (self): """look for headers and process them if found""" # Headers are terminated by a blank line .. now in the regexp, # we want to say it's either a newline at the beginning of # the document, or it's a lot of headers followed by two newlines. # The cleaner alternative would be to read one line at a time # until we get to a blank line... m = re.match(r'^((?:[^\r\n]+\r?\n)*\r?\n)', self.recv_buffer) if not m: return # get headers fp = StringIO(self.read(m.end())) msg = WcMessage(fp) # put unparsed data (if any) back to the buffer msg.rewindbody() self.recv_buffer = fp.read() + self.recv_buffer debug(PROXY, "%s server headers\n%s", self, msg) if self.statuscode==100: # it's a Continue request, so go back to waiting for headers # XXX for HTTP/1.1 clients, forward this self.state = 'response' return http_ver = serverpool.http_versions[self.addr] if http_ver >= (1,1): self.persistent = not has_header_value(msg, 'Connection', 'Close') elif http_ver >= (1,0): self.persistent = has_header_value(msg, 'Connection', 'Keep-Alive') else: self.persistent = False self.attrs = get_filterattrs(self.url, [FILTER_RESPONSE_HEADER], headers=msg) try: self.headers = applyfilter(FILTER_RESPONSE_HEADER, msg, "finish", self.attrs) except FilterRating, msg: debug(PROXY, "%s FilterRating from header: %s", self, msg) msg = str(msg) if msg==rating.MISSING: self.show_rating_config(msg) else: self.show_rating_deny(msg) return server_set_headers(self.headers) self.bytes_remaining = server_set_encoding_headers(self.headers, self.is_rewrite(), self.decoders, self.bytes_remaining) # 304 Not Modified does not send any type info, because it was cached if self.statuscode!=304: # copy decoders decoders = [ d.__class__() for d in self.decoders] data = self.recv_buffer for decoder in decoders: data = decoder.decode(data) data += flush_decoders(decoders) server_set_content_headers(self.headers, data, self.document, self.mime, self.url) # XXX <doh> #if not self.headers.has_key('Content-Length'): # self.headers['Connection'] = 'close\r' #remove_headers(self.headers, ['Keep-Alive']) # XXX </doh> if self.statuscode in (204, 304) or self.method == 'HEAD': # These response codes indicate no content self.client.server_response(self, self.response, self.statuscode, self.headers) self.data_written = True self.state = 'recycle' else: self.state = 'content' self.attrs = get_filterattrs(self.url, _response_filters, headers=msg) debug(PROXY, "%s filtered headers %s", self, self.headers)
wc.log.info(wc.LOG_PROXY, '%s connect error %s', self, strerr)
wc.log.info(wc.LOG_PROXY, '%s connect(%s) error %s', self, addr, strerr)
def check_connect (self, addr): """ Check if the connection is etablished. See also http://cr.yp.to/docs/connect.html and connect(2) manpage. """ wc.log.debug(wc.LOG_PROXY, '%s check connect', self) self.connect_checks += 1 if self.connect_checks >= 50: wc.log.info(wc.LOG_PROXY, '%s connect timed out', self) self.handle_close() return try: (r, w, e) = select.select([], [self.fileno()], [], 0.2) except select.error, why: # not yet ready return if self.fileno() not in w: # not yet ready return err = self.socket.getsockopt(socket.SOL_SOCKET, socket.SO_ERROR) if err == 0: self.addr = addr self.connected = True wc.log.debug(wc.LOG_PROXY, '%s connected', self) self.handle_connect() elif err in (errno.EINPROGRESS, errno.EWOULDBLOCK): wc.proxy.make_timer(0.2, lambda a=addr: self.check_connect(addr)) else: strerr = errno.errorcode[err] wc.log.info(wc.LOG_PROXY, '%s connect error %s', self, strerr) self.handle_close()
self.headers['Content-Encoding'] = gm[1]
if gm[1] in _fix_content_types: self.headers['Content-Encoding'] = gm[1]
def process_headers (self): # Headers are terminated by a blank line .. now in the regexp, # we want to say it's either a newline at the beginning of # the document, or it's a lot of headers followed by two newlines. # The cleaner alternative would be to read one line at a time # until we get to a blank line... m = re.match(r'^((?:[^\r\n]+\r?\n)*\r?\n)', self.recv_buffer) if not m: return # handle continue requests (XXX should be in process_response?) response = self.response.split() if response and response[1] == '100': # it's a Continue request, so go back to waiting for headers # XXX for HTTP/1.1 clients, forward this self.state = 'response' return # filter headers self.headers = applyfilter(FILTER_RESPONSE_HEADER, rfc822.Message(StringIO(self.read(m.end()))), attrs=self.nofilter) #debug(HURT_ME_PLENTY, "S/Headers", `self.headers.headers`) # check content-type against our own guess gm = mimetypes.guess_type(self.document, None) if gm[0]: # guessed an own content type if not self.headers.has_key('Content-Type'): self.headers['Content-Type'] = gm[0] print >>sys.stderr, _("Warning: %s guessed Content-Type (%s)") % \ (self.url, gm[0]) elif self.headers.get('Content-Type') != gm[0]: print >>sys.stderr, _("Warning: %s guessed Content-Type (%s) != server Content-Type (%s)") % \ (self.url, gm[0], self.headers.get('Content-Type')) self.headers['Content-Type'] = gm[0] if gm[1]: # guessed an own encoding type if not self.headers.has_key('Content-Encoding'): self.headers['Content-Encoding'] = gm[1] print >>sys.stderr, _("Warning: %s guessed Content-Encoding (%s)") % \ (self.url, gm[1]) elif self.headers.get('Content-Encoding') != gm[1]: print >>sys.stderr, _("Warning: %s guessed Content-Encoding (%s) != server Content-Encoding (%s)") % \ (self.url, gm[1], self.headers.get('Content-Encoding')) self.headers['Content-Encoding'] = gm[1] # will content be rewritten? rewrite = None for ro in config['mime_content_rewriting']: if ro.match(self.headers.get('Content-Type', '')): rewrite = "True" break # add client accept-encoding value self.headers['Accept-Encoding'] = self.client.compress if self.headers.has_key('Content-Length'): self.bytes_remaining = int(self.headers['Content-Length']) #debug(HURT_ME_PLENTY, "%d bytes remaining"%self.bytes_remaining) if rewrite: remove_headers(self.headers, ['Content-Length']) else: self.bytes_remaining = None
debug("Socket sent %r" % data)
debug("Socket sent %d bytes: %r" % (len(data), data))
def socket_send (sock, data): """ Send data to socket. """ sock.sendall(data) debug("Socket sent %r" % data)
debug("Socket read %r" % data)
debug("Socket read %d bytes: %r" % (len(data), data))
def socket_read (sock): """ Read data from socket until no more data is available. """ data = "" while wc.proxy.readable_socket(sock): s = sock.recv(8192) if not s: break data += s debug("Socket read %r" % data) return data
debug("Socket file read %r" % data)
debug("Socket file read %d bytes: %r" % (len(data), data))
def socketfile_read (sock): """ Read data from socket until no more data is available. """ data = "" while wc.proxy.readable_socket(sock): s = sock.read(1) if not s: break data += s debug("Socket file read %r" % data) return data + sock._rbuf
o HTTP/0.9 - 1.1
o HTTP/1.1 support
def create_conf_file(self, directory, data=[]): data.insert(0, "# this file is automatically created by setup.py") filename = os.path.join(directory, self.config_file) # add metadata metanames = dir(self.metadata) + \ ['fullname', 'contact', 'contact_email'] for name in metanames: method = "get_" + name cmd = "%s = %s" % (name, `getattr(self.metadata, method)()`) data.append(cmd) util.execute(write_file, (filename, data), "creating %s" % filename, self.verbose>=1, self.dry_run)
fp = file(self.filename, 'wb') pickle.dump(self.cache, fp, 1) fp.close()
wc.log.debug(wc.LOG_RATING, "Write ratings to %r", self.filename) def callback (fp, obj): pickle.dump(obj, fp, 1) wc.fileutil.write_save(self.filename, self.cache, callback=callback)
def write (self): """ Write pickled cache to disk. """ fp = file(self.filename, 'wb') pickle.dump(self.cache, fp, 1) fp.close()
if self.headers.get('Content-Type', '').lower().startswith('application/x-httpd-php'): self.headers['Content-Type'] = 'text/html'
def check_headers (self): """add missing content-type and/or encoding headers if needed""" # 304 Not Modified does not send any type or encoding info, # because this info was cached if self.statuscode == '304': return # check content-type against our own guess i = self.document.find('?') if i>0: document = self.document[:i] else: document = self.document gm = mimetypes.guess_type(document, None) if gm[0]: # guessed an own content type if self.headers.get('Content-Type') is None: print >>sys.stderr, "Warning: add Content-Type %s to %s" % \ (`gm[0]`, `self.url`) self.headers['Content-Type'] = gm[0] # fix some content types elif not self.headers['Content-Type'].startswith(gm[0]) and \ gm[0] in _fix_content_types: print >>sys.stderr, "Warning: change Content-Type from %s to %s in %s" % \ (`self.headers['Content-Type']`, `gm[0]`, `self.url`) self.headers['Content-Type'] = gm[0] if gm[1] and gm[1] in _fix_content_encodings: # guessed an own encoding type if self.headers.get('Content-Encoding') is None: self.headers['Content-Encoding'] = gm[1] print >>sys.stderr, "Warning: add Content-Encoding %s to %s" % \ (`gm[1]`, `self.url`) elif self.headers.get('Content-Encoding') != gm[1]: print >>sys.stderr, "Warning: change Content-Encoding from %s to %s in %s" % \ (`self.headers['Content-Encoding']`, `gm[1]`, `self.url`) self.headers['Content-Encoding'] = gm[1]
sock = wc.objproxy.Proxy(sock)
def create_socket (family, socktype, proto=0): """ Create a socket with given family and type. If SSL context is given an SSL socket is created. """ sock = socket.socket(family, socktype, proto=proto) # store family, type and proto in the object sock.family = family sock.socktype = socktype sock.proto = proto # XXX disable custom timeouts for now #sock.settimeout(wc.configuration.config['timeout']) socktypes_inet = [socket.AF_INET] if has_ipv6: socktypes_inet.append(socket.AF_INET6) if family in socktypes_inet and socktype == socket.SOCK_STREAM: # disable NAGLE algorithm, which means sending pending data # immediately, possibly wasting bandwidth but improving # responsiveness for fast networks sock.setsockopt(socket.IPPROTO_TCP, socket.TCP_NODELAY, 1) return sock
return self.socket.accept()
res = self.socket.accept() if res is not None: sock = wc.objproxy.Proxy(res[0]) sock.family = self.socket.family sock.socktype = self.socket.socktype sock.proto = self.socket.proto return (sock, res[1])
def accept (self): """ Accept a new connection on the socket.
debug(FILTER, "rule %s filter_tag", self.title) debug(FILTER, "original tag %r attrs %s", tag, attrs) debug(FILTER, "replace %s with %r", num_part(self.part), self.replacement)
def filter_tag (self, tag, attrs): """return filtered tag data for given tag and attributes""" #debug(FILTER, "rule %s filter_tag", self.title) #debug(FILTER, "original tag %r attrs %s", tag, attrs) #debug(FILTER, "replace %s with %r", num_part(self.part), self.replacement) if self.part==COMPLETE: return [DATA, ""] if self.part==TAGNAME: return [STARTTAG, self.replacement, attrs] if self.part==TAG: return [DATA, self.replacement] if self.part==ENCLOSED: return [STARTTAG, tag, attrs] newattrs = {} # look for matching tag attributes for attr,val in attrs.items(): ro = self.attrs_ro.get(attr) if ro: mo = ro.search(val) if mo: if self.part==ATTR: # replace complete attr, and make it possible # for replacement to generate multiple attributes, # eg "a=b c=d" # XXX this is limited, but works so far # XXX split does not honor quotes for f in self.replacement.split(): if '=' in self.replacement: k,v = f.split('=') newattrs[k] = mo.expand(v) else: newattrs[self.replacement] = None elif self.part==ATTRVAL: # backreferences are replaced newattrs[attr] = mo.expand(self.replacement) else: error(FILTER, "Invalid part value %s", self.part) continue # nothing matched, just append the attribute as is newattrs[attr] = val #debug(FILTER, "filtered tag %s attrs %s", tag, newattrs) return [STARTTAG, tag, newattrs]
debug(FILTER, "filtered tag %s attrs %s", tag, newattrs)
def filter_tag (self, tag, attrs): """return filtered tag data for given tag and attributes""" #debug(FILTER, "rule %s filter_tag", self.title) #debug(FILTER, "original tag %r attrs %s", tag, attrs) #debug(FILTER, "replace %s with %r", num_part(self.part), self.replacement) if self.part==COMPLETE: return [DATA, ""] if self.part==TAGNAME: return [STARTTAG, self.replacement, attrs] if self.part==TAG: return [DATA, self.replacement] if self.part==ENCLOSED: return [STARTTAG, tag, attrs] newattrs = {} # look for matching tag attributes for attr,val in attrs.items(): ro = self.attrs_ro.get(attr) if ro: mo = ro.search(val) if mo: if self.part==ATTR: # replace complete attr, and make it possible # for replacement to generate multiple attributes, # eg "a=b c=d" # XXX this is limited, but works so far # XXX split does not honor quotes for f in self.replacement.split(): if '=' in self.replacement: k,v = f.split('=') newattrs[k] = mo.expand(v) else: newattrs[self.replacement] = None elif self.part==ATTRVAL: # backreferences are replaced newattrs[attr] = mo.expand(self.replacement) else: error(FILTER, "Invalid part value %s", self.part) continue # nothing matched, just append the attribute as is newattrs[attr] = val #debug(FILTER, "filtered tag %s attrs %s", tag, newattrs) return [STARTTAG, tag, newattrs]
debug(FILTER, "rule %s filter_complete", self.title) debug(FILTER, "original buffer %r", buf) debug(FILTER, "part %s", num_part(self.part))
def filter_complete (self, i, buf): """replace complete tag data in buf with replacement""" #debug(FILTER, "rule %s filter_complete", self.title) #debug(FILTER, "original buffer %r", buf) #debug(FILTER, "part %s", num_part(self.part)) if self.part==COMPLETE: buf[i:] = [[DATA, self.replacement]] elif self.part==TAG: buf[i] = [DATA, self.replacement] buf[-1] = [DATA, self.replacement] elif self.part==TAGNAME: buf[i] = [STARTTAG, self.replacement, {}] buf[-1] = [ENDTAG, self.replacement] elif self.part==ENCLOSED: buf[i+1:-1] = [[DATA, self.replacement]] #debug(FILTER, "filtered buffer %r", buf)
if not (self.attrs or self.part!=COMPLETE or self.replacement or \ self.enclosed): return s+"/>\n"
def toxml (self): """Rule data as XML for storing""" s = super(RewriteRule, self).toxml() if self.tag!='a': s += '\n tag="%s"' % self.tag if not (self.attrs or self.part!=COMPLETE or self.replacement or \ self.enclosed): return s+"/>\n" s += ">\n" for key, val in self.attrs.items(): s += "<attr" if key!='href': s += ' name="%s"' % key if val: s += ">"+xmlify(val)+"</attr>\n" else: s += "/>\n" if self.enclosed: s += "<enclosed>"+xmlify(self.enclosed)+"</enclosed>\n" s += "<replacement" s += ' part="%s"' % num_part(self.part) if self.replacement: s += '>'+xmlify(self.replacement)+"</replacement>\n" else: s += "/>\n" s += self.matchestoxml() s += "</%s>" % self.get_name() return s
s += "<replacement" s += ' part="%s"' % num_part(self.part) if self.replacement: s += '>'+xmlify(self.replacement)+"</replacement>\n" else: s += "/>\n"
if self.part!=COMPLETE or self.replacement: s += "<replacement" s += ' part="%s"' % num_part(self.part) if self.replacement: s += '>'+xmlify(self.replacement)+"</replacement>\n" else: s += "/>\n"
def toxml (self): """Rule data as XML for storing""" s = super(RewriteRule, self).toxml() if self.tag!='a': s += '\n tag="%s"' % self.tag if not (self.attrs or self.part!=COMPLETE or self.replacement or \ self.enclosed): return s+"/>\n" s += ">\n" for key, val in self.attrs.items(): s += "<attr" if key!='href': s += ' name="%s"' % key if val: s += ">"+xmlify(val)+"</attr>\n" else: s += "/>\n" if self.enclosed: s += "<enclosed>"+xmlify(self.enclosed)+"</enclosed>\n" s += "<replacement" s += ' part="%s"' % num_part(self.part) if self.replacement: s += '>'+xmlify(self.replacement)+"</replacement>\n" else: s += "/>\n" s += self.matchestoxml() s += "</%s>" % self.get_name() return s
def __init__ (self):
def __init__ (self, debug=0):
def __init__ (self): """initialize the internal parser""" self.parser = htmlsax.parser(self)
raise ValueError, "Invalid hostname type %r" % hostname
raise ValueError, "invalid hostname type %r" % hostname
def coerce_hostname (hostname): """ Assure that hostname is a plain string. """ if isinstance(hostname, unicode): # XXX encode? hostname = str(hostname) elif not isinstance(hostname, str): raise ValueError, "Invalid hostname type %r" % hostname return hostname
wc.proxy.make_timer(30, self.handle_connect_timeout)
def establish_connection (self): family = self.get_family(self.nameserver) if self.tcp: self.create_socket(family, socket.SOCK_STREAM) self.connect((self.nameserver, self.PORT)) wc.proxy.make_timer(30, self.handle_connect_timeout) else: self.create_socket(family, socket.SOCK_DGRAM) self.connect((self.nameserver, self.PORT)) self.send_dns_request()
self.send_dns_request()
wc.proxy.make_timer(30, self.handle_connect_timeout)
def establish_connection (self): family = self.get_family(self.nameserver) if self.tcp: self.create_socket(family, socket.SOCK_STREAM) self.connect((self.nameserver, self.PORT)) wc.proxy.make_timer(30, self.handle_connect_timeout) else: self.create_socket(family, socket.SOCK_DGRAM) self.connect((self.nameserver, self.PORT)) self.send_dns_request()
dns_accepts_tcp[self.nameserver] = True
if self.tcp: dns_accepts_tcp[self.nameserver] = True
def handle_connect (self): # For TCP requests only dns_accepts_tcp[self.nameserver] = True self.send_dns_request()
wc.log.debug(wc.LOG_DNS, "sending DNS query %s", self.query)
wc.log.debug(wc.LOG_DNS, "%s sending DNS query %s", self, wc.strformat.indent(self.query))
def send_dns_request (self): # Issue the request and set a timeout if not self.callback: # Only issue if we have someone waiting return
wc.log.debug(wc.LOG_DNS, "got DNS response %s", response)
def process_read (self): if not self.callback: self.close() # Assume that the entire answer comes in one packet if self.tcp: if len(self.recv_buffer) < 2: return header = self.recv_buffer[:2] (l,) = struct.unpack("!H", header) if len(self.recv_buffer) < 2+l: return self.read(2) # header wire = self.read(l) try: self.socket.shutdown(1) except socket.error: pass else: wire = self.read(1024) response = wc.dns.message.from_wire( wire, keyring=self.query.keyring, request_mac=self.query.mac) wc.log.debug(wc.LOG_DNS, "got DNS response %s", response) if not self.query.is_response(response): wc.log.warn(wc.LOG_DNS, 'Wrong response %s to query %s', response, self.query) # Oops, this doesn't answer the right question. This can # happen because we're using UDP, and UDP replies might end # up in the wrong place: open conn A, send question to A, # timeout, send question to A, receive answer, close our # object, then open a new conn B, send question to B, # but get the OLD answer to A as a reply. This doesn't happen # with TCP but then TCP is slower.
wc.log.warn(wc.LOG_DNS, 'Wrong response %s to query %s', response, self.query)
wc.log.warn(wc.LOG_DNS, '%s wrong response %s to query %s', self, wc.strformat.indent(response), wc.strformat.indent(self.query))
def process_read (self): if not self.callback: self.close() # Assume that the entire answer comes in one packet if self.tcp: if len(self.recv_buffer) < 2: return header = self.recv_buffer[:2] (l,) = struct.unpack("!H", header) if len(self.recv_buffer) < 2+l: return self.read(2) # header wire = self.read(l) try: self.socket.shutdown(1) except socket.error: pass else: wire = self.read(1024) response = wc.dns.message.from_wire( wire, keyring=self.query.keyring, request_mac=self.query.mac) wc.log.debug(wc.LOG_DNS, "got DNS response %s", response) if not self.query.is_response(response): wc.log.warn(wc.LOG_DNS, 'Wrong response %s to query %s', response, self.query) # Oops, this doesn't answer the right question. This can # happen because we're using UDP, and UDP replies might end # up in the wrong place: open conn A, send question to A, # timeout, send question to A, receive answer, close our # object, then open a new conn B, send question to B, # but get the OLD answer to A as a reply. This doesn't happen # with TCP but then TCP is slower.
'Truncated TCP DNS packet: %s from %s for %r', response, self.nameserver, self.hostname) self.handle_error("dns error: truncated TCP packet")
'%s truncated TCP DNS packet from %s', self, self.nameserver) self.handle_error("DNS error: truncated TCP packet")
def process_read (self): if not self.callback: self.close() # Assume that the entire answer comes in one packet if self.tcp: if len(self.recv_buffer) < 2: return header = self.recv_buffer[:2] (l,) = struct.unpack("!H", header) if len(self.recv_buffer) < 2+l: return self.read(2) # header wire = self.read(l) try: self.socket.shutdown(1) except socket.error: pass else: wire = self.read(1024) response = wc.dns.message.from_wire( wire, keyring=self.query.keyring, request_mac=self.query.mac) wc.log.debug(wc.LOG_DNS, "got DNS response %s", response) if not self.query.is_response(response): wc.log.warn(wc.LOG_DNS, 'Wrong response %s to query %s', response, self.query) # Oops, this doesn't answer the right question. This can # happen because we're using UDP, and UDP replies might end # up in the wrong place: open conn A, send question to A, # timeout, send question to A, receive answer, close our # object, then open a new conn B, send question to B, # but get the OLD answer to A as a reply. This doesn't happen # with TCP but then TCP is slower.
'truncated UDP DNS packet from %s for %r', self.nameserver, self.hostname)
'%s truncated UDP DNS packet from %s', self, self.nameserver,)
def process_read (self): if not self.callback: self.close() # Assume that the entire answer comes in one packet if self.tcp: if len(self.recv_buffer) < 2: return header = self.recv_buffer[:2] (l,) = struct.unpack("!H", header) if len(self.recv_buffer) < 2+l: return self.read(2) # header wire = self.read(l) try: self.socket.shutdown(1) except socket.error: pass else: wire = self.read(1024) response = wc.dns.message.from_wire( wire, keyring=self.query.keyring, request_mac=self.query.mac) wc.log.debug(wc.LOG_DNS, "got DNS response %s", response) if not self.query.is_response(response): wc.log.warn(wc.LOG_DNS, 'Wrong response %s to query %s', response, self.query) # Oops, this doesn't answer the right question. This can # happen because we're using UDP, and UDP replies might end # up in the wrong place: open conn A, send question to A, # timeout, send question to A, receive answer, close our # object, then open a new conn B, send question to B, # but get the OLD answer to A as a reply. This doesn't happen # with TCP but then TCP is slower.
wc.log.debug(wc.LOG_DNS, "DNS answer %s", answer)
wc.log.debug(wc.LOG_DNS, "%s DNS answer %s", self, wc.strformat.indent(answer))
def process_read (self): if not self.callback: self.close() # Assume that the entire answer comes in one packet if self.tcp: if len(self.recv_buffer) < 2: return header = self.recv_buffer[:2] (l,) = struct.unpack("!H", header) if len(self.recv_buffer) < 2+l: return self.read(2) # header wire = self.read(l) try: self.socket.shutdown(1) except socket.error: pass else: wire = self.read(1024) response = wc.dns.message.from_wire( wire, keyring=self.query.keyring, request_mac=self.query.mac) wc.log.debug(wc.LOG_DNS, "got DNS response %s", response) if not self.query.is_response(response): wc.log.warn(wc.LOG_DNS, 'Wrong response %s to query %s', response, self.query) # Oops, this doesn't answer the right question. This can # happen because we're using UDP, and UDP replies might end # up in the wrong place: open conn A, send question to A, # timeout, send question to A, receive answer, close our # object, then open a new conn B, send question to B, # but get the OLD answer to A as a reply. This doesn't happen # with TCP but then TCP is slower.
wc.log.warn(wc.LOG_DNS, "No answer: %s", response)
wc.log.warn(wc.LOG_DNS, "%s no answer", self)
def process_read (self): if not self.callback: self.close() # Assume that the entire answer comes in one packet if self.tcp: if len(self.recv_buffer) < 2: return header = self.recv_buffer[:2] (l,) = struct.unpack("!H", header) if len(self.recv_buffer) < 2+l: return self.read(2) # header wire = self.read(l) try: self.socket.shutdown(1) except socket.error: pass else: wire = self.read(1024) response = wc.dns.message.from_wire( wire, keyring=self.query.keyring, request_mac=self.query.mac) wc.log.debug(wc.LOG_DNS, "got DNS response %s", response) if not self.query.is_response(response): wc.log.warn(wc.LOG_DNS, 'Wrong response %s to query %s', response, self.query) # Oops, this doesn't answer the right question. This can # happen because we're using UDP, and UDP replies might end # up in the wrong place: open conn A, send question to A, # timeout, send question to A, receive answer, close our # object, then open a new conn B, send question to B, # but get the OLD answer to A as a reply. This doesn't happen # with TCP but then TCP is slower.
for key, item in form.items():
for key in form:
def get_prefix_vals (form, prefix): """return a list of (key, value) pairs where ``prefix+key'' is a valid form field""" res = [] for key, item in form.items(): if key.startswith(prefix): res.append(key[len(prefix):], get_item_value(item)) return res
res.append(key[len(prefix):], get_item_value(item))
res.append(key[len(prefix):], get_item_value(form[key]))
def get_prefix_vals (form, prefix): """return a list of (key, value) pairs where ``prefix+key'' is a valid form field""" res = [] for key, item in form.items(): if key.startswith(prefix): res.append(key[len(prefix):], get_item_value(item)) return res
warn(PROXY, i18n._("set Content-Type from %s to %s in %s"), `str(ct)`, `self.mime`, `self.url`)
warn(PROXY, i18n._("add Content-Type %s in %s"), `self.mime`, `self.url`)
def check_headers (self): """add missing content-type and/or encoding headers""" # 304 Not Modified does not send any type or encoding info, # because this info was cached if self.statuscode == '304': return # check content-type against our own guess i = self.document.find('?') if i>0: document = self.document[:i] else: document = self.document gm = mimetypes.guess_type(document, None) ct = self.headers.get('Content-Type', None) if self.mime: if ct is None: warn(PROXY, i18n._("set Content-Type from %s to %s in %s"), `str(ct)`, `self.mime`, `self.url`) self.headers['Content-Type'] = "%s\r"%self.mime elif not ct.startswith(self.mime): i = ct.find(';') if i== -1: val = self.mime else: val = self.mime + ct[i] warn(PROXY, i18n._("set Content-Type from %s to %s in %s"), `str(ct)`, `val`, `self.url`) self.headers['Content-Type'] = "%s\r"%val elif gm[0]: # guessed an own content type if ct is None: warn(PROXY, i18n._("add Content-Type %s to %s"), `gm[0]`, `self.url`) self.headers['Content-Type'] = "%s\r"%gm[0] # fix some content types elif not ct.startswith(gm[0]) and \ gm[0] in _fix_content_types: warn(PROXY, i18n._("change Content-Type from %s to %s in %s"), `ct`, `gm[0]`, `self.url`) self.headers['Content-Type'] = "%s\r"%gm[0] if gm[1] and gm[1] in _fix_content_encodings: ce = self.headers.get('Content-Encoding', None) # guessed an own encoding type if ce is None: self.headers['Content-Encoding'] = "%s\r"%gm[1] warn(PROXY, i18n._("add Content-Encoding %s to %s"), `gm[1]`, `self.url`) elif ce != gm[1]: warn(PROXY, i18n._("change Content-Encoding from %s to %s in %s"), `ce`, `gm[1]`, `self.url`) self.headers['Content-Encoding'] = "%s\r"%gm[1] # hmm, fix application/x-httpd-php* if self.headers.get('Content-Type', '').lower().startswith('application/x-httpd-php'): warn(PROXY, i18n._("fix x-httpd-php Content-Type")) self.headers['Content-Type'] = 'text/html\r'
val = self.mime + ct[i]
val = self.mime + ct[i:]
def check_headers (self): """add missing content-type and/or encoding headers""" # 304 Not Modified does not send any type or encoding info, # because this info was cached if self.statuscode == '304': return # check content-type against our own guess i = self.document.find('?') if i>0: document = self.document[:i] else: document = self.document gm = mimetypes.guess_type(document, None) ct = self.headers.get('Content-Type', None) if self.mime: if ct is None: warn(PROXY, i18n._("set Content-Type from %s to %s in %s"), `str(ct)`, `self.mime`, `self.url`) self.headers['Content-Type'] = "%s\r"%self.mime elif not ct.startswith(self.mime): i = ct.find(';') if i== -1: val = self.mime else: val = self.mime + ct[i] warn(PROXY, i18n._("set Content-Type from %s to %s in %s"), `str(ct)`, `val`, `self.url`) self.headers['Content-Type'] = "%s\r"%val elif gm[0]: # guessed an own content type if ct is None: warn(PROXY, i18n._("add Content-Type %s to %s"), `gm[0]`, `self.url`) self.headers['Content-Type'] = "%s\r"%gm[0] # fix some content types elif not ct.startswith(gm[0]) and \ gm[0] in _fix_content_types: warn(PROXY, i18n._("change Content-Type from %s to %s in %s"), `ct`, `gm[0]`, `self.url`) self.headers['Content-Type'] = "%s\r"%gm[0] if gm[1] and gm[1] in _fix_content_encodings: ce = self.headers.get('Content-Encoding', None) # guessed an own encoding type if ce is None: self.headers['Content-Encoding'] = "%s\r"%gm[1] warn(PROXY, i18n._("add Content-Encoding %s to %s"), `gm[1]`, `self.url`) elif ce != gm[1]: warn(PROXY, i18n._("change Content-Encoding from %s to %s in %s"), `ce`, `gm[1]`, `self.url`) self.headers['Content-Encoding'] = "%s\r"%gm[1] # hmm, fix application/x-httpd-php* if self.headers.get('Content-Type', '').lower().startswith('application/x-httpd-php'): warn(PROXY, i18n._("fix x-httpd-php Content-Type")) self.headers['Content-Type'] = 'text/html\r'
return wc.i18n.get_translator(configdata.name, get_locdir(), lang,
return wc.i18n.get_translator(configdata.name, get_locdir(), [lang],
def get_translator (lang, translatorklass=None, fallbackklass=None): """ Return translator class. """ return wc.i18n.get_translator(configdata.name, get_locdir(), lang, translatorklass=translatorklass, fallback=True, fallbackklass=fallbackklass)
if name=='replacer':
if name=='replace':
def fill_data (self, data, name): if name=='replacer': self.replace += data
return s+">"+xmlify(self.replace)+"</replacer>"
return s+">"+xmlify(self.replace)+"</replace>"
def toxml (self):
elif not ct.startswith(mime):
elif not ct.startswith(mime) and mime.startswith('text/html'):
def server_set_content_headers (headers, content, document, mime, url): """add missing content-type headers""" # document can have query parameters at the end, remove them i = document.find('?') if i>0: document = document[:i] # check content-type against our own guess if not mime and not headers.has_key('Transfer-Encoding') and content: # note: recognizing a mime type here fixes exploits like # CVE-2002-0025 and CVE-2002-0024 try: mime = wc.magic.classify(StringIO.StringIO(content)) except StandardError, msg: wc.log.error(wc.LOG_PROXY, "Could not classify %r: %s", url, msg) ct = headers.get('Content-Type', None) if mime: if ct is None: wc.log.warn(wc.LOG_PROXY, wc.i18n._("add Content-Type %r in %r"), mime, url) headers['Content-Type'] = "%s\r"%mime elif not ct.startswith(mime): i = ct.find(';') if i != -1 and mime.startswith('text'): # add charset information val = mime + ct[i:] else: val = mime wc.log.warn(wc.LOG_PROXY, wc.i18n._("set Content-Type from %r to %r in %r"), str(ct), val, url) headers['Content-Type'] = "%s\r"%val else: gm = mimetypes.guess_type(document, None) if gm[0]: # guessed an own content type if ct is None: wc.log.warn(wc.LOG_PROXY, wc.i18n._("add Content-Type %r to %r"), gm[0], url) headers['Content-Type'] = "%s\r"%gm[0] # hmm, fix application/x-httpd-php* if headers.get('Content-Type', '').lower().startswith('application/x-httpd-php'): wc.log.warn(wc.LOG_PROXY, wc.i18n._("fix x-httpd-php Content-Type")) headers['Content-Type'] = 'text/html\r'
if headers.get('Content-Type', '').lower().startswith('application/x-httpd-php'): wc.log.warn(wc.LOG_PROXY, wc.i18n._("fix x-httpd-php Content-Type")) headers['Content-Type'] = 'text/html\r'
def server_set_content_headers (headers, content, document, mime, url): """add missing content-type headers""" # document can have query parameters at the end, remove them i = document.find('?') if i>0: document = document[:i] # check content-type against our own guess if not mime and not headers.has_key('Transfer-Encoding') and content: # note: recognizing a mime type here fixes exploits like # CVE-2002-0025 and CVE-2002-0024 try: mime = wc.magic.classify(StringIO.StringIO(content)) except StandardError, msg: wc.log.error(wc.LOG_PROXY, "Could not classify %r: %s", url, msg) ct = headers.get('Content-Type', None) if mime: if ct is None: wc.log.warn(wc.LOG_PROXY, wc.i18n._("add Content-Type %r in %r"), mime, url) headers['Content-Type'] = "%s\r"%mime elif not ct.startswith(mime): i = ct.find(';') if i != -1 and mime.startswith('text'): # add charset information val = mime + ct[i:] else: val = mime wc.log.warn(wc.LOG_PROXY, wc.i18n._("set Content-Type from %r to %r in %r"), str(ct), val, url) headers['Content-Type'] = "%s\r"%val else: gm = mimetypes.guess_type(document, None) if gm[0]: # guessed an own content type if ct is None: wc.log.warn(wc.LOG_PROXY, wc.i18n._("add Content-Type %r to %r"), gm[0], url) headers['Content-Type'] = "%s\r"%gm[0] # hmm, fix application/x-httpd-php* if headers.get('Content-Type', '').lower().startswith('application/x-httpd-php'): wc.log.warn(wc.LOG_PROXY, wc.i18n._("fix x-httpd-php Content-Type")) headers['Content-Type'] = 'text/html\r'
import tempfile, urllib2
def download_adzapper_file (): """download adzapper url into temporary file and return filename""" import tempfile, urllib2 fd, tmpfile = tempfile.mkstemp(".txt", "adzapper_", DOWNLOAD, text=True) f = os.fdopen(fd, "w") try: urldata = urllib2.urlopen(ADZAPPER_URL) f.write(urldata.read()) finally: f.close() return tmpfile
from os.path import exists, isfile, isdir, basename, dirname import errno if not isfile(src):
if not os.path.isfile(src):
def move (src, dst): """move src to dst, possibly overwriting file2""" from os.path import exists, isfile, isdir, basename, dirname import errno if not isfile(src): raise IOError, "can't move %s: not a regular file" % `src` if isdir(dst): dst = os.path.join(dst, basename(src)) elif exists(dst): raise IOError, "can't move %s: destination %s already exists" % \ (`src`, `dst`) if not isdir(dirname(dst)): raise IOError, "can't move %s: destination %s not a valid path" % \ (`src`, `dst`) copy_it = 0 try: os.rename(src, dst) except os.error, (num, msg): if num == errno.EXDEV: copy_it = 1 else: raise IOError, "couldn't move %s to %s: %s" % (`src`, `dst`, msg) if copy_it: copy_file(src, dst) try: os.unlink(src) except os.error, (num, msg): try: os.unlink(dst) except os.error: pass raise IOError, ("couldn't move %s to %s by copy/delete: " + "delete %s failed: %s") % \ (`src`, `dst`, `src`, msg)
if isdir(dst): dst = os.path.join(dst, basename(src)) elif exists(dst):
if os.path.isdir(dst): dst = os.path.join(dst, os.path.basename(src)) elif os.path.exists(dst):
def move (src, dst): """move src to dst, possibly overwriting file2""" from os.path import exists, isfile, isdir, basename, dirname import errno if not isfile(src): raise IOError, "can't move %s: not a regular file" % `src` if isdir(dst): dst = os.path.join(dst, basename(src)) elif exists(dst): raise IOError, "can't move %s: destination %s already exists" % \ (`src`, `dst`) if not isdir(dirname(dst)): raise IOError, "can't move %s: destination %s not a valid path" % \ (`src`, `dst`) copy_it = 0 try: os.rename(src, dst) except os.error, (num, msg): if num == errno.EXDEV: copy_it = 1 else: raise IOError, "couldn't move %s to %s: %s" % (`src`, `dst`, msg) if copy_it: copy_file(src, dst) try: os.unlink(src) except os.error, (num, msg): try: os.unlink(dst) except os.error: pass raise IOError, ("couldn't move %s to %s by copy/delete: " + "delete %s failed: %s") % \ (`src`, `dst`, `src`, msg)
if not isdir(dirname(dst)):
if not os.path.isdir(os.path.dirname(dst)):
def move (src, dst): """move src to dst, possibly overwriting file2""" from os.path import exists, isfile, isdir, basename, dirname import errno if not isfile(src): raise IOError, "can't move %s: not a regular file" % `src` if isdir(dst): dst = os.path.join(dst, basename(src)) elif exists(dst): raise IOError, "can't move %s: destination %s already exists" % \ (`src`, `dst`) if not isdir(dirname(dst)): raise IOError, "can't move %s: destination %s not a valid path" % \ (`src`, `dst`) copy_it = 0 try: os.rename(src, dst) except os.error, (num, msg): if num == errno.EXDEV: copy_it = 1 else: raise IOError, "couldn't move %s to %s: %s" % (`src`, `dst`, msg) if copy_it: copy_file(src, dst) try: os.unlink(src) except os.error, (num, msg): try: os.unlink(dst) except os.error: pass raise IOError, ("couldn't move %s to %s by copy/delete: " + "delete %s failed: %s") % \ (`src`, `dst`, `src`, msg)
"title_en": wc.Xmlutils.xmlquote("AdZapper filters"), "title_de": wc.Xmlutils.xmlquote("AdZapper Filter"), "desc_en": wc.Xmlutils.xmlquote("Automatically generated by adzap2wc.py from %s on %s"%(ADZAPPER_URL, date)), "desc_de": wc.Xmlutils.xmlquote("Automatisch erzeugt von adzap2wc.py aus %s am %s"%(ADZAPPER_URL, date)),
"title_en": wc.XmlUtils.xmlquote("AdZapper filters"), "title_de": wc.XmlUtils.xmlquote("AdZapper Filter"), "desc_en": wc.XmlUtils.xmlquote("Automatically generated by adzap2wc.py from %s on %s"%(ADZAPPER_URL, date)), "desc_de": wc.XmlUtils.xmlquote("Automatisch erzeugt von adzap2wc.py aus %s am %s"%(ADZAPPER_URL, date)),
def write_filters (res, filename): if os.path.exists(filename): remove(filename) zapfile = file(filename, 'w') d = { "charset": wc.ConfigCharset, "title_en": wc.Xmlutils.xmlquote("AdZapper filters"), "title_de": wc.Xmlutils.xmlquote("AdZapper Filter"), "desc_en": wc.Xmlutils.xmlquote("Automatically generated by adzap2wc.py from %s on %s"%(ADZAPPER_URL, date)), "desc_de": wc.Xmlutils.xmlquote("Automatisch erzeugt von adzap2wc.py aus %s am %s"%(ADZAPPER_URL, date)), } zapfile.write("""<?xml version="1.0" encoding="%(charset)s"?>
zapfile.write(" <replacement>%s</replacement>"%wc.Xmlutils.xmlquote(convert_adzapper_replace(replace)))
zapfile.write(" <replacement>%s</replacement>"%wc.XmlUtils.xmlquote(convert_adzapper_replace(replace)))
def write_block (zapfile, adclass, pattern, lineno, replace=None): #print "%s block %s => %s" % (adclass, `pattern`, `replacement`) d = get_rule_dict(adclass, pattern, lineno, replace=replace) zapfile.write("""<block url="%(url)s""" % d) zapfile.write("\"") if adclass=='PRINT': zapfile.write("\n disable=\"1\">") zapfile.write("""
'title_en': wc.Xmlutils.xmlquote("%s filter line %d" % (adclass, lineno)), 'title_de': wc.Xmlutils.xmlquote("%s Filter Zeile %d" % (adclass, lineno)), 'desc_en': wc.Xmlutils.xmlquote("AdZapper pattern:\n%s." % pattern), 'desc_de': wc.Xmlutils.xmlquote("AdZapper Pattern:\n%s." % pattern), 'url': wc.Xmlutils.xmlquote(convert_adzapper_pattern(pattern)),
'title_en': wc.XmlUtils.xmlquote("%s filter line %d" % (adclass, lineno)), 'title_de': wc.XmlUtils.xmlquote("%s Filter Zeile %d" % (adclass, lineno)), 'desc_en': wc.XmlUtils.xmlquote("AdZapper pattern:\n%s." % pattern), 'desc_de': wc.XmlUtils.xmlquote("AdZapper Pattern:\n%s." % pattern), 'url': wc.XmlUtils.xmlquote(convert_adzapper_pattern(pattern)),
def get_rule_dict (adclass, pattern, lineno, replace=None): d = { 'title_en': wc.Xmlutils.xmlquote("%s filter line %d" % (adclass, lineno)), 'title_de': wc.Xmlutils.xmlquote("%s Filter Zeile %d" % (adclass, lineno)), 'desc_en': wc.Xmlutils.xmlquote("AdZapper pattern:\n%s." % pattern), 'desc_de': wc.Xmlutils.xmlquote("AdZapper Pattern:\n%s." % pattern), 'url': wc.Xmlutils.xmlquote(convert_adzapper_pattern(pattern)), } if replace is not None: d['desc_en'] += wc.Xmlutils.xmlquote("\nReplacement:\n%s." % replace) d['desc_de'] += wc.Xmlutils.xmlquote("\nErsatzwert:\n%s." % replace) return d
d['desc_en'] += wc.Xmlutils.xmlquote("\nReplacement:\n%s." % replace) d['desc_de'] += wc.Xmlutils.xmlquote("\nErsatzwert:\n%s." % replace)
d['desc_en'] += wc.XmlUtils.xmlquote("\nReplacement:\n%s." % replace) d['desc_de'] += wc.XmlUtils.xmlquote("\nErsatzwert:\n%s." % replace)
def get_rule_dict (adclass, pattern, lineno, replace=None): d = { 'title_en': wc.Xmlutils.xmlquote("%s filter line %d" % (adclass, lineno)), 'title_de': wc.Xmlutils.xmlquote("%s Filter Zeile %d" % (adclass, lineno)), 'desc_en': wc.Xmlutils.xmlquote("AdZapper pattern:\n%s." % pattern), 'desc_de': wc.Xmlutils.xmlquote("AdZapper Pattern:\n%s." % pattern), 'url': wc.Xmlutils.xmlquote(convert_adzapper_pattern(pattern)), } if replace is not None: d['desc_en'] += wc.Xmlutils.xmlquote("\nReplacement:\n%s." % replace) d['desc_de'] += wc.Xmlutils.xmlquote("\nErsatzwert:\n%s." % replace) return d
cat = c.getHelpText() self.rule.ratings[service][cat] = 1
def onCmdService (self, sender, sel, ptr): """enable/disable a PICS service""" service = sender.getHelpText() widgets = self.widgets[service].values() # enable this service if sender.getCheck(): self.rule.ratings[service] = {} for c in widgets: # gui update c.enable() # rule update cat = c.getHelpText() self.rule.ratings[service][cat] = 1 #v.getValue() # disable this service else: # rule update del self.rule.ratings[service] # gui update for c in widgets: c.setCheck(0) #v.setValue(0) c.disable() #v.disable() self.getApp().dirty = 1 debug(BRING_IT_ON, "Changed rule pics service data") return 1
def __init__ (self, apply_to_mimelist):
def __init__ (self):
def __init__ (self, apply_to_mimelist): """initialize image reducer flags""" super(ImageReducer, self).__init__(apply_to_mimelist) # minimal number of bytes before we start reducing self.minimal_size_bytes = 5120
super(ImageReducer, self).__init__(apply_to_mimelist)
super(ImageReducer, self).__init__()
def __init__ (self, apply_to_mimelist): """initialize image reducer flags""" super(ImageReducer, self).__init__(apply_to_mimelist) # minimal number of bytes before we start reducing self.minimal_size_bytes = 5120
"""The parser has the rules, a data buffer and a rule stack.
"""The parser has filter rules, data buffers and a rule stack. XXX fixme: should make internal functions start with _
def getAttrs (self, headers, url): """We need a separate filter instance for stateful filtering""" rewrites = [] opts = {'comments': 1, 'javascript': 0} for rule in self.rules: if not rule.appliesTo(url): continue if rule.get_name()=='rewrite': rewrites.append(rule) elif rule.get_name()=='nocomments': opts['comments'] = 0 elif rule.get_name()=='javascript': opts['javascript'] = 1 # generate the HTML filter return {'filter': HtmlFilter(rewrites, url, **opts)}
XXX
XXX (to be done)
def getAttrs (self, headers, url): """We need a separate filter instance for stateful filtering""" rewrites = [] opts = {'comments': 1, 'javascript': 0} for rule in self.rules: if not rule.appliesTo(url): continue if rule.get_name()=='rewrite': rewrites.append(rule) elif rule.get_name()=='nocomments': opts['comments'] = 0 elif rule.get_name()=='javascript': opts['javascript'] = 1 # generate the HTML filter return {'filter': HtmlFilter(rewrites, url, **opts)}
if _has_ws(url): print >> sys.stderr, "HtmlFilter[%d]: broken JS url"%self.level,\ `url`, "at", `self.url` return
def jsScriptSrc (self, url, language): """Start a background download for <script src=""> tags""" assert self.state=='parse' ver = 0.0 if language: mo = re.search(r'(?i)javascript(?P<num>\d\.\d)', language) if mo: ver = float(mo.group('num')) url = urlparse.urljoin(self.url, url) #self._debug(HURT_ME_PLENTY, "JS jsScriptSrc", url, ver) self.state = 'wait' self.js_src = 'True' client = HttpProxyClient(self.jsScriptData, (url, ver)) ClientServerMatchmaker(client, "GET %s HTTP/1.1" % url, #request {}, #headers '', #content {'nofilter': None}, 'identity', # compress ) self.waited = "True"
if self.buf[-3][0]==STARTTAG and self.buf[-3][1]=='script':
if len(self.buf) > 2 and \ self.buf[-3][0]==STARTTAG and self.buf[-3][1]=='script':
def jsEndElement (self, item): """parse generated html for scripts""" #self._debug(NIGHTMARE, "jsEndElement buf", self.buf) if len(self.buf)<2: # syntax error, ignore return if self.js_src: del self.buf[-1] if len(self.buf)<2: # syntax error, ignore print >>sys.stderr, "JS end self.buf", self.buf return if self.buf[-3][0]==STARTTAG and self.buf[-3][1]=='script': del self.buf[-1] if len(self.buf)<2 or self.buf[-1][0]!=DATA or \ self.buf[-2][0]!=STARTTAG or self.buf[-2][1]!='script': # syntax error, ignore return # get script data script = self.buf[-1][1].strip() # remove html comments if script.startswith("<!--"): i = script.find('\n') if i==-1: script = script[4:] else: script = script[(i+1):] if script.endswith("-->"): script = script[:-3] if not script: # again, ignore an empty script del self.buf[-1] del self.buf[-1] else: self.jsScript(script, 0.0, item)
super(SetList, self).extend([i for i in x if i not in self])
for i in x: self.append(i)
def extend (self, x): super(SetList, self).extend([i for i in x if i not in self])
if value not in self: super(SetList, self).__setitem__(key, value)
oldvalues = [] for i in range(len(self)): if self[i]==value: oldvalues.append(i) super(SetList, self).__setitem__(key, value) oldvalues.reverse() for i in oldvalues: if i!=key: del self[key]
def __setitem__ (self, key, value): if value not in self: super(SetList, self).__setitem__(key, value)
locdir = os.path.join(configdata.install_data, 'share', 'locale')
locdir = os.path.join(InstallData, 'share', 'locale')
def get_locdir (): """return locale directory""" locdir = os.environ.get('LOCPATH') if locdir is None: locdir = os.path.join(configdata.install_data, 'share', 'locale') return locdir
wc.log.info(wc.LOG_PROXY, '%s connect(%s) error %s', self, addr, strerr)
wc.log.info(wc.LOG_PROXY, '%s connect error %s', self, strerr)
def check_connect (self, addr): """ Check if the connection is etablished. See also http://cr.yp.to/docs/connect.html and connect(2) manpage. """ wc.log.debug(wc.LOG_PROXY, '%s check connect', self) self.connect_checks += 1 if self.connect_checks >= 50: wc.log.info(wc.LOG_PROXY, '%s connect timed out', self) self.handle_close() return try: (r, w, e) = select.select([], [self.fileno()], [], 0.2) except select.error, why: # not yet ready return if self.fileno() not in w: # not yet ready return err = self.socket.getsockopt(socket.SOL_SOCKET, socket.SO_ERROR) if err == 0: self.addr = addr self.connected = True wc.log.debug(wc.LOG_PROXY, '%s connected', self) self.handle_connect() elif err in (errno.EINPROGRESS, errno.EWOULDBLOCK): wc.proxy.make_timer(0.2, lambda a=addr: self.check_connect(addr)) else: strerr = errno.errorcode[err] wc.log.info(wc.LOG_PROXY, '%s connect(%s) error %s', self, addr, strerr) self.handle_close()
command = (sys.executable, 'webcleaner', 'start_nt') ret = os.spawnv(os.P_NOWAIT, command[0], command)
ret = os.spawnv(mode, command[0], command)
def start(): # already running? if os.path.exists(pidfile): raise Exception("webcleaner already started (lock file found). " "Do 'webcleaner stop' first.") try: command = (sys.executable, 'webcleaner', 'start_nt') ret = os.spawnv(os.P_NOWAIT, command[0], command) except OSError, exc: # this seems to happen when the command isn't found print exc raise Exception, \ "command '%s' failed: %s" % (command, exc[-1]) if ret != 0: # and this reflects the command running but failing raise Exception, \ "command '%s' failed with exit status %d" % (command, ret)
if ret != 0:
if ret < 0:
def start(): # already running? if os.path.exists(pidfile): raise Exception("webcleaner already started (lock file found). " "Do 'webcleaner stop' first.") try: command = (sys.executable, 'webcleaner', 'start_nt') ret = os.spawnv(os.P_NOWAIT, command[0], command) except OSError, exc: # this seems to happen when the command isn't found print exc raise Exception, \ "command '%s' failed: %s" % (command, exc[-1]) if ret != 0: # and this reflects the command running but failing raise Exception, \ "command '%s' failed with exit status %d" % (command, ret)
"command '%s' failed with exit status %d" % (command, ret)
"command '%s' killed by signal %d" % (command, -ret)
def start(): # already running? if os.path.exists(pidfile): raise Exception("webcleaner already started (lock file found). " "Do 'webcleaner stop' first.") try: command = (sys.executable, 'webcleaner', 'start_nt') ret = os.spawnv(os.P_NOWAIT, command[0], command) except OSError, exc: # this seems to happen when the command isn't found print exc raise Exception, \ "command '%s' failed: %s" % (command, exc[-1]) if ret != 0: # and this reflects the command running but failing raise Exception, \ "command '%s' failed with exit status %d" % (command, ret)
debug(NIGHTMARE, 'Proxy: data', `data`)
debug(NIGHTMARE, 'Proxy: data', `data[:num_sent]`)
def handle_write (self): assert self.connected num_sent = 0 data = self.send_buffer[:SEND_BUFSIZE] try: num_sent = self.send(data) except socket.error, err: self.handle_error('write error', socket.error, err) return debug(HURT_ME_PLENTY, 'Proxy: wrote', num_sent, '=>', self) debug(NIGHTMARE, 'Proxy: data', `data`) self.send_buffer = self.send_buffer[num_sent:] if self.close_pending and not self.send_buffer: self.close_pending = 0 self.close() return num_sent
wc.log.debug(wc.LOG_RATING, "Write ratings to %r", self.filename)
wc.log.debug(wc.LOG_RATING, "Write ratings to %r.", self.filename)
def write (self): """ Write pickled cache to disk. """ wc.log.debug(wc.LOG_RATING, "Write ratings to %r", self.filename) def callback (fp, obj): pickle.dump(obj, fp, 1) wc.fileutil.write_save(self.filename, self.cache, callback=callback)
wc.fileutil.write_save(self.filename, self.cache, callback=callback)
wc.fileutil.write_file(self.filename, self.cache, callback=callback)
def callback (fp, obj): pickle.dump(obj, fp, 1)
wc.log.debug(wc.LOG_RATING, "Loading ratings from %r", self.filename)
wc.log.debug(wc.LOG_RATING, "Loading ratings from %r.", self.filename)
def load (self): """ Load pickled cache from disk. """ wc.log.debug(wc.LOG_RATING, "Loading ratings from %r", self.filename) if os.path.isfile(self.filename): fp = file(self.filename, 'rb') self.cache = pickle.load(fp) fp.close() # remove invalid entries toremove = [] for url in self.cache: if not wc.url.is_safe_url(url): wc.log.error(wc.LOG_RATING, "Invalid rating url %r", url) toremove.append(url) if toremove: for url in toremove: del self[url] self.write()
context_add(context, "nav", {filename.replace('.', '_'): True})
add_nav_context(context, filename)
def add_default_context (context, filename, lang): """add context variables used by all templates""" # rule macros path, dirs = wc.webgui.get_safe_template_path("macros/rules.html") rulemacros = wc.webgui.templatecache.templates[path] context_add(context, "rulemacros", rulemacros.macros) # standard macros path, dirs = wc.webgui.get_safe_template_path("macros/standard.html") macros = wc.webgui.templatecache.templates[path] context_add(context, "macros", macros.macros) # used by navigation macro context_add(context, "nav", {filename.replace('.', '_'): True}) # page template name context_add(context, "filename", filename) # base url context_add(context, "baseurl", "http://localhost:%d/" % wc.configuration.config['port']) # language context_add(context, "lang", lang) # other available languges otherlanguages = [] for la in wc.i18n.supported_languages: if lang == la: continue otherlanguages.append({'code': la, 'name': wc.i18n.lang_name(la), 'trans': wc.i18n.lang_trans(la, lang), }) context_add(context, "otherlanguages", otherlanguages)
from wc.parser.htmllib import HtmlPrinter from wc.parser import htmlsax
from bk.HtmlParser.htmllib import HtmlPrinter from bk.HtmlParser import htmlsax
def _main (): """USAGE: test/run.sh test/parsefile.py test.html""" import sys if len(sys.argv)!=2: print _main.__doc__ sys.exit(1) if sys.argv[1]=='-': f = sys.stdin else: f = file(sys.argv[1]) from wc.parser.htmllib import HtmlPrinter from wc.parser import htmlsax p = htmlsax.parser(HtmlPrinter()) #p.debug(1) size = 1024 #size = 1 data = f.read(size) while data: p.feed(data) data = f.read(size) p.flush()
p.debug(1)
def _main (): """USAGE: test/run.sh test/parsefile.py test.html""" import sys if len(sys.argv)!=2: print _main.__doc__ sys.exit(1) if sys.argv[1]=='-': f = sys.stdin else: f = file(sys.argv[1]) from wc.parser.htmllib import HtmlPrinter from wc.parser import htmlsax p = htmlsax.parser(HtmlPrinter()) #p.debug(1) size = 1024 #size = 1 data = f.read(size) while data: p.feed(data) data = f.read(size) p.flush()
self.document = url or "unknown"
self.url = url or "unknown"
def __init__ (self, rules, url, **opts): if wc.config['showerrors']: self.error = self._error self.warning = self._warning self.fatalError = self._fatalError HtmlParser.__init__(self) self.rules = rules self.comments = opts['comments'] self.javascript = opts['javascript'] self.data = [] self.rulestack = [] self.buffer = [] self.document = url or "unknown" if self.javascript: self.jsEnv = jslib.new_jsenv() self.popup_counter = 0
self.jsScriptSrc(attrs.get('src'), lang) return
if self.jsScriptSrc(attrs.get('src', ''), lang): return
def jsStartElement (self, tag, attrs): """Check popups for onmouseout and onmouseover. Inline extern javascript sources (only in the same domain)""" changed = 0 for name in ('onmouseover', 'onmouseout'): if attrs.has_key(name) and self.jsPopup(attrs, name): del attrs[name] changed = 1 if tag=='form': name = attrs.get('name', attrs.get('id')) self.jsForm(name, attrs.get('action', ''), attrs.get('target', '')) elif tag=='script': lang = attrs.get('language', '').lower() scrtype = attrs.get('type', '').lower() if scrtype=='text/javascript' or \ lang.startswith('javascript') or \ not (lang or scrtype): self.jsScriptSrc(attrs.get('src'), lang) return self.buffer.append((STARTTAG, tag, attrs))
script = urlutils.open_url(url)
script = urlutils.open_url(url).read()
def jsScriptSrc (self, url, language): if not url: return #debug(HURT_ME_PLENTY, "jsScriptSrc", url, language) try: script = urlutils.open_url(url) except: print >>sys.stderr, "exception fetching script url", `url` return if not script: return ver = 0.0 if language: mo = re.search(r'(?i)javascript(?P<num>\d\.\d)', language) if mo: ver = float(mo.group('num')) self.jsScript(script, ver)
self.jsfilter = HtmlFilter(self.rules, self.document,
self.jsfilter = HtmlFilter(self.rules, self.url,
def jsScript (self, script, ver): """execute given script with javascript version ver""" self.jsEnv.attachListener(self) self.jsfilter = HtmlFilter(self.rules, self.document, comments=self.comments, javascript=self.javascript) self.jsEnv.executeScript(script, ver) self.jsEnv.detachListener(self) self.jsfilter.flush() self.data.append(self.jsfilter.flushbuf()) self.buffer += self.jsfilter.buffer self.rulelist += self.jsfilter.rulelist self.jsfilter = None
self.rulelist += self.jsfilter.rulelist
self.rulestack += self.jsfilter.rulestack
def jsScript (self, script, ver): """execute given script with javascript version ver""" self.jsEnv.attachListener(self) self.jsfilter = HtmlFilter(self.rules, self.document, comments=self.comments, javascript=self.javascript) self.jsEnv.executeScript(script, ver) self.jsEnv.detachListener(self) self.jsfilter.flush() self.data.append(self.jsfilter.flushbuf()) self.buffer += self.jsfilter.buffer self.rulelist += self.jsfilter.rulelist self.jsfilter = None
self.buffer[1][1]=='script'):
self.buffer[-1][1]=='script'):
def jsEndElement (self, tag): """parse generated html for scripts""" if not self.buffer: print >>sys.stderr, "empty buffer on </script>" return if self.buffer[-1][0]!=DATA: print >>sys.stderr, "missing data for </script>", self.buffer[-1:] return script = self.buffer[-1][1].strip() del self.buffer[-1] if not (self.buffer and self.buffer[-1][0]==STARTTAG and \ self.buffer[1][1]=='script'): # there was a <script src="..."> already return del self.buffer[-1] if script.startswith("<!--"): script = script[4:].strip() if not script: return self.jsScript(script, 0.0)
print >> sys.stderr, name, "parsing %s: %s" % (self.document, msg)
print >> sys.stderr, name, "parsing %s: %s" % (self.url, msg)
def errorfun (self, msg, name): print >> sys.stderr, name, "parsing %s: %s" % (self.document, msg)
a = "--&
a = "-->";
def testCommentQuoting4 (self): self.filt(
_form_parentproxypass( base64.encodestring(_getval(form, 'parentproxypass')))
val = _getval(form, 'parentproxypass') if val=='__dummy__': val = "" _form_parentproxypass(base64.encodestring(val))
def _exec_form (form): # reset info/error del info[:] del error[:] # proxy port if form.has_key('port'): _form_proxyport(_getval(form, 'port')) # proxy user if form.has_key('proxyuser'): _form_proxyuser(_getval(form, 'proxyuser').strip()) else: config['proxyuser'] = '' # proxy pass if form.has_key('proxypass'): _form_proxypass(base64.encodestring(_getval(form, 'proxypass').strip())) else: config['proxypass'] = '' # parent proxy host if form.has_key('parentproxy'): _form_parentproxy(_getval(form, 'parentproxy').strip()) else: config['parentproxy'] = '' # parent proxy port if form.has_key('parentproxyport'): _form_parentproxyport(_getval(form, 'parentproxyport')) else: config['parentproxyport'] = 3128 # parent proxy user if form.has_key('parentproxyuser'): _form_parentproxyuser(_getval(form, 'parentproxyuser').strip()) else: config['parentproxyuser'] = '' # parent proxy pass if form.has_key('parentproxypass'): _form_parentproxypass( base64.encodestring(_getval(form, 'parentproxypass'))) else: config['parentproxypass'] = '' # timeout if form.has_key('timeout'): _form_timeout(_getval(form, 'timeout')) else: config['timeout'] = 0 # filter modules _form_filtermodules(form) # allowed hosts if form.has_key('addallowed') and form.has_key('newallowed'): _form_addallowed(_getval(form, 'newallowed').strip()) elif form.has_key('delallowed') and form.has_key('allowedhosts'): _form_delallowed(form) # no filter hosts if form.has_key('addnofilter') and form.has_key('newnofilter'): _form_addnofilter(_getval(form, 'newnofilter').strip()) elif form.has_key('delnofilter') and form.has_key('nofilterhosts'): _form_delnofilter(form) if info: # write changed config config.write_proxyconf() _daemon.reload()
self.pass_entry = tk.Entry(master)
self.pass_entry = tk.Entry(master, show="*")
def body (self, master): d = {"appname": wc.AppName} msg = _("""The administrator password protects the web
if password:
password2 = self.pass2_entry.get() if password != password2: print _("Error, passwords differ.") else:
def apply (self): password = self.pass_entry.get() if password: save_adminpassword(password) else: print _("Not saving empty password.")
else: print _("Not saving empty password.")
def apply (self): password = self.pass_entry.get() if password: save_adminpassword(password) else: print _("Not saving empty password.")
password = base64.b64encode(password) if not password or not wc.strformat.is_ascii(password): print _("Not saving binary password.")
if not wc.strformat.is_ascii(password): print _("Error, password has binary characters.")
def save_adminpassword (password): """ Save new admin password to WebCleaner configuration. Also checks for invalid password format. """ import base64 import wc.strformat password = base64.b64encode(password) if not password or not wc.strformat.is_ascii(password): print _("Not saving binary password.") return config = get_wc_config() config["password"] = password config.write_proxyconf()
self._stream_write("\n")
self._stream_write(ustr("\n"))
def __call__ (self): assert self.level == 0 assert self.scopeLevel == 0 assert self.i18nContext.parent is None self.interpret(self.program) assert self.level == 0 assert self.scopeLevel == 0 assert self.i18nContext.parent is None if self.col > 0: self._stream_write("\n") self.col = 0
append("\n") append(" "*align)
append(ustr("\n")) append(ustr(" ")*align)
def do_startTag (self, (name, attrList), end=">", endlen=1, _len=len): # The bytecode generator does not cause calls to this method # for start tags with no attributes; those are optimized down # to rawtext events. Hence, there is no special "fast path" # for that case. L = ["<", name] append = L.append col = self.col + _len(name) + 1 wrap = self.wrap align = col + 1 if align >= wrap/2: align = 4 # Avoid a narrow column far to the right attrAction = self.dispatch["<attrAction>"] try: for item in attrList: if _len(item) == 2: name, s = item else: # item[2] is the 'action' field: if item[2] in ('metal', 'tal', 'xmlns', 'i18n'): if not self.showtal: continue ok, name, s = self.attrAction(item) else: ok, name, s = attrAction(self, item) if not ok: continue slen = _len(s) if (wrap and col >= align and col + 1 + slen > wrap): append("\n") append(" "*align) col = align + slen else: append(" ") col = col + 1 + slen append(s) append(end) self._stream_write("".join(L)) col = col + endlen finally: self.col = col
append(" ")
append(ustr(" "))
def do_startTag (self, (name, attrList), end=">", endlen=1, _len=len): # The bytecode generator does not cause calls to this method # for start tags with no attributes; those are optimized down # to rawtext events. Hence, there is no special "fast path" # for that case. L = ["<", name] append = L.append col = self.col + _len(name) + 1 wrap = self.wrap align = col + 1 if align >= wrap/2: align = 4 # Avoid a narrow column far to the right attrAction = self.dispatch["<attrAction>"] try: for item in attrList: if _len(item) == 2: name, s = item else: # item[2] is the 'action' field: if item[2] in ('metal', 'tal', 'xmlns', 'i18n'): if not self.showtal: continue ok, name, s = self.attrAction(item) else: ok, name, s = attrAction(self, item) if not ok: continue slen = _len(s) if (wrap and col >= align and col + 1 + slen > wrap): append("\n") append(" "*align) col = align + slen else: append(" ") col = col + 1 + slen append(s) append(end) self._stream_write("".join(L)) col = col + endlen finally: self.col = col
self._stream_write("".join(L))
self._stream_write(ustr("").join(L))
def do_startTag (self, (name, attrList), end=">", endlen=1, _len=len): # The bytecode generator does not cause calls to this method # for start tags with no attributes; those are optimized down # to rawtext events. Hence, there is no special "fast path" # for that case. L = ["<", name] append = L.append col = self.col + _len(name) + 1 wrap = self.wrap align = col + 1 if align >= wrap/2: align = 4 # Avoid a narrow column far to the right attrAction = self.dispatch["<attrAction>"] try: for item in attrList: if _len(item) == 2: name, s = item else: # item[2] is the 'action' field: if item[2] in ('metal', 'tal', 'xmlns', 'i18n'): if not self.showtal: continue ok, name, s = self.attrAction(item) else: ok, name, s = attrAction(self, item) if not ok: continue slen = _len(s) if (wrap and col >= align and col + 1 + slen > wrap): append("\n") append(" "*align) col = align + slen else: append(" ") col = col + 1 + slen append(s) append(end) self._stream_write("".join(L)) col = col + endlen finally: self.col = col
s = '</%s>' % name
s = ustr('</%s>') % name
def do_optTag (self, (name, cexpr, tag_ns, isend, start, program), omit=0): if tag_ns and not self.showtal: return self.no_tag(start, program)
placeholder = '${%s}' % varname
placeholder = ustr('${%s}') % varname
def do_i18nVariable (self, stuff): varname, program, expression = stuff if expression is None: # The value is implicitly the contents of this tag, so we have to # evaluate the mini-program to get the value of the variable. state = self.saveState() try: tmpstream = self.StringIO() self.interpretWithStream(program, tmpstream) value = normalize(tmpstream.getvalue()) finally: self.restoreState(state) else: # Evaluate the value to be associated with the variable in the # i18n interpolation dictionary. value = self.engine.evaluate(expression) # Either the i18n:name tag is nested inside an i18n:translate in which # case the last item on the stack has the i18n dictionary and string # representation, or the i18n:name and i18n:translate attributes are # in the same tag, in which case the i18nStack will be empty. In that # case we can just output the ${name} to the stream i18ndict, srepr = self.i18nStack[-1] i18ndict[varname] = value placeholder = '${%s}' % varname srepr.append(placeholder) self._stream_write(placeholder)
Reteurn default element if key is not in the dict, of if the self[key] evaluates to False.
Return default element if key is not in the dict, or if self[key] evaluates to False.
def get_true (self, key, default): """ Reteurn default element if key is not in the dict, of if the self[key] evaluates to False. """ if key not in self or not self[key]: return default return self[key]
_form_proxyport(form['port'].value)
_form_proxyport(getval(form, 'port'))
def exec_form (form): # reset info/error del info[:] del error[:] # proxy port if form.has_key('port'): _form_proxyport(form['port'].value) # proxy user if form.has_key('proxyuser'): _form_proxyuser(form['proxyuser'].value.strip()) # proxy pass if form.has_key('proxypass'): _form_proxypass(base64.encodestring(form['proxypass'].value.strip())) # parent proxy host if form.has_key('parentproxy'): _form_parentproxy(form['parentproxy'].value.strip()) # parent proxy port if form.has_key('parentproxyport'): _form_parentproxyport(form['parentproxyport'].value) # parent proxy user if form.has_key('parentproxyuser'): _form_parentproxyuser(form['parentproxyuser'].value.strip()) # parent proxy pass if form.has_key('parentproxypass'): _form_parentproxypass( base64.encodestring(form['parentproxypass'].value)) # timeout if form.has_key('timeout'): _form_timeout(form['timeout'].value) # filter modules _form_filtermodules(form) # allowed hosts if form.has_key('addallowed') and form.has_key('newallowed'): _form_addallowed(form['newallowed'].value.strip()) elif form.has_key('delallowed') and form.has_key('allowedhosts'): _form_delallowed(form['allowedhosts']) # no filter hosts if form.has_key('addnofilter') and form.has_key('newnofilter'): _form_addnofilter(form['newnofilter'].value.strip()) elif form.has_key('delnofilter') and form.has_key('nofilterhosts'): _form_delnofilter(form['nofilterhosts']) if info: # write changed config config.write_proxyconf()
_form_proxyuser(form['proxyuser'].value.strip())
_form_proxyuser(getval(form, 'proxyuser').strip())
def exec_form (form): # reset info/error del info[:] del error[:] # proxy port if form.has_key('port'): _form_proxyport(form['port'].value) # proxy user if form.has_key('proxyuser'): _form_proxyuser(form['proxyuser'].value.strip()) # proxy pass if form.has_key('proxypass'): _form_proxypass(base64.encodestring(form['proxypass'].value.strip())) # parent proxy host if form.has_key('parentproxy'): _form_parentproxy(form['parentproxy'].value.strip()) # parent proxy port if form.has_key('parentproxyport'): _form_parentproxyport(form['parentproxyport'].value) # parent proxy user if form.has_key('parentproxyuser'): _form_parentproxyuser(form['parentproxyuser'].value.strip()) # parent proxy pass if form.has_key('parentproxypass'): _form_parentproxypass( base64.encodestring(form['parentproxypass'].value)) # timeout if form.has_key('timeout'): _form_timeout(form['timeout'].value) # filter modules _form_filtermodules(form) # allowed hosts if form.has_key('addallowed') and form.has_key('newallowed'): _form_addallowed(form['newallowed'].value.strip()) elif form.has_key('delallowed') and form.has_key('allowedhosts'): _form_delallowed(form['allowedhosts']) # no filter hosts if form.has_key('addnofilter') and form.has_key('newnofilter'): _form_addnofilter(form['newnofilter'].value.strip()) elif form.has_key('delnofilter') and form.has_key('nofilterhosts'): _form_delnofilter(form['nofilterhosts']) if info: # write changed config config.write_proxyconf()
_form_proxypass(base64.encodestring(form['proxypass'].value.strip()))
_form_proxypass(base64.encodestring(getval(form, 'proxypass').strip()))
def exec_form (form): # reset info/error del info[:] del error[:] # proxy port if form.has_key('port'): _form_proxyport(form['port'].value) # proxy user if form.has_key('proxyuser'): _form_proxyuser(form['proxyuser'].value.strip()) # proxy pass if form.has_key('proxypass'): _form_proxypass(base64.encodestring(form['proxypass'].value.strip())) # parent proxy host if form.has_key('parentproxy'): _form_parentproxy(form['parentproxy'].value.strip()) # parent proxy port if form.has_key('parentproxyport'): _form_parentproxyport(form['parentproxyport'].value) # parent proxy user if form.has_key('parentproxyuser'): _form_parentproxyuser(form['parentproxyuser'].value.strip()) # parent proxy pass if form.has_key('parentproxypass'): _form_parentproxypass( base64.encodestring(form['parentproxypass'].value)) # timeout if form.has_key('timeout'): _form_timeout(form['timeout'].value) # filter modules _form_filtermodules(form) # allowed hosts if form.has_key('addallowed') and form.has_key('newallowed'): _form_addallowed(form['newallowed'].value.strip()) elif form.has_key('delallowed') and form.has_key('allowedhosts'): _form_delallowed(form['allowedhosts']) # no filter hosts if form.has_key('addnofilter') and form.has_key('newnofilter'): _form_addnofilter(form['newnofilter'].value.strip()) elif form.has_key('delnofilter') and form.has_key('nofilterhosts'): _form_delnofilter(form['nofilterhosts']) if info: # write changed config config.write_proxyconf()
_form_parentproxy(form['parentproxy'].value.strip())
_form_parentproxy(getval(form, 'parentproxy').strip())
def exec_form (form): # reset info/error del info[:] del error[:] # proxy port if form.has_key('port'): _form_proxyport(form['port'].value) # proxy user if form.has_key('proxyuser'): _form_proxyuser(form['proxyuser'].value.strip()) # proxy pass if form.has_key('proxypass'): _form_proxypass(base64.encodestring(form['proxypass'].value.strip())) # parent proxy host if form.has_key('parentproxy'): _form_parentproxy(form['parentproxy'].value.strip()) # parent proxy port if form.has_key('parentproxyport'): _form_parentproxyport(form['parentproxyport'].value) # parent proxy user if form.has_key('parentproxyuser'): _form_parentproxyuser(form['parentproxyuser'].value.strip()) # parent proxy pass if form.has_key('parentproxypass'): _form_parentproxypass( base64.encodestring(form['parentproxypass'].value)) # timeout if form.has_key('timeout'): _form_timeout(form['timeout'].value) # filter modules _form_filtermodules(form) # allowed hosts if form.has_key('addallowed') and form.has_key('newallowed'): _form_addallowed(form['newallowed'].value.strip()) elif form.has_key('delallowed') and form.has_key('allowedhosts'): _form_delallowed(form['allowedhosts']) # no filter hosts if form.has_key('addnofilter') and form.has_key('newnofilter'): _form_addnofilter(form['newnofilter'].value.strip()) elif form.has_key('delnofilter') and form.has_key('nofilterhosts'): _form_delnofilter(form['nofilterhosts']) if info: # write changed config config.write_proxyconf()
_form_parentproxyport(form['parentproxyport'].value)
_form_parentproxyport(getval(form, 'parentproxyport'))
def exec_form (form): # reset info/error del info[:] del error[:] # proxy port if form.has_key('port'): _form_proxyport(form['port'].value) # proxy user if form.has_key('proxyuser'): _form_proxyuser(form['proxyuser'].value.strip()) # proxy pass if form.has_key('proxypass'): _form_proxypass(base64.encodestring(form['proxypass'].value.strip())) # parent proxy host if form.has_key('parentproxy'): _form_parentproxy(form['parentproxy'].value.strip()) # parent proxy port if form.has_key('parentproxyport'): _form_parentproxyport(form['parentproxyport'].value) # parent proxy user if form.has_key('parentproxyuser'): _form_parentproxyuser(form['parentproxyuser'].value.strip()) # parent proxy pass if form.has_key('parentproxypass'): _form_parentproxypass( base64.encodestring(form['parentproxypass'].value)) # timeout if form.has_key('timeout'): _form_timeout(form['timeout'].value) # filter modules _form_filtermodules(form) # allowed hosts if form.has_key('addallowed') and form.has_key('newallowed'): _form_addallowed(form['newallowed'].value.strip()) elif form.has_key('delallowed') and form.has_key('allowedhosts'): _form_delallowed(form['allowedhosts']) # no filter hosts if form.has_key('addnofilter') and form.has_key('newnofilter'): _form_addnofilter(form['newnofilter'].value.strip()) elif form.has_key('delnofilter') and form.has_key('nofilterhosts'): _form_delnofilter(form['nofilterhosts']) if info: # write changed config config.write_proxyconf()
_form_parentproxyuser(form['parentproxyuser'].value.strip())
_form_parentproxyuser(getval(form, 'parentproxyuser').strip())
def exec_form (form): # reset info/error del info[:] del error[:] # proxy port if form.has_key('port'): _form_proxyport(form['port'].value) # proxy user if form.has_key('proxyuser'): _form_proxyuser(form['proxyuser'].value.strip()) # proxy pass if form.has_key('proxypass'): _form_proxypass(base64.encodestring(form['proxypass'].value.strip())) # parent proxy host if form.has_key('parentproxy'): _form_parentproxy(form['parentproxy'].value.strip()) # parent proxy port if form.has_key('parentproxyport'): _form_parentproxyport(form['parentproxyport'].value) # parent proxy user if form.has_key('parentproxyuser'): _form_parentproxyuser(form['parentproxyuser'].value.strip()) # parent proxy pass if form.has_key('parentproxypass'): _form_parentproxypass( base64.encodestring(form['parentproxypass'].value)) # timeout if form.has_key('timeout'): _form_timeout(form['timeout'].value) # filter modules _form_filtermodules(form) # allowed hosts if form.has_key('addallowed') and form.has_key('newallowed'): _form_addallowed(form['newallowed'].value.strip()) elif form.has_key('delallowed') and form.has_key('allowedhosts'): _form_delallowed(form['allowedhosts']) # no filter hosts if form.has_key('addnofilter') and form.has_key('newnofilter'): _form_addnofilter(form['newnofilter'].value.strip()) elif form.has_key('delnofilter') and form.has_key('nofilterhosts'): _form_delnofilter(form['nofilterhosts']) if info: # write changed config config.write_proxyconf()
base64.encodestring(form['parentproxypass'].value))
base64.encodestring(getval(form, 'parentproxypass')))
def exec_form (form): # reset info/error del info[:] del error[:] # proxy port if form.has_key('port'): _form_proxyport(form['port'].value) # proxy user if form.has_key('proxyuser'): _form_proxyuser(form['proxyuser'].value.strip()) # proxy pass if form.has_key('proxypass'): _form_proxypass(base64.encodestring(form['proxypass'].value.strip())) # parent proxy host if form.has_key('parentproxy'): _form_parentproxy(form['parentproxy'].value.strip()) # parent proxy port if form.has_key('parentproxyport'): _form_parentproxyport(form['parentproxyport'].value) # parent proxy user if form.has_key('parentproxyuser'): _form_parentproxyuser(form['parentproxyuser'].value.strip()) # parent proxy pass if form.has_key('parentproxypass'): _form_parentproxypass( base64.encodestring(form['parentproxypass'].value)) # timeout if form.has_key('timeout'): _form_timeout(form['timeout'].value) # filter modules _form_filtermodules(form) # allowed hosts if form.has_key('addallowed') and form.has_key('newallowed'): _form_addallowed(form['newallowed'].value.strip()) elif form.has_key('delallowed') and form.has_key('allowedhosts'): _form_delallowed(form['allowedhosts']) # no filter hosts if form.has_key('addnofilter') and form.has_key('newnofilter'): _form_addnofilter(form['newnofilter'].value.strip()) elif form.has_key('delnofilter') and form.has_key('nofilterhosts'): _form_delnofilter(form['nofilterhosts']) if info: # write changed config config.write_proxyconf()
_form_timeout(form['timeout'].value)
_form_timeout(getval(form, 'timeout'))
def exec_form (form): # reset info/error del info[:] del error[:] # proxy port if form.has_key('port'): _form_proxyport(form['port'].value) # proxy user if form.has_key('proxyuser'): _form_proxyuser(form['proxyuser'].value.strip()) # proxy pass if form.has_key('proxypass'): _form_proxypass(base64.encodestring(form['proxypass'].value.strip())) # parent proxy host if form.has_key('parentproxy'): _form_parentproxy(form['parentproxy'].value.strip()) # parent proxy port if form.has_key('parentproxyport'): _form_parentproxyport(form['parentproxyport'].value) # parent proxy user if form.has_key('parentproxyuser'): _form_parentproxyuser(form['parentproxyuser'].value.strip()) # parent proxy pass if form.has_key('parentproxypass'): _form_parentproxypass( base64.encodestring(form['parentproxypass'].value)) # timeout if form.has_key('timeout'): _form_timeout(form['timeout'].value) # filter modules _form_filtermodules(form) # allowed hosts if form.has_key('addallowed') and form.has_key('newallowed'): _form_addallowed(form['newallowed'].value.strip()) elif form.has_key('delallowed') and form.has_key('allowedhosts'): _form_delallowed(form['allowedhosts']) # no filter hosts if form.has_key('addnofilter') and form.has_key('newnofilter'): _form_addnofilter(form['newnofilter'].value.strip()) elif form.has_key('delnofilter') and form.has_key('nofilterhosts'): _form_delnofilter(form['nofilterhosts']) if info: # write changed config config.write_proxyconf()
_form_addallowed(form['newallowed'].value.strip())
_form_addallowed(getval(form, 'newallowed').strip())
def exec_form (form): # reset info/error del info[:] del error[:] # proxy port if form.has_key('port'): _form_proxyport(form['port'].value) # proxy user if form.has_key('proxyuser'): _form_proxyuser(form['proxyuser'].value.strip()) # proxy pass if form.has_key('proxypass'): _form_proxypass(base64.encodestring(form['proxypass'].value.strip())) # parent proxy host if form.has_key('parentproxy'): _form_parentproxy(form['parentproxy'].value.strip()) # parent proxy port if form.has_key('parentproxyport'): _form_parentproxyport(form['parentproxyport'].value) # parent proxy user if form.has_key('parentproxyuser'): _form_parentproxyuser(form['parentproxyuser'].value.strip()) # parent proxy pass if form.has_key('parentproxypass'): _form_parentproxypass( base64.encodestring(form['parentproxypass'].value)) # timeout if form.has_key('timeout'): _form_timeout(form['timeout'].value) # filter modules _form_filtermodules(form) # allowed hosts if form.has_key('addallowed') and form.has_key('newallowed'): _form_addallowed(form['newallowed'].value.strip()) elif form.has_key('delallowed') and form.has_key('allowedhosts'): _form_delallowed(form['allowedhosts']) # no filter hosts if form.has_key('addnofilter') and form.has_key('newnofilter'): _form_addnofilter(form['newnofilter'].value.strip()) elif form.has_key('delnofilter') and form.has_key('nofilterhosts'): _form_delnofilter(form['nofilterhosts']) if info: # write changed config config.write_proxyconf()
_form_delallowed(form['allowedhosts'])
_form_delallowed(form)
def exec_form (form): # reset info/error del info[:] del error[:] # proxy port if form.has_key('port'): _form_proxyport(form['port'].value) # proxy user if form.has_key('proxyuser'): _form_proxyuser(form['proxyuser'].value.strip()) # proxy pass if form.has_key('proxypass'): _form_proxypass(base64.encodestring(form['proxypass'].value.strip())) # parent proxy host if form.has_key('parentproxy'): _form_parentproxy(form['parentproxy'].value.strip()) # parent proxy port if form.has_key('parentproxyport'): _form_parentproxyport(form['parentproxyport'].value) # parent proxy user if form.has_key('parentproxyuser'): _form_parentproxyuser(form['parentproxyuser'].value.strip()) # parent proxy pass if form.has_key('parentproxypass'): _form_parentproxypass( base64.encodestring(form['parentproxypass'].value)) # timeout if form.has_key('timeout'): _form_timeout(form['timeout'].value) # filter modules _form_filtermodules(form) # allowed hosts if form.has_key('addallowed') and form.has_key('newallowed'): _form_addallowed(form['newallowed'].value.strip()) elif form.has_key('delallowed') and form.has_key('allowedhosts'): _form_delallowed(form['allowedhosts']) # no filter hosts if form.has_key('addnofilter') and form.has_key('newnofilter'): _form_addnofilter(form['newnofilter'].value.strip()) elif form.has_key('delnofilter') and form.has_key('nofilterhosts'): _form_delnofilter(form['nofilterhosts']) if info: # write changed config config.write_proxyconf()
_form_addnofilter(form['newnofilter'].value.strip())
_form_addnofilter(getval(form, 'newnofilter').strip())
def exec_form (form): # reset info/error del info[:] del error[:] # proxy port if form.has_key('port'): _form_proxyport(form['port'].value) # proxy user if form.has_key('proxyuser'): _form_proxyuser(form['proxyuser'].value.strip()) # proxy pass if form.has_key('proxypass'): _form_proxypass(base64.encodestring(form['proxypass'].value.strip())) # parent proxy host if form.has_key('parentproxy'): _form_parentproxy(form['parentproxy'].value.strip()) # parent proxy port if form.has_key('parentproxyport'): _form_parentproxyport(form['parentproxyport'].value) # parent proxy user if form.has_key('parentproxyuser'): _form_parentproxyuser(form['parentproxyuser'].value.strip()) # parent proxy pass if form.has_key('parentproxypass'): _form_parentproxypass( base64.encodestring(form['parentproxypass'].value)) # timeout if form.has_key('timeout'): _form_timeout(form['timeout'].value) # filter modules _form_filtermodules(form) # allowed hosts if form.has_key('addallowed') and form.has_key('newallowed'): _form_addallowed(form['newallowed'].value.strip()) elif form.has_key('delallowed') and form.has_key('allowedhosts'): _form_delallowed(form['allowedhosts']) # no filter hosts if form.has_key('addnofilter') and form.has_key('newnofilter'): _form_addnofilter(form['newnofilter'].value.strip()) elif form.has_key('delnofilter') and form.has_key('nofilterhosts'): _form_delnofilter(form['nofilterhosts']) if info: # write changed config config.write_proxyconf()