rem
stringlengths
0
322k
add
stringlengths
0
2.05M
context
stringlengths
8
228k
"A long integer was expected in the value '%s'" % escape(v)
"A long integer was expected in the value %s" % escape(`v`)
def field2long(v): if isinstance(v, (ListType, TupleType)): return map(field2long, v) v = field2string(v) # handle trailing 'L' if present. if v[-1:] in ('L', 'l'): v = v[:-1] if v: try: return long(v) except ValueError: raise ValueError, ( "A long integer was expected in the value '%s'" % escape(v) ) raise ValueError, 'Empty entry when <strong>integer</strong> expected'
raise DateTime.SyntaxError, escape(e)
raise DateTime.SyntaxError, "Invalid DateTime "+escape(`v`)
def field2date(v): v = field2string(v) try: v = DateTime(v) except DateTime.SyntaxError, e: raise DateTime.SyntaxError, escape(e) return v
raise DateTime.SyntaxError, escape(e)
raise DateTime.SyntaxError, "Invalid DateTime "+escape(`v`)
def field2date_international(v): v = field2string(v) try: v = DateTime(v, datefmt="international") except DateTime.SyntaxError, e: raise DateTime.SyntaxError, escape(e) return v
setup(name="zope",
setup(name="zopex30",
def __init__(self, *attrs): Distribution.__init__(self, *attrs) self.cmdclass['build'] = MyBuilder self.cmdclass['build_ext'] = MyExtBuilder self.cmdclass['install_lib'] = MyLibInstaller
headers = ["persistent/cPersistence.h", "zope/proxy/proxy.h"], scripts = [],
def __init__(self, *attrs): Distribution.__init__(self, *attrs) self.cmdclass['build'] = MyBuilder self.cmdclass['build_ext'] = MyExtBuilder self.cmdclass['install_lib'] = MyLibInstaller
setup( name='Five', author='Martijn Faassen', packages=['Products.Five'], data_files=[['Products/Five', ['Products/Five/*']], ['Products/Five/demo', ['Products/Five/demo/*']], ['Products/Five/doc', ['Products/Five/doc/*']], ['Products/Five/skel', ['Products/Five/skel/*']], ['Products/Five/tests', ['Products/Five/tests/*']], ], )
def __init__(self, *attrs): Distribution.__init__(self, *attrs) self.cmdclass['build'] = MyBuilder self.cmdclass['build_ext'] = MyExtBuilder self.cmdclass['install_lib'] = MyLibInstaller
and not self._isPasswordEncrypted(pw):
and not self._isPasswordEncrypted(password):
def _doAddUser(self, name, password, roles, domains, **kw): """Create a new user""" if password is not None and self.encrypt_passwords \ and not self._isPasswordEncrypted(pw): password = self._encryptPassword(password) self.data[name]=User(name,password,roles,domains)
if sys.platform != 'win32': locale.setlocale(locale.LC_ALL, 'de_DE.ISO8859-1') else: locale.setlocale(locale.LC_ALL, 'German_Germany.1252') words = ['mlltonne waschbr behrde berflieger']
try: if sys.platform != 'win32': locale.setlocale(locale.LC_ALL, 'de_DE.ISO8859-1') else: locale.setlocale(locale.LC_ALL, 'German_Germany.1252') except locale.Error: return expected = ['m\xfclltonne', 'waschb\xe4r', 'beh\xf6rde', '\xfcberflieger'] words = [" ".join(expected)]
def testSplitterLocaleAwareness(self): from Products.ZCTextIndex.HTMLSplitter import HTMLWordSplitter import locale loc = locale.setlocale(locale.LC_ALL) # get current locale # set German locale if sys.platform != 'win32': locale.setlocale(locale.LC_ALL, 'de_DE.ISO8859-1') else: locale.setlocale(locale.LC_ALL, 'German_Germany.1252') words = ['mlltonne waschbr behrde berflieger'] words = Splitter().process(words) self.assertEqual( words, ['mlltonne', 'waschbr', 'behrde', 'berflieger']) words = HTMLWordSplitter().process(words) self.assertEqual( words, ['mlltonne', 'waschbr', 'behrde', 'berflieger']) locale.setlocale(locale.LC_ALL, loc) # restore saved locale
self.assertEqual( words, ['mlltonne', 'waschbr', 'behrde', 'berflieger'])
self.assertEqual(words, expected)
def testSplitterLocaleAwareness(self): from Products.ZCTextIndex.HTMLSplitter import HTMLWordSplitter import locale loc = locale.setlocale(locale.LC_ALL) # get current locale # set German locale if sys.platform != 'win32': locale.setlocale(locale.LC_ALL, 'de_DE.ISO8859-1') else: locale.setlocale(locale.LC_ALL, 'German_Germany.1252') words = ['mlltonne waschbr behrde berflieger'] words = Splitter().process(words) self.assertEqual( words, ['mlltonne', 'waschbr', 'behrde', 'berflieger']) words = HTMLWordSplitter().process(words) self.assertEqual( words, ['mlltonne', 'waschbr', 'behrde', 'berflieger']) locale.setlocale(locale.LC_ALL, loc) # restore saved locale
def manage_addZCatalog(self, id, title, vocab_id=None, REQUEST=None):
def manage_addZCatalog(self, id, title, vocab_id='create_default_catalog_', REQUEST=None):
def manage_addZCatalog(self, id, title, vocab_id=None, REQUEST=None): """Add a ZCatalog object """ id=str(id) title=str(title) vocab_id=str(vocab_id) if vocab_id == 'create_default_catalog_': vocab_id = None c=ZCatalog(id, title, vocab_id, self) self._setObject(id, c) if REQUEST is not None: return self.manage_main(self, REQUEST)
('View management screens', ('manage_tabs', 'manage_workspace')), ('Change permissions', ('manage_access',) ),
def PersistentClassDict(doc=None, meta_type=None): # Build new class dict dict={} dict.update(Template.__dict__) if meta_type is not None: dict['meta_type']=dict['__doc__']=meta_type if doc is not None: dict['__doc__']=doc return dict
self._zbases=bases
self._zbases=copy._zbases
def _setBasesHoldOnToYourButts(self, bases): # Eeeek copy=self.__class__(self.id, self.title, bases)
def index_html(self, id, REQUEST, RESPONSE=None):
def createInObjectManager(self, id, REQUEST, RESPONSE=None):
def index_html(self, id, REQUEST, RESPONSE=None): """ Create Z instance. If called with a RESPONSE, the RESPONSE will be redirected to the management screen of the new instance's parent Folder. Otherwise, the instance will be returned. """ i=mapply(self._zclass_, (), REQUEST) if not hasattr(i, 'id') or not i.id: i.id=id
__call__=index_html
index_html=createInObjectManager def fromRequest(self, id=None, REQUEST={}): i=mapply(self._zclass_, (), REQUEST) if id is not None and (not hasattr(i, 'id') or not i.id): i.id=id return i def __call__(self, *args, **kw): return apply(self._zclass_, args, kw)
def index_html(self, id, REQUEST, RESPONSE=None): """ Create Z instance. If called with a RESPONSE, the RESPONSE will be redirected to the management screen of the new instance's parent Folder. Otherwise, the instance will be returned. """ i=mapply(self._zclass_, (), REQUEST) if not hasattr(i, 'id') or not i.id: i.id=id
h=HTTP() h.connect(self.host, self.port)
h=HTTP(self.host, self.port)
def __call__(self,*args,**kw): method=self.method if method=='PUT' and len(args)==1 and not kw: query=[args[0]] args=() else: query=[] for i in range(len(args)): try: k=self.args[i] if kw.has_key(k): raise TypeError, 'Keyword arg redefined' kw[k]=args[i] except IndexError: raise TypeError, 'Too many arguments'
args += self._get_override("-m", "umask")
args += self._get_override("-m", "umask", oct(self.options.umask))
def do_start(self, arg): self.get_status() if not self.zd_up: args = [ self.options.python, self.options.zdrun, ] args += self._get_override("-S", "schemafile") args += self._get_override("-C", "configfile") args += self._get_override("-b", "backofflimit") args += self._get_override("-d", "daemon", flag=1) args += self._get_override("-f", "forever", flag=1) args += self._get_override("-s", "sockname") args += self._get_override("-u", "user") args += self._get_override("-m", "umask") args += self._get_override( "-x", "exitcodes", ",".join(map(str, self.options.exitcodes))) args += self._get_override("-z", "directory") args.extend(self.options.program) if self.options.daemon: flag = os.P_NOWAIT else: flag = os.P_WAIT os.spawnvp(flag, args[0], args) elif not self.zd_pid: self.send_action("start") else: print "daemon process already running; pid=%d" % self.zd_pid return self.awhile(lambda: self.zd_pid, "daemon process started, pid=%(zd_pid)d")
rq=join(rq,'\n')
rq=join(rq,'\r\n')
def _mp_call(self,kw, type2suffix={ type(1.0): ':float', type(1): ':int', type(1L): ':long', type([]): ':list', type(()): ':tuple', } ): # Call a function using the file-upload protcol
s.append('\n')
s.append('\r\n')
def render(self): h=self._headers s=[]
s.append('Content-Length: %s\n\n' % len(t))
s.append('Content-Length: %s\r\n\r\n' % len(t))
def render(self): h=self._headers s=[]
s.append('\n') s.append('\n')
s.append('\r\n') s.append('\r\n')
def render(self): h=self._headers s=[]
prev=rc[n] if c > prev: rd.append( (c - prev, (c, prev, n)) )
try: prev=rc[n] if c > prev: rd.append( (c - prev, (c, prev, n)) ) except: pass
def rcdeltas(self): if _v_rcs is None: self.rcsnapshot() nc=self.refdict() rc=_v_rcs rd=[] for n, c in nc.items(): prev=rc[n] if c > prev: rd.append( (c - prev, (c, prev, n)) ) rd.sort() rd.reverse()
revid = nvrevid
revid = start_tid = nvrevid while True: rec = c.next() if rec is None or rec[0][:8] <> oid: end_tid = None break vid = rec[1][:8] if vid == ZERO: end_tid = rec[0][8:] break
def _search_before(self, c, oid, tid, end_tid): # Operates on the cursor created by loadBefore(). p = c.prev() if p is None: return None key, rec = p # If the previous record is for a different oid, then # there is no matching record. if key[:8] != oid: return None vid, nvrevid, lrevid = unpack(">8s8s8s", rec[:24]) if vid == ZERO: revid = lrevid else: revid = nvrevid data = self._pickles[oid+revid] start_tid = key[8:] return data, start_tid, end_tid
start_tid = key[8:]
def _search_before(self, c, oid, tid, end_tid): # Operates on the cursor created by loadBefore(). p = c.prev() if p is None: return None key, rec = p # If the previous record is for a different oid, then # there is no matching record. if key[:8] != oid: return None vid, nvrevid, lrevid = unpack(">8s8s8s", rec[:24]) if vid == ZERO: revid = lrevid else: revid = nvrevid data = self._pickles[oid+revid] start_tid = key[8:] return data, start_tid, end_tid
for base in bases: if 'UnIndex' in base: found = True break
if idx.meta_type == 'PathIndex': found = True else: for base in bases: if 'UnIndex' in base: found = True break
def manage_convertIndexes(self, REQUEST=None, RESPONSE=None, URL1=None): """Recreate indexes derived from UnIndex because the implementation of __len__ changed in Zope 2.8. Pre-Zope 2.7 installation used to implement __len__ as persistent attribute of the index instance which is totally incompatible with the new extension class implementation based on new-style classes. """
except LookupError:
except (LookupError, SystemError):
def has_codec(x): try: codecs.lookup(x) except LookupError: return 0 else: return 1
'\([\0- ]*\("[^\0- =\"]+"\)\)'),
'\([\0- ]*\("[^"]*"\)\)'),
def parse_params(text, result=None, tag='', unparmre=regex.compile( '\([\0- ]*\([^\0- =\"]+\)\)'), qunparmre=regex.compile( '\([\0- ]*\("[^\0- =\"]+"\)\)'), parmre=regex.compile( '\([\0- ]*\([^\0- =\"]+\)=\([^\0- =\"]+\)\)'), qparmre=regex.compile( '\([\0- ]*\([^\0- =\"]+\)="\([^"]*\)\"\)'), **parms): """Parse tag parameters The format of tag parameters consists of 1 or more parameter specifications separated by whitespace. Each specification consists of an unnamed and unquoted value, a valueless name, or a name-value pair. A name-value pair consists of a name and a quoted or unquoted value separated by an '='. The input parameter, text, gives the text to be parsed. The keyword parameters give valid parameter names and default values. If a specification is not a name-value pair and it is not the first specification and it is a valid parameter name, then it is treated as a name-value pair with a value as given in the keyword argument. Otherwise, if it is not a name-value pair, it is treated as an unnamed value. The data are parsed into a dictionary mapping names to values. Unnamed values are mapped from the name '""'. Only one value may be given for a name and there may be only one unnamed value. """ result=result or {} if parmre.match(text) >= 0: name=lower(parmre.group(2)) value=parmre.group(3) l=len(parmre.group(1)) elif qparmre.match(text) >= 0: name=lower(qparmre.group(2)) value=qparmre.group(3) l=len(qparmre.group(1)) elif unparmre.match(text) >= 0: name=unparmre.group(2) l=len(unparmre.group(1)) if result: if parms.has_key(name): if parms[name] is None: raise ParseError, ( 'Attribute %s requires a value' % name, tag) result[name]=parms[name] else: raise ParseError, ( 'Invalid attribute name, "%s"' % name, tag) else: result['']=name return apply(parse_params,(text[l:],result),parms) elif qunparmre.match(text) >= 0: name=qunparmre.group(2) l=len(qunparmre.group(1)) if result: raise ParseError, ( 'Invalid attribute name, "%s"' % name, tag) else: result['']=name return apply(parse_params,(text[l:],result),parms) else: if not text or not strip(text): return result raise ParseError, ('invalid parameter: "%s"' % text, tag) if not parms.has_key(name): raise ParseError, ( 'Invalid attribute name, "%s"' % name, tag) if result.has_key(name): p=parms[name] if type(p) is not ListType or p: raise ParseError, ( 'Duplicate values for attribute "%s"' % name, tag) result[name]=value text=strip(text[l:]) if text: return apply(parse_params,(text,result),parms) else: return result
if ts_results:
if mo:
def parse(text, result=None, keys=None, unparmre=re.compile( r'([\000- ]*([^\000- ="]+))'), parmre=re.compile( r'([\000- ]*([^\000- ="]+)=([^\000- ="]+))'), qparmre=re.compile( r'([\000- ]*([^\000- ="]+)="([^"]*)")'), ): if result is None: result = {} keys=[] __traceback_info__=text mo = parmre.match(text) if mo: name=mo.group(2) value={'default':mo.group(3)} l=len(mo.group(1)) else: mo = qparmre.match(text) if mo: name=mo.group(1) value={'default':mo.group(3)} l=len(mo.group(2)) else: mo = unparmre.match(text) if ts_results: name=mo.group(2) l=len(mo.group(1)) value={} else: if not text or not strip(text): return Args(result,keys) raise InvalidParameter, text lt=string.find(name,':') if lt > 0: value['type']=name[lt+1:] name=name[:lt] result[name]=value keys.append(name) return parse(text[l:],result,keys)
define = todo.get("define")
def emitEndElement(self, name, isend=0, implied=0): todo = self.todoPop() if not todo: # Shortcut if not isend: self.emitEndTag(name) return
if len(i18nattrlist) == 2:
if len(i18nattrlist) == 1: addAttribute(d, i18nattrlist[0], None, position, xml) elif len(i18nattrlist) == 2:
def addAttribute(dic, attr, msgid, position, xml): if not xml: attr = attr.lower() if attr in dic: raise TALError( "attribute may only be specified once in i18n:attributes: " + attr, position) dic[attr] = msgid
args = parse_params(args, name='', type=None, column=None,
args = parse_params(args, name='', expr='', type=None, column=None,
def __init__(self, args): args = parse_params(args, name='', type=None, column=None, multiple=1, optional=1, op=None) self.__name__ = name_param(args,'sqlvar') has_key=args.has_key if not has_key('type'): raise ParseError, ('the type attribute is required', 'sqltest') self.type=t=args['type'] if not valid_type(t): raise ParseError, ('invalid type, %s' % t, 'sqltest') if has_key('optional'): self.optional=args['optional'] if has_key('multiple'): self.multiple=args['multiple'] if has_key('column'): self.column=args['column'] else: self.column=self.__name__
self.__name__ = name_param(args,'sqlvar') has_key=args.has_key if not has_key('type'): raise ParseError, ('the type attribute is required', 'sqltest')
name,expr = name_param(args,'sqlvar',1) if expr is None: expr=name else: expr=expr.eval self.__name__, self.expr = name, expr self.args=args if not args.has_key('type'): raise ParseError, ('the type attribute is required', 'sqltest')
def __init__(self, args): args = parse_params(args, name='', type=None, column=None, multiple=1, optional=1, op=None) self.__name__ = name_param(args,'sqlvar') has_key=args.has_key if not has_key('type'): raise ParseError, ('the type attribute is required', 'sqltest') self.type=t=args['type'] if not valid_type(t): raise ParseError, ('invalid type, %s' % t, 'sqltest') if has_key('optional'): self.optional=args['optional'] if has_key('multiple'): self.multiple=args['multiple'] if has_key('column'): self.column=args['column'] else: self.column=self.__name__
if has_key('optional'): self.optional=args['optional'] if has_key('multiple'): self.multiple=args['multiple'] if has_key('column'): self.column=args['column'] else: self.column=self.__name__
def __init__(self, args): args = parse_params(args, name='', type=None, column=None, multiple=1, optional=1, op=None) self.__name__ = name_param(args,'sqlvar') has_key=args.has_key if not has_key('type'): raise ParseError, ('the type attribute is required', 'sqltest') self.type=t=args['type'] if not valid_type(t): raise ParseError, ('invalid type, %s' % t, 'sqltest') if has_key('optional'): self.optional=args['optional'] if has_key('multiple'): self.multiple=args['multiple'] if has_key('column'): self.column=args['column'] else: self.column=self.__name__
if has_key('op'):
if args.has_key('op'):
def __init__(self, args): args = parse_params(args, name='', type=None, column=None, multiple=1, optional=1, op=None) self.__name__ = name_param(args,'sqlvar') has_key=args.has_key if not has_key('type'): raise ParseError, ('the type attribute is required', 'sqltest') self.type=t=args['type'] if not valid_type(t): raise ParseError, ('invalid type, %s' % t, 'sqltest') if has_key('optional'): self.optional=args['optional'] if has_key('multiple'): self.multiple=args['multiple'] if has_key('column'): self.column=args['column'] else: self.column=self.__name__
try: v = md[name] except KeyError, key: if str(key)==name and self.optional: return '' raise KeyError, key, sys.exc_info()[2]
args=self.args try: expr=self.expr if type(expr) is type(''): v=md[expr] else: v=expr(md) except KeyError: if args.has_key('optional') and args['optional']: return '' raise 'Missing Input', 'Missing input variable, <em>%s</em>' % name
def render(self, md): name=self.__name__ t=self.type try: v = md[name] except KeyError, key: if str(key)==name and self.optional: return '' raise KeyError, key, sys.exc_info()[2] if type(v) in (ListType, TupleType): if len(v) > 1 and not self.multiple: raise 'Multiple Values', ( 'multiple values are not allowed for <em>%s</em>' % name) else: v=[v] vs=[] for v in v: if not v and type(v) is StringType and t != 'string': continue if t=='int': try: if type(v) is StringType: atoi(v) else: v=str(int(v)) except: raise ValueError, ( 'Invalid integer value for <em>%s</em>' % name) elif t=='float': if not v and type(v) is StringType: continue try: if type(v) is StringType: atof(v) else: v=str(float(v)) except: raise ValueError, ( 'Invalid floating-point value for <em>%s</em>' % name) else: v=str(v) v=md.getitem('sql_quote__',0)(v) #if find(v,"\'") >= 0: v=join(split(v,"\'"),"''") #v="'%s'" % v vs.append(v)
if type(v) is StringType: atoi(v)
if type(v) is StringType: if v[-1:]=='L': v=v[:-1] atoi(v)
def render(self, md): name=self.__name__ t=self.type try: v = md[name] except KeyError, key: if str(key)==name and self.optional: return '' raise KeyError, key, sys.exc_info()[2] if type(v) in (ListType, TupleType): if len(v) > 1 and not self.multiple: raise 'Multiple Values', ( 'multiple values are not allowed for <em>%s</em>' % name) else: v=[v] vs=[] for v in v: if not v and type(v) is StringType and t != 'string': continue if t=='int': try: if type(v) is StringType: atoi(v) else: v=str(int(v)) except: raise ValueError, ( 'Invalid integer value for <em>%s</em>' % name) elif t=='float': if not v and type(v) is StringType: continue try: if type(v) is StringType: atof(v) else: v=str(float(v)) except: raise ValueError, ( 'Invalid floating-point value for <em>%s</em>' % name) else: v=str(v) v=md.getitem('sql_quote__',0)(v) #if find(v,"\'") >= 0: v=join(split(v,"\'"),"''") #v="'%s'" % v vs.append(v)
except:
except ValueError:
def render(self, md): name=self.__name__ t=self.type try: v = md[name] except KeyError, key: if str(key)==name and self.optional: return '' raise KeyError, key, sys.exc_info()[2] if type(v) in (ListType, TupleType): if len(v) > 1 and not self.multiple: raise 'Multiple Values', ( 'multiple values are not allowed for <em>%s</em>' % name) else: v=[v] vs=[] for v in v: if not v and type(v) is StringType and t != 'string': continue if t=='int': try: if type(v) is StringType: atoi(v) else: v=str(int(v)) except: raise ValueError, ( 'Invalid integer value for <em>%s</em>' % name) elif t=='float': if not v and type(v) is StringType: continue try: if type(v) is StringType: atof(v) else: v=str(float(v)) except: raise ValueError, ( 'Invalid floating-point value for <em>%s</em>' % name) else: v=str(v) v=md.getitem('sql_quote__',0)(v) #if find(v,"\'") >= 0: v=join(split(v,"\'"),"''") #v="'%s'" % v vs.append(v)
if prev_id in special: raise 'NotSupported', eNotSupported
if prev_id in r: raise 'NotSupported', Globals.MessageDialog( title='Not Supported', message='This item cannot be cut and pasted', action ='manage_main')
def manage_paste(self, moniker, clip_id, REQUEST=None): """ """
SUBMIT,dtpref_cols,dtpref_rows,REQUEST):
SUBMIT,sql_pref__cols,sql_pref__rows,REQUEST):
def _er(self,title,connection_id,arguments,template, SUBMIT,dtpref_cols,dtpref_rows,REQUEST): dr,dc = self._size_changes[SUBMIT] rows=max(1,atoi(dtpref_rows)+dr) cols=max(40,atoi(dtpref_cols)+dc) e='Friday, 31-Dec-99 23:59:59 GMT' resp=REQUEST['RESPONSE'] resp.setCookie('dtpref_rows',str(rows),path='/',expires=e) resp.setCookie('dtpref_cols',str(cols),path='/',expires=e) return self.manage_main( self,REQUEST, title=title, arguments_src=arguments, connection_id=connection_id, template=template, dtpref_cols=cols,dtpref_rows=rows)
rows=max(1,atoi(dtpref_rows)+dr) cols=max(40,atoi(dtpref_cols)+dc)
rows=max(1,atoi(sql_pref__rows)+dr) cols=max(40,atoi(sql_pref__cols)+dc)
def _er(self,title,connection_id,arguments,template, SUBMIT,dtpref_cols,dtpref_rows,REQUEST): dr,dc = self._size_changes[SUBMIT] rows=max(1,atoi(dtpref_rows)+dr) cols=max(40,atoi(dtpref_cols)+dc) e='Friday, 31-Dec-99 23:59:59 GMT' resp=REQUEST['RESPONSE'] resp.setCookie('dtpref_rows',str(rows),path='/',expires=e) resp.setCookie('dtpref_cols',str(cols),path='/',expires=e) return self.manage_main( self,REQUEST, title=title, arguments_src=arguments, connection_id=connection_id, template=template, dtpref_cols=cols,dtpref_rows=rows)
resp.setCookie('dtpref_rows',str(rows),path='/',expires=e) resp.setCookie('dtpref_cols',str(cols),path='/',expires=e)
resp.setCookie('sql_pref__rows',str(rows),path='/',expires=e) resp.setCookie('sql_pref__cols',str(cols),path='/',expires=e)
def _er(self,title,connection_id,arguments,template, SUBMIT,dtpref_cols,dtpref_rows,REQUEST): dr,dc = self._size_changes[SUBMIT] rows=max(1,atoi(dtpref_rows)+dr) cols=max(40,atoi(dtpref_cols)+dc) e='Friday, 31-Dec-99 23:59:59 GMT' resp=REQUEST['RESPONSE'] resp.setCookie('dtpref_rows',str(rows),path='/',expires=e) resp.setCookie('dtpref_cols',str(cols),path='/',expires=e) return self.manage_main( self,REQUEST, title=title, arguments_src=arguments, connection_id=connection_id, template=template, dtpref_cols=cols,dtpref_rows=rows)
template=template, dtpref_cols=cols,dtpref_rows=rows)
src=template, sql_pref__cols=cols,sql_pref__rows=rows)
def _er(self,title,connection_id,arguments,template, SUBMIT,dtpref_cols,dtpref_rows,REQUEST): dr,dc = self._size_changes[SUBMIT] rows=max(1,atoi(dtpref_rows)+dr) cols=max(40,atoi(dtpref_cols)+dc) e='Friday, 31-Dec-99 23:59:59 GMT' resp=REQUEST['RESPONSE'] resp.setCookie('dtpref_rows',str(rows),path='/',expires=e) resp.setCookie('dtpref_cols',str(cols),path='/',expires=e) return self.manage_main( self,REQUEST, title=title, arguments_src=arguments, connection_id=connection_id, template=template, dtpref_cols=cols,dtpref_rows=rows)
SUBMIT='Change',dtpref_cols='50', dtpref_rows='20',
SUBMIT='Change',sql_pref__cols='50', sql_pref__rows='20',
def manage_edit(self,title,connection_id,arguments,template, SUBMIT='Change',dtpref_cols='50', dtpref_rows='20', REQUEST=None):
SUBMIT,dtpref_cols,dtpref_rows,REQUEST)
SUBMIT,sql_pref__cols,sql_pref__rows,REQUEST)
def manage_edit(self,title,connection_id,arguments,template, SUBMIT='Change',dtpref_cols='50', dtpref_rows='20', REQUEST=None):
obj = self.resolve_url(url, REQUEST)
obj = REQUEST.resolve_url(url, REQUEST)
def manage_catalogObject(self, REQUEST, urls=None, blah=None): """ index all Zope objects that 'urls' point to """ if urls: for url in urls: try: # if an error happens here, the catalog will be in # an unstable state. If this happens, ignore the # object. obj = self.resolve_url(url, REQUEST) except: continue self.catalog_object(obj, url)
obj = self.resolve_url(url, REQUEST)
obj = REQUEST.resolve_url(url, REQUEST)
def manage_uncatalogObject(self, REQUEST, urls=None): """ removes Zope object 'urls' from catalog """
obj = self.resolve_url(path, REQUEST)
obj = self.getobject(i, REQUEST)
def manage_catalogReindex(self, REQUEST): """ iterate over the whole catalog, deleting inexistent references and refreshing objects""" items = tuple(self._catalog.uids.items())
return self.manage_main(self, REQUEST,
return self.manage_catalogView(self, REQUEST,
def manage_catalogReindex(self, REQUEST): """ iterate over the whole catalog, deleting inexistent references and refreshing objects""" items = tuple(self._catalog.uids.items())
return self.manage_main(self, REQUEST,
return self.manage_catalogView(self, REQUEST,
def manage_catalogFoundItems(self, REQUEST, obj_metatypes=None, obj_ids=None, obj_searchterm=None, obj_expr=None, obj_mtime=None, obj_mspec=None, obj_roles=None, obj_permission=None): """ Find object according to search criteria and Catalog them """ results = self.ZopeFind(REQUEST.PARENTS[1], obj_metatypes=obj_metatypes, obj_ids=obj_ids, obj_searchterm=obj_searchterm, obj_expr=obj_expr, obj_mtime=obj_mtime, obj_mspec=obj_mspec, obj_permission=obj_permission, obj_roles=obj_roles, search_sub=1, REQUEST=REQUEST)
return REQUEST.clone().resolve_url(url)
obj = REQUEST.resolve_url(url) return obj
def getobject(self, rid, REQUEST=None): """ Return a cataloged object given a 'data_record_id_' """ if REQUEST is None: REQUEST=self.REQUEST url='%s/%s' %(REQUEST.script, self.getpath(rid)) return REQUEST.clone().resolve_url(url)
return req.PARENTS[0]
result = req.PARENTS[0] req.close() return result req.close()
def resolve_url(self, path, REQUEST): # Attempt to resolve a url into an object in the Zope # namespace. The url must be a fully-qualified url. The # method will return the requested object if it is found # or raise the same HTTP error that would be raised in # the case of a real web request. If the passed in url # does not appear to describe an object in the system # namespace (e.g. the host, port or script name dont # match that of the current request), a ValueError will # be raised.
from servicemanager import LogMsg LogMsg(servicemanager.EVENTLOG_INFORMATION_TYPE, event,
from servicemanager import LogMsg, EVENTLOG_INFORMATION_TYPE LogMsg(EVENTLOG_INFORMATION_TYPE, event,
def logmsg(self, event): # log a service event using servicemanager.LogMsg from servicemanager import LogMsg LogMsg(servicemanager.EVENTLOG_INFORMATION_TYPE, event, (self._svc_name_, " (%s)" % self._svc_display_name_))
"""
""" % sys.argv[0]
def main(): import getopt from string import split user=None try: optlist, args = getopt.getopt(sys.argv[1:],'u:') url=args[0] u =filter(lambda o: o[0]=='-u', optlist) if u: [user, pw] = split(u[0][1],':') kw={} for arg in args[1:]: [name,v]=split(arg) kw[name]=v except: print """ Usage: %s [-u username:password] url [name=value ...] where url is the web resource to call. The -u option may be used to provide a user name and password. Optional arguments may be provides as name=value pairs. """ sys.exit(1) # The "main" program for this module f=BoboFunction(url) if user: f.username, f.password = user, pw print apply(f,(),kw)
class Catalog(Persistent, Acquisition.Implicit):
class Catalog(Persistent, Acquisition.Implicit, ExtensionClass.Base):
def orify(seq, query_map={ type(regex.compile('')): Query.Regex, type(''): Query.String, }): subqueries=[] for q in seq: try: q=query_map[type(q)](q) except: q=Query.Cmp(q) subqueries.append(q) return apply(Query.Or,tuple(subqueries))
def __init__(self, brains=None):
def __init__(self, vocabulary=None, brains=None):
def __init__(self, brains=None):
self.lexicon = Lexicon()
if type(vocabulary) is type(''): self.lexicon = vocabulary else: self.lexicon = Lexicon()
def __init__(self, brains=None):
if uid not in self.uids.keys(): raise ValueError, "Uncatalog of absent id %s" % `uid`
def uncatalogObject(self, uid): """ Uncatalog and object from the Catalog. and 'uid' is a unique Catalog identifier
index = self.indexes[i]
index = self.indexes[i].__of__(self)
def _indexedSearch(self, args, sort_index, append, used, IIBType=type(IIBucket()), intSType=type(intSet())): """ Iterate through the indexes, applying the query to each one. Do some magic to join result sets. Be intelligent about handling intSets and IIBuckets. """
('last_modified', 'TextIndex', 'd', None),
('last_modified', 'FieldIndex', 'd', None),
def __init__(self,id,title=None): self.id=id self.title=title self._ztable=ZTablesCore.ZTable(id)
('date', 'TextIndex', 'd', None),
('date', 'FieldIndex', 'd', None),
def __init__(self,id,title=None): self.id=id self.title=title self._ztable=ZTablesCore.ZTable(id)
if _looping: apply(callback, (map,)+args, kw or {}) else: _loop_callbacks.append((callback, args, kw)) finally: _loop_lock.release()
if _looping is not None: apply(callback, (_looping,) + args, kw or {}) else: _loop_callbacks.append((callback, args, kw)) finally: _loop_lock.release()
def register_loop_callback(callback, args=(), kw=None): _loop_lock.acquire() try: if _looping: apply(callback, (map,)+args, kw or {}) else: _loop_callbacks.append((callback, args, kw)) finally: _loop_lock.release()
_looping=1
_looping = map
def _start_loop(map): _loop_lock.acquire() try: global _looping _looping=1 while _loop_callbacks: cb, args, kw = _loop_callbacks.pop() apply(cb, (map,)+args, kw or {}) finally: _loop_lock.release()
_looping=0 finally: _loop_lock.release()
_looping = None finally: _loop_lock.release()
def _stop_loop(): _loop_lock.acquire() try: global _looping _looping=0 finally: _loop_lock.release()
if use_poll: poll_fun = asyncore.poll2 else: poll_fun = asyncore.poll
This function functions like the regular asyncore.loop() function except that it also triggers ThreadedAsync callback functions before starting the loop. """ if use_poll: if hasattr(select, 'poll'): poll_fun = asyncore.poll3 else: poll_fun = asyncore.poll2 else: poll_fun = asyncore.poll
def loop (timeout=30.0, use_poll=0, map=None): if use_poll: poll_fun = asyncore.poll2 else: poll_fun = asyncore.poll if map is None: map=asyncore.socket_map _start_loop(map) while map: poll_fun (timeout, map) _stop_loop()
if os.environ.has_key('LOG_ZPUBLISHER_TRACEBACK'): zLOG.LOG('zpublisher',zLOG.WARNING,'Traceback:',body)
def exception(self, fatal=0, info=None, absuri_match=re.compile(r'\w+://[\w\.]+').match, tag_search=re.compile('[a-zA-Z]>').search, abort=1 ): if type(info) is type(()) and len(info)==3: t, v, tb = info else: t, v, tb = sys.exc_info()
StorageTestBase.StorageTestBase.setUp(self)
def setUp(self): self.open() StorageTestBase.StorageTestBase.setUp(self)
if aq_base(container) is not container: try: roles = container.aq_acquire('__roles__')
if containerbase is container: if containerbase is not accessedbase: raise Unauthorized(name, value) else: try: roles = container.aq_acquire('__roles__')
def validate(self, accessed, container, name, value, context, roles=_noroles, getattr=getattr, _noroles=_noroles, valid_aq_=('aq_parent','aq_inner', 'aq_explicit')):
pass
if containerbase is not accessedbase: raise Unauthorized(name, value)
def validate(self, accessed, container, name, value, context, roles=_noroles, getattr=getattr, _noroles=_noroles, valid_aq_=('aq_parent','aq_inner', 'aq_explicit')):
owner = eo.getOwner() if owner and not hasattr(owner, 'aq_parent'): udb = eo.getOwner(1)[0] root = container.getPhysicalRoot() udb = root.unrestrictedTraverse(udb) owner = owner.__of__(udb)
owner = eo.getWrappedOwner()
def validate(self, accessed, container, name, value, context, roles=_noroles, getattr=getattr, _noroles=_noroles, valid_aq_=('aq_parent','aq_inner', 'aq_explicit')):
if not owner._check_context(container): raise Unauthorized(name, value)
if container is not containerbase: if not owner._check_context(container): raise Unauthorized(name, value)
def validate(self, accessed, container, name, value, context, roles=_noroles, getattr=getattr, _noroles=_noroles, valid_aq_=('aq_parent','aq_inner', 'aq_explicit')):
self.interesting = interesting_normal
def goahead(self, end): rawdata = self.rawdata i = 0 n = len(rawdata) while i < n: match = self.interesting.search(rawdata, i) # < or & if match: j = match.start() self.interesting = interesting_normal else: j = n if i < j: self.handle_data(rawdata[i:j]) i = self.updatepos(i, j) if i == n: break if rawdata[i] == '<': if starttagopen.match(rawdata, i): # < + letter k = self.parse_starttag(i) elif endtagopen.match(rawdata, i): # </ k = self.parse_endtag(i) elif commentopen.match(rawdata, i): # <!-- k = self.parse_comment(i) elif piopen.match(rawdata, i): # <? k = self.parse_pi(i) elif declopen.match(rawdata, i): # <! k = self.parse_declaration(i) else: if i < n-1: raise HTMLParseError( "invalid '<' construct: %s" % `rawdata[i:i+2]`, self.getpos()) k = -1 if k < 0: if end: raise HTMLParseError("EOF in middle of construct", self.getpos()) break i = self.updatepos(i, k) elif rawdata[i] == '&': match = charref.match(rawdata, i) if match: name = match.group(1) self.handle_charref(name) k = match.end() if rawdata[k-1] != ';': k = k-1 i = self.updatepos(i, k) continue match = entityref.match(rawdata, i) if match: name = match.group(1) self.handle_entityref(name) k = match.end() if rawdata[k-1] != ';': k = k-1 i = self.updatepos(i, k) continue if incomplete.match(rawdata, i): if end: raise HTMLParseError( "EOF in middle of entity or char ref", self.getpos()) return -1 # incomplete raise HTMLParseError("'&' not part of entity or char ref", self.getpos()) else: assert 0, "interesting.search() lied" # end while if end and i < n: self.handle_data(rawdata[i:n]) i = self.updatepos(i, n) self.rawdata = rawdata[i:]
s = -1
i = -1
def index(self, src, srckey): '''\ index(src, srckey)
s=getattr(client, 'standard_error_message')
if hasattr(client, 'standard_error_message'): s=getattr(client, 'standard_error_message') else: client = client.aq_parent s=getattr(client, 'standard_error_message')
def raise_standardErrorMessage( self, client=None, REQUEST={}, error_type=None, error_value=None, tb=None, error_tb=None, error_message='', tagSearch=regex.compile('[a-zA-Z]>').search):
except: v='Sorry, an error occured'
except: v = error_value or "Sorry, an error occurred"
def raise_standardErrorMessage( self, client=None, REQUEST={}, error_type=None, error_value=None, tb=None, error_tb=None, error_message='', tagSearch=regex.compile('[a-zA-Z]>').search):
raise Globals.MessageDialog(
raise MessageDialog(
def _setId(self, id): if id != self.id: raise Globals.MessageDialog( title='Invalid Id', message='Cannot change the id of a %s' % self.meta_type, action ='./manage_main',)
host_match=regex.compile('[A-Za-z0-9\.\*]*').match
host_match=regex.compile('[-A-Za-z0-9\.\*]*').match
def rolejoin(roles, other): dict={} for role in roles: dict[role]=1 for role in other: dict[role]=1 roles=dict.keys() roles.sort() return roles
ServerError='bci.ServerError
ServerError='bci.ServerError'
def marshal_tuple(n,l): return join(map(lambda v, n=n: "%s:tuple=%s" % (n,quote(v)),l),'&')
zn='+%d' % localzone
lz='+%d' % localzone
def __init__(self,*args): """Return a new date-time object
dt1 = DateTime('%d/%d/%d %d:%d:%f %s' % (
dt1s = '%d/%d/%d %d:%d:%f %s' % (
def testConstructor3(self): '''Constructor from date/time string''' dt = DateTime() dt1 = DateTime('%d/%d/%d %d:%d:%f %s' % ( dt.year(), dt.month(), dt.day(), dt.hour(), dt.minute(), dt.second(), dt.timezone())) assert dt.debugCompare(dt1), (dt, dt1)
dt.timezone())) assert dt.debugCompare(dt1), (dt, dt1)
dt.timezone()) dt1 = DateTime(dt1s) _compare(dt, dt1)
def testConstructor3(self): '''Constructor from date/time string''' dt = DateTime() dt1 = DateTime('%d/%d/%d %d:%d:%f %s' % ( dt.year(), dt.month(), dt.day(), dt.hour(), dt.minute(), dt.second(), dt.timezone())) assert dt.debugCompare(dt1), (dt, dt1)
assert dt.debugCompare(dt1), (dt, dt1)
_compare(dt, dt1)
def testConstructor6(self): '''Constructor from year and julian date''' dt = DateTime('1980/1/5 12:00:00.050 pm') dt1 = DateTime(1980, 5.500000578705) assert dt.debugCompare(dt1), (dt, dt1)
assert dt - dt1 == 3000000.0, (dt, dt1)
assert dt - dt1 == 3000000.0, (dt - dt1)
def testY10KDate(self): '''Comparison of a Y10K date and a Y2K date''' dt = DateTime('10213/09/21') dt1 = DateTime(2000, 1, 1) assert dt - dt1 == 3000000.0, (dt, dt1)
manage_getPermissionMapping__roles__=PermissionRole('Change permissions')
def manage_getPermissionMapping(self): """Return the permission mapping for the object
manage_setPermissionMapping__roles__=PermissionRole('Change permissions')
def manage_setPermissionMapping(self, permission_names=[], class_permissions=[], REQUEST=None): """Change the permission mapping """ wrapper=getattr(self, '_permissionMapper', None) if wrapper is None: wrapper=PM()
if not hasattr(pack, '_m'): pack._m={}
fd=getattr(pack, '__FactoryDispatcher__', None) if fd is None: class __FactoryDispatcher__(FactoryDispatcher): "Factory Dispatcher for a Specific Product" fd = pack.__FactoryDispatcher__ = __FactoryDispatcher__ if not hasattr(pack, '_m'): pack._m=fd.__dict__
def registerClass(self, instance_class=None, meta_type='', permission=None, constructors=(), icon=None, permissions=None, legacy=(), ): """Register a constructor
setattr(Connection, k, v)
if not k.startswith('__'): setattr(Connection, k, v)
def close(self): if self._root_connection is not None: raise RuntimeError("Should not close mounted connections directly") conns = self._mounted_connections if conns: for conn in conns.values(): # Notify the activity monitor db = conn.db() f = getattr(db, 'getActivityMonitor', None) if f is not None: am = f() if am is not None: am.closedConnection(conn) conn._incrgc() # This is a good time to do some GC # XXX maybe we ought to call the close callbacks. conn._storage = conn._tmp = conn.new_oid = conn._opened = None conn._debug_info = () # The mounted connection keeps a reference to # its database, but nothing else. # Note that mounted connections can not operate # independently, so don't use _closeConnection() to # return them to the pool. Only the root connection # should be returned. # Close this connection only after the mounted connections # have been closed. Otherwise, this connection gets returned # to the pool too early and another thread might use this # connection before the mounted connections have all been # closed. self._real_close()
self._nearsec=round(sc)
self._nearsec=math.floor(sc)
def __init__(self,*args): """Return a new date-time object
import pdb pdb.set_trace() print '1'
def include(name, arguments, options, content, lineno, content_offset, block_text, state, state_machine): """Include a reST file as part of the content of this reST file.""" import pdb pdb.set_trace() print '1' if not state.document.settings.file_insertion_enabled: warning = state_machine.reporter.warning( '"%s" directive disabled.' % name, nodes.literal_block(block_text, block_text), line=lineno) return [warning] source = state_machine.input_lines.source( lineno - state_machine.input_offset - 1) source_dir = os.path.dirname(os.path.abspath(source)) path = directives.path(arguments[0]) path = os.path.normpath(os.path.join(source_dir, path)) path = utils.relative_path(None, path) encoding = options.get('encoding', state.document.settings.input_encoding) try: state.document.settings.record_dependencies.add(path) include_file = io.FileInput( source_path=path, encoding=encoding, error_handler=state.document.settings.input_encoding_error_handler, handle_io_errors=None) except IOError, error: severe = state_machine.reporter.severe( 'Problems with "%s" directive path:\n%s: %s.' % (name, error.__class__.__name__, error), nodes.literal_block(block_text, block_text), line=lineno) return [severe] try: include_text = include_file.read() except UnicodeError, error: severe = state_machine.reporter.severe( 'Problem with "%s" directive:\n%s: %s' % (name, error.__class__.__name__, error), nodes.literal_block(block_text, block_text), line=lineno) return [severe] if options.has_key('literal'): literal_block = nodes.literal_block(include_text, include_text, source=path) literal_block.line = 1 return literal_block else: include_lines = statemachine.string2lines(include_text, convert_whitespace=1) state_machine.insert_input(include_lines, path) return []
RESPONSE.setStatus(207) RESPONSE.setHeader('Content-Type', 'text/xml; charset="utf-8"') RESPONSE.setBody(result) return RESPONSE
def PROPFIND(self, REQUEST, RESPONSE): """Retrieve properties defined on the resource.""" self.dav__init(REQUEST, RESPONSE) cmd=davcmds.PropFind(REQUEST) result=cmd.apply(self) RESPONSE.setStatus(207) RESPONSE.setHeader('Content-Type', 'text/xml; charset="utf-8"') RESPONSE.setBody(result) return RESPONSE # work around MSIE DAV bug for creation and modified date if (REQUEST.get_header('User-Agent') == 'Microsoft Data Access Internet Publishing Provider DAV 1.1'): result = result.replace('<n:getlastmodified xmlns:n="DAV:">', '<n:getlastmodified xmlns:n="DAV:" xmlns:b="urn:uuid:c2f41010-65b3-11d1-a29f-00aa00c14882/" b:dt="dateTime.rfc1123">') result = result.replace('<n:creationdate xmlns:n="DAV:">', '<n:creationdate xmlns:n="DAV:" xmlns:b="urn:uuid:c2f41010-65b3-11d1-a29f-00aa00c14882/" b:dt="dateTime.tz">')
if self.encrypt_passwords and not self._isPasswordEncrypted(pw):
if self.encrypt_passwords and not self._isPasswordEncrypted(password):
def _doChangeUser(self, name, password, roles, domains, **kw): user=self.data[name] if password is not None: if self.encrypt_passwords and not self._isPasswordEncrypted(pw): password = self._encryptPassword(password) user.__=password user.roles=roles user.domains=domains
'Access contents information', ('helpValues',)),
('Access contents information', ('helpValues',)),
def get_catalog(self): return self.catalog
catch_log_errors()
def setUp(self): self.index=SearchIndex.UnTextIndex.UnTextIndex('text') self.doc=Dummy(text='this is the time, when all good zopes') catch_log_errors()
ignore_log_errors()
def tearDown(self): ignore_log_errors() get_transaction().abort() if hasattr(self, 'jar'): self.dbclose() cleanDB() self.__dict__.clear()
if find(msg,'client'):
if find(msg,'client')>=0:
def __call__(self, REQUEST=None, __ick__=None, src__=0, test__=0, **kw): """Call the database method
if stid == tid: data = self._pickles.get(oid+stid, txn=txn) assert data is not None self._update(deltas, data, 1) else:
if stid <> tid:
def _docommit(self, txn, tid): self._pending.put(self._serial, COMMIT, txn) deltas = {} co = cs = None try: co = self._oids.cursor(txn=txn) cs = self._serials.cursor(txn=txn) rec = co.first() while rec: oid = rec[0] rec = co.next() # Remove from the serials table all entries with key oid where # the serial is not tid. These are the old revisions of the # object. At the same time, we want to collect the oids of # the objects referred to by this revision's pickle, so that # later we can decref those reference counts. srec = cs.set(oid) while srec: soid, stid = srec if soid <> oid: break if stid == tid: # This is the current revision of the object, so # increment the refcounts of all referents data = self._pickles.get(oid+stid, txn=txn) assert data is not None self._update(deltas, data, 1) else: # This is the previous revision of the object, so # decref its referents and clean up its pickles. cs.delete() data = self._pickles.get(oid+stid, txn=txn) assert data is not None self._update(deltas, data, -1) self._pickles.delete(oid+stid, txn=txn) srec = cs.next_dup() # Now add incref deltas for all objects referenced by the new # revision of this object. data = self._pickles.get(oid+tid, txn=txn) assert data is not None self._update(deltas, data, 1) finally: # There's a small window of opportunity for leaking a cursor here, # if co.close() were to fail. In practice this shouldn't happen. if co: co.close() if cs: cs.close() # We're done with this table self._pending.truncate(txn) # If we're in the middle of a pack, we need to add to the packmark # table any objects that were modified in this transaction. # Otherwise, there's a race condition where mark might have happened, # then the object is added, then sweep runs, deleting the object # created in the interrim. if self._packing: for oid in self._oids.keys(): self._packmark.put(oid, PRESENT, txn=txn) self._oids.truncate(txn) # Now, to finish up, we need apply the refcount deltas to the # refcounts table, and do recursive collection of all refcount == 0 # objects. while deltas: deltas = self._update_refcounts(deltas, txn)
search_type=regex.compile('\(:[a-zA-Z][a-zA-Z0-9_]+\|\.[xy]\)$').search,
search_type=re.compile('(:[a-zA-Z][a-zA-Z0-9_]+|\\.[xy])$').search,
def processInputs( self, # "static" variables that we want to be local for speed SEQUENCE=1, DEFAULT=2, RECORD=4, RECORDS=8, REC=12, # RECORD|RECORDS EMPTY=16, CONVERTED=32, hasattr=hasattr, getattr=getattr, setattr=setattr, search_type=regex.compile('\(:[a-zA-Z][a-zA-Z0-9_]+\|\.[xy]\)$').search, rfind=string.rfind, ): """Process request inputs
l=search_type(key,l)
mo = search_type(key,l) if mo: l=mo.start(0) else: l=-1
def processInputs( self, # "static" variables that we want to be local for speed SEQUENCE=1, DEFAULT=2, RECORD=4, RECORDS=8, REC=12, # RECORD|RECORDS EMPTY=16, CONVERTED=32, hasattr=hasattr, getattr=getattr, setattr=setattr, search_type=regex.compile('\(:[a-zA-Z][a-zA-Z0-9_]+\|\.[xy]\)$').search, rfind=string.rfind, ): """Process request inputs
qparmre=regex.compile( '\([\0- ]*' '\([^\0- ;,=\"]+\)="\([^"]*\)\"' '\([\0- ]*[;,]\)?[\0- ]*\)' ), parmre=regex.compile( '\([\0- ]*' '\([^\0- ;,=\"]+\)=\([^\0- ;,\"]*\)' '\([\0- ]*[;,]\)?[\0- ]*\)' ),
qparmre=re.compile( '([\x00- ]*([^\x00- ;,="]+)="([^"]*)"([\x00- ]*[;,])?[\x00- ]*)'), parmre=re.compile( '([\x00- ]*([^\x00- ;,="]+)=([^\x00- ;,"]*)([\x00- ]*[;,])?[\x00- ]*)'),
def parse_cookie(text, result=None, qparmre=regex.compile( '\([\0- ]*' '\([^\0- ;,=\"]+\)="\([^"]*\)\"' '\([\0- ]*[;,]\)?[\0- ]*\)' ), parmre=regex.compile( '\([\0- ]*' '\([^\0- ;,=\"]+\)=\([^\0- ;,\"]*\)' '\([\0- ]*[;,]\)?[\0- ]*\)' ), acquire=parse_cookie_lock.acquire, release=parse_cookie_lock.release, ): if result is None: result={} already_have=result.has_key acquire() try: if qparmre.match(text) >= 0: # Match quoted correct cookies name=qparmre.group(2) value=qparmre.group(3) l=len(qparmre.group(1)) elif parmre.match(text) >= 0: # Match evil MSIE cookies ;) name=parmre.group(2) value=parmre.group(3) l=len(parmre.group(1)) else: # this may be an invalid cookie. # We'll simply bail without raising an error # if the cookie is invalid. return result finally: release() if not already_have(name): result[name]=value return apply(parse_cookie,(text[l:],result))
if qparmre.match(text) >= 0:
mo_q = qparmre.match(text) if mo_q:
def parse_cookie(text, result=None, qparmre=regex.compile( '\([\0- ]*' '\([^\0- ;,=\"]+\)="\([^"]*\)\"' '\([\0- ]*[;,]\)?[\0- ]*\)' ), parmre=regex.compile( '\([\0- ]*' '\([^\0- ;,=\"]+\)=\([^\0- ;,\"]*\)' '\([\0- ]*[;,]\)?[\0- ]*\)' ), acquire=parse_cookie_lock.acquire, release=parse_cookie_lock.release, ): if result is None: result={} already_have=result.has_key acquire() try: if qparmre.match(text) >= 0: # Match quoted correct cookies name=qparmre.group(2) value=qparmre.group(3) l=len(qparmre.group(1)) elif parmre.match(text) >= 0: # Match evil MSIE cookies ;) name=parmre.group(2) value=parmre.group(3) l=len(parmre.group(1)) else: # this may be an invalid cookie. # We'll simply bail without raising an error # if the cookie is invalid. return result finally: release() if not already_have(name): result[name]=value return apply(parse_cookie,(text[l:],result))
name=qparmre.group(2) value=qparmre.group(3) l=len(qparmre.group(1)) elif parmre.match(text) >= 0:
l = len(mo_q.group(1)) name = mo_q.group(2) value = mo_q.group(3) else:
def parse_cookie(text, result=None, qparmre=regex.compile( '\([\0- ]*' '\([^\0- ;,=\"]+\)="\([^"]*\)\"' '\([\0- ]*[;,]\)?[\0- ]*\)' ), parmre=regex.compile( '\([\0- ]*' '\([^\0- ;,=\"]+\)=\([^\0- ;,\"]*\)' '\([\0- ]*[;,]\)?[\0- ]*\)' ), acquire=parse_cookie_lock.acquire, release=parse_cookie_lock.release, ): if result is None: result={} already_have=result.has_key acquire() try: if qparmre.match(text) >= 0: # Match quoted correct cookies name=qparmre.group(2) value=qparmre.group(3) l=len(qparmre.group(1)) elif parmre.match(text) >= 0: # Match evil MSIE cookies ;) name=parmre.group(2) value=parmre.group(3) l=len(parmre.group(1)) else: # this may be an invalid cookie. # We'll simply bail without raising an error # if the cookie is invalid. return result finally: release() if not already_have(name): result[name]=value return apply(parse_cookie,(text[l:],result))
name=parmre.group(2) value=parmre.group(3) l=len(parmre.group(1)) else: return result
mo_p = parmre.match(text) if mo_p: l = len(mo_p.group(1)) name = mo_p.group(2) value = mo_p.group(3) else: return result
def parse_cookie(text, result=None, qparmre=regex.compile( '\([\0- ]*' '\([^\0- ;,=\"]+\)="\([^"]*\)\"' '\([\0- ]*[;,]\)?[\0- ]*\)' ), parmre=regex.compile( '\([\0- ]*' '\([^\0- ;,=\"]+\)=\([^\0- ;,\"]*\)' '\([\0- ]*[;,]\)?[\0- ]*\)' ), acquire=parse_cookie_lock.acquire, release=parse_cookie_lock.release, ): if result is None: result={} already_have=result.has_key acquire() try: if qparmre.match(text) >= 0: # Match quoted correct cookies name=qparmre.group(2) value=qparmre.group(3) l=len(qparmre.group(1)) elif parmre.match(text) >= 0: # Match evil MSIE cookies ;) name=parmre.group(2) value=parmre.group(3) l=len(parmre.group(1)) else: # this may be an invalid cookie. # We'll simply bail without raising an error # if the cookie is invalid. return result finally: release() if not already_have(name): result[name]=value return apply(parse_cookie,(text[l:],result))