rem
stringlengths 0
322k
| add
stringlengths 0
2.05M
| context
stringlengths 8
228k
|
---|---|---|
write_inituser(inituser, user, password) | if user and password: write_inituser(inituser, user, password) | def main(): try: opts, args = getopt.getopt(sys.argv[1:], "hu:d:s:", ["help", "user=", "dir=", "skelsrc="] ) except getopt.GetoptError, msg: usage(sys.stderr, msg) sys.exit(2) script = os.path.abspath(sys.argv[0]) user = None password = None skeltarget = None skelsrc = None for opt, arg in opts: if opt in ("-d", "--dir"): skeltarget = os.path.abspath(os.path.expanduser(arg)) if not skeltarget: usage(sys.stderr, "dir must not be empty") sys.exit(2) if opt in ("-s", "--skelsrc"): skelsrc = os.path.abspath(os.path.expanduser(arg)) if not skelsrc: usage(sys.stderr, "skelsrc must not be empty") sys.exit(2) if opt in ("-h", "--help"): usage(sys.stdout) sys.exit() if opt in ("-u", "--user"): if not arg: usage(sys.stderr, "user must not be empty") sys.exit(2) if not ":" in arg: usage(sys.stderr, "user must be specified as name:password") sys.exit(2) user, password = arg.split(":", 1) if not skeltarget: # interactively ask for skeltarget and initial user name/passwd. # cant set custom instancehome in interactive mode, we default # to skeltarget. skeltarget = instancehome = os.path.abspath( os.path.expanduser(get_skeltarget()) ) instancehome = skeltarget zopehome = os.path.dirname(os.path.dirname(script)) softwarehome = os.path.join(zopehome, "lib", "python") configfile = os.path.join(instancehome, 'etc', 'zope.conf') if skelsrc is None: # default to using stock Zope skeleton source skelsrc = os.path.join(zopehome, "skel") inituser = os.path.join(instancehome, "inituser") if not (user or os.path.exists(inituser)): user, password = get_inituser() # we need to distinguish between python.exe and pythonw.exe under # Windows in order to make Zope run using python.exe when run in a # console window and pythonw.exe when run as a service, so we do a bit # of sniffing here. psplit = os.path.split(sys.executable) exedir = os.path.join(*psplit[:-1]) pythonexe = os.path.join(exedir, 'python.exe') pythonwexe = os.path.join(exedir, 'pythonw.exe') if ( os.path.isfile(pythonwexe) and os.path.isfile(pythonexe) and (sys.executable in [pythonwexe, pythonexe]) ): # we're using a Windows build with both python.exe and pythonw.exe # in the same directory PYTHON = pythonexe PYTHONW = pythonwexe else: # we're on UNIX or we have a nonstandard Windows setup PYTHON = PYTHONW = sys.executable kw = { "PYTHON":PYTHON, "PYTHONW":PYTHONW, "INSTANCE_HOME": instancehome, "SOFTWARE_HOME": softwarehome, "ZOPE_HOME": zopehome, } copyzopeskel.copyskel(skelsrc, skeltarget, None, None, **kw) write_inituser(inituser, user, password) |
reverse=0) | reverse=1) | def __init__(self, blocks): tname, args, section = blocks[0] args=parse_params(args, name='', start='1',end='-1',size='10', orphan='3',overlap='1',mapping=1, skip_unauthorized=1, previous=1, next=1, expr='', sort='', reverse=0) self.args=args has_key=args.has_key |
self.reverse=1 self.sort='' else: self.reverse=0 | self.reverse=args['reverse'] | def __init__(self, blocks): tname, args, section = blocks[0] args=parse_params(args, name='', start='1',end='-1',size='10', orphan='3',overlap='1',mapping=1, skip_unauthorized=1, previous=1, next=1, expr='', sort='', reverse=0) self.args=args has_key=args.has_key |
if self.sort is not None: sequence=self.sort_sequence(sequence) | if self.sort is not None: sequence=self.sort_sequence(sequence) if self.reverse is not None: sequence.reverse() | def renderwb(self, md): expr=self.expr name=self.__name__ if expr is None: sequence=md[name] cache={ name: sequence } else: sequence=expr(md) cache=None |
if self.sort is not None: sequence=self.sort_sequence(sequence) | if self.sort is not None: sequence=self.sort_sequence(sequence) if self.reverse is not None: sequence.reverse() | def renderwob(self, md): expr=self.expr name=self.__name__ if expr is None: sequence=md[name] cache={ name: sequence } else: sequence=expr(md) cache=None |
if self.reverse: s.reverse() | def sort_sequence(self, sequence): |
|
imp = getattr(self.aq_parent, '%s__roles__' % self.__name__) return imp.__of__(self) | imp = getattr(self.aq_inner.aq_parent, '%s__roles__' % self.__name__) if hasattr(imp, '__of__'): return imp.__of__(self) return imp | def _get__roles__(self): imp = getattr(self.aq_parent, '%s__roles__' % self.__name__) return imp.__of__(self) |
try: self.ids.remove(Id) except: pass | try: self.ids.remove(documentId) except KeyError: pass | def unindex_object(self,documentId): try: self.ids.remove(Id) except: pass |
id=id.strip() | def _setProperty(self, id, value, type='string'): # for selection and multiple selection properties # the value argument indicates the select variable # of the property id=id.strip() self._wrapperCheck(value) if not self.valid_property_id(id): raise 'Bad Request', 'Invalid or duplicate property id' |
|
self._setProperty(id, value, type) | self._setProperty(id.strip(), value, type) | def manage_addProperty(self, id, value, type, REQUEST=None): """Add a new property via the web. Sets a new property with the given id, type, and value.""" if type_converters.has_key(type): value=type_converters[type](value) self._setProperty(id, value, type) if REQUEST is not None: return self.manage_propertiesForm(self, REQUEST) |
self._context = context | self._context_ref = ref(context) | def __init__(self, name, seq, context): ZTUtils.Iterator.__init__(self, seq) self.name = name self._context = context |
self._context.setLocal(self.name, self.item) | context = self._context_ref() if context is not None: context.setLocal(self.name, self.item) | def next(self): if ZTUtils.Iterator.next(self): self._context.setLocal(self.name, self.item) return 1 return 0 |
lexicon_id = lexicon_id or extra.lexicon_idp | lexicon_id = lexicon_id or extra.lexicon_id | def __init__(self, id, extra=None, caller=None, index_factory=None, field_name=None, lexicon_id=None): self.id = id |
{'icon':icon, 'label':'Properties', 'action':'manage_main', 'target':'manage_main'}, {'icon':icon, 'label':'Try It', 'action':'', 'target':'manage_main'}, {'icon':'AccessControl/AccessControl_icon.gif', 'label':'Access Control', 'action':'manage_rolesForm', 'target':'manage_main'}, | {'label':'Properties', 'action':'manage_main'}, {'label':'Try It', 'action':''}, {'label':'Access Control', 'action':'manage_access'}, | def add(self, id, title, external_name, REQUEST=None): """Add an external method to a folder""" names=split(external_name,'.') module, function = join(names[:-1],'.'), names[-1] i=ExternalMethod(id,title,module,function) self._setObject(id,i) return self.manage_main(self,REQUEST) |
dbtab.databases.update(getattr(DB, 'databases', {})) DB.databases = dbtab.databases else: DB = ZODB.DB(m.Storage, databases=dbtab.databases) | databases.update(getattr(DB, 'databases', {})) DB.databases = databases else: DB = ZODB.DB(m.Storage, databases=databases) | def startup(): global app # Import products OFS.Application.import_products() configuration = getConfiguration() # Open the database dbtab = configuration.dbtab try: # Try to use custom storage try: m=imp.find_module('custom_zodb',[configuration.testinghome]) except: m=imp.find_module('custom_zodb',[configuration.instancehome]) except: # if there is no custom_zodb, use the config file specified databases DB = dbtab.getDatabase('/', is_root=1) else: m=imp.load_module('Zope2.custom_zodb', m[0], m[1], m[2]) sys.modules['Zope2.custom_zodb']=m if hasattr(m,'DB'): DB=m.DB dbtab.databases.update(getattr(DB, 'databases', {})) DB.databases = dbtab.databases else: DB = ZODB.DB(m.Storage, databases=dbtab.databases) Globals.BobobaseName = DB.getName() if DB.getActivityMonitor() is None: from ZODB.ActivityMonitor import ActivityMonitor DB.setActivityMonitor(ActivityMonitor()) Globals.DB = DB # Ick, this is temporary until we come up with some registry Zope2.DB = DB # Hook for providing multiple transaction object manager undo support: Globals.UndoManager=DB Globals.opened.append(DB) import ClassFactory DB.classFactory = ClassFactory.ClassFactory # "Log on" as system user newSecurityManager(None, AccessControl.User.system) # Set up the "app" object that automagically opens # connections app = App.ZApplication.ZApplicationWrapper( DB, 'Application', OFS.Application.Application, (), Globals.VersionNameName) Zope2.bobo_application = app # Initialize the app object application = app() OFS.Application.initialize(application) if Globals.DevelopmentMode: # Set up auto-refresh. from App.RefreshFuncs import setupAutoRefresh setupAutoRefresh(application._p_jar) application._p_jar.close() # "Log off" as system user noSecurityManager() global startup_time startup_time = asctime() Zope2.zpublisher_transactions_manager = TransactionsManager() Zope2.zpublisher_exception_hook = zpublisher_exception_hook Zope2.zpublisher_validated_hook = validated_hook Zope2.__bobo_before__ = noSecurityManager |
path to the ZCatalog (since two Zope object's cannot have the same | path to the ZCatalog (since two Zope objects cannot have the same | def manage_addZCatalog(id, title, vocab_id=None): """ Add a ZCatalog object. 'vocab_id' is the name of a Vocabulary object this catalog should use. A value of None will cause the Catalog to create its own private vocabulary. """ |
% lexicon.getId()) | % repr(lexicon)) | def getLexicon(self): """Get the lexicon for this index """ if hasattr(aq_base(self), 'lexicon'): # Fix up old ZCTextIndexes by removing direct lexicon ref # and changing it to an ID lexicon = getattr(aq_parent(aq_inner(self)), self.lexicon.getId()) self.lexicon_id = lexicon.getId() del self.lexicon |
e.registerType('provider', TALESProviderExpression) | e.registerType('provider', Z2ProviderExpression) | def createZopeEngine(): e = ZopeEngine() e.iteratorFactory = PathIterator for pt in ZopePathExpr._default_type_names: e.registerType(pt, ZopePathExpr) e.registerType('string', StringExpr) e.registerType('python', ZRPythonExpr.PythonExpr) e.registerType('not', NotExpr) e.registerType('defer', DeferExpr) e.registerType('lazy', LazyExpr) e.registerType('provider', TALESProviderExpression) e.registerBaseName('modules', SecureModuleImporter) return e |
def setProperty(self, id, value, type='string', meta=None): | def _setProperty(self, id, value, type='string', meta=None): | def setProperty(self, id, value, type='string', meta=None): # Set a new property with the given id, value and optional type. # Note that different property sets may support different typing # systems. if not self.valid_property_id(id): raise 'Bad Request', 'Invalid property id.' self=self.v_self() if meta is None: meta={} prop={'id':id, 'type':type, 'meta':meta} self._properties=self._properties+(prop,) setattr(self, id, value) |
def updateProperty(self, id, value): | def _updateProperty(self, id, value): | def updateProperty(self, id, value): # Update the value of an existing property. If value is a string, # an attempt will be made to convert the value to the type of the # existing property. if not self.hasProperty(id): raise 'Bad Request', 'The property %s does not exist.' % id if type(value)==type(''): proptype=self.propertyInfo(id).get('type', 'string') if type_converters.has_key(proptype): value=type_converters[proptype](value) setattr(self.v_self(), id, value) |
def delProperty(self, id): | def _delProperty(self, id): | def delProperty(self, id): # Delete the property with the given id. If a property with the # given id does not exist, a ValueError is raised. if not self.hasProperty(id): raise ValueError, 'The property %s does not exist.' % id self=self.v_self() delattr(self, id) self._properties=tuple(filter(lambda i, n=id: i['id'] != n, self._properties)) |
self.setProperty(id, value, type) | self._setProperty(id, value, type) | def manage_addProperty(self, id, value, type, REQUEST=None): """Add a new property via the web. Sets a new property with the given id, type, and value.""" if type_converters.has_key(type): value=type_converters[type](value) self.setProperty(id, value, type) if REQUEST is not None: return self.manage_propertiesForm(self, REQUEST) |
vself.updateProperty(name, value) | vself._updateProperty(name, value) | def manage_changeProperties(self, REQUEST=None, **kw): """Change existing object properties by passing either a mapping object of name:value pairs {'foo':6} or passing name=value parameters.""" if REQUEST is None: props={} else: props=REQUEST if kw: for name, value in kw.items(): props[name]=value propdict=self.propdict() vself=self.v_self() for name, value in props.items(): if self.hasProperty(name): if not 'w' in propdict[name].get('mode', 'wd'): raise 'BadRequest', '%s cannot be changed.' % name vself.updateProperty(name, value) if REQUEST is not None: return MessageDialog( title ='Success!', message='Your changes have been saved.', action ='manage_propertiesForm') |
self.delProperty(id) | self._delProperty(id) | def manage_delProperties(self, ids=None, REQUEST=None): """Delete one or more properties specified by 'ids'.""" if ids is None: return MessageDialog( title='No property specified', message='No properties were specified!', action ='./manage_propertiesForm',) propdict=self.propdict() vself=self.v_self() if hasattr(vself, '_reserved_names'): nd=vself._reserved_names else: nd=() for id in ids: if not propdict.has_key(id): raise 'BadRequest', ( 'The property <em>%s</em> does not exist.' % id) if (not 'd' in propdict[id].get('mode', 'wd')) or (id in nd): return MessageDialog( title ='Cannot delete %s' % id, message='The property <em>%s</em> cannot be deleted.' % id, action ='manage_propertiesForm') self.delProperty(id) if REQUEST is not None: return self.manage_propertiesForm(self, REQUEST) |
def setProperty(self, id, value, type='string', meta=None): | def _setProperty(self, id, value, type='string', meta=None): | def setProperty(self, id, value, type='string', meta=None): raise ValueError, 'Property cannot be set.' |
def updateProperty(self, id, value): | def _updateProperty(self, id, value): | def updateProperty(self, id, value): raise ValueError, 'Property cannot be set.' |
def delProperty(self, id): | def _delProperty(self, id): | def delProperty(self, id): raise ValueError, 'Property cannot be deleted.' |
if find(s,'\\') < 0 or (find(s,'\\t') < 0 and find(s,'\\n') < 0): return s | if find(s,'\\') < 0 and (find(s,'\\t') < 0 and find(s,'\\n') < 0): return s | def parse_text(s): if find(s,'\\') < 0 or (find(s,'\\t') < 0 and find(s,'\\n') < 0): return s r=[] for x in split(s,'\\\\'): x=join(split(x,'\\n'),'\n') r.append(join(split(x,'\\t'),'\t')) return join(r,'\\') |
warnings.warn('Using the zLOG module is deprecated (to be removed in ' | warnings.warn('The zLOG package is deprecated and will be removed in ' | def LOG(subsystem, severity, summary, detail='', error=None, reraise=None): """Log some information The required arguments are: subsystem -- The subsystem generating the message (e.g. ZODB) severity -- The "severity" of the event. This may be an integer or a floating point number. Logging back ends may consider the int() of this value to be significant. For example, a backend may consider any severity whos integer value is WARNING to be a warning. summary -- A short summary of the event detail -- A detailed description error -- A three-element tuple consisting of an error type, value, and traceback. If provided, then a summary of the error is added to the detail. reraise -- If provided with a true value, then the error given by error is reraised. """ warnings.warn('Using the zLOG module is deprecated (to be removed in ' 'Zope 2.12. Use the Python logging module instead.', DeprecationWarning, stacklevel=2) log_write(subsystem, severity, summary, detail, error) if reraise and error: raise error[0], error[1], error[2] |
return '\n <n:src>%s</n:src>\n' \ ' <n:dst>%s/document_src</n:dst>\n ' % (url, url) | return '\n <n:link>\n' \ ' <n:src>%s</n:src>\n' \ ' <n:dst>%s/document_src</n:dst>\n' \ ' </n:link>\n ' % (url, url) | def dav__source(self): vself=self.v_self() if hasattr(vself, 'meta_type') and vself.meta_type in \ ('Document', 'DTML Document', 'DTML Method'): url=vself.absolute_url() return '\n <n:src>%s</n:src>\n' \ ' <n:dst>%s/document_src</n:dst>\n ' % (url, url) return '' |
i._init(smtpHost=smtp_host, smtpPort=smtp_port) | i._init(smtp_host=smtp_host, smtp_port=smtp_port) | def add(self, id, title='', smtp_host=None, smtp_port=25, REQUEST=None): ' add a MailHost into the system ' i=MailHost() #create new mail host i.id=id #give it id i.title=title #title i._init(smtpHost=smtp_host, smtpPort=smtp_port) self._setObject(id,i) #register it if REQUEST: return self.manage_main(self,REQUEST) |
def _init(self, smtpHost, smtpPort): self.smtpHost=smtpHost self.smtpPort=smtpPort def manage_makeChanges(self,title,smtpHost,smtpPort, REQUEST=None): | def _init(self, smtp_host, smtp_port): self.smtp_host=smtp_host self.smtp_port=smtp_port def manage_makeChanges(self,title,smtp_host,smtp_port, REQUEST=None): | def _init(self, smtpHost, smtpPort): self.smtpHost=smtpHost self.smtpPort=smtpPort |
self.smtpHost=smtpHost self.smtpPort=smtpPort | self.smtp_host=smtp_host self.smtp_port=smtp_port | def manage_makeChanges(self,title,smtpHost,smtpPort, REQUEST=None): 'make the changes' self.title=title self.smtpHost=smtpHost self.smtpPort=smtpPort if REQUEST: return MessageDialog( title ='Changed %s' % self.__name__, message='%s has been updated' % self.id, action =REQUEST['URL2']+'/manage_main', target ='manage_main') |
mailserver = SMTP(trueself.smtpHost, trueself.smtpPort) | mailserver = SMTP(trueself.smtp_host, trueself.smtp_port) | def sendTemplate(trueself, self, messageTemplate, statusTemplate=None, mto=None, mfrom=None, encode=None, REQUEST=None): 'render a mail template, then send it...' mtemplate = getattr(self, messageTemplate) messageText = mtemplate(self, trueself.REQUEST) messageText=_encode(messageText, encode) headers = extractheaders(messageText) if mto: headers['to'] = mto if mfrom: headers['from'] = mfrom for requiredHeader in ('to', 'from'): if not headers.has_key(requiredHeader): raise MailHostError,"Message missing SMTP Header '%s'"\ % requiredHeader mailserver = SMTP(trueself.smtpHost, trueself.smtpPort) mailserver.sendmail(headers['from'], headers['to'], messageText) |
smtpserver = SMTP(self.smtpHost, self.smtpPort) | smtpserver = SMTP(self.smtp_host, self.smtp_port) | def send(self, messageText, mto=None, mfrom=None, subject=None, encode=None): headers = extractheaders(messageText) if not headers['subject']: messageText="subject: %s\n%s" % (subject or '[No Subject]', messageText) if mto: if type(mto) is type('s'): mto=map(string.strip, string.split(mto,',')) headers['to'] = filter(truth, mto) if mfrom: headers['from'] = mfrom for requiredHeader in ('to', 'from'): if not headers.has_key(requiredHeader): raise MailHostError,"Message missing SMTP Header '%s'"\ % requiredHeader messageText=_encode(messageText, encode) smtpserver = SMTP(self.smtpHost, self.smtpPort) smtpserver.sendmail(headers['from'],headers['to'], messageText) |
smtpserver = SMTP(self.smtpHost, self.smtpPort) | smtpserver = SMTP(self.smtp_host, self.smtp_port) | def scheduledSend(self, messageText, mto=None, mfrom=None, subject=None, encode=None): headers = extractheaders(messageText) |
mailserver = SMTP(self.smtphost, self.smtpport) | mailserver = SMTP(self.smtp_host, self.smtp_port) | def simple_send(self, mto, mfrom, subject, body): body="from: %s\nto: %s\nsubject: %s\n\n%s" % ( mfrom, mto, subject, body) mailserver = SMTP(self.smtphost, self.smtpport) mailserver.sendmail(mfrom, mto, body) |
verify = getpass.getpass("Vefify password: ") | verify = getpass.getpass("Verify password: ") | def main(argv): short_options = ':u:p:e:d:' long_options = ['username=', 'password=', 'encoding=', 'domains='] usage = """%s [options] filename |
''.string.join(combined).rstrip() | ''.join(combined).rstrip() | def fancy_replace(a, alo, ahi, b, blo, bhi): if TRACE: print '*** fancy_replace', alo, ahi, blo, bhi dump('>', a, alo, ahi) dump('<', b, blo, bhi) # don't synch up unless the lines have a similarity score of at # least cutoff; best_ratio tracks the best score seen so far best_ratio, cutoff = 0.74, 0.75 cruncher = SequenceMatcher(IS_CHARACTER_JUNK) eqi, eqj = None, None # 1st indices of equal lines (if any) # search for the pair that matches best without being identical # (identical lines must be junk lines, & we don't want to synch up # on junk -- unless we have to) for j in xrange(blo, bhi): bj = b[j] cruncher.set_seq2(bj) for i in xrange(alo, ahi): ai = a[i] if ai == bj: if eqi is None: eqi, eqj = i, j continue cruncher.set_seq1(ai) # computing similarity is expensive, so use the quick # upper bounds first -- have seen this speed up messy # compares by a factor of 3. # note that ratio() is only expensive to compute the first # time it's called on a sequence pair; the expensive part # of the computation is cached by cruncher if cruncher.real_quick_ratio() > best_ratio and \ cruncher.quick_ratio() > best_ratio and \ cruncher.ratio() > best_ratio: best_ratio, best_i, best_j = cruncher.ratio(), i, j if best_ratio < cutoff: # no non-identical "pretty close" pair if eqi is None: # no identical pair either -- treat it as a straight replace plain_replace(a, alo, ahi, b, blo, bhi) return # no close pair, but an identical pair -- synch up on that best_i, best_j, best_ratio = eqi, eqj, 1.0 else: # there's a close pair, so forget the identical pair (if any) eqi = None # a[best_i] very similar to b[best_j]; eqi is None iff they're not # identical if TRACE: print '*** best_ratio', best_ratio, best_i, best_j dump('>', a, best_i, best_i+1) dump('<', b, best_j, best_j+1) # pump out diffs from before the synch point fancy_helper(a, alo, best_i, b, blo, best_j) # do intraline marking on the synch pair aelt, belt = a[best_i], b[best_j] if eqi is None: # pump out a '-', '+', '?' triple for the synched lines; atags = btags = "" cruncher.set_seqs(aelt, belt) for tag, ai1, ai2, bj1, bj2 in cruncher.get_opcodes(): la, lb = ai2 - ai1, bj2 - bj1 if tag == 'replace': atags = atags + '.' * la btags = btags + '.' * lb elif tag == 'delete': atags = atags + '.' * la elif tag == 'insert': btags = btags + '.' * lb elif tag == 'equal': atags = atags + ' ' * la btags = btags + ' ' * lb else: raise ValueError, 'unknown tag ' + `tag` la, lb = len(atags), len(btags) if la < lb: atags = atags + ' ' * (lb - la) elif lb < la: btags = btags + ' ' * (la - lb) combined = map(lambda x,y: _combine[x+y], atags, btags) print '-', aelt, '+', belt, '?', \ ''.string.join(combined).rstrip() else: # the synch pair is identical print ' ', aelt, # pump out diffs from after the synch point fancy_helper(a, best_i+1, ahi, b, best_j+1, bhi) |
{'id':'getlastmodified', 'mode':'r'}, | def v_self(self): return self.aq_parent.aq_parent |
|
vself=self.v_self() if hasattr(vself, '_p_mtime'): return rfc1123_date(vself._p_mtime) return '' | return rfc1123_date(self.v_self()._p_mtime) | def dav__getlastmodified(self): vself=self.v_self() if hasattr(vself, '_p_mtime'): return rfc1123_date(vself._p_mtime) return '' |
print "<%s>" % tag | def finish_starttag(self, tag, attrs): self.scan_xmlns(attrs) if tag in EMPTY_HTML_TAGS: print "<%s>" % tag self.pop_xmlns() elif tag in CLOSING_BLOCK_LEVEL_HTML_TAGS: close_to = -1 for i in range(len(self.tagstack)): t = self.tagstack[i] if t in CLOSING_BLOCK_LEVEL_HTML_TAGS: close_to = i elif t in BLOCK_LEVEL_HTML_TAGS: close_to = -1 self._close_to_level(close_to) self.tagstack.append(tag) elif tag in PARA_LEVEL_HTML_TAGS + BLOCK_LEVEL_HTML_TAGS: close_to = -1 for i in range(len(self.tagstack)): if self.tagstack[i] in BLOCK_LEVEL_HTML_TAGS: close_to = -1 elif self.tagstack[i] in PARA_LEVEL_HTML_TAGS: if close_to == -1: close_to = i self.tagstack.append(tag) self._close_to_level(close_to) else: self.tagstack.append(tag) self.gen.emitStartTag(tag, attrs) |
|
self._close_to_level(close_to) | def finish_starttag(self, tag, attrs): self.scan_xmlns(attrs) if tag in EMPTY_HTML_TAGS: print "<%s>" % tag self.pop_xmlns() elif tag in CLOSING_BLOCK_LEVEL_HTML_TAGS: close_to = -1 for i in range(len(self.tagstack)): t = self.tagstack[i] if t in CLOSING_BLOCK_LEVEL_HTML_TAGS: close_to = i elif t in BLOCK_LEVEL_HTML_TAGS: close_to = -1 self._close_to_level(close_to) self.tagstack.append(tag) elif tag in PARA_LEVEL_HTML_TAGS + BLOCK_LEVEL_HTML_TAGS: close_to = -1 for i in range(len(self.tagstack)): if self.tagstack[i] in BLOCK_LEVEL_HTML_TAGS: close_to = -1 elif self.tagstack[i] in PARA_LEVEL_HTML_TAGS: if close_to == -1: close_to = i self.tagstack.append(tag) self._close_to_level(close_to) else: self.tagstack.append(tag) self.gen.emitStartTag(tag, attrs) |
|
self.finish_endtag(t) | self.finish_endtag(t, implied=1) | def _close_to_level(self, close_to): if close_to > -1: closing = self.tagstack[close_to:] closing.reverse() for t in closing: self.finish_endtag(t) |
def finish_endtag(self, tag): if tag not in EMPTY_HTML_TAGS: assert tag in self.tagstack while self.tagstack[-1] != tag: self.finish_endtag(self.tagstack[-1]) self.tagstack.pop() self.pop_xmlns() self.gen.emitEndTag(tag) | def finish_endtag(self, tag, implied=0): if tag in EMPTY_HTML_TAGS: return assert tag in self.tagstack while self.tagstack[-1] != tag: self.finish_endtag(self.tagstack[-1], implied=1) self.tagstack.pop() self.pop_xmlns() if implied \ and tag in TIGHTEN_IMPLICIT_CLOSE_TAGS \ and self.gen.program \ and self.gen.program[-1][0] == "rawtext": data = self.gen.program.pop()[1] prefix = string.rstrip(data) white = data[len(prefix):] if data: self.gen.emitRawText(prefix) self.gen.emitEndTag(tag) if white: self.gen.emitRawText(white) else: self.gen.emitEndTag(tag) | def finish_endtag(self, tag): if tag not in EMPTY_HTML_TAGS: assert tag in self.tagstack while self.tagstack[-1] != tag: self.finish_endtag(self.tagstack[-1]) self.tagstack.pop() self.pop_xmlns() self.gen.emitEndTag(tag) |
try: error_type=error_type.__name__ except: pass | if hasattr(error_type, '__name__'): error_type=error_type.__name__ | def raise_standardErrorMessage( self, client=None, REQUEST={}, error_type=None, error_value=None, tb=None, error_tb=None, error_message='', tagSearch=regex.compile('[a-zA-Z]>').search): |
def test(*args, **kw): | def debug(*args, **kw): | def test(*args, **kw): return apply(ZPublisher.test,('Zope',)+args, kw) |
self.setCookie(name,value) def expireCookie(self, name): | cookies=self.cookies if cookies.has_key(name): cookie=cookies[name] else: cookie=cookies[name]={} if cookie.has_key('value'): cookie['value']='%s:%s' % (cookie['value'], value) else: cookie['value']=value def expireCookie(self, name, **kw): | def appendCookie(self, name, value): |
that has already passed. ''' self.setCookie(name,'deleted', max_age=0) | that has already passed. Note that some clients require a path to be specified - this path must exactly match the path given when creating the cookie. The path can be specified as a keyword argument. ''' dict={'max_age':0, 'expires':'Wed, 31-Dec-97 23:59:59 GMT'} for k, v in kw.items(): dict[k]=v apply(Response.setCookie, (self, name, 'deleted'), dict) | def expireCookie(self, name): |
cookie='set-cookie: %s="%s"' % (name,attrs['value']) | cookie='Set-Cookie: %s="%s"' % (name, attrs['value']) | def _cookie_list(self): |
if name=='expires': cookie = '%s; Expires="%s"' % (cookie,v) elif name=='domain': cookie = '%s; Domain="%s"' % (cookie,v) | name=lower(name) if name=='expires': cookie = '%s; Expires=%s' % (cookie,v) elif name=='domain': cookie = '%s; Domain=%s' % (cookie,v) | def _cookie_list(self): |
elif name=='max_age': cookie = '%s; Max-Age="%s"' % (cookie,v) elif name=='comment': cookie = '%s; Comment="%s"' % (cookie,v) | elif name=='max_age': cookie = '%s; Max-Age=%s' % (cookie,v) elif name=='comment': cookie = '%s; Comment=%s' % (cookie,v) | def _cookie_list(self): |
_DQUOTEDTEXT = r'("[ a-zA-Z0-9\n\-\.\,\;\(\)\/\:\/]+")' | _DQUOTEDTEXT = r'("[ a-zA-Z0-9\n\-\.\,\;\(\)\/\:\/\*\']+")' | def doc_strong(self, s, expr = re.compile(r'\s*\*\*([ \na-zA-Z0-9.:/;\-,!\?\'\"]+)\*\*').search ): |
if n != '__call__': | if str(n) != '__call__': | def render(ob): """ Calls the object, possibly a document template, or just returns it if not callable. (From DT_Util.py) """ base = aq_base(ob) if callable(base): try: if getattr(base, 'isDocTemp', 0): ob = ob(ob, ob.REQUEST) else: ob = ob() except AttributeError, n: if n != '__call__': raise return ob |
$Id: Publish.py,v 1.21 1996/10/15 15:45:35 jim Exp $""" | $Id: Publish.py,v 1.22 1996/10/25 19:34:27 jim Exp $""" | def taste(spam): "a favorable reviewer" return spam,'yum yum, I like ' + spam |
__version__='$Revision: 1.21 $'[11:-2] | __version__='$Revision: 1.22 $'[11:-2] | def taste(spam): "a favorable reviewer" return spam,'yum yum, I like ' + spam |
output('<TD%s%s VALIGN="TOP" ALLIGN="LEFT">' % | output('<TD%s%s VALIGN="TOP" ALIGN="LEFT">' % | def tpRenderTABLE(self, id, root_url, url, state, substate, diff, data, colspan, section, md, treeData, level=0, args=None, simple_type={type(''):0, type(1):0, type(1.0):0}.has_key, ): "Render a tree as a table" have_arg=args.has_key exp=0 if level >= 0: urlattr=args['url'] if urlattr and hasattr(self, urlattr): tpUrl=getattr(self, urlattr) if not simple_type(type(tpUrl)): tpUrl=tpUrl() url = (url and ('%s/%s' % (url, tpUrl))) or tpUrl root_url = root_url or tpUrl treeData['tree-item-url']=url treeData['tree-level']=level treeData['tree-item-expanded']=0 idattr=args['id'] output=data.append items=None if (have_arg('assume_children') and args['assume_children'] and substate is not state): # We should not compute children unless we have to. # See if we've been asked to expand our children. for i in range(len(substate)): sub=substate[i] if sub[0]==id: exp=i+1 break if not exp: items=1 if items is None: validate=md.validate if have_arg('branches') and hasattr(self, args['branches']): if validate is None or not hasattr(self, 'aq_acquire'): items=getattr(self, args['branches']) else: items=self.aq_acquire(args['branches'],validate,md) items=items() elif have_arg('branches_expr'): items=args['branches_expr'](md) if not items and have_arg('leaves'): items=1 if items and items != 1: if validate is not None: unauth=[] index=0 for i in items: try: v=validate(items,items,index,i,md) except: v=0 if not v: unauth.append(index) index=index+1 if unauth: if have_arg('skip_unauthorized') and args['skip_unauthorized']: items=list(items) unauth.reverse() for i in unauth: del items[i] else: raise ValidationError, unauth if have_arg('sort'): # Faster/less mem in-place sort if type(items)==type(()): items=list(items) sort=args['sort'] size=range(len(items)) for i in size: v=items[i] k=getattr(v,sort) try: k=k() except: pass items[i]=(k,v) items.sort() for i in size: items[i]=items[i][1] diff.append(id) sub=None if substate is state: output('<TABLE CELLSPACING="0">\n') sub=substate[0] exp=items else: # Add prefix output('<TR>\n') # Add +/- icon if items: if level: if level > 3: output( '<TD COLSPAN="%s"></TD>' % (level-1)) elif level > 1: output('<TD></TD>' * (level-1)) output('<TD WIDTH="16"></TD>\n') output('<TD WIDTH="16" VALIGN="TOP">') for i in range(len(substate)): sub=substate[i] if sub[0]==id: exp=i+1 break #################################### # Mostly inline encode_seq for speed s=compress(str(diff)) if len(s) > 57: s=encode_str(s) else: s=b2a_base64(s)[:-1] l=find(s,'=') if l >= 0: s=s[:l] s=translate(s, tplus) #################################### script=md['SCRIPT_NAME'] if exp: treeData['tree-item-expanded']=1 output('<A NAME="%s">' '<A HREF="%s?tree-c=%s#%s">' '<IMG SRC="%s/p_/mi" BORDER=0></A>' % (id, root_url, s, id, script)) else: output('<A NAME="%s">' '<A HREF="%s?tree-e=%s#%s">' '<IMG SRC="%s/p_/pl" BORDER=0></A>' % (id, root_url, s, id, script)) output('</TD>\n') else: if level > 2: output('<TD COLSPAN="%s"></TD>' % level) elif level > 0: output('<TD></TD>' * level) output('<TD WIDTH="16"></TD>\n') # add item text dataspan=colspan-level output('<TD%s%s VALIGN="TOP" ALLIGN="LEFT">' % ((dataspan > 1 and (' COLSPAN="%s"' % dataspan) or ''), (have_arg('nowrap') and args['nowrap'] and ' NOWRAP' or '')) ) output(render_blocks(section, md)) output('</TD>\n</TR>\n') if exp: level=level+1 dataspan=colspan-level if level > 3: h='<TD COLSPAN="%s"></TD>' % (level-1) elif level > 1: h='<TD></TD>' * (level-1) else: h='' if have_arg('header'): doc=args['header'] if md.has_key(doc): doc=md.getitem(doc,0) else: doc=None if doc is not None: output(doc( None, md, standard_html_header=( '<TR>%s<TD WIDTH="16"></TD>' '<TD%s VALIGN="TOP">' % (h, (dataspan > 1 and (' COLSPAN="%s"' % dataspan) or ''))), standard_html_footer='</TD></TR>', )) if items==1: # leaves if have_arg('leaves'): doc=args['leaves'] if md.has_key(doc): doc=md.getitem(doc,0) else: doc=None if doc is not None: treeData['-tree-substate-']=sub treeData['tree-level']=level md._push(treeData) try: output(doc( None,md, standard_html_header=( '<TR>%s<TD WIDTH="16"></TD>' '<TD%s VALIGN="TOP">' % (h, (dataspan > 1 and (' COLSPAN="%s"' % dataspan) or ''))), standard_html_footer='</TD></TR>', )) finally: md._pop(1) elif have_arg('expand'): doc=args['expand'] if md.has_key(doc): doc=md.getitem(doc,0) else: doc=None if doc is not None: treeData['-tree-substate-']=sub treeData['tree-level']=level md._push(treeData) try: output(doc( None,md, standard_html_header=( '<TR>%s<TD WIDTH="16"></TD>' '<TD%s VALIGN="TOP">' % (h, (dataspan > 1 and (' COLSPAN="%s"' % dataspan) or ''))), standard_html_footer='</TD></TR>', )) finally: md._pop(1) else: __traceback_info__=sub, args, state, substate ids={} for item in items: if hasattr(item, idattr): id=getattr(item, idattr) if not simple_type(type(id)): id=id() elif hasattr(item, '_p_oid'): id=item._p_oid else: id=pyid(item) if len(sub)==1: sub.append([]) substate=sub[1] ids[id]=1 md._push(InstanceDict(item,md)) try: data=tpRenderTABLE( item,id,root_url,url,state,substate,diff,data, colspan, section, md, treeData, level, args) finally: md._pop() if not sub[1]: del sub[1] ids=ids.has_key for i in range(len(substate)-1,-1): if not ids(substate[i][0]): del substate[i] if have_arg('footer'): doc=args['footer'] if md.has_key(doc): doc=md.getitem(doc,0) else: doc=None if doc is not None: output(doc( None, md, standard_html_header=( '<TR>%s<TD WIDTH="16"></TD>' '<TD%s VALIGN="TOP">' % (h, (dataspan > 1 and (' COLSPAN="%s"' % dataspan) or ''))), standard_html_footer='</TD></TR>', )) del diff[-1] if not diff: output('</TABLE>\n') return data |
def analyze(files, top, sortf, start=None, end=None, mode='cumulative', resolution=60): beginrequests = {} cumulative = {} | def get_requests(files, start=None, end=None, statsfname=None, writestats=None, readstats=None): | def get_earliest_file_data(files): temp = {} earliest_fromepoch = 0 earliest = None retn = None for file in files: line = file.readline() if not line: continue linelen = len(line) line = string.strip(line) tup = parsebigmlogline(line) if tup is None: print "Could not interpret line: %s" % line continue code, id, timestr, desc = tup timestr = string.strip(timestr) fromepoch = getdate(timestr) temp[file] = linelen if earliest_fromepoch == 0 or fromepoch < earliest_fromepoch: earliest_fromepoch = fromepoch earliest = file retn = [code, id, fromepoch, desc] for file, linelen in temp.items(): if file is not earliest: file.seek(file.tell() - linelen) return retn |
decidelines = {} computed_start = None computed_end = None while 1: tup = get_earliest_file_data(files) if tup is None: break code, id, fromepoch, desc = tup if computed_start is None: computed_start = fromepoch computed_end = fromepoch if start is not None and fromepoch < start: continue if end is not None and fromepoch > end: break if code == 'U': finished.extend(unfinished.values()) unfinished.clear() request = StartupRequest() request.url = desc request.start = int(fromepoch) finished.append(request) continue request = unfinished.get(id) if request is None: if code != "B": continue request = Request() for pending_req in unfinished.values(): pending_req.active = pending_req.active + 1 unfinished[id] = request t = int(fromepoch) try: request.put(code, t, desc) except: print "Unable to handle entry: %s %s %s"%(code, t, desc) if request.isfinished(): del unfinished[id] finished.append(request) finished.extend(unfinished.values()) requests = finished | if readstats: fp = open(statsfname, 'r') u = cPickle.Unpickler(fp) requests = u.load() fp.close() del u del fp else: while 1: tup = get_earliest_file_data(files) if tup is None: break code, id, fromepoch, desc = tup if start is not None and fromepoch < start: continue if end is not None and fromepoch > end: break if code == 'U': finished.extend(unfinished.values()) unfinished.clear() request = StartupRequest() request.url = desc request.start = int(fromepoch) finished.append(request) continue request = unfinished.get(id) if request is None: if code != "B": continue request = Request() for pending_req in unfinished.values(): pending_req.active = pending_req.active + 1 unfinished[id] = request t = int(fromepoch) try: request.put(code, t, desc) except: print "Unable to handle entry: %s %s %s"%(code, t, desc) if request.isfinished(): del unfinished[id] finished.append(request) finished.extend(unfinished.values()) requests = finished if writestats: fp = open(statsfname, 'w') p = cPickle.Pickler(fp) p.dump(requests) fp.close() del p del fp return requests def analyze(requests, top, sortf, start=None, end=None, mode='cumulative', resolution=60, urlfocusurl=None, urlfocustime=60): | def analyze(files, top, sortf, start=None, end=None, mode='cumulative', resolution=60): beginrequests = {} cumulative = {} finished = [] unfinished = {} decidelines = {} # filename to filepos computed_start = None computed_end = None while 1: tup = get_earliest_file_data(files) if tup is None: break code, id, fromepoch, desc = tup if computed_start is None: computed_start = fromepoch computed_end = fromepoch if start is not None and fromepoch < start: continue if end is not None and fromepoch > end: break if code == 'U': finished.extend(unfinished.values()) unfinished.clear() request = StartupRequest() request.url = desc request.start = int(fromepoch) finished.append(request) continue request = unfinished.get(id) if request is None: if code != "B": continue # garbage at beginning of file request = Request() for pending_req in unfinished.values(): pending_req.active = pending_req.active + 1 unfinished[id] = request t = int(fromepoch) try: request.put(code, t, desc) except: print "Unable to handle entry: %s %s %s"%(code, t, desc) if request.isfinished(): del unfinished[id] finished.append(request) finished.extend(unfinished.values()) requests = finished if mode == 'cumulative': for request in requests: url = request.url stats = cumulative.get(url) if stats is None: stats = Cumulative(url) cumulative[url] = stats stats.put(request) cumulative = cumulative.values() if mode == 'cumulative': dict = cumulative elif mode == 'detailed': dict = requests elif mode == 'timed': dict = requests else: raise "Invalid mode." if mode=='timed': if start and end: timewrite(requests,start,end,resolution) if start and not end: timewrite(requests,start,computed_end,resolution) if end and not start: timewrite(requests,computed_start,end,resolution) if not end and not start: timewrite(requests,computed_start,computed_end,resolution) else: dict.sort(sortf) write(dict, top) |
cumulative = cumulative.values() if mode == 'cumulative': dict = cumulative elif mode == 'detailed': dict = requests elif mode == 'timed': dict = requests else: raise "Invalid mode." if mode=='timed': | requests = cumulative.values() requests.sort(sortf) write(requests, top) elif mode=='timed': computed_start = requests[0].start computed_end = requests[-1].t_end | def analyze(files, top, sortf, start=None, end=None, mode='cumulative', resolution=60): beginrequests = {} cumulative = {} finished = [] unfinished = {} decidelines = {} # filename to filepos computed_start = None computed_end = None while 1: tup = get_earliest_file_data(files) if tup is None: break code, id, fromepoch, desc = tup if computed_start is None: computed_start = fromepoch computed_end = fromepoch if start is not None and fromepoch < start: continue if end is not None and fromepoch > end: break if code == 'U': finished.extend(unfinished.values()) unfinished.clear() request = StartupRequest() request.url = desc request.start = int(fromepoch) finished.append(request) continue request = unfinished.get(id) if request is None: if code != "B": continue # garbage at beginning of file request = Request() for pending_req in unfinished.values(): pending_req.active = pending_req.active + 1 unfinished[id] = request t = int(fromepoch) try: request.put(code, t, desc) except: print "Unable to handle entry: %s %s %s"%(code, t, desc) if request.isfinished(): del unfinished[id] finished.append(request) finished.extend(unfinished.values()) requests = finished if mode == 'cumulative': for request in requests: url = request.url stats = cumulative.get(url) if stats is None: stats = Cumulative(url) cumulative[url] = stats stats.put(request) cumulative = cumulative.values() if mode == 'cumulative': dict = cumulative elif mode == 'detailed': dict = requests elif mode == 'timed': dict = requests else: raise "Invalid mode." if mode=='timed': if start and end: timewrite(requests,start,end,resolution) if start and not end: timewrite(requests,start,computed_end,resolution) if end and not start: timewrite(requests,computed_start,end,resolution) if not end and not start: timewrite(requests,computed_start,computed_end,resolution) else: dict.sort(sortf) write(dict, top) |
dict.sort(sortf) write(dict, top) | requests.sort(sortf) write(requests, top) def urlfocuswrite(requests, url, t): l = [] i = 0 for request in requests: if request.url == url: l.append(i) i = i + 1 before = {} after = {} x = 0 for n in l: x = x + 1 r = requests[n] start = r.start earliest = start - t latest = start + t print 'URLs invoked %s seconds before and after %s ( (t, url, x, r.shortprettystart()) print '---' i = -1 for request in requests: i = i + 1 if request.start < earliest: continue if request.start > latest: break if n == i: print '%3d' % (request.start - start), print '%s' % (request.shortprettystart()), print request.url continue if request.start <= start: if before.get(i): before[i] = before[i] + 1 else: before[i] = 1 if request.start > start: if after.get(i): after[i] = after[i] + 1 else: after[i] = 1 print '%3d' % (request.start - start), print '%s' % (request.shortprettystart()), print request.url print print ('Summary of URLs invoked before (and at the same time as) %s ' '(times, url)' % url) before = before.items() before.sort() for k,v in before: print v, requests[k].url print print 'Summary of URLs invoked after %s (times, url)' % url after = after.items() after.sort() for k,v in after: print v, requests[k].url | def analyze(files, top, sortf, start=None, end=None, mode='cumulative', resolution=60): beginrequests = {} cumulative = {} finished = [] unfinished = {} decidelines = {} # filename to filepos computed_start = None computed_end = None while 1: tup = get_earliest_file_data(files) if tup is None: break code, id, fromepoch, desc = tup if computed_start is None: computed_start = fromepoch computed_end = fromepoch if start is not None and fromepoch < start: continue if end is not None and fromepoch > end: break if code == 'U': finished.extend(unfinished.values()) unfinished.clear() request = StartupRequest() request.url = desc request.start = int(fromepoch) finished.append(request) continue request = unfinished.get(id) if request is None: if code != "B": continue # garbage at beginning of file request = Request() for pending_req in unfinished.values(): pending_req.active = pending_req.active + 1 unfinished[id] = request t = int(fromepoch) try: request.put(code, t, desc) except: print "Unable to handle entry: %s %s %s"%(code, t, desc) if request.isfinished(): del unfinished[id] finished.append(request) finished.extend(unfinished.values()) requests = finished if mode == 'cumulative': for request in requests: url = request.url stats = cumulative.get(url) if stats is None: stats = Cumulative(url) cumulative[url] = stats stats.put(request) cumulative = cumulative.values() if mode == 'cumulative': dict = cumulative elif mode == 'detailed': dict = requests elif mode == 'timed': dict = requests else: raise "Invalid mode." if mode=='timed': if start and end: timewrite(requests,start,end,resolution) if start and not end: timewrite(requests,start,computed_end,resolution) if end and not start: timewrite(requests,computed_start,end,resolution) if not end and not start: timewrite(requests,computed_start,computed_end,resolution) else: dict.sort(sortf) write(dict, top) |
max_requests = 0 | def timewrite(requests, start, end, resolution): max_requests = 0 print "Start: %s End: %s Resolution: %d secs" % \ (tick2str(start), tick2str(end), resolution) print "-" * 78 print print "Date/Time #requests requests/second" d = {} for r in requests: t = r.start slice = getTimeslice(resolution,t) if d.has_key(slice): d[slice] = d[slice] + 1 else: d[slice] = 1 num = 0 hits = 0 avg_requests = None slices = d.keys() slices.sort() for slice in slices: num = d[slice] if num>max_requests: max_requests = num hits = hits + num if avg_requests is None: avg_requests = num else: avg_requests = (avg_requests + num) / 2 s = tick2str(slice) s = s + " %6d %4.2lf" % (num,num*1.0/resolution) print s print '='*78 print " Peak: %6d %4.2lf" % \ (max_requests,max_requests*1.0/resolution) print " Avg: %6d %4.2lf" % \ (avg_requests,avg_requests*1.0/resolution) print "Total: %6d n/a " % (hits) |
|
slices = d.keys() slices.sort() for slice in slices: num = d[slice] | max_requests = 0 for slice in range(min, max, resolution): num = d.get(slice, 0) | def timewrite(requests, start, end, resolution): max_requests = 0 print "Start: %s End: %s Resolution: %d secs" % \ (tick2str(start), tick2str(end), resolution) print "-" * 78 print print "Date/Time #requests requests/second" d = {} for r in requests: t = r.start slice = getTimeslice(resolution,t) if d.has_key(slice): d[slice] = d[slice] + 1 else: d[slice] = 1 num = 0 hits = 0 avg_requests = None slices = d.keys() slices.sort() for slice in slices: num = d[slice] if num>max_requests: max_requests = num hits = hits + num if avg_requests is None: avg_requests = num else: avg_requests = (avg_requests + num) / 2 s = tick2str(slice) s = s + " %6d %4.2lf" % (num,num*1.0/resolution) print s print '='*78 print " Peak: %6d %4.2lf" % \ (max_requests,max_requests*1.0/resolution) print " Avg: %6d %4.2lf" % \ (avg_requests,avg_requests*1.0/resolution) print "Total: %6d n/a " % (hits) |
Reports are of three types: cumulative,detailed or timed. The default is cumulative. Data is taken from the one or more Zope detailed request logs (-M logs). | Reports are of four types: cumulative, detailed, timed, or urlfocus. The default is cumulative. Data is taken from one or more Zope detailed request logs (-M logs, aka 'big M' logs) or from a preprocessed statistics file. | def detailedusage(): details = usage(0) pname = sys.argv[0] details = details + """ |
For timed reports there are no sort specs allowed. | For timed and urlfocus reports, there are no sort specs allowed. | def detailedusage(): details = usage(0) pname = sys.argv[0] details = details + """ |
elapsed time.""" % {'pname':pname} | elapsed time. %(pname)s debug.log debug2.log --writestats='requests.stat' Write stats file for debug.log and debug2.log into 'requests.stat' and show default report. %(pname)s --readstats='requests.stat' --detailed Read from 'requests.stat' stats file (instead of actual -M log files) and show detailed report against this data.""" % {'pname':pname} | def detailedusage(): details = usage(0) pname = sys.argv[0] details = details + """ |
Usage: %s filename1 [filename2 ...] [--cumulative|--detailed|--timed] | Usage: %s filename1 [filename2 ...] [--cumulative | --detailed | [--timed --resolution=seconds]] | def usage(basic=1): usage = ( """ |
[--top==n] | [--top=n] | def usage(basic=1): usage = ( """ |
[--resolution=seconds] | [--writestats=filename | --readstats=filename] [--urlfocus=url] [--urlfocustime=seconds] | def usage(basic=1): usage = ( """ |
resolution=10 | resolution=60 urlfocustime=10 urlfocusurl=None statsfname = None readstats = 0 writestats = 0 | def usage(basic=1): usage = ( """ |
'end=','resolution='] | 'end=','resolution=', 'writestats=', 'readstats=','urlfocus=','urlfocustime='] | def usage(basic=1): usage = ( """ |
if opt=='--urlfocus': mode='urlfocus' urlfocusurl = val if opt=='--urlfocustime': urlfocustime=int(val) | def usage(basic=1): usage = ( """ |
|
analyze(files, top, sortf, start, end, mode, resolution) | req=get_requests(files, start, end, statsfname, writestats, readstats) analyze(req, top, sortf, start, end, mode, resolution, urlfocusurl, urlfocustime) | def usage(basic=1): usage = ( """ |
setlist=index.items(lo,hi) | if hi: setlist=index.items(lo,hi) else: setlist=index.items(lo) | def _apply_index(self, request, cid=''): |
if self.REQUEST['REQUEST_METHOD']=='PUT': return PUTer(self,key) | if self.REQUEST['REQUEST_METHOD']=='PUT': return PUTer(self,key).__of__(self) | def __getitem__(self, key): # Hm, getattr didn't work, maybe this is a put: if key[:19]=='manage_draftFolder-': id=key[19:] if hasattr(self, id): return getattr(self, id).manage_supervisor() raise KeyError, key try: if self.REQUEST['REQUEST_METHOD']=='PUT': return PUTer(self,key) except: pass raise KeyError, key |
class PUTer: | class PUTer(Acquisition.Explicit): | def manage_importHack(self, REQUEST=None): "Imports a previously exported object from /var/export.bbe" f=Globals.data_dir+'/export.bbe' o=self._p_jar.import_file(f) id=o.id if hasattr(id,'im_func'): id=id() self._setObject(id,o) return 'OK, I imported %s' % id |
if lower(type)=='text/html': | if lower(type)[:5]=='text/': | def PUT(self, REQUEST, BODY): """Adds a document, image or file to the folder when a PUT request is received.""" name=self._key try: type=REQUEST['CONTENT_TYPE'] except KeyError: type='' if not type: dot=rfind(name, '.') suf=dot > 0 and lower(name[dot+1:]) or '' if suf: try: type=content_type[suf] except KeyError: if find_binary(BODY) >= 0: type='application/x-%s' % suf else: type=text_type(BODY) else: if find_binary(BODY) >= 0: raise 'Bad Request', 'Could not determine file type' else: type=text_type(BODY) __traceback_info__=suf, dot, name, type if lower(type)=='text/html': return self._parent.manage_addDocument(name,'',BODY, REQUEST=REQUEST) if lower(type)[:6]=='image/': self._parent._setObject(name, Image(name, '', BODY, type)) else: self._parent._setObject(name, File(name, '', BODY, type)) return 'OK' |
self._parent._setObject(name, Image(name, '', BODY, type)) | self._parent._setObject(name, Image(name, '', BODY, content_type=type)) | def PUT(self, REQUEST, BODY): """Adds a document, image or file to the folder when a PUT request is received.""" name=self._key try: type=REQUEST['CONTENT_TYPE'] except KeyError: type='' if not type: dot=rfind(name, '.') suf=dot > 0 and lower(name[dot+1:]) or '' if suf: try: type=content_type[suf] except KeyError: if find_binary(BODY) >= 0: type='application/x-%s' % suf else: type=text_type(BODY) else: if find_binary(BODY) >= 0: raise 'Bad Request', 'Could not determine file type' else: type=text_type(BODY) __traceback_info__=suf, dot, name, type if lower(type)=='text/html': return self._parent.manage_addDocument(name,'',BODY, REQUEST=REQUEST) if lower(type)[:6]=='image/': self._parent._setObject(name, Image(name, '', BODY, type)) else: self._parent._setObject(name, File(name, '', BODY, type)) return 'OK' |
self._parent._setObject(name, File(name, '', BODY, type)) | self._parent._setObject(name, File(name, '', BODY, content_type=type)) | def PUT(self, REQUEST, BODY): """Adds a document, image or file to the folder when a PUT request is received.""" name=self._key try: type=REQUEST['CONTENT_TYPE'] except KeyError: type='' if not type: dot=rfind(name, '.') suf=dot > 0 and lower(name[dot+1:]) or '' if suf: try: type=content_type[suf] except KeyError: if find_binary(BODY) >= 0: type='application/x-%s' % suf else: type=text_type(BODY) else: if find_binary(BODY) >= 0: raise 'Bad Request', 'Could not determine file type' else: type=text_type(BODY) __traceback_info__=suf, dot, name, type if lower(type)=='text/html': return self._parent.manage_addDocument(name,'',BODY, REQUEST=REQUEST) if lower(type)[:6]=='image/': self._parent._setObject(name, Image(name, '', BODY, type)) else: self._parent._setObject(name, File(name, '', BODY, type)) return 'OK' |
if REQUEST.has_key(name): if 'w' in prop.get('mode', 'wd'): value=REQUEST.get(name) self._setPropValue(name, value) | if 'w' in prop.get('mode', 'wd'): value=REQUEST.get(name, '') self._setPropValue(name, value) | def manage_editProperties(self, REQUEST): """Edit object properties via the web.""" for prop in self._properties: name=prop['id'] if REQUEST.has_key(name): if 'w' in prop.get('mode', 'wd'): value=REQUEST.get(name) self._setPropValue(name, value) return MessageDialog( title ='Success!', message='Your changes have been saved', action ='manage_propertiesForm') |
if not sys.modules.has_key('logging'): import logging logging.basicConfig() | def _configure_logging(): if not sys.modules.has_key('logging'): import logging logging.basicConfig() | def _exec(cmd): '''Prints the time it takes to execute 'cmd'.''' if os.environ.get('X', None): start = time.time() exec cmd _print('(%.3fs)' % (time.time() - start)) |
import App.config config = App.config.getConfiguration() config.debug_mode = 0 App.config.setConfiguration(config) | def _configure_debug_mode(): import App.config config = App.config.getConfiguration() config.debug_mode = 0 App.config.setConfiguration(config) | def _exec(cmd): '''Prints the time it takes to execute 'cmd'.''' if os.environ.get('X', None): start = time.time() exec cmd _print('(%.3fs)' % (time.time() - start)) |
def _configure_client_cache(): import App.config config = App.config.getConfiguration() config.zeo_client_name = None App.config.setConfiguration(config) _configure_logging() _configure_debug_mode() _configure_client_cache() | def _exec(cmd): '''Prints the time it takes to execute 'cmd'.''' if os.environ.get('X', None): start = time.time() exec cmd _print('(%.3fs)' % (time.time() - start)) |
|
def _null_import_products(): pass OFS.Application.import_products = _null_import_products | def _apply_patches(): def null_import_products(): pass OFS.Application.import_products = null_import_products | def _exec(cmd): '''Prints the time it takes to execute 'cmd'.''' if os.environ.get('X', None): start = time.time() exec cmd _print('(%.3fs)' % (time.time() - start)) |
def _null_initialize(app): pass OFS.Application.initialize = _null_initialize | def null_initialize(app): pass OFS.Application.initialize = null_initialize | def _null_import_products(): pass |
def _null_register_topic(self,id,topic): pass App.ProductContext.ProductContext.registerHelpTopic = _null_register_topic def _null_register_title(self,title): pass App.ProductContext.ProductContext.registerHelpTitle = _null_register_title def _null_register_help(self,directory='',clear=1,title_re=None): pass App.ProductContext.ProductContext.registerHelp = _null_register_help | def null_register_topic(self,id,topic): pass App.ProductContext.ProductContext.registerHelpTopic = null_register_topic def null_register_title(self,title): pass App.ProductContext.ProductContext.registerHelpTitle = null_register_title def null_register_help(self,directory='',clear=1,title_re=None): pass App.ProductContext.ProductContext.registerHelp = null_register_help | def _null_initialize(app): pass |
if os.environ.get('ZEO_CLIENT'): del os.environ['ZEO_CLIENT'] | if not Zope2._began_startup: _apply_patches() | def _null_register_help(self,directory='',clear=1,title_re=None): pass |
app = _theApp | def installProduct(name, quiet=0): '''Installs a Zope product.''' start = time.time() app = _theApp meta_types = [] if not _installedProducts.has_key(name): for priority, product_name, index, product_dir in get_products(): if product_name == name: if not quiet: _print('Installing %s ... ' % product_name) # We want to fail immediately if a product throws an exception # during install, so we set the raise_exc flag. install_product(app, product_dir, product_name, meta_types, get_folder_permissions(), raise_exc=1) _installedProducts[product_name] = 1 Products.meta_types = Products.meta_types + tuple(meta_types) Globals.default__class_init__(Folder) if not quiet: _print('done (%.3fs)\n' % (time.time() - start)) break else: if name != 'SomeProduct': # Ignore the skeleton tests :-P if not quiet: _print('Installing %s ... NOT FOUND\n' % name) |
|
install_product(app, product_dir, product_name, meta_types, | install_product(_theApp, product_dir, product_name, meta_types, | def installProduct(name, quiet=0): '''Installs a Zope product.''' start = time.time() app = _theApp meta_types = [] if not _installedProducts.has_key(name): for priority, product_name, index, product_dir in get_products(): if product_name == name: if not quiet: _print('Installing %s ... ' % product_name) # We want to fail immediately if a product throws an exception # during install, so we set the raise_exc flag. install_product(app, product_dir, product_name, meta_types, get_folder_permissions(), raise_exc=1) _installedProducts[product_name] = 1 Products.meta_types = Products.meta_types + tuple(meta_types) Globals.default__class_init__(Folder) if not quiet: _print('done (%.3fs)\n' % (time.time() - start)) break else: if name != 'SomeProduct': # Ignore the skeleton tests :-P if not quiet: _print('Installing %s ... NOT FOUND\n' % name) |
_s = time.time(); _max = (_s - _start) / 4 _exec('_theApp.Control_Panel') _cp = _theApp.Control_Panel if hasattr(_cp, 'initialize_cache'): _cp.initialize_cache() if (time.time() - _s) > _max: _write('.') | def _load_control_panel(): start = time.time() max = (start - _start) / 4 _exec('_theApp.Control_Panel') _theApp.Control_Panel if (time.time() - start) > max: _write('.') | def installProduct(name, quiet=0): '''Installs a Zope product.''' start = time.time() app = _theApp meta_types = [] if not _installedProducts.has_key(name): for priority, product_name, index, product_dir in get_products(): if product_name == name: if not quiet: _print('Installing %s ... ' % product_name) # We want to fail immediately if a product throws an exception # during install, so we set the raise_exc flag. install_product(app, product_dir, product_name, meta_types, get_folder_permissions(), raise_exc=1) _installedProducts[product_name] = 1 Products.meta_types = Products.meta_types + tuple(meta_types) Globals.default__class_init__(Folder) if not quiet: _print('done (%.3fs)\n' % (time.time() - start)) break else: if name != 'SomeProduct': # Ignore the skeleton tests :-P if not quiet: _print('Installing %s ... NOT FOUND\n' % name) |
installProduct('PluginIndexes', 1) installProduct('OFSP', 1) | def _install_products(): installProduct('PluginIndexes', 1) installProduct('OFSP', 1) _load_control_panel() _install_products() | def installProduct(name, quiet=0): '''Installs a Zope product.''' start = time.time() app = _theApp meta_types = [] if not _installedProducts.has_key(name): for priority, product_name, index, product_dir in get_products(): if product_name == name: if not quiet: _print('Installing %s ... ' % product_name) # We want to fail immediately if a product throws an exception # during install, so we set the raise_exc flag. install_product(app, product_dir, product_name, meta_types, get_folder_permissions(), raise_exc=1) _installedProducts[product_name] = 1 Products.meta_types = Products.meta_types + tuple(meta_types) Globals.default__class_init__(Folder) if not quiet: _print('done (%.3fs)\n' % (time.time() - start)) break else: if name != 'SomeProduct': # Ignore the skeleton tests :-P if not quiet: _print('Installing %s ... NOT FOUND\n' % name) |
if env.has_key('HTTP_CGI_AUTHORIZATION'): dict['HTTP_AUTHORIZATION']=env['HTTP_CGI_AUTHORIZATION'] try: del env['HTTP_CGI_AUTHORIZATION'] | if dict.has_key('HTTP_CGI_AUTHORIZATION'): dict['HTTP_AUTHORIZATION']=dict['HTTP_CGI_AUTHORIZATION'] try: del dict['HTTP_CGI_AUTHORIZATION'] | def sane_environment(env): # return an environment mapping which has been cleaned of # funny business such as REDIRECT_ prefixes added by Apache # or HTTP_CGI_AUTHORIZATION hacks. dict={} for key, val in env.items(): while key[:9]=='REDIRECT_': key=key[9:] dict[key]=val if env.has_key('HTTP_CGI_AUTHORIZATION'): dict['HTTP_AUTHORIZATION']=env['HTTP_CGI_AUTHORIZATION'] try: del env['HTTP_CGI_AUTHORIZATION'] except: pass return dict |
def testNotValidateWithoutRoles(self): | def testValidateWithoutRoles(self): | def testNotValidateWithoutRoles(self): user = self.uf.validate(self.app.REQUEST, self.basic) self.assertEqual(user, None) |
self.assertEqual(user, None) | self.assertEqual(user.getUserName(), 'user1') | def testNotValidateWithoutRoles(self): user = self.uf.validate(self.app.REQUEST, self.basic) self.assertEqual(user, None) |
else: result=apply(DB__.query, query) r=compress(result) cache[query]= now, r if compressed: return r | return result result=apply(DB__.query, query) r=compress(result) cache[query]= now, r if compressed: return r | def _cached_result(self, DB__, query, compressed=0): |
offset=(-localzone/(60*60)) | offset=(-localzone/(60*60.0)) | def _findLocalTimeZoneName(isDST): if not daylight: # Daylight savings does not occur in this time zone. isDST = 0 try: # Get the name of the current time zone depending # on DST. _localzone = _cache._zmap[tzname[isDST].lower()] except: try: # Generate a GMT-offset zone name. if isDST: localzone = altzone else: localzone = timezone offset=(-localzone/(60*60)) majorOffset=int(offset) if majorOffset != 0 : minorOffset=abs(int((offset % majorOffset) * 60.0)) else: minorOffset = 0 m=majorOffset >= 0 and '+' or '' lz='%s%0.02d%0.02d' % (m, majorOffset, minorOffset) _localzone = _cache._zmap[('GMT%s' % lz).lower()] except: _localzone = '' return _localzone |
i=Folder() i.id=id i.title=title self._setObject(id,i) if createUserF: i.manage_addUserFolder() if createPublic: i.manage_addDTMLDocument(id='index_html',title='') if REQUEST is not None: return self.manage_main(self,REQUEST,update_menu=1) | ob=Folder() ob.id=id ob.title=title self._setObject(id, ob) try: user=REQUEST['AUTHENTICATED_USER'] except: user=None if createUserF: if (user is not None) and not ( user.has_permission('Add User Folders', self)): raise 'Unauthorized', ( 'You are not authorized to add User Folders.' ) ob.manage_addUserFolder() if createPublic: if (user is not None) and not ( user.has_permission('Add Documents, Images, and Files', self)): raise 'Unauthorized', ( 'You are not authorized to add DTML Documents.' ) ob.manage_addDTMLDocument(id='index_html', title='') if REQUEST is not None: return self.manage_workspace(self, REQUEST, update_menu=1) | def manage_addFolder(self,id,title='',createPublic=0,createUserF=0, REQUEST=None): """Add a new Folder object with id *id*. If the 'createPublic' and 'createUserF' parameters are set to any true value, an 'index_html' and a 'UserFolder' objects are created respectively in the new folder. """ i=Folder() i.id=id i.title=title self._setObject(id,i) if createUserF: i.manage_addUserFolder() if createPublic: i.manage_addDTMLDocument(id='index_html',title='') if REQUEST is not None: return self.manage_main(self,REQUEST,update_menu=1) |
tb=sys.exc_traceback | tb=sys.exc_info()[2] | def __call__(self, *args, **kw): """Call an ExternalMethod |
script=REQUEST.script if string.find(path, script) != 0: path='%s/%s' % (script, path) | while path and path[0]=='/': path=path[1:] while path and path[-1]=='/': path=path[:-1] req=REQUEST.clone() rsp=req.response req['PATH_INFO']=path object=None try: object=req.traverse(path) except: pass if object is not None: if hasattr(object, 'id'): if callable(object.id): name=object.id() else: name=object.id elif hasattr(object, '__name__'): name=object.__name__ else: name='' if name != os.path.split(path)[-1]: result = req.PARENTS[0] req.close() return result req.close() return object req.close() raise rsp.errmsg, sys.exc_value | def resolve_url(self, path, REQUEST): """ Attempt to resolve a url into an object in the Zope namespace. The url may be absolute or a catalog path style url. If no object is found, None is returned. No exceptions are raised. """ script=REQUEST.script if string.find(path, script) != 0: path='%s/%s' % (script, path) print "resolving", path try: return REQUEST.resolve_url(path) except: print "not found" return None |
print "resolving", path try: return REQUEST.resolve_url(path) except: print "not found" return None | def resolve_url(self, path, REQUEST): """ Attempt to resolve a url into an object in the Zope namespace. The url may be absolute or a catalog path style url. If no object is found, None is returned. No exceptions are raised. """ script=REQUEST.script if string.find(path, script) != 0: path='%s/%s' % (script, path) print "resolving", path try: return REQUEST.resolve_url(path) except: print "not found" return None |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.