rem
stringlengths
0
322k
add
stringlengths
0
2.05M
context
stringlengths
8
228k
env[e[0]] = e[1]
env[e[0]]=''
def handler(self, conn): from string import split, join, atoi hdr = conn.recv(10)
try: try: doc=subobject.__doc__ except: doc=getattr(object, entry_name+'__doc__') if not doc: raise AttributeError, entry_name except: if debug_mode: return response.debugError( "Missing doc string at: %s" % URL) else: return response.notFoundError("%s" % URL)
doc = getattr(subobject, '__doc__', None) if doc is None: doc = getattr(object, '%s__doc__' % entry_name, None) if not doc: return response.debugError( "The object at %s has an empty or missing " \ "docstring. Objects must have a docstring to be " \ "published." % URL )
def traverse(self, path, response=None, validated_hook=None): """Traverse the object space
del cache[q]
if int(cache[q][0]) == key: del cache[q]
def _cached_result(self, DB__, query):
module=__import__(module_name)
module=__import__(module_name, globals(), globals(), ('__doc__',))
def get_module_info(module_name, modules={}, acquire=_l.acquire, release=_l.release, ): if modules.has_key(module_name): return modules[module_name] if module_name[-4:]=='.cgi': module_name=module_name[:-4] acquire() tb=None try: try: module=__import__(module_name) realm=module_name # Let the app specify a realm if hasattr(module,'__bobo_realm__'): realm=module.__bobo_realm__ else: realm=module_name # Check whether tracebacks should be hidden: if (hasattr(module,'__bobo_hide_tracebacks__') and not module.__bobo_hide_tracebacks__): CGIResponse._tbopen, CGIResponse._tbclose = '<PRE>', '</PRE>' if hasattr(module,'__bobo_before__'): bobo_before=module.__bobo_before__ else: bobo_before=None if hasattr(module,'__bobo_after__'): bobo_after=module.__bobo_after__ else: bobo_after=None # Get request data from outermost environment: if hasattr(module,'__request_data__'): request_params=module.__request_data__ else: request_params=None # Get initial group data: inherited_groups=[] if hasattr(module,'__allow_groups__'): groups=module.__allow_groups__ inherited_groups.append(groups) else: groups=None web_objects=None roles=UNSPECIFIED_ROLES if hasattr(module,'bobo_application'): object=module.bobo_application if hasattr(object,'__allow_groups__'): groups=object.__allow_groups__ inherited_groups.append(groups) else: groups=None if hasattr(object,'__roles__'): roles=object.__roles__ else: if hasattr(module,'web_objects'): web_objects=module.web_objects object=web_objects else: object=module published=web_objects try: doc=module.__doc__ except: if web_objects is not None: doc=' ' else: doc=None info= (bobo_before, bobo_after, request_params, inherited_groups, groups, roles, object, doc, published, realm, module_name) modules[module_name]=modules[module_name+'.cgi']=info return info except: if hasattr(sys, 'exc_info'): t,v,tb=sys.exc_info() else: t, v, tb = sys.exc_type, sys.exc_value, sys.exc_traceback v=str(v) raise ImportError, (t, v), tb finally: tb=None release()
currentindet = indent
currentindent = indent
def StructuredText(paragraphs, delimiter=re.compile(para_delim)): """ StructuredText accepts paragraphs, which is a list of lines to be parsed. StructuredText creates a structure which mimics the structure of the paragraphs. Structure => [paragraph,[sub-paragraphs]] """ currentlevel = 0 currentindent = 0 levels = {0:0} level = 0 # which header are we under struct = [] # the structure to be returned run = struct paragraphs = expandtabs(paragraphs) paragraphs = '%s%s%s' % ('\n\n', paragraphs, '\n\n') paragraphs = delimiter.split(paragraphs) paragraphs = filter(strip, paragraphs) if not paragraphs: return StructuredTextDocument() ind = [] # structure based on indention levels for paragraph in paragraphs: ind.append([indention(paragraph), paragraph]) currentindent = indention(paragraphs[0]) levels[0] = currentindent ############################################################# # updated # ############################################################# for indent,paragraph in ind : if indent == 0: level = level + 1 currentlevel = 0 currentindent = 0 levels = {0:0} struct.append(StructuredTextParagraph(paragraph, indent=indent, level=currentlevel)) elif indent > currentindent: currentlevel = currentlevel + 1 currentindent = indent levels[currentlevel] = indent run = insert(struct,level,currentlevel) run.append(StructuredTextParagraph(paragraph, indent=indent, level=currentlevel)) elif indent < currentindent: result = findlevel(levels,indent) if result > 0: currentlevel = result currentindent = indent if not level: struct.append(StructuredTextParagraph(paragraph, indent=indent, level=currentlevel)) else: run = insert(struct,level,currentlevel) run.append(StructuredTextParagraph(paragraph, indent=indent, level=currentlevel)) else: if insert(struct,level,currentlevel): run = insert(struct,level,currentlevel) else: run = struct currentindet = indent run.append(StructuredTextParagraph(paragraph, indent=indent, level=currentlevel)) return StructuredTextDocument(struct)
return self._lambdas[ 0 ]( *args, **kw )
from zExceptions import Unauthorized allowed = self._lambdas[ 0 ]( *args, **kw ) if not allowed: raise Unauthorized return 1
def validate( self, *args, **kw ): return self._lambdas[ 0 ]( *args, **kw )
schema = [('id', 'FieldIndex', 's'), ('url', 'FieldIndex', 's'), ('title', 'TextIndex', 's'), ('meta_type', 'FieldIndex', 's'), ('last_modified', 'TextIndex', 'd'), ('subject', 'TextIndex', 's'), ('description', 'TextIndex', 's'), ('date', 'TextIndex', 'd'), ('reviewed', 'FieldIndex', 'i'),
schema = [('id', 'FieldIndex', 's', None), ('url', 'FieldIndex', 's', 1), ('title', 'TextIndex', 's', None), ('meta_type', 'FieldIndex', 's', None), ('last_modified', 'TextIndex', 'd', None), ('subject', 'TextIndex', 's', None), ('description', 'TextIndex', 's', None), ('date', 'TextIndex', 'd', None), ('reviewed', 'FieldIndex', 'i', None),
def __init__(self,id,title=None): self.id=id self.title=title self._ztable=ZTablesCore.ZTable(id)
for name, index, type in schema:
call = [] for name, index, type, ci in schema:
def __init__(self,id,title=None): self.id=id self.title=title self._ztable=ZTablesCore.ZTable(id)
self._ztable.update_database_schema(uindex, utype)
self._ztable.update_database_schema(uindex, utype, call)
def __init__(self,id,title=None): self.id=id self.title=title self._ztable=ZTablesCore.ZTable(id)
'an object from the uid %r.' % (uid))
'an object from the uid %r.' % p)
def reindexIndex(self, name, REQUEST): if isinstance(name, str): name = (name,) for p in self._catalog.uids.keys(): obj = self.resolve_path(p) if not obj: obj = self.resolve_url(p, REQUEST) if obj is None: LOG.error('reindexIndex could not resolve ' 'an object from the uid %r.' % (uid)) else: # don't update metadata when only reindexing a single # index via the UI try: self.catalog_object(obj, p, idxs=name, update_metadata=0) except TypeError: # Fall back to Zope 2.6.2 interface. This is necessary for # products like CMF 1.4.2 and earlier that subclass from # ZCatalog and don't support the update_metadata argument. # May be removed some day. from warnings import warn warn('catalog_object interface of %s not up to date' % self.__class__.__name__, DeprecationWarning) self.catalog_object(obj, p, idxs=name)
self.notify_queue.put((key, bucket[key]))
ob = bucket.get(key, _marker) if ob is _marker: DEBUG and TLOG( 'OOBTree lied about %s keys: %s doesnt exist' % (bucket, key) ) continue self.notify_queue.put((key, ob))
def _getCurrentBucket(self): """ Do housekeeping if necessary, then return the 'current' bucket. """ self.lock.acquire() try: # do in-place upgrade of old "ring-based" instances if # we've just upgraded from Zope 2.5.X if self._data is None: self._upgrade()
value=' '.join(str(value))
value=' '.join(map(str, value))
def dav__allprop(self, propstat=propstat ): # DAV helper method - return one or more propstat elements # indicating property names and values for all properties. result=[] for item in self._propertyMap(): name, type=item['id'], item.get('type','string') value=self.getProperty(name)
value='\n'.join(str(value))
value='\n'.join(map(str, value))
def dav__allprop(self, propstat=propstat ): # DAV helper method - return one or more propstat elements # indicating property names and values for all properties. result=[] for item in self._propertyMap(): name, type=item['id'], item.get('type','string') value=self.getProperty(name)
value=' '.join(str(value))
value=' '.join(map(str, value))
def dav__propstat(self, name, result, propstat=propstat, propdesc=propdesc): # DAV helper method - return a propstat element indicating # property name and value for the requested property. xml_id=self.xml_namespace() propdict=self._propdict() if not propdict.has_key(name): prop='<n:%s xmlns:n="%s"/>\n' % (name, xml_id) code='404 Not Found' if not result.has_key(code): result[code]=[prop] else: result[code].append(prop) return else: item=propdict[name] name, type=item['id'], item.get('type','string') value=self.getProperty(name) if type=='tokens': value=' '.join(str(value)) elif type=='lines': value='\n'.join(str(value)) # allow for xml properties attrs=item.get('meta', {}).get('__xml_attrs__', None) if attrs is not None: attrs=map(lambda n: ' %s="%s"' % n, attrs.items()) attrs=''.join(attrs) else: # quote non-xml items here? attrs='' prop='<n:%s%s xmlns:n="%s">%s</n:%s>\n' % ( name, attrs, xml_id, value, name) code='200 OK' if not result.has_key(code): result[code]=[prop] else: result[code].append(prop) return
value='\n'.join(str(value))
value='\n'.join(map(str, value))
def dav__propstat(self, name, result, propstat=propstat, propdesc=propdesc): # DAV helper method - return a propstat element indicating # property name and value for the requested property. xml_id=self.xml_namespace() propdict=self._propdict() if not propdict.has_key(name): prop='<n:%s xmlns:n="%s"/>\n' % (name, xml_id) code='404 Not Found' if not result.has_key(code): result[code]=[prop] else: result[code].append(prop) return else: item=propdict[name] name, type=item['id'], item.get('type','string') value=self.getProperty(name) if type=='tokens': value=' '.join(str(value)) elif type=='lines': value='\n'.join(str(value)) # allow for xml properties attrs=item.get('meta', {}).get('__xml_attrs__', None) if attrs is not None: attrs=map(lambda n: ' %s="%s"' % n, attrs.items()) attrs=''.join(attrs) else: # quote non-xml items here? attrs='' prop='<n:%s%s xmlns:n="%s">%s</n:%s>\n' % ( name, attrs, xml_id, value, name) code='200 OK' if not result.has_key(code): result[code]=[prop] else: result[code].append(prop) return
zLOG.LOG('Z2', zLOG.BLATHER, "Reopened Z2.log")
zLOG.LOG('Z2', zLOG.BLATHER, "Reopened access log")
def sighandler(signum, frame): signame = zdaemon.Daemon.get_signal_name(signum) zLOG.LOG('Z2', zLOG.INFO , "Caught signal %s" % signame)
expr = re.compile(r'\s*\*([ \na-zA-Z0-9.:/;,\'\"\?\-\_\/\=]+)\*(?!\*|-)').search
expr = re.compile(r'\s*\*([ \na-zA-Z0-9.:/;,\'\"\?\-\_\/\=\-\>\<\(\)]+)\*(?!\*|-)').search
def doc_emphasize( self, s, expr = re.compile(r'\s*\*([ \na-zA-Z0-9.:/;,\'\"\?\-\_\/\=]+)\*(?!\*|-)').search ):
if _looping: apply(cb, (map,)+args, kw or {})
if _looping: apply(callback, (map,)+args, kw or {})
def register_loop_callback(callback, args=(), kw=None): _loop_lock.acquire() try: if _looping: apply(cb, (map,)+args, kw or {}) else: _loop_callbacks.append((callback, args, kw)) finally: _loop_lock.release()
raise smtpError, "Cannot convert line from SMTP: %s" % line if code > 400: raise smtpError, "Recieved error code %s from SMTP: %s"\
raise smtpError, \ "Cannot convert line from SMTP: %s" % line if code > 400: raise smtpError, \ "Recieved error code %s from SMTP: %s"\
def _check(self, lev='250'): line = self.getLine() if not line: return 0 #can't check an empty line, eh? try: code=string.atoi(line[:3]) except: raise smtpError, "Cannot convert line from SMTP: %s" % line
schema = Zope.Startup.getSchema()
schema = self.schema
def load_config_text(self, text): # We have to create a directory of our own since the existence # of the directory is checked. This handles this in a # platform-independent way. schema = Zope.Startup.getSchema() sio = cStringIO.StringIO( text.replace("<<INSTANCE_HOME>>", TEMPNAME)) os.mkdir(TEMPNAME) os.mkdir(TEMPPRODUCTS) try: conf, handler = ZConfig.loadConfigFile(schema, sio) finally: os.rmdir(TEMPPRODUCTS) os.rmdir(TEMPNAME) self.assertEqual(conf.instancehome, TEMPNAME) return conf
schema = Zope.Startup.getSchema()
schema = self.schema
def test_load_config_template(self): schema = Zope.Startup.getSchema() cfg = getConfiguration() fn = os.path.join(cfg.zopehome, "skel", "etc", "zope.conf.in") f = open(fn) text = f.read() f.close() self.load_config_text(text)
z1 = os.environ.get('Z_DEBUG_MODE',None) z2 = os.environ.get('BOBO_DEBUG_MODE',None)
z1 = os.environ.get('Z_DEBUG_MODE','') z2 = os.environ.get('BOBO_DEBUG_MODE','')
def get_module_info(module_name, modules={}, acquire=_l.acquire, release=_l.release, ): if modules.has_key(module_name): return modules[module_name] if module_name[-4:]=='.cgi': module_name=module_name[:-4] acquire() tb=None try: try: module=__import__(module_name, globals(), globals(), ('__doc__',)) realm=module_name # Let the app specify a realm if hasattr(module,'__bobo_realm__'): realm=module.__bobo_realm__ elif os.environ.has_key('Z_REALM'): realm=os.environ['Z_REALM'] elif os.environ.has_key('BOBO_REALM'): realm=os.environ['BOBO_REALM'] else: realm=module_name # Check for debug mode debug_mode=None if hasattr(module,'__bobo_debug_mode__'): debug_mode=not not module.__bobo_debug_mode__ else: z1 = os.environ.get('Z_DEBUG_MODE',None) z2 = os.environ.get('BOBO_DEBUG_MODE',None) if z1.lower() in ('yes','y') or z1.isdigit(): debug_mode = 1 elif z2.lower() in ('yes','y') or z2.isdigit(): debug_mode = 1 if hasattr(module,'__bobo_before__'): bobo_before=module.__bobo_before__ else: bobo_before=None if hasattr(module,'__bobo_after__'): bobo_after=module.__bobo_after__ else: bobo_after=None if hasattr(module,'bobo_application'): object=module.bobo_application elif hasattr(module,'web_objects'): object=module.web_objects else: object=module error_hook=getattr(module,'zpublisher_exception_hook', None) validated_hook=getattr(module,'zpublisher_validated_hook', None) transactions_manager=getattr( module,'zpublisher_transactions_manager', None) if not transactions_manager: try: get_transaction() except: pass else: # Create a default transactions manager for use # by software that uses ZPublisher and ZODB but # not the rest of Zope. transactions_manager = DefaultTransactionsManager() info= (bobo_before, bobo_after, object, realm, debug_mode, error_hook, validated_hook, transactions_manager) modules[module_name]=modules[module_name+'.cgi']=info return info except: t,v,tb=sys.exc_info() v=str(v) raise ImportError, (t, v), tb finally: tb=None release()
if isinstance(file, StringType):
if not isinstance(file, StringType):
def pt_upload(self, REQUEST, file=''): """Replace the document with the text in file.""" if SUPPORTS_WEBDAV_LOCKS and self.wl_isLocked(): raise ResourceLockedError, "File is locked via WebDAV"
' '
def manage_test(self, REQUEST): ' '
src=self(REQUEST, src__=1)
src, result=self(REQUEST, test__=1)
def manage_test(self, REQUEST): ' '
result=self(REQUEST, test__=1)
def manage_test(self, REQUEST): ' '
if not klass._p_changed:
if not getattr(klass,'_p_changed',None):
def __setattr__(self, name, v): klass=self._k setattr(klass, name, v) if not klass._p_changed: get_transaction().register(klass) klass._p_changed=1
if not klass._p_changed:
if not getattr(klass,'_p_changed',None):
def __delattr__(self, name): klass=self._k delattr(klass, name) if not klass._p_changed: get_transaction().register(klass) klass._p_changed=1
'Access contents information',
def manage_setPermissionMapping(self, permission_names=[], class_permissions=[], REQUEST=None): "Change property sheet permissions" ips=self.getClassAttr('propertysheets') ips=getattr(ips, self.id)
_Manage_properties_Permission='Manage properties' _Access_contents_information_Permission='View'
_Manage_properties_Permission='_Manage_properties_Permission' _Access_contents_information_Permission='_View_Permission'
def manage_setPermissionMapping(self, permission_names=[], class_permissions=[], REQUEST=None): "Change property sheet permissions" ips=self.getClassAttr('propertysheets') ips=getattr(ips, self.id)
if klass._p_changed==0:
if not getattr(klass, '_p_changed', 0):
def rclass(klass): if klass._p_changed==0: get_transaction().register(klass) klass._p_changed=1
delattr(pc,id)
try: delattr(pc,id) except: pass
def _delOb(self, id): delattr(self, id) pc=self.aq_inner.aq_parent.aq_parent._zclass_propertysheets_class delattr(pc,id) pc.__propset_attrs__=tuple(map(lambda o: o['id'], self._objects)) rclass(pc)
ob.manage_afterClone(ob)
def manage_pasteObjects(self, cb_copy_data=None, REQUEST=None): """Paste previously copied objects into the current object. If calling manage_pasteObjects from python code, pass the result of a previous call to manage_cutObjects or manage_copyObjects as the first argument.""" cp=None if cb_copy_data is not None: cp=cb_copy_data else: if REQUEST and REQUEST.has_key('__cp'): cp=REQUEST['__cp'] if cp is None: raise CopyError, eNoData try: cp=_cb_decode(cp) except: raise CopyError, eInvalid
if domains and not domainSpecValidate(domains):
if domains and not self.domainSpecValidate(domains):
def _changeUser(self,name,password,confirm,roles,domains,REQUEST=None): if not name: return MessageDialog( title ='Illegal value', message='A username must be specified', action ='manage_main')
sys.stdout.write(os.path.basename(self.__file) + " ")
basename = os.path.basename(self.__file) sys.stdout.write(basename + " ")
def runTest(self): sys.stdout.write(os.path.basename(self.__file) + " ") sys.stdout.flush() sys.argv = ["", "-Q", self.__file] pwd = os.getcwd() try: try: os.chdir(self.__dir) runtest.main() finally: os.chdir(pwd) except SystemExit, what: if what.code: self.fail("output for %s didn't match" % self.__file)
sys.argv = ["", "-Q", self.__file]
if basename[:10] == 'test_metal': sys.argv = ["", "-Q", "-m", self.__file] else: sys.argv = ["", "-Q", self.__file]
def runTest(self): sys.stdout.write(os.path.basename(self.__file) + " ") sys.stdout.flush() sys.argv = ["", "-Q", self.__file] pwd = os.getcwd() try: try: os.chdir(self.__dir) runtest.main() finally: os.chdir(pwd) except SystemExit, what: if what.code: self.fail("output for %s didn't match" % self.__file)
r=_getPath(home, "Products/%s/%s/" % (p,prefix),
r=_getPath(home, "lib/python/Products/%s/%s/" % (p,prefix),
def getPath(prefix, name, checkProduct=1, suffixes=('',)): """Find a file in one of several relative locations Arguments: prefix -- The location, relative to some home, to look for the file name -- The name of the file. This must not be a path. checkProduct -- a flag indicating whether product directories should be used as additional hope ares to be searched. This defaults to a true value. If this is true and the name contains a dot, then the text before the dot is treated as a product name and the product package directory is used as anothe rhome. suffixes -- a sequences of file suffixes to check. By default, the name is used without a suffix. The search takes on multiple homes which are INSTANCE_HOME, the directory containing the directory containing SOFTWARE_HOME, and possibly product areas. """ d,n = path_split(name) if d: raise ValueError, ( 'The file name, %s, should be a simple file name' % name) sw=path_split(path_split(SOFTWARE_HOME)[0])[0] for home in (INSTANCE_HOME, sw): if checkProduct: l=find(name, '.') if l > 0: p=name[:l] n=name[l+1:] r=_getPath(home, "Products/%s/%s/" % (p,prefix), n, suffixes) if r is not None: return r r=_getPath(home, prefix, name, suffixes) if r is not None: return r
rotor.newrotor(d+' shshsh').decrypt(open(p,'rb').read())
rotor.newrotor(prod_id +' shshsh').decrypt(open(p,'rb').read())
def getObject(module, name, reload=0, # The use of a mutable default is intentional here, # because modules is a module cache. modules={} ): # The use of modules here is not thread safe, however, there is # no real harm in a rece condition here. If two threads # update the cache, then one will have simply worked a little # harder than need be. So, in this case, we won't incur # the expense of a lock. if modules.has_key(module): old=modules[module] if old.has_key(name) and not reload: return old[name] else: old=None if module[-3:]=='.py': p=module[:-3] elif module[-4:]=='.pyp': p=module[:-4] else: p=module p=getPath('Extensions', p, suffixes=('','py','pyp')) if p is None: raise "Module Error", ( "The specified module, <em>%s</em>, couldn't be found." % module) __traceback_info__=p, module if p[-4:]=='.pyp': data=zlib.decompress( rotor.newrotor(d+' shshsh').decrypt(open(p,'rb').read()) ) execsrc=compile(data,module,'exec') else: try: execsrc=open(p) except: raise "Module Error", ( "The specified module, <em>%s</em>, couldn't be opened." % module) m={} exec execsrc in m try: r=m[name] except KeyError: raise 'Invalid Object Name', ( "The specified object, <em>%s</em>, was not found in module, " "<em>%s</em>." % (name, module)) if old: for k, v in m.items(): old[k]=v else: modules[module]=m return r
e=tpValuesIds(item, branches)
e=tpValuesIds(item, branches, args)
def tpValuesIds(self, branches, args, simple_type={type(''):0, type(1):0, type(1.0):0}.has_key, ): # This should build the ids of subitems which are # expandable (non-empty). Leaves should never be # in the state - it will screw the colspan counting. r=[] idattr=args['id'] try: try: items=getattr(self, branches)() except AttributeError: items=() for item in items: try: if getattr(item, branches)(): if hasattr(self, idattr): id=getattr(self, idattr) if not simple_type(type(id)): id=id() elif hasattr(item, '_p_oid'): id=item._p_oid else: id=pyid(item) e=tpValuesIds(item, branches) if e: id=[id,e] else: id=[id] r.append(id) except: pass except: pass return r
except: pass
except: if self._p_jar.db is not Globals.Bobobase._jar.db: raise 'Version Error', ( '''You may not change the database cache age while working in a <em>version</em>''') self._cache_age=Globals.Bobobase._jar.cache.cache_age=value
def manage_cache_age(self,value,REQUEST): "set cache age" try: v=self._p_jar.getVersion() except: pass else: if v: self._vcache_age=value self._p_jar.db().setVersionCacheDeactivateAfter(value) else: self._cache_age=value self._p_jar.db().setCacheDeactivateAfter(value) return
return if self._p_jar.db is not Globals.Bobobase._jar.db: raise 'Version Error', ( '''You may not change the database cache age while working in a <em>version</em>''') self._cache_age=Globals.Bobobase._jar.cache.cache_age=value return self.manage_CacheParameters(self,REQUEST)
return self.manage_cacheParameters(self,REQUEST)
def manage_cache_age(self,value,REQUEST): "set cache age" try: v=self._p_jar.getVersion() except: pass else: if v: self._vcache_age=value self._p_jar.db().setVersionCacheDeactivateAfter(value) else: self._cache_age=value self._p_jar.db().setCacheDeactivateAfter(value) return
except: pass
except: if self._p_jar.db is not Globals.Bobobase._jar.db: raise 'Version Error', ( '''You may not change the database cache size while working in a <em>version</em>''') self._cache_size=Globals.Bobobase._jar.cache.cache_size=value
def manage_cache_size(self,value,REQUEST): "set cache size" try: v=self._p_jar.getVersion() except: pass else: if v: self._vcache_size=value self._p_jar.db().setVersionCacheSize(value) else: self._cache_size=value self._p_jar.db().setCacheSize(value) return
return if self._p_jar.db is not Globals.Bobobase._jar.db: raise 'Version Error', ( '''You may not change the database cache size while working in a <em>version</em>''') self._cache_size=Globals.Bobobase._jar.cache.cache_size=value
def manage_cache_size(self,value,REQUEST): "set cache size" try: v=self._p_jar.getVersion() except: pass else: if v: self._vcache_size=value self._p_jar.db().setVersionCacheSize(value) else: self._cache_size=value self._p_jar.db().setCacheSize(value) return
if hasattr(self, 'REQUEST') and hasattr(object, '__ac_local_roles__'):
if hasattr(self, 'REQUEST') and type(self.REQUEST) != type('') and \ hasattr(object, '__ac_local_roles__'):
def _setObject(self,id,object,roles=None,user=None): v=self._checkId(id) if v is not None: id=v try: t=object.meta_type except: t=None self._objects=self._objects+({'id':id,'meta_type':t},) self._setOb(id,object) object=self._getOb(id) object.manage_afterAdd(object, self) # Try to give user the local role "Owner", but only if # no local roles have been set on the object yet. if hasattr(self, 'REQUEST') and hasattr(object, '__ac_local_roles__'): if object.__ac_local_roles__ is None: user=self.REQUEST['AUTHENTICATED_USER'] name=user.getUserName() if name != 'Anonymous User': object.manage_setLocalRoles(name, ['Owner']) return id
self._stop = False
self._stop = self._closed = False
def __init__(self, name, env=None, prefix='zodb_', config=None): """Create a new storage.
self._doclose()
if not self._closed: self._doclose() self._closed = True
def close(self): """Close the storage.
+SimpleItem.Item.manage_options
def locked_in_version(self): return 0
class ApplicationManager(Folder,CacheManager): """System management""" __roles__=('Manager',) isPrincipiaFolderish=1 Database=DatabaseManager() Versions=VersionManager() manage=manage_main=HTMLFile('cpContents', globals()) manage_undoForm=HTMLFile('undo', globals()) def version_txt(self): if not hasattr(self, '_v_version_txt'): self._v_version_txt=version_txt() return self._v_version_txt def sys_version(self): return sys.version def sys_platform(self): return sys.platform _objects=( {'id': 'Database', 'meta_type': Database.meta_type}, {'id': 'Versions', 'meta_type': Versions.meta_type}, {'id': 'Products', 'meta_type': 'Product Management'}, )
class DebugManager(Fake, SimpleItem.Item, Acquisition.Implicit): """Debug and profiling information""" manage=manage_main=HTMLFile('debug', globals()) id ='DebugInfo' name=title='Debug Information' meta_type = name icon='p_/DebugManager_icon'
def locked_in_version(self): return 0
( {'label':'Contents', 'action':'manage_main', 'help':('OFSP','Control-Panel_Contents.dtml')}, ) +Undo.UndoSupport.manage_options ) id ='Control_Panel' name=title='Control Panel' meta_type ='Control Panel' icon='p_/ControlPanel_icon' process_id=os.getpid() process_start=int(time.time()) manage_addObject=None manage_delObjects=None manage_addProperty=None manage_editProperties=None manage_delProperties=None def __init__(self): self.Products=ProductFolder() def _canCopy(self, op=0): return 0 def _init(self): pass def manage_app(self, URL2): """Return to the main management screen""" raise 'Redirect', URL2+'/manage' def process_time(self): s=int(time.time())-self.process_start d=int(s/86400) s=s-(d*86400) h=int(s/3600) s=s-(h*3600) m=int(s/60) s=s-(m*60) d=d and ('%d day%s' % (d, (d != 1 and 's' or ''))) or '' h=h and ('%d hour%s' % (h, (h != 1 and 's' or ''))) or '' m=m and ('%d min' % m) or '' s='%d sec' % s return '%s %s %s %s' % (d, h, m, s) def thread_get_ident(self): return get_ident() def db_name(self): return self._p_jar.db().getName() def db_size(self): if Globals.DatabaseVersion=='2': s=os.stat(self.db_name())[6] else: s=self._p_jar.db().getSize() if s >= 1048576.0: return '%.1fM' % (s/1048576.0) return '%.1fK' % (s/1024.0)
( {'label':'Debugging Info', 'action':'manage_main', 'help':('OFSP','DebugInfo-Debug.dtml')}, {'label':'Profiling', 'action':'manage_profile', 'help':('OFSP','DebugInfo-Profile.dtml')}, ) )
def sys_platform(self): return sys.platform
manage_profile=HTMLFile('profile', globals()) def manage_profile_stats(self, sort='time', limit=200): """Return profile data if available""" stats=getattr(sys, '_ps_', None) if stats is None: return None output=StringIO() stdout=sys.stdout sys.stdout=output stats.strip_dirs().sort_stats(sort).print_stats(limit) sys.stdout.flush() sys.stdout=stdout return output.getvalue()
def manage_discardVersions(self, versions, REQUEST=None): "Discard some versions" db=self._p_jar.db() for v in versions: db.abortVersion(v) if REQUEST is not None: REQUEST['RESPONSE'].redirect(REQUEST['URL1']+'/manage_main')
if hasattr(aq_base(ob), 'isTopLevelPrincipiaApplicationObject'):
if hasattr(aq_base(ob), 'isTopLevelPrincipiaApplicationObject') and \ ob.isTopLevelPrincipiaApplicationObject:
def is_acquired(ob): # Return true if this object is not a direct # subobject of its aq_parent object. if not hasattr(ob, 'aq_parent'): return 0 if hasattr(aq_base(ob.aq_parent), absattr(ob.id)): return 0 if hasattr(aq_base(ob), 'isTopLevelPrincipiaApplicationObject'): return 0 return 1
if err[0] is KeyboardInterrupt:
if error[0] is KeyboardInterrupt:
def addError(self, test, error): TestResult.addError(self,test,error) self.stream.write('E') self.stream.flush() if err[0] is KeyboardInterrupt: self.shouldStop = 1
if rawdata[i-1] != ';':
if rawdata[k-1] != ';':
def goahead(self, end): rawdata = self.rawdata i = 0 n = len(rawdata) while i < n: if self.nomoretags: self.handle_data(rawdata[i:n]) i = self.updatepos(i, n) break match = interesting.search(rawdata, i) if match: j = match.start(0) else: j = n if i < j: self.handle_data(rawdata[i:j]) i = self.updatepos(i, j) if i == n: break if rawdata[i] == '<': if starttagopen.match(rawdata, i): if self.literal: self.handle_data(rawdata[i]) i = self.updatepos(i, i+1) continue k = self.parse_starttag(i) if k < 0: break i = self.updatepos(i, k) continue if endtagopen.match(rawdata, i): k = self.parse_endtag(i) if k < 0: break i = self.updatepos(i, k) self.literal = 0 continue if commentopen.match(rawdata, i): if self.literal: self.handle_data(rawdata[i]) i = self.updatepos(i, i+1) continue k = self.parse_comment(i) if k < 0: break i = self.updatepos(i, i+k) continue if piopen.match(rawdata, i): if self.literal: self.handle_data(rawdata[i]) i = self.updatepos(i, i+1) continue k = self.parse_pi(i) if k < 0: break i = self.updatepos(i, i+k) continue match = special.match(rawdata, i) if match: if self.literal: self.handle_data(rawdata[i]) i = self.updatepos(i, i+1) continue k = match.end(0) i = self.updatepos(i, k) continue elif rawdata[i] == '&': match = charref.match(rawdata, i) if match: name = match.group(1) self.handle_charref(name) k = match.end(0) if rawdata[i-1] != ';': k = k-1 i = self.updatepos(i, k) continue match = entityref.match(rawdata, i) if match: name = match.group(1) self.handle_entityref(name) k = match.end(0) if rawdata[i-1] != ';': k = k-1 i = self.updatepos(i, k) continue else: raise RuntimeError, 'neither < nor & ??' # We get here only if incomplete matches but # nothing else match = incomplete.match(rawdata, i) if not match: self.handle_data(rawdata[i]) i = self.updatepos(i, i+1) continue j = match.end(0) if j == n: break # Really incomplete self.handle_data(rawdata[i:j]) i = self.updatepos(self, i, j) # end while if end and i < n: self.handle_data(rawdata[i:n]) i = self.updatepos(i, n) self.rawdata = rawdata[i:] # XXX if end: check for empty stack
sm= getattr(response,'setMessage',None)
try: sm= response.setMessage except: sm= None
def publish(request, module_name, after_list, debug=0, # Optimize: call_object=call_object, missing_name=missing_name, dont_publish_class=dont_publish_class, mapply=mapply, ): (bobo_before, bobo_after, object, realm, debug_mode, err_hook, validated_hook, transactions_manager)= get_module_info(module_name) parents=None try: request.processInputs() request_get=request.get response=request.response # First check for "cancel" redirect: cancel='' if request_get('SUBMIT','').strip().lower()=='cancel': cancel=request_get('CANCEL_ACTION','') if cancel: raise 'Redirect', cancel after_list[0]=bobo_after if debug_mode: response.debug_mode=debug_mode if realm and not request.get('REMOTE_USER',None): response.realm=realm if bobo_before is not None: bobo_before() # Get a nice clean path list: path=request_get('PATH_INFO').strip() request['PARENTS']=parents=[object] if transactions_manager: transactions_manager.begin() object=request.traverse(path, validated_hook=validated_hook) if transactions_manager: transactions_manager.recordMetaData(object, request) result=mapply(object, request.args, request, call_object,1, missing_name, dont_publish_class, request, bind=1) if result is not response: response.setBody(result) if transactions_manager: transactions_manager.commit() return response except: if transactions_manager: transactions_manager.abort() # DM: provide nicer error message for FTP sm= getattr(response,'setMessage',None) if sm is not None: from ZServer.medusa.asyncore import compact_traceback cl,val= sys.exc_info()[:2] sm('%s: %s %s' % (getattr(cl,'__name__',cl), val, debug_mode and compact_traceback()[-1] or '')) if err_hook is not None: if parents: parents=parents[0] try: return err_hook(parents, request, sys.exc_info()[0], sys.exc_info()[1], sys.exc_info()[2], ) except Retry: # We need to try again.... if not request.supports_retry(): return err_hook(parents, request, sys.exc_info()[0], sys.exc_info()[1], sys.exc_info()[2], ) newrequest=request.retry() request.close() # Free resources held by the request. try: return publish(newrequest, module_name, after_list, debug) finally: newrequest.close() else: raise
os.unlink(filename) def checkCloseNoUnlink(self):
assert not os.path.exists(filename) def checkCloseDoesUnlink(self):
def checkCreateWithFileobj(self): filename = 'commit.log' fp = open(filename, 'w+b') try: self.assertRaises(CommitLog.TruncationError, CommitLog.CommitLog, fp) finally: fp.close() os.unlink(filename)
try: assert os.path.exists(filename) finally: os.unlink(filename) assert not os.path.exists(filename)
assert not os.path.exists(filename)
def checkCloseNoUnlink(self): log = CommitLog.CommitLog() filename = log.get_filename() log.close() try: assert os.path.exists(filename) finally: os.unlink(filename) assert not os.path.exists(filename)
try: assert os.path.exists(filename) finally: os.unlink(filename) assert not os.path.exists(filename)
assert not os.path.exists(filename)
def checkDel(self): log = CommitLog.CommitLog() filename = log.get_filename() del log try: assert os.path.exists(filename) finally: os.unlink(filename) assert not os.path.exists(filename)
self._log.close(unlink=1)
try: self._log.close(unlink=1) except OSError, e: if e.errno <> errno.ENOENT: raise
def tearDown(self): self._log.close(unlink=1)
suite.addTest(CreateCommitLogTest('checkCreateNoFile')) suite.addTest(CreateCommitLogTest('checkCreateWithFilename')) suite.addTest(CreateCommitLogTest('checkCreateWithFileobj')) suite.addTest(CreateCommitLogTest('checkCloseNoUnlink')) suite.addTest(CreateCommitLogTest('checkDel')) suite.addTest(CommitLogStateTransitionTest('checkProperStart')) suite.addTest(CommitLogStateTransitionTest('checkAppendSetsOpen')) suite.addTest(CommitLogStateTransitionTest('checkPromiseSetsPromise')) suite.addTest(CommitLogStateTransitionTest('checkBadDoublePromise')) suite.addTest(CommitLogStateTransitionTest('checkFinishSetsStart')) suite.addTest(LowLevelStoreAndLoadTest('checkOneStoreAndLoad')) suite.addTest(LowLevelStoreAndLoadTest('checkTenStoresAndLoads')) suite.addTest(PacklessLogTest('checkOneStoreAndLoad')) suite.addTest(PacklessLogTest('checkTenStoresAndLoads')) suite.addTest(FullLogTest('checkOneStoreAndLoad')) suite.addTest(FullLogTest('checkOtherWriteMethods'))
suite.addTest(unittest.makeSuite(CreateCommitLogTest, 'check')) suite.addTest(unittest.makeSuite(CommitLogStateTransitionTest, 'check')) suite.addTest(unittest.makeSuite(LowLevelStoreAndLoadTest, 'check')) suite.addTest(unittest.makeSuite(PacklessLogTest, 'check')) suite.addTest(unittest.makeSuite(FullLogTest, 'check'))
def suite(): suite = unittest.TestSuite() # Creation and closing suite.addTest(CreateCommitLogTest('checkCreateNoFile')) suite.addTest(CreateCommitLogTest('checkCreateWithFilename')) suite.addTest(CreateCommitLogTest('checkCreateWithFileobj')) suite.addTest(CreateCommitLogTest('checkCloseNoUnlink')) suite.addTest(CreateCommitLogTest('checkDel')) # State transitions suite.addTest(CommitLogStateTransitionTest('checkProperStart')) suite.addTest(CommitLogStateTransitionTest('checkAppendSetsOpen')) suite.addTest(CommitLogStateTransitionTest('checkPromiseSetsPromise')) suite.addTest(CommitLogStateTransitionTest('checkBadDoublePromise')) suite.addTest(CommitLogStateTransitionTest('checkFinishSetsStart')) # Base class for storing and loading suite.addTest(LowLevelStoreAndLoadTest('checkOneStoreAndLoad')) suite.addTest(LowLevelStoreAndLoadTest('checkTenStoresAndLoads')) # PacklessLog API suite.addTest(PacklessLogTest('checkOneStoreAndLoad')) suite.addTest(PacklessLogTest('checkTenStoresAndLoads')) # FullLog API suite.addTest(FullLogTest('checkOneStoreAndLoad')) suite.addTest(FullLogTest('checkOtherWriteMethods')) return suite
self._.migrate_length()
self._migrate_length()
def index_object(self, docid, obj ,threshold=100): """ hook for (Z)Catalog """
self._.migrate_length()
self._migrate_length()
def unindex_object(self, docid): """ hook for (Z)Catalog """
if jar is None: return 1
if jar is None: if hasattr(self, 'aq_parent') and hasattr(self.aq_parent, '_p_jar'): jar=self.aq_parent._p_jar if jar is None: return 0
def modified_in_session(self):
return self.objectIds()
return len(self.objectIds())
def hasChildNodes(self): """Returns true if the node has any children, false if it doesn't. """ return self.objectIds()
if (child.getNodeType==1 and child.getTagName()==tagname or tagname== '*'):
if (child.getNodeType()==ELEMENT_NODE and \ child.getTagName()==tagname or tagname== '*'):
def getElementsByTagName(self, tagname): """ Returns a NodeList of all the Elements with a given tag name in the order in which they would be encountered in a preorder traversal of the Document tree. Parameter: tagname The name of the tag to match (* = all tags). Return Value: A new NodeList object containing all the matched Elements. """ nodeList = [] for child in self.objectValues(): if (child.getNodeType==1 and child.getTagName()==tagname or tagname== '*'): nodeList.append( child ) n1 = child.getElementsByTagName(tagname) nodeList = nodeList + n1._data return NodeList(nodeList)
attributes = self.propdict() list = [] for a in attributes.keys(): attributeNode = Attr(a, attributes[a], self.getOwnerDocument()) list.append(attributeNode) return NamedNodeMap(list)
attribs={} for p in self._properties: if p['type'] == 'string': name=p['id'] attrib=Attr(name, self.getProperty(name,'')).__of__(self) attribs[name]=attrib return NamedNodeMap(attribs)
def getAttributes(self): attributes = self.propdict() list = [] for a in attributes.keys(): attributeNode = Attr(a, attributes[a], self.getOwnerDocument()) list.append(attributeNode) return NamedNodeMap(list)
return str(self.getProperty(name))
if self.getPropertyType(name) == 'string': return self.getProperty(name,'')
def getAttribute(self, name): return str(self.getProperty(name))
attributes = self.propdict() if attributes.has_key(name): node = Attr(name, self.getProperty(name), self.getOwnerDocument()) return node return None
if self.getPropertyType(name) == 'string': return Attr(name, self.getProperty(name,'')).__of__(self) return None class ElementWithTitle(Element): """ Elements that allow DOM access to Zope title property. Note: Don't use this sub-class for PropertyManagers """ def getAttributes(self): if self.getAttribute('title'): return NamedNodeMap({'title' : self.title}) return NamedNodeMap() def getAttribute(self, name): if name=='title' and hasattr(self.aq_base, 'title'): return self.title return '' def getAttributeNode(self, name): value=self.getAttribute(name) if value: return Attr(name, value).__of__(self) return None
def getAttributeNode(self, name): attributes = self.propdict() if attributes.has_key(name): node = Attr(name, self.getProperty(name), self.getOwnerDocument()) return node return None
def __init__(self,list=[]): self._data = list self.length = len(list)
def __init__(self,list=None): self._data = list or []
def __init__(self,list=[]): self._data = list self.length = len(list)
if index >= self.length: return None return self._data[index] item = __getitem__
try: return self._data[index] except IndexError: return None
def __getitem__(self, index): """Returns the index-th item in the collection""" if index >= self.length: return None return self._data[index]
return self.length
return len(self._data) __len__=getLength
def getLength(self): """The length of the NodeList""" return self.length
self.length = len(data)
def __init__(self, data=None): if data is None : data = {} self._data = data self.length = len(data)
return self._data[index]
try: return self._data.values()[index] except IndexError: return None
def __getitem__(self, index): """Returns the index-th item in the map""" return self._data[index]
class Attr(Node): def __init__(self, name, value, ownerDocument): self.nodeName = name
class Attr(Acquisition.Implicit, Node): def __init__(self, name, value):
def getNamedItem(self, name): """Retrieves a node specified by name. Parameters: name Name of a node to retrieve. Return Value A Node (of any type) with the specified name, or null if the specified name did not identify any node in the map. """ if self._data.has_key(name): return self._data[name] return None
self.ownerDocument = ownerDocument
def __init__(self, name, value, ownerDocument): self.nodeName = name # attr attributes self.name = name self.value = value self.specified = 1 # attr nodes are specified because properties # don't exist without having a value self.ownerDocument = ownerDocument
def getOwnerDocument(self): return self.ownerDocument
def getSpecified(self): return self.specified
def getNodeType(self): return ATTRIBUTE_NODE
tree = self.parseQueryEx(query)
tree = self.parseQuery(query)
def parseQueryEx(self, query): tree = self.parseQueryEx(query) ignored = self.getIgnored() return tree, ignored
l = form[keys] for x in values: if getattr(x, '__class__',0) is record: for k, v in x.__dict__.items(): for y in l: if not hasattr(y, k): setattr(y, k, v) else: if not a in l: l.append(a) form[keys] = l
pass
def __init__(self, stdin, environ, response, clean=0, # "static" variables that we want to be local for speed SEQUENCE=1, DEFAULT=2, RECORD=4, RECORDS=8, REC=12, # RECORD|RECORDS EMPTY=16, CONVERTED=32, hasattr=hasattr, getattr=getattr, setattr=setattr, search_type=regex.compile( ':[a-zA-Z][a-zA-Z0-9_]+$' ).search, rfind=string.rfind, ): # Avoid the overhead of scrubbing the environment in the # case of request cloning for traversal purposes. If the # clean flag is set, we know we can use the passed in # environ dict directly.
'manage_advancedForm', 'PrincipiaSearchSource'
'manage_advancedForm', 'PrincipiaSearchSource', 'document_src'
def validate(self, inst, parent, name, value, md): return getSecurityManager().validate(inst, parent, name, value)
'manage_product_zclass_info')),
'manage_product_zclass_info', 'PUT', 'manage_FTPput')),
def validate(self, inst, parent, name, value, md): return getSecurityManager().validate(inst, parent, name, value)
default_content_type = 'text/plain' def document_src(self, REQUEST=None, RESPONSE=None): """Return unprocessed document source.""" if RESPONSE is not None: RESPONSE.setHeader('Content-Type', 'text/plain') return '<params>%s</params>\n%s' % (self.arguments_src, self.src) def manage_FTPget(self): """Get source for FTP download""" self.REQUEST.RESPONSE.setHeader('Content-Type', 'text/plain') return '<params>%s</params>\n%s' % (self.arguments_src, self.src) def PUT(self, REQUEST, RESPONSE): """Handle put requests""" self.dav__init(REQUEST, RESPONSE) body = REQUEST.get('BODY', '') m = re.match('\s*<params>(.*)</params>\s*\n', body, re.I) if m: self.arguments_src = m.group(1) self._arg=parse(self.arguments_src) body = body[m.end():] template = body self.src = template self.template=t=self.template_class(template) t.cook() self._v_cache={}, Bucket() RESPONSE.setStatus(204) return RESPONSE
def PrincipiaSearchSource(self): """Return content for use by the Find machinery.""" return '%s\n%s' % (self.arguments_src, self.src)
path=urllib.unquote(string.split(URL2, REQUEST.script)[1])
path=string.split(URL2, REQUEST.script) if len(path) > 1: path=path[1] else: path='' path=urllib.unquote(path)
def manage_catalogFoundItems(self, REQUEST, RESPONSE, URL2, URL1, obj_metatypes=None, obj_ids=None, obj_searchterm=None, obj_expr=None, obj_mtime=None, obj_mspec=None, obj_roles=None, obj_permission=None): """ Find object according to search criteria and Catalog them """
if nargs.split()=='':
if nargs.strip()=='':
def __init__(self, blocks): tname, args, section = blocks[0]
t.write(open('input/TeeShop1.html').read()) expect = open('output/TeeShop1.html').read() util.check_html(expect, t(getProducts=self.getProducts))
t.write(open('input/TeeShop2.html').read()) expect = open('output/TeeShop2.html').read() out = t(getProducts=self.getProducts) util.check_html(expect, out)
def check2(self): self.folder.laf.write(open('input/TeeShopLAF.html').read())
t.write(open('input/TeeShop2.html').read()) expect = open('output/TeeShop2.html').read() util.check_html(expect, t(getProducts=self.getProducts))
t.write(open('input/TeeShop1.html').read()) expect = open('output/TeeShop1.html').read() out = t(getProducts=self.getProducts) util.check_html(expect, out)
def check3(self): self.folder.laf.write(open('input/TeeShopLAF.html').read())
LOG('Zope', INFO, 'Checking product: %s' % product.id)
LOG('Zope', INFO, 'Searching in product: %s' % product.id)
def checkGlobalRegistry(self, rebuild=1): """Check the global (product) registry for problems, which can be caused by disk-based products being deleted and other things that Zope cannot know about. If rebuild is true, the global registry will be rebuilt automatically if a problem is found.
LOG('Zope', WARNING, 'Successfully rebuilt global product registry')
try: if hasattr(ob, '_register') and hasattr(ob, '_zclass_'): ob._register() if hasattr(ob, 'objectItems'): m = list(ob.objectItems()) items.extend(m) if hasattr(ob, 'propertysheets'): ps = ob.propertysheets if (hasattr(ps, 'methods') and hasattr(ps.methods, 'objectItems')): m = list(ps.methods.objectItems()) items.extend(m) except: LOG('Zope', WARNING, 'Broken objects exist in product %s.' % product.id) idx = idx + 1 LOG('Zope', INFO, 'Successfully rebuilt global product registry')
def checkGlobalRegistry(self, rebuild=1): """Check the global (product) registry for problems, which can be caused by disk-based products being deleted and other things that Zope cannot know about. If rebuild is true, the global registry will be rebuilt automatically if a problem is found.
'A problem was found in the global product registry but ' \
'A problem was found in the global product registry but '
def initialize(app): # Initialize the application # Initialize the cache: app.Control_Panel.initialize_cache() # The following items marked b/c are backward compatibility hacks # which make sure that expected system objects are added to the # bobobase. This is required because the bobobase in use may pre- # date the introduction of certain system objects such as those # which provide Lever support. # b/c: Ensure that Control Panel exists. if not hasattr(app, 'Control_Panel'): cpl=ApplicationManager() cpl._init() app._setObject('Control_Panel', cpl) get_transaction().note('Added Control_Panel') get_transaction().commit() # b/c: Ensure that a ProductFolder exists. if not hasattr(app.Control_Panel.aq_base, 'Products'): app.Control_Panel.Products=App.Product.ProductFolder() get_transaction().note('Added Control_Panel.Products') get_transaction().commit() # b/c: Ensure that std err msg exists. if not hasattr(app, 'standard_error_message'): import Document Document.manage_addDocument( app, 'standard_error_message', 'Standard Error Message', _standard_error_msg) get_transaction().note('Added standard_error_message') get_transaction().commit() # b/c: Ensure that Owner role exists. if hasattr(app, '__ac_roles__') and not ('Owner' in app.__ac_roles__): app.__ac_roles__=app.__ac_roles__ + ('Owner',) get_transaction().note('Added Owner role') get_transaction().commit() # Make sure we have Globals root=app._p_jar.root() if not root.has_key('ZGlobals'): import BTree app._p_jar.root()['ZGlobals']=BTree.BTree() get_transaction().note('Added Globals') get_transaction().commit() # Install the initial user. if hasattr(app, 'acl_users'): users = app.acl_users if hasattr(users, '_createInitialUser'): app.acl_users._createInitialUser() get_transaction().note('Created initial user') get_transaction().commit() install_products(app) # Check the global product registry for problems. Note that if the # check finds problems but fails to successfully rebuild the global # registry we abort the transaction so that we don't leave it in an # indeterminate state. try: if app.checkGlobalRegistry(): get_transaction().note('Rebuilt global product registry') get_transaction().commit() except: LOG('Zope', ERROR, 'A problem was found in the global product registry but ' \ 'the attempt to rebuild the registry failed.', error=sys.exc_info()) get_transaction().abort()
self.refreshCatalog(clear=1)
pgthreshold = self._getProgressThreshold() handler = (pgthreshold > 0) and ZLogHandler(pgthreshold) or None self.refreshCatalog(clear=1, pghandler=handler) print handler
def manage_catalogReindex(self, REQUEST, RESPONSE, URL1): """ clear the catalog, then re-index everything """
def refreshCatalog(self, clear=0):
def refreshCatalog(self, clear=0, pghandler=None):
def refreshCatalog(self, clear=0): """ re-index everything we can find """
LOG('ZCatalog', BLATHER, 'Starting recataloging of ZCatalog at %s' % self.absolute_url(1))
def refreshCatalog(self, clear=0): """ re-index everything we can find """
LOG('ZCatalog', BLATHER, 'Recataloging object %s (%d/%d)' % (p, i, num_objects))
def refreshCatalog(self, clear=0): """ re-index everything we can find """
LOG('ZCatalog', ERROR, 'Recataloging object at %s failed' % p, error=sys.exc_info()) LOG('ZCatalog', BLATHER, 'Recataloging of ZCatalog at %s terminated' % self.absolute_url(1))
LOG.error('Recataloging object at %s failed' % p, exc_info=sys.exc_info()) if pghandler: pghandler.finish()
def refreshCatalog(self, clear=0): """ re-index everything we can find """
if compress: result = ':' + b2a(zlib.compress(result, 9))
if compress and len(result) > 2: zresult = ':' + b2a(zlib.compress(result, 9)) if len(zresult) < len(result): result = zresult
def encodeExpansion(nodes, compress=1): '''Encode the expanded node ids of a tree into a string. Accepts a list of nodes, such as that produced by root.flat(). Marks each expanded node with an expansion_number attribute. Since node ids are encoded, the resulting string is safe for use in cookies and URLs. ''' steps = [] last_depth = -1 n = 0 for node in nodes: if node.state <=0: continue dd = last_depth - node.depth + 1 last_depth = node.depth if dd > 0: steps.append('_' * dd) steps.append(node.id) node.expansion_number = n n = n + 1 result = ':'.join(steps) if compress: result = ':' + b2a(zlib.compress(result, 9)) return result
if tag in EMPTY_HTML_TAGS: return self.handle_startendtag(tag, attrs)
def handle_starttag(self, tag, attrs): if tag in EMPTY_HTML_TAGS: return self.handle_startendtag(tag, attrs) self.close_para_tags(tag) self.tagstack.append(tag) self.scan_xmlns(attrs) attrlist, taldict, metaldict = self.extract_attrs(attrs) self.gen.emitStartElement(tag, attrlist, taldict, metaldict, self.getpos())
if implied > 0: if tag in TIGHTEN_IMPLICIT_CLOSE_TAGS: white = self.gen.unEmitWhitespace() self.gen.emitEndElement(tag, implied=implied) if white: self.gen.emitRawText(white) else: self.gen.emitEndElement(tag, implied=implied)
isend = (implied < 0) if tag in TIGHTEN_IMPLICIT_CLOSE_TAGS: white = self.gen.unEmitWhitespace() else: white = None self.gen.emitEndElement(tag, isend=isend, implied=implied) if white: self.gen.emitRawText(white)
def implied_endtag(self, tag, implied): assert tag == self.tagstack[-1] assert implied in (-1, 1, 2) if implied > 0: if tag in TIGHTEN_IMPLICIT_CLOSE_TAGS: # Pick out trailing whitespace from the program, and # insert the close tag before the whitespace. white = self.gen.unEmitWhitespace() self.gen.emitEndElement(tag, implied=implied) if white: self.gen.emitRawText(white) else: self.gen.emitEndElement(tag, implied=implied) self.tagstack.pop() self.pop_xmlns()
self._catalog.indexes[name].clear()
self._catalog.getIndex(name).clear()
def clearIndex(self, name):
if self.getAttribute('title'): return NamedNodeMap({'title' : self.title})
title = self.getAttributeNode('title') if title is not None: return NamedNodeMap({'title':title})
def getAttributes(self): """Returns a NamedNodeMap containing the attributes of this node (if it is an element) or None otherwise.""" if self.getAttribute('title'): return NamedNodeMap({'title' : self.title}) return NamedNodeMap()
warn("\nZCatalog query using '%s' detected.\nUsing query parameters ending with '_usage' is deprecated.\nConsider using record-style parameters instead (see lib/python/Products/PluggableIndexes/README.txt for details)" % usage_param, DeprecationWarning)
warn("\nZCatalog query using '%s' detected.\nUsing query parameters ending with '_usage' is deprecated.\nConsider using record-style parameters instead (see lib/python/Products/PluginIndexes/README.txt for details)" % usage_param, DeprecationWarning)
def __init__(self, request, iid, options=[]): """ parse a request from the ZPublisher and return a uniform datastructure back to the _apply_index() method of the index
"An integer was expected in the value '%s'" % escape(v)
"An integer was expected in the value %s" % escape(`v`)
def field2int(v): if isinstance(v, (ListType, TupleType)): return map(field2int, v) v = field2string(v) if v: try: return int(v) except ValueError: raise ValueError, ( "An integer was expected in the value '%s'" % escape(v) ) raise ValueError, 'Empty entry when <strong>integer</strong> expected'
"A floating-point number was expected in the value '%s'" % escape(v)
"A floating-point number was expected in the value %s" % escape(`v`)
def field2float(v): if isinstance(v, (ListType, TupleType)): return map(field2float, v) v = field2string(v) if v: try: return float(v) except ValueError: raise ValueError, ( "A floating-point number was expected in the value '%s'" % escape(v) ) raise ValueError, ( 'Empty entry when <strong>floating-point number</strong> expected')