rem
stringlengths 0
322k
| add
stringlengths 0
2.05M
| context
stringlengths 8
228k
|
---|---|---|
order = req.args.get('order', 'name').lower() req.hdf['browser.order'] = order desc = req.args.has_key('desc') req.hdf['browser.desc'] = desc and 1 or 0
|
def _render_directory(self, req, repos, node, rev=None): req.perm.assert_permission('BROWSER_VIEW')
|
|
req.hdf['browser.items'] = info req.hdf['browser.changes'] = changes
|
switch_ordering_hrefs = {} for col in ('name', 'size', 'date'): switch_ordering_hrefs[col] = req.href.browser( node.path, rev=rev, order=col, desc=(col == order and not desc and 1 or None))
|
def browse_order(a): return a['is_dir'] and dir_order or 0, file_order(a)
|
return "%s <%s>" % (name, email)
|
return "\"%s\" <%s>" % (name, email)
|
def format_header(self, name, email=None): from email.Header import Header try: tmp = unicode(name, 'ascii') name = Header(tmp, 'ascii', maxlinelen=self.maxheaderlen) except UnicodeDecodeError: name = Header(name, self._charset, maxlinelen=self.maxheaderlen) if not email: return name else: return "%s <%s>" % (name, email)
|
print "hej"
|
def _create_file(fname, data=None): fd = open(fname, 'w') if data: fd.write(data) fd.close()
|
|
def _get_changes(env, db, repos, revs, full=None, req=None):
|
def _get_changes(env, db, repos, revs, full=None, req=None, raw=False):
|
def _get_changes(env, db, repos, revs, full=None, req=None): changes = {} for rev in filter(lambda x: x in revs, revs): changeset = repos.get_changeset(rev) if changeset.message: message = util.wiki_escape_newline(changeset.message) # FIXME (#48) if not full: message = util.shorten_line(message) message = wiki_to_oneliner(message, env, db) else: message = wiki_to_html(message, req.hdf, env, db) else: message = '--' changes[rev] = { 'date_seconds': changeset.date, 'date': time.strftime('%x %X', time.localtime(changeset.date)), 'age': util.pretty_timedelta(changeset.date), 'author': changeset.author or 'anonymous', 'message': message } return changes
|
if changeset.message:
|
message = changeset.message if raw: files = [c[0] for c in changeset.get_changes()] elif message:
|
def _get_changes(env, db, repos, revs, full=None, req=None): changes = {} for rev in filter(lambda x: x in revs, revs): changeset = repos.get_changeset(rev) if changeset.message: message = util.wiki_escape_newline(changeset.message) # FIXME (#48) if not full: message = util.shorten_line(message) message = wiki_to_oneliner(message, env, db) else: message = wiki_to_html(message, req.hdf, env, db) else: message = '--' changes[rev] = { 'date_seconds': changeset.date, 'date': time.strftime('%x %X', time.localtime(changeset.date)), 'age': util.pretty_timedelta(changeset.date), 'author': changeset.author or 'anonymous', 'message': message } return changes
|
else:
|
if not message:
|
def _get_changes(env, db, repos, revs, full=None, req=None): changes = {} for rev in filter(lambda x: x in revs, revs): changeset = repos.get_changeset(rev) if changeset.message: message = util.wiki_escape_newline(changeset.message) # FIXME (#48) if not full: message = util.shorten_line(message) message = wiki_to_oneliner(message, env, db) else: message = wiki_to_html(message, req.hdf, env, db) else: message = '--' changes[rev] = { 'date_seconds': changeset.date, 'date': time.strftime('%x %X', time.localtime(changeset.date)), 'age': util.pretty_timedelta(changeset.date), 'author': changeset.author or 'anonymous', 'message': message } return changes
|
'message': message
|
'message': message, 'files': files,
|
def _get_changes(env, db, repos, revs, full=None, req=None): changes = {} for rev in filter(lambda x: x in revs, revs): changeset = repos.get_changeset(rev) if changeset.message: message = util.wiki_escape_newline(changeset.message) # FIXME (#48) if not full: message = util.shorten_line(message) message = wiki_to_oneliner(message, env, db) else: message = wiki_to_html(message, req.hdf, env, db) else: message = '--' changes[rev] = { 'date_seconds': changeset.date, 'date': time.strftime('%x %X', time.localtime(changeset.date)), 'age': util.pretty_timedelta(changeset.date), 'author': changeset.author or 'anonymous', 'message': message } return changes
|
req.hdf['log.log_path_history_href'] = self.env.href.log(path, rev=rev, log_mode='path_history')
|
def render(self, req): self.perm.assert_permission(perm.LOG_VIEW)
|
|
def add_page_link(what, page): args = {
|
def make_log_href(**args): args.update({
|
def add_page_link(what, page): args = { 'rev': rev, 'log_mode': log_mode, 'limit': limit, 'page': page, 'pages': urllib.quote_plus(' '.join(pages)), } if full_messages: args['full_messages'] = full_messages add_link(req, what, self.env.href.log(path, **args), 'Revision Log (Page %d)' % (page))
|
'page': page, 'pages': urllib.quote_plus(' '.join(pages)), }
|
})
|
def add_page_link(what, page): args = { 'rev': rev, 'log_mode': log_mode, 'limit': limit, 'page': page, 'pages': urllib.quote_plus(' '.join(pages)), } if full_messages: args['full_messages'] = full_messages add_link(req, what, self.env.href.log(path, **args), 'Revision Log (Page %d)' % (page))
|
add_link(req, what, self.env.href.log(path, **args),
|
return self.env.href.log(path, **args) def add_page_link(what, page): add_link(req, what, make_log_href(page=page, pages=urllib.quote_plus(' '.join(pages))),
|
def add_page_link(what, page): args = { 'rev': rev, 'log_mode': log_mode, 'limit': limit, 'page': page, 'pages': urllib.quote_plus(' '.join(pages)), } if full_messages: args['full_messages'] = full_messages add_link(req, what, self.env.href.log(path, **args), 'Revision Log (Page %d)' % (page))
|
req.hdf['log.changes'] = _get_changes(self.env, self.db, repos, [i['rev'] for i in info], full_messages, req) rss_href = self.env.href.log(path, rev=rev, format='rss') add_link(req, 'alternate', rss_href, 'RSS Feed', 'application/rss+xml', 'rss') if req.args.get('format') == 'rss':
|
changes = _get_changes(self.env, self.db, repos, [i['rev'] for i in info], full_messages, req, format == 'changelog') if format == 'rss': for cs in changes.values(): cs['message'] = util.escape(cs['message']) elif format == 'changelog': for cs in changes.values(): cs['message'] = '\n'.join(['\t' + m for m in cs['message'].split('\n')]) req.hdf['log.changes'] = changes add_link(req, 'alternate', make_log_href(format='rss', stop_rev=stop_rev), 'RSS Feed', 'application/rss+xml', 'rss') add_link(req, 'alternate', make_log_href(format='changelog', stop_rev=stop_rev), 'ChangeLog', 'text/plain') if format == 'rss':
|
def add_page_link(what, page): args = { 'rev': rev, 'log_mode': log_mode, 'limit': limit, 'page': page, 'pages': urllib.quote_plus(' '.join(pages)), } if full_messages: args['full_messages'] = full_messages add_link(req, what, self.env.href.log(path, **args), 'Revision Log (Page %d)' % (page))
|
env.log.debug('Loading plugin %s from %s', name, egg.location)
|
env.log.debug('Loading plugin %s', egg.location)
|
def flatten(dists): for dist in dists: if dist in memo: continue memo.add(dist) try: predecessors = ws.resolve([dist.as_requirement()]) for predecessor in flatten(predecessors): yield predecessor yield dist except pkg_resources.DistributionNotFound, e: env.log.error('Skipping "%s" ("%s" not found)', dist, e) except pkg_resources.VersionConflict, e: env.log.error('Skipping "%s" (version conflict: "%s")', dist, e)
|
sorter = ColumnSorter(colIndex, int(asc)) req.hdf[k] = asc
|
asc = int(asc)
|
def _render_view(self, req, db, id): """ uses a user specified sql query to extract some information from the database and presents it as a html table. """ actions = {'create': 'REPORT_CREATE', 'delete': 'REPORT_DELETE', 'modify': 'REPORT_MODIFY'} for action in [k for k,v in actions.items() if req.perm.has_permission(v)]: req.hdf['report.can_' + action] = True req.hdf['report.href'] = self.env.href.report(id)
|
sorter = ColumnSorter(colIndex) req.hdf[k] = 1 rows.sort(sorter.sort)
|
asc = 1 req.hdf[k] = asc def sortkey(row): val = row[colIndex] if isinstance(val, basestring): val = val.lower() return val rows = sorted(rows, key=sortkey, reverse=(not asc))
|
def _render_view(self, req, db, id): """ uses a user specified sql query to extract some information from the database and presents it as a html table. """ actions = {'create': 'REPORT_CREATE', 'delete': 'REPORT_DELETE', 'modify': 'REPORT_MODIFY'} for action in [k for k,v in actions.items() if req.perm.has_permission(v)]: req.hdf['report.can_' + action] = True req.hdf['report.href'] = self.env.href.report(id)
|
return '<div class="system-message">Macro %s(%s) failed: %s</span' \
|
return '<div class="system-message">Macro %s(%s) failed: %s</div>' \
|
def _macro_formatter(self, match, fullmatch): name = fullmatch.group('macroname') if name in ['br', 'BR']: return '<br />' args = fullmatch.group('macroargs') try: macro = self.load_macro(name) return macro(self.hdf, args, self.env) except Exception, e: return '<div class="system-message">Macro %s(%s) failed: %s</span' \ % (name, args, e)
|
else: author_email = '' for username,name,email in self.env.get_known_users(): if email and username == author: author_email = email
|
elif author in email_map.keys(): author_email = email_map[author]
|
def make_log_href(path, **args): link_rev = rev if rev == str(repos.youngest_rev): link_rev = None params = {'rev': link_rev, 'mode': mode, 'limit': limit} params.update(args) if verbose: params['verbose'] = verbose return self.env.href.log(path, **params)
|
self.failIf(int(mo.group('dm')) not in range(1,31))
|
self.failIf(int(mo.group('dm')) not in range(1,32))
|
def test_date(self): """ Validate date format Date format hould be compliant with RFC822, we do not support 'military' format """ date_str = r"^((?P<day>\w{3}),\s*)*(?P<dm>\d{2})\s+" \ r"(?P<month>\w{3})\s+(?P<year>200\d)\s+" \ r"(?P<hour>\d{2}):(?P<min>[0-5][0-9])" \ r"(:(?P<sec>[0-5][0-9]))*\s" \ r"((?P<tz>\w{2,3})|(?P<offset>[+\-]\d{4}))$" date_re = re.compile(date_str) days = ['Mon','Tue','Wed','Thu','Fri','Sat','Sun'] months = ['Jan','Feb','Mar','Apr','May','Jun', \ 'Jul','Aug','Sep','Oct','Nov','Dec'] tz = ['UT','GMT','EST','EDT','CST','CDT','MST','MDT''PST','PDT'] ticket = Ticket(self.env) ticket['reporter'] = '"Joe User" <[email protected]>' ticket['summary'] = 'This is a summary' ticket.insert() tn = TicketNotifyEmail(self.env) tn.notify(ticket, newticket=True) message = notifysuite.smtpd.get_message() (headers, body) = self.parse_message(message) self.failIf('Date' not in headers) mo = date_re.match(headers['Date']) self.failIf(not mo) if mo.group('day'): self.failIf(mo.group('day') not in days) self.failIf(int(mo.group('dm')) not in range(1,31)) self.failIf(mo.group('month') not in months) self.failIf(int(mo.group('hour')) not in range(0,24)) if mo.group('tz'): self.failIf(mo.group('tz') not in tz)
|
cursor.execute("UPDATE ticket SET %s=%s WHERE id=%s", (fname, self[name], id))
|
cursor.execute("UPDATE ticket SET %s=%%s WHERE id=%%s" % fname, (self[name], id))
|
def save_changes(self, db, author, comment, when = 0): """Store ticket changes in the database. The ticket must already exist in the database.""" assert self.has_key('id') cursor = db.cursor() if not when: when = int(time.time()) id = self['id']
|
self.parser.read(self.filename)
|
self.parser.readfp(open(self.filename))
|
def parse_if_needed(self): if not self.filename: return modtime = os.path.getmtime(self.filename) if modtime > self.__lastmtime: self.parser.read(self.filename) self.__lastmtime = modtime
|
path = re.sub(self.req.path_info + '$', '', self.req.uri)
|
path = re.sub('%s$' % re.escape(self.req.path_info), '', self.req.uri)
|
def init_request(self): core.Request.init_request(self)
|
r"(?P<definition>^\s+((?:%s.*?%s|%s.*?%s|[^%s%s])+?::)(?:\s+|$))" % (INLINE_TOKEN, INLINE_TOKEN, STARTBLOCK_TOKEN, ENDBLOCK_TOKEN, INLINE_TOKEN, STARTBLOCK[0]),
|
r"(?P<definition>^\s+((?:%s[^%s]*%s|%s.*?%s|[^%s%s:]|:[^:])+::)(?:\s+|$))" % (INLINE_TOKEN, INLINE_TOKEN, INLINE_TOKEN, STARTBLOCK_TOKEN, ENDBLOCK_TOKEN, INLINE_TOKEN, STARTBLOCK[0]),
|
def process(self, req, text, in_paragraph=False): if self.error: text = system_message(Markup('Error: Failed to load processor ' '<code>%s</code>', self.name), self.error) else: text = self.processor(req, text) if not text: return '' if in_paragraph: content_for_span = None interrupt_paragraph = False if isinstance(text, Element): tagname = text.tag.lower() if tagname == 'div': class_ = text.attrib.get('class', '') if class_ and 'code' in class_: content_for_span = text.children else: interrupt_paragraph = True elif tagname == 'table': interrupt_paragraph = True else: text = to_unicode(text) match = re.match(self._code_block_re, unicode(text)) if match: if match.group(1) and 'code' in match.group(1): content_for_span = match.group(2) else: interrupt_paragraph = True elif text.startswith('<table'): interrupt_paragraph = True if content_for_span: text = html.SPAN(class_='code-block')(*content_for_span) elif interrupt_paragraph: text = "</p>%s<p>" % to_unicode(text) return text
|
missing = 'missing' in link.attr.get('class', '')
|
missing = 'missing' in link.attr.get('class_', '')
|
def trac_get_reference(rawtext, target, text): link = wiki_to_link(target, self.env, req) uri = None missing = False if isinstance(link, Element): uri = link.attr.get('href', '') missing = 'missing' in link.attr.get('class', '') else: uri = req.href.wiki(target) missing = not WikiSystem(self.env).has_page(target) if uri: reference = nodes.reference(rawtext, text or target) reference['refuri']= uri if missing: reference.set_class('missing') return reference return None
|
self.send_response(200) mtype, enc = mimetypes.guess_type(filename) stat = os.fstat(f.fileno()) content_length = stat[6] last_modified = time.strftime("%a, %d %b %Y %H:%M:%S GMT", time.gmtime(stat[8])) self.send_header('Content-Type', mtype) self.send_header('Conten-Length', str(content_length)) self.send_header('Last-Modified', last_modified) self.end_headers() shutil.copyfileobj(f, self.wfile)
|
try: self.send_response(200) mtype, enc = mimetypes.guess_type(filename) stat = os.fstat(f.fileno()) content_length = stat[6] last_modified = time.strftime("%a, %d %b %Y %H:%M:%S GMT", time.gmtime(stat[8])) self.send_header('Content-Type', mtype) self.send_header('Content-Length', str(content_length)) self.send_header('Last-Modified', last_modified) self.end_headers() shutil.copyfileobj(f, self.wfile) except socket.error, (code, msg): if code == errno.EPIPE or code == 10053: self.log_message('Lost connection to client: %s', self.address_string()) else: raise
|
def do_htdocs_req(self, path): """This function serves request for static img/css files""" path = urllib.unquote(path) # Make sure the path doesn't contain any dangerous ".."-parts. path = '/'.join(filter(lambda x: x not in ['..', ''], path.split('/'))) filename = os.path.join(siteconfig.__default_htdocs_dir__, os.path.normcase(path)) try: f = open(filename, 'rb') except IOError: self.send_error(404, path) return self.send_response(200) mtype, enc = mimetypes.guess_type(filename) stat = os.fstat(f.fileno()) content_length = stat[6] last_modified = time.strftime("%a, %d %b %Y %H:%M:%S GMT", time.gmtime(stat[8])) self.send_header('Content-Type', mtype) self.send_header('Conten-Length', str(content_length)) self.send_header('Last-Modified', last_modified) self.end_headers() shutil.copyfileobj(f, self.wfile)
|
send_pretty_error(e, env, req)
|
try: send_pretty_error(e, env, req) except socket.error, (code, msg): if code == errno.EPIPE or code == 10053: self.log_message('Lost connection to client: %s', self.address_string()) else: raise
|
def do_trac_req(self): m = self.url_re.findall(self.path) if not m: self.send_error(400, 'Bad Request') return project_name, path_info, query_string = m[0] if not self.server.projects.has_key(project_name): self.send_error(404, 'Not Found') return path_info = urllib.unquote(path_info) env = self.server.projects[project_name] self.log = env.log
|
f[i] = space_re.sub(htmlify, line)
|
f[i] = Markup(space_re.sub(htmlify, line))
|
def htmlify(match): div, mod = divmod(len(match.group(0)), 2) return div * ' ' + mod * ' '
|
t[i] = space_re.sub(htmlify, line)
|
t[i] = Markup(space_re.sub(htmlify, line))
|
def htmlify(match): div, mod = divmod(len(match.group(0)), 2) return div * ' ' + mod * ' '
|
blocks[-1]['base.lines'].append(line)
|
blocks[-1]['base.lines'].append(Markup(line))
|
def markup_intraline_changes(opcodes): for tag, i1, i2, j1, j2 in opcodes: if tag == 'replace' and i2 - i1 == j2 - j1: for i in range(i2 - i1): fromline, toline = fromlines[i1 + i], tolines[j1 + i] (start, end) = _get_change_extent(fromline, toline)
|
blocks[-1]['changed.lines'].append(line)
|
blocks[-1]['changed.lines'].append(Markup(line))
|
def markup_intraline_changes(opcodes): for tag, i1, i2, j1, j2 in opcodes: if tag == 'replace' and i2 - i1 == j2 - j1: for i in range(i2 - i1): fromline, toline = fromlines[i1 + i], tolines[j1 + i] (start, end) = _get_change_extent(fromline, toline)
|
blocks[-1]['base.lines'].append(space_re.sub(htmlify, line))
|
blocks[-1]['base.lines'].append(Markup(space_re.sub(htmlify, line)))
|
def markup_intraline_changes(opcodes): for tag, i1, i2, j1, j2 in opcodes: if tag == 'replace' and i2 - i1 == j2 - j1: for i in range(i2 - i1): fromline, toline = fromlines[i1 + i], tolines[j1 + i] (start, end) = _get_change_extent(fromline, toline)
|
blocks[-1]['changed.lines'].append(space_re.sub(htmlify, line))
|
blocks[-1]['changed.lines'].append(Markup(space_re.sub(htmlify, line)))
|
def markup_intraline_changes(opcodes): for tag, i1, i2, j1, j2 in opcodes: if tag == 'replace' and i2 - i1 == j2 - j1: for i in range(i2 - i1): fromline, toline = fromlines[i1 + i], tolines[j1 + i] (start, end) = _get_change_extent(fromline, toline)
|
def my_sum(list): """Python2.2 doesn't have sum()""" tot = 0 for item in list: tot += item return tot
|
try: sum except NameError: def sum(list): """Python2.2 doesn't have sum()""" tot = 0 for item in list: tot += item return tot
|
def my_sum(list): """Python2.2 doesn't have sum()""" tot = 0 for item in list: tot += item return tot
|
def db_open(self):
|
def env_open(self):
|
def db_open(self): try: if not self.__env: self.__env = Environment(self.envname) return self.__env.get_db_cnx() except Exception, e: print 'Failed to open environment.', e sys.exit(1)
|
return self.__env.get_db_cnx()
|
return self.__env except Exception, e: print 'Failed to open environment.', e sys.exit(1) def db_open(self): try: return self.env_open().get_db_cnx()
|
def db_open(self): try: if not self.__env: self.__env = Environment(self.envname) return self.__env.get_db_cnx() except Exception, e: print 'Failed to open environment.', e sys.exit(1)
|
xrange(0, (1 + len(sep)) * cnum + my_sum(colw))])
|
xrange(0, (1 + len(sep)) * cnum + sum(colw))])
|
def print_listing(self, headers, data, sep=' ', decor=True): ldata = list(data) if decor: ldata.insert(0, headers) print colw = [] ncols = len(ldata[0]) # assumes all rows are of equal length for cnum in xrange(0, ncols): mw = 0 for cell in [str(d[cnum]) or '' for d in ldata]: if len(cell) > mw: mw = len(cell) colw.append(mw) for rnum in xrange(len(ldata)): for cnum in xrange(ncols): if decor and rnum == 0: sp = ('%%%ds' % len(sep)) % ' ' # No separator in header else: sp = sep if cnum + 1 == ncols: sp = '' # No separator after last column print ('%%-%ds%s' % (colw[cnum], sp)) \ % (ldata[rnum][cnum] or ''), print if rnum == 0 and decor: print ''.join(['-' for x in xrange(0, (1 + len(sep)) * cnum + my_sum(colw))]) print
|
for c in Component.select(self.__env):
|
for c in Component.select(self.env_open()):
|
def _do_component_list(self): data = [] for c in Component.select(self.__env): data.append((c.name, c.owner)) self.print_listing(['Name', 'Owner'], data)
|
component = Component(self.__env)
|
component = Component(self.env_open())
|
def _do_component_add(self, name, owner): component = Component(self.__env) component.name = name component.owner = owner component.insert()
|
component = Component(self.__env, name)
|
component = Component(self.env_open(), name)
|
def _do_component_rename(self, name, newname): component = Component(self.__env, name) component.name = newname component.update()
|
component = Component(self.__env, name)
|
component = Component(self.env_open(), name)
|
def _do_component_remove(self, name): component = Component(self.__env, name) component.delete()
|
component = Component(self.__env, name)
|
component = Component(self.env_open(), name)
|
def _do_component_set_owner(self, name, owner): component = Component(self.__env, name) component.owner = owner component.update()
|
self.db_open()
|
def do_resync(self, line): self.db_open() # We need to call this function to open the env, really stupid
|
|
cnx = self.__env.get_db_cnx()
|
cnx = self.db_open()
|
def do_resync(self, line): self.db_open() # We need to call this function to open the env, really stupid
|
[(e.name,) for e in enum_cls.select(self.__env)])
|
[(e.name,) for e in enum_cls.select(self.env_open())])
|
def _do_enum_list(self, type): enum_cls = self._enum_map[type] self.print_listing(['Possible Values'], [(e.name,) for e in enum_cls.select(self.__env)])
|
enum = enum_cls(self.__env, name)
|
enum = enum_cls(self.env_open(), name)
|
def _do_enum_change(self, type, name, newname): enum_cls = self._enum_map[type] enum = enum_cls(self.__env, name) enum.name = newname enum.update()
|
enum = enum_cls(self.__env, name)
|
enum = enum_cls(self.env_open(), name)
|
def _do_enum_remove(self, type, name): enum_cls = self._enum_map[type] enum = enum_cls(self.__env, name) enum.delete()
|
for m in Milestone.select(self.__env, include_completed=True):
|
for m in Milestone.select(self.env_open(), include_completed=True):
|
def _do_milestone_list(self): data = [] for m in Milestone.select(self.__env, include_completed=True): data.append((m.name, m.due and self._format_date(m.due), m.completed and self._format_datetime(m.completed)))
|
milestone = Milestone(self.__env, name)
|
milestone = Milestone(self.env_open(), name)
|
def _do_milestone_rename(self, name, newname): milestone = Milestone(self.__env, name) milestone.name = newname milestone.update()
|
milestone = Milestone(self.__env)
|
milestone = Milestone(self.env_open())
|
def _do_milestone_add(self, name): milestone = Milestone(self.__env) milestone.name = name milestone.insert()
|
milestone = Milestone(self.__env, name)
|
milestone = Milestone(self.env_open(), name)
|
def _do_milestone_remove(self, name): milestone = Milestone(self.__env, name) milestone.delete()
|
milestone = Milestone(self.__env, name)
|
milestone = Milestone(self.env_open(), name)
|
def _do_milestone_set_due(self, name, t): milestone = Milestone(self.__env, name) milestone.due = self._parse_date(t) milestone.update()
|
milestone = Milestone(self.__env, name)
|
milestone = Milestone(self.env_open(), name)
|
def _do_milestone_set_completed(self, name, t): milestone = Milestone(self.__env, name) milestone.completed = self._parse_date(t) milestone.update()
|
for v in Version.select(self.__env):
|
for v in Version.select(self.env_open()):
|
def _do_version_list(self): data = [] for v in Version.select(self.__env): data.append((v.name, v.time and self._format_date(v.time))) self.print_listing(['Name', 'Time'], data)
|
version = Version(self.__env, name)
|
version = Version(self.env_open(), name)
|
def _do_version_rename(self, name, newname): version = Version(self.__env, name) version.name = newname version.update()
|
version = Version(self.__env)
|
version = Version(self.env_open())
|
def _do_version_add(self, name): version = Version(self.__env) version.name = name version.insert()
|
version = Version(self.__env, name)
|
version = Version(self.env_open(), name)
|
def _do_version_remove(self, name): version = Version(self.__env, name) version.delete()
|
version = Version(self.__env, name)
|
version = Version(self.env_open(), name)
|
def _do_version_time(self, name, t): version = Version(self.__env, name) version.time = self._parse_date(t) version.update()
|
if item['author'].find('@') != -1: item['author.email'] = item['author']
|
if event['author'].find('@') != -1: event['author.email'] = event['author']
|
def render(self, req): self.perm.assert_permission(perm.TIMELINE_VIEW) self.authzperm = SubversionAuthorizer(self.env, req.authname) # Kludge
|
component_name = cls
|
component_name = cls.lower()
|
def is_component_enabled(self, cls): """Implemented to only allow activation of components that are not disabled in the configuration. This is called by the `ComponentManager` base class when a component is about to be activated. If this method returns false, the component does not get activated.""" if not isinstance(cls, (str, unicode)): component_name = (cls.__module__ + '.' + cls.__name__).lower() else: component_name = cls
|
message = util.escape(info)
|
message = util.Markup(info)
|
def produce((id, t, author, type, summary), status, fields, comment): if status == 'edit': if 'ticket_details' in filters: info = '' if len(fields) > 0: info = ', '.join(['<i>%s</i>' % f for f in \ fields.keys()]) + ' changed<br />' else: return None elif 'ticket' in filters: if status == 'closed' and fields.has_key('resolution'): info = fields['resolution'] if info and comment: info = '%s: ' % info else: info = '' else: return None kind, verb = status_map[status] title = util.Markup('Ticket <em title="%s">#%s</em> (%s) %s by %s', summary, id, type, verb, author) href = rss and self.env.abs_href.ticket(id) \ or self.env.href.ticket(id)
|
Format = WikiProcessor(self.env, language) html = Format.process(hdf, text) raw = nodes.raw('',html, format='html')
|
processor = WikiProcessor(self.env, language) html = processor.process(req.hdf, text) raw = nodes.raw('', html, format='html')
|
def code_formatter(language, text): Format = WikiProcessor(self.env, language) html = Format.process(hdf, text) raw = nodes.raw('',html, format='html') return raw
|
return '<a title="%s" href="%s">%s</a>' % (title,link, text)
|
return '<a title="%s" href="%s">%s</a>' % (title or '',link, text)
|
def _modulehref_formatter(self, match, fullmatch): link, text, missing, title = self._expand_module_link(match) if link and missing: return '<a title="%s" class="missing" href="%s">%s?</a>' % (title,link, text) elif link: return '<a title="%s" href="%s">%s</a>' % (title,link, text) else: return match
|
req.redirect(req.get_header('Referer') or env.href.wiki())
|
req.redirect(referer or env.href.wiki())
|
def dispatch_request(path_info, args, req, env, database=None): import Wiki if not database: database = env.get_db_cnx() # Let the wiki module build a dictionary of all page names database = env.get_db_cnx() Wiki.populate_page_dict(database, env) authenticator = auth.Authenticator(database, req) if path_info == '/logout': authenticator.logout() try: req.redirect(req.get_header('Referer') or env.href.wiki()) except RedirectException: pass elif req.remote_user and authenticator.authname == 'anonymous': auth_cookie = authenticator.login(req) if path_info == '/login': try: req.redirect(req.get_header('Referer') or env.href.wiki()) except RedirectException: pass req.authname = authenticator.authname newsession = args.has_key('newsession') and args['newsession'] req.session = Session.Session(env, req, newsession) add_args_to_hdf(args, req.hdf) try: pool = None # Load the selected module module = module_factory(args, env, database, req) pool = module.pool module.run() finally: # We do this even if the cgi will terminate directly after. A pool # destruction might trigger important clean-up functions. if pool: import svn.core svn.core.svn_pool_destroy(pool)
|
self._send_csv(req, cols, results)
|
self._send_csv(req, cols, results, mimetype='text/csv')
|
def sortkey(row): val = row[idx] if isinstance(val, basestring): val = val.lower() return val
|
self._send_csv(req, cols, results, '\t')
|
self._send_csv(req, cols, results, '\t', mimetype='text/tab-separated-values')
|
def sortkey(row): val = row[idx] if isinstance(val, basestring): val = val.lower() return val
|
def _send_csv(self, req, cols, rows, sep=','):
|
def _send_csv(self, req, cols, rows, sep=',', mimetype='text/plain'):
|
def _send_csv(self, req, cols, rows, sep=','): req.send_response(200) req.send_header('Content-Type', 'text/plain;charset=utf-8') req.end_headers()
|
req.send_header('Content-Type', 'text/plain;charset=utf-8')
|
req.send_header('Content-Type', mimetype + ';charset=utf-8')
|
def _send_csv(self, req, cols, rows, sep=','): req.send_response(200) req.send_header('Content-Type', 'text/plain;charset=utf-8') req.end_headers()
|
msg['Date'] = time.strftime('%a, %d %b %Y %H:%M:%S +0000', time.gmtime());
|
msg['Date'] = formatdate()
|
def send(self, rcpt, mime_headers={}): from email.MIMEText import MIMEText from email.Header import Header body = self.hdf.render(self.template_name) msg = MIMEText(body, 'plain', 'utf-8') msg['X-Mailer'] = 'Trac %s, by Edgewall Software' % __version__ msg['X-Trac-Version'] = __version__ projname = self.config.get('project','name') msg['X-Trac-Project'] = projname msg['X-URL'] = self.config.get('project','url') msg['Subject'] = Header(self.subject, 'utf-8') msg['From'] = '%s <%s>' % (projname, self.from_email) msg['Sender'] = self.from_email msg['Reply-To'] = self.replyto_email msg['To'] = rcpt msg['Date'] = time.strftime('%a, %d %b %Y %H:%M:%S +0000', time.gmtime()); for hdr in mime_headers.keys(): msg[hdr] = mime_headers[hdr] self.env.log.debug("Sending SMTP notification to %s on port %d" % (self.smtp_server, self.smtp_port)) self.server.sendmail(self.from_email, rcpt, msg.as_string())
|
(milestone.encode('utf-8')))
|
(milestone.encode('utf-8'),))
|
def setMilestoneList(self, m, key): """Remove all milestones, set them to `m`""" self.assertNoTickets() c = self.db().cursor() c.execute("DELETE FROM milestone") for ms in m: milestone = ms[key] print " inserting milestone '%s'" % (milestone) c.execute("INSERT INTO milestone (name) VALUES (%s)", (milestone.encode('utf-8'))) self.db().commit()
|
self.assertTrue(page, listener.deleted_version[0])
|
self.assertEqual(page, listener.deleted_version[0])
|
def test_delete_page_version(self): cursor = self.db.cursor() cursor.executemany("INSERT INTO wiki VALUES(%s,%s,%s,%s,%s,%s,%s,%s)", [('TestPage', 1, 42, 'joe', '::1', 'Bla bla', 'Testing', 0), ('TestPage', 2, 43, 'kate', '192.168.0.101', 'Bla', 'Changing', 0)])
|
title = 'Changeset <em>[%s]</em> by %s' % ( util.escape(chgset.rev), util.escape(chgset.author))
|
excerpt = util.shorten_line(chgset.message or '--')
|
def get_timeline_events(self, req, start, stop, filters): if 'changeset' in filters: format = req.args.get('format') show_files = int(self.config.get('timeline', 'changeset_show_files')) db = self.env.get_db_cnx() repos = self.env.get_repository() rev = repos.youngest_rev while rev: chgset = repos.get_changeset(rev) if chgset.date < start: return if chgset.date < stop: title = 'Changeset <em>[%s]</em> by %s' % ( util.escape(chgset.rev), util.escape(chgset.author)) if format == 'rss': href = self.env.abs_href.changeset(chgset.rev) message = wiki_to_html(chgset.message or '--', self.env, db, absurls=True) else: href = self.env.href.changeset(chgset.rev) excerpt = util.shorten_line(chgset.message or '--') message = wiki_to_oneliner(excerpt, self.env, db) if show_files: files = [] for chg in chgset.get_changes(): if show_files > 0 and len(files) >= show_files: files.append('...') break files.append('<span class="%s">%s</span>' % (chg[2], util.escape(chg[0]))) message = '<span class="changes">' + ', '.join(files) +\ '</span>: ' + message yield 'changeset', href, title, chgset.date, chgset.author,\ message rev = repos.previous_rev(rev)
|
excerpt = util.shorten_line(chgset.message or '--')
|
def get_timeline_events(self, req, start, stop, filters): if 'changeset' in filters: format = req.args.get('format') show_files = int(self.config.get('timeline', 'changeset_show_files')) db = self.env.get_db_cnx() repos = self.env.get_repository() rev = repos.youngest_rev while rev: chgset = repos.get_changeset(rev) if chgset.date < start: return if chgset.date < stop: title = 'Changeset <em>[%s]</em> by %s' % ( util.escape(chgset.rev), util.escape(chgset.author)) if format == 'rss': href = self.env.abs_href.changeset(chgset.rev) message = wiki_to_html(chgset.message or '--', self.env, db, absurls=True) else: href = self.env.href.changeset(chgset.rev) excerpt = util.shorten_line(chgset.message or '--') message = wiki_to_oneliner(excerpt, self.env, db) if show_files: files = [] for chg in chgset.get_changes(): if show_files > 0 and len(files) >= show_files: files.append('...') break files.append('<span class="%s">%s</span>' % (chg[2], util.escape(chg[0]))) message = '<span class="changes">' + ', '.join(files) +\ '</span>: ' + message yield 'changeset', href, title, chgset.date, chgset.author,\ message rev = repos.previous_rev(rev)
|
|
yield line
|
yield escape(line)
|
def render(self, req, mimetype, content, filename=None, rev=None): if is_binary(content): self.env.log.debug("Binary data; no preview available") return
|
odata = np.out.splitlines()[1]
|
odata = ''.join(np.out.splitlines()[1:-1])
|
def render(self, req, mimetype, content, filename=None, rev=None): cmdline = self.config.get('mimeviewer', 'php_path') # -n to ignore php.ini so we're using default colors cmdline += ' -sn' self.env.log.debug("PHP command line: %s" % cmdline)
|
self._dormant.append(cnx)
|
if cnx.poolable: self._dormant.append(cnx) else: self._cursize -= 1
|
def _return_cnx(self, cnx): self._available.acquire() try: tid = threading._get_ident() if tid in self._active: num, cnx_ = self._active.get(tid) assert cnx is cnx_ if num > 1: self._active[tid][0] = num - 1 else: del self._active[tid] if cnx not in self._dormant: cnx.rollback() self._dormant.append(cnx) self._available.notify() finally: self._available.release()
|
sqlite.Cursor.execute(self, sql, args or [])
|
return self._rollback_on_error(sqlite.Cursor.execute, sql, args or [])
|
def execute(self, sql, args=None): if args: sql = sql % (('?',) * len(args)) sqlite.Cursor.execute(self, sql, args or [])
|
sqlite.Cursor.executemany(self, sql, args or [])
|
return self._rollback_on_error(sqlite.Cursor.executemany, sql, args or [])
|
def executemany(self, sql, args=None): if args: sql = sql % (('?',) * len(args[0])) sqlite.Cursor.executemany(self, sql, args or [])
|
__slots__ = ['cnx']
|
__slots__ = ['cnx', '_active_cursors'] poolable = False
|
def fetchall(self): rows = sqlite.Cursor.fetchall(self) return rows != None and [self._convert_row(row) for row in rows] or None
|
timeout = int(params.get('timeout', 10000))
|
def __init__(self, path, params={}): assert have_pysqlite > 0 self.cnx = None if path != ':memory:': if not os.access(path, os.F_OK): raise TracError, 'Database "%s" not found.' % path
|
|
check_same_thread=False, timeout=timeout)
|
timeout=timeout)
|
def __init__(self, path, params={}): assert have_pysqlite > 0 self.cnx = None if path != ':memory:': if not os.access(path, os.F_OK): raise TracError, 'Database "%s" not found.' % path
|
return self.cnx.cursor(PyFormatCursor)
|
cursor = self.cnx.cursor(PyFormatCursor) self._active_cursors[cursor] = True cursor.cnx = self return cursor def rollback(self): for cursor in self._active_cursors.keys(): cursor.close() self.cnx.rollback()
|
def cursor(self): return self.cnx.cursor(PyFormatCursor)
|
FROM ticket t, enum p
|
FROM ticket t LEFT JOIN enum p ON p.name = t.priority AND p.type = 'priority'
|
def get_reports(db): owner = db.concat('owner', "' *'") return (
|
AND p.name = t.priority AND p.type = 'priority'
|
def get_reports(db): owner = db.concat('owner', "' *'") return (
|
|
FROM ticket t,enum p WHERE p.name=t.priority AND p.type='priority'
|
FROM ticket t LEFT JOIN enum p ON p.name = t.priority AND p.type = 'priority'
|
def get_reports(db): owner = db.concat('owner', "' *'") return (
|
FROM ticket t, enum p WHERE t.status IN ('new', 'assigned', 'reopened') AND p.name = t.priority AND p.type = 'priority' AND owner = '$USER'
|
FROM ticket t LEFT JOIN enum p ON p.name = t.priority AND p.type = 'priority' WHERE t.status IN ('new', 'assigned', 'reopened') AND owner = '$USER'
|
def get_reports(db): owner = db.concat('owner', "' *'") return (
|
for line in html.split('<br />'): yield line.replace(' ', ' ')
|
return [line.replace(' ', ' ') for line in html.split('<br />')]
|
def render(self, context, mimetype, content, filename=None, rev=None): # -n to ignore php.ini so we're using default colors cmdline = '%s -sn' % self.path self.env.log.debug("PHP command line: %s" % cmdline)
|
for i in range(0, min(len(str), 1000)): if str[i] == '\0': return True return False
|
return '\0' in str[:1000]
|
def is_binary(str): """Detect binary content by checking the first thousand bytes for zeroes.""" if detect_unicode(str): return False for i in range(0, min(len(str), 1000)): if str[i] == '\0': return True return False
|
if mimetype.startswith('text/'): return 1 return 0
|
if mimetype in TREAT_AS_BINARY: return 0 return 1
|
def get_quality_ratio(self, mimetype): if mimetype.startswith('text/'): return 1 return 0
|
if StrictVersion(__version__) < StrictVersion('0.3.3'):
|
if StrictVersion(__version__) < StrictVersion('0.3.9'):
|
def render(self, req, mimetype, content, filename=None, rev=None): try: from docutils import nodes from docutils.core import publish_string from docutils.parsers import rst from docutils import __version__ except ImportError: raise TracError, 'Docutils not found' if StrictVersion(__version__) < StrictVersion('0.3.3'): raise TracError, 'Docutils version >= %s required, %s found' \ % ('0.3.3', __version__)
|
% ('0.3.3', __version__)
|
% ('0.3.9', __version__)
|
def render(self, req, mimetype, content, filename=None, rev=None): try: from docutils import nodes from docutils.core import publish_string from docutils.parsers import rst from docutils import __version__ except ImportError: raise TracError, 'Docutils not found' if StrictVersion(__version__) < StrictVersion('0.3.3'): raise TracError, 'Docutils version >= %s required, %s found' \ % ('0.3.3', __version__)
|
settings_overrides={'halt_level': 6})
|
settings_overrides={'halt_level': 6, 'file_insertion_enabled': 0, 'raw_enabled': 0})
|
def code_block(name, arguments, options, content, lineno, content_offset, block_text, state, state_machine): """ Create a code-block directive for docutils.
|
application_pool = None
|
def history_cb(path, rev, pool): if authz.has_permission(path): history.append((path, rev))
|
|
return [None, path, dir_pool]
|
def add_directory(self, path, parent_baton, copyfrom_path, copyfrom_revision, dir_pool): self.cursor.execute('INSERT INTO node_change (rev, name, change) ' 'VALUES (%s, %s, \'A\')', self.rev, path) return [None, path, dir_pool]
|
|
changelog = ticket.get_changelog(db=db) curr_author = None curr_date = 0
|
def export_rss(self, req, ticket): db = self.env.get_db_cnx() changelog = ticket.get_changelog(db=db) curr_author = None curr_date = 0 changes = [] change_summary = {}
|
|
def update_title(): if not changes: return title = '; '.join(['%s %s' % (', '.join(v), k) for k, v in change_summary.iteritems()]) changes[-1]['title'] = title for date, author, field, old, new in changelog: if date != curr_date or author != curr_author: update_title() change_summary = {} changes.append({ 'date': http_date(date), 'author': author, 'fields': {} }) curr_date = date curr_author = author if field == 'comment':
|
for change in self.grouped_changelog_entries(ticket, db): changes.append(change) change_summary = {} if 'comment' in change: comment = change['comment'] change['comment'] = unicode(wiki_to_html( comment, self.env, req, db, absurls=True))
|
def update_title(): if not changes: return title = '; '.join(['%s %s' % (', '.join(v), k) for k, v in change_summary.iteritems()]) changes[-1]['title'] = title
|
changes[-1]['comment'] = unicode(wiki_to_html(new, self.env, req, db, absurls=True)) elif field == 'description': change_summary.setdefault('changed', []).append(field) changes[-1]['fields'][field] = '' else: change = 'changed' if not old: change = 'set' elif not new: change = 'deleted' change_summary.setdefault(change, []).append(field) changes[-1]['fields'][field] = {'old': old, 'new': new} update_title()
|
for field, values in change['fields'].iteritems(): if field == 'description': change_summary.setdefault('changed', []).append(field) else: chg = 'changed' if not values['old']: chg = 'set' elif not values['new']: chg = 'deleted' change_summary.setdefault(chg, []).append(field) change['title'] = '; '.join(['%s %s' % (', '.join(v), k) for k, v \ in change_summary.iteritems()])
|
def update_title(): if not changes: return title = '; '.join(['%s %s' % (', '.join(v), k) for k, v in change_summary.iteritems()]) changes[-1]['title'] = title
|
changelog = ticket.get_changelog(db=db) autonum = 0
|
def quote_original(author, original, link): if not 'comment' in req.args: # i.e. the comment was not yet edited req.hdf['ticket.comment'] = '\n'.join( ['Replying to [%s %s]:' % (link, author)] + ['> %s' % line for line in original.splitlines()] + [''])
|
|
last_uid = current = None for date, author, field, old, new, permanent in changelog: uid = date, author, permanent if uid != last_uid: last_uid = uid current = { 'date': format_datetime(date), 'author': author, 'fields': {} } changes.append(current) if permanent: autonum += 1 current['cnum'] = autonum if field == 'comment': current['comment'] = wiki_to_html(new, self.env, req, db) if permanent: this_num = str(autonum) if old: if '.' in old: parent_num, this_num = old.split('.', 1) current['replyto'] = parent_num replies.setdefault(parent_num, []).append(this_num) else: this_num = old assert this_num == str(autonum) if replyto == this_num: quote_original(author, new, 'comment:%s' % replyto) elif field == 'description': current['fields'][field] = '' else: current['fields'][field] = {'old': old, 'new': new}
|
cnum = 0 for change in self.grouped_changelog_entries(ticket, db): changes.append(change) comment = '' if 'comment' in change: comment = change['comment'] change['comment'] = wiki_to_html(comment, self.env, req, db) if change['permanent']: cnum = change['cnum'] if 'replyto' in change: replies.setdefault(change['replyto'], []).append(cnum) if replyto == str(cnum): quote_original(change['author'], comment, 'comment:%s' % replyto)
|
def quote_original(author, original, link): if not 'comment' in req.args: # i.e. the comment was not yet edited req.hdf['ticket.comment'] = '\n'.join( ['Replying to [%s %s]:' % (link, author)] + ['> %s' % line for line in original.splitlines()] + [''])
|
'cnum': autonum + 1
|
'cnum': cnum + 1
|
def quote_original(author, original, link): if not 'comment' in req.args: # i.e. the comment was not yet edited req.hdf['ticket.comment'] = '\n'.join( ['Replying to [%s %s]:' % (link, author)] + ['> %s' % line for line in original.splitlines()] + [''])
|
node = get_existing_node(self.env, repos, path, rev) rev = repos.normalize_rev(rev) hidden_properties = [p.strip() for p in self.config.get('browser', 'hide_properties').split(',')]
|
if rev: rev = repos.normalize_rev(rev) rev_or_latest = rev or repos.youngest_rev node = get_existing_node(self.env, repos, path, rev_or_latest) hidden_properties = self.config.getlist('browser', 'hide_properties')
|
def process_request(self, req): path = req.args.get('path', '/') rev = req.args.get('rev')
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.