rem
stringlengths
0
322k
add
stringlengths
0
2.05M
context
stringlengths
8
228k
print U64(oid), r.version, len(r.data)
print u64(oid), r.version, len(r.data)
def recover(inp, outp, verbose=0, partial=0, force=0, pack=0): print 'Recovering', inp, 'into', outp if os.path.exists(outp) and not force: die("%s exists" % outp) file = open(inp, "rb") if file.read(4) != ZODB.FileStorage.packed_version: die("input is not a file storage") file.seek(0,2) file_size = file.tell() ofs = ZODB.FileStorage.FileStorage(outp, create=1) _ts = None ok = 1 prog1 = 0 preindex = {} undone = 0 pos = 4 while pos: try: npos, transaction = read_transaction_header(file, pos, file_size) except EOFError: break except: print "\n%s: %s\n" % sys.exc_info()[:2] if not verbose: progress(prog1) pos = scan(file, pos) continue if transaction is None: undone = undone + npos - pos pos = npos continue else: pos = npos tid = transaction.tid if _ts is None: _ts = TimeStamp(tid) else: t = TimeStamp(tid) if t <= _ts: if ok: print ('Time stamps out of order %s, %s' % (_ts, t)) ok = 0 _ts = t.laterThan(_ts) tid = `_ts` else: _ts = t if not ok: print ('Time stamps back in order %s' % (t)) ok = 1 if verbose: print 'begin', if verbose > 1: print sys.stdout.flush() ofs.tpc_begin(transaction, tid, transaction.status) if verbose: print 'begin', pos, _ts, if verbose > 1: print sys.stdout.flush() nrec = 0 try: for r in transaction: oid = r.oid if verbose > 1: print U64(oid), r.version, len(r.data) pre = preindex.get(oid) s = ofs.store(oid, pre, r.data, r.version, transaction) preindex[oid] = s nrec += 1 except: if partial and nrec: ofs._status = 'p' ofs.tpc_vote(transaction) ofs.tpc_finish(transaction) if verbose: print 'partial' else: ofs.tpc_abort(transaction) print "\n%s: %s\n" % sys.exc_info()[:2] if not verbose: progress(prog1) pos = scan(file, pos) else: ofs.tpc_vote(transaction) ofs.tpc_finish(transaction) if verbose: print 'finish' sys.stdout.flush() if not verbose: prog = pos * 20l / file_size while prog > prog1: prog1 = prog1 + 1 iprogress(prog1) bad = file_size - undone - ofs._pos print "\n%s bytes removed during recovery" % bad if undone: print "%s bytes of undone transaction data were skipped" % undone if pack is not None: print "Packing ..." from ZODB.referencesf import referencesf ofs.pack(pack, referencesf) ofs.close()
revid = self._dostore(oid, revid, description='packable%d' % i)
revid = self._x_dostore(oid, revid, description='packable%d' % i)
def load_value(oid, version=''): data, revid = self._storage.load(oid, version) return zodb_unpickle(data).value
revid1 = self._dostore(oid1, data=MinPO(0), description='create1') revid2 = self._dostore(oid1, data=MinPO(1), revid=revid1,
revid1 = self._x_dostore(oid1, data=MinPO(0), description='create1') revid2 = self._x_dostore(oid1, data=MinPO(1), revid=revid1,
def load_value(oid, version=''): data, revid = self._storage.load(oid, version) return zodb_unpickle(data).value
revid3 = self._dostore(oid1, data=MinPO(2), revid=revid2,
revid3 = self._x_dostore(oid1, data=MinPO(2), revid=revid2,
def load_value(oid, version=''): data, revid = self._storage.load(oid, version) return zodb_unpickle(data).value
self._dostore(description='create2')
self._x_dostore(description='create2')
def load_value(oid, version=''): data, revid = self._storage.load(oid, version) return zodb_unpickle(data).value
if h < 0: h=t32+h v=h*t32+v
if h < 0: h= t32 + h v = (long(h) << 32) + v
def u64(v, unpack=struct.unpack): """Unpack an 8-byte string into a 64-bit (or long) integer""" h, v = unpack(">ii", v) if v < 0: v=t32+v if h: if h < 0: h=t32+h v=h*t32+v return v
v=h*t32+v
v = (long(h) << 32) + v
def U64(v, unpack=struct.unpack): """Same as u64 but always returns a long.""" h, v = unpack(">II", v) if h: v=h*t32+v return v
self.sql_log_model = gtk.ListStore(gobject.TYPE_STRING, gobject.TYPE_STRING)
self.sql_log_model = gtk.ListStore(gobject.TYPE_STRING, gobject.TYPE_STRING, gobject.TYPE_STRING)
def __init__(self): #~ self.read_query(
log = self.sql_log_model.get(iter, 0, 1)
log = self.sql_log_model.get(iter, 0, 1, 2)
def __getstate__(self): hosts = [] iter = self.connections_model.get_iter_root() while iter: host = self.connections_model.get_value(iter, 0) hosts.append(host) iter = self.connections_model.iter_next(iter) sql_logs = [] iter = self.sql_log_model.get_iter_root() while iter: log = self.sql_log_model.get(iter, 0, 1) sql_logs.append(log) iter = self.sql_log_model.iter_next(iter) return {"hosts": hosts, "queries": self.queries, "sql_logs": sql_logs}
self.clipboard.set_text(row[1]) self.pri_clipboard.set_text(row[1])
self.clipboard.set_text(row[2]) self.pri_clipboard.set_text(row[2])
def on_sql_log_activate(self, *args): if len(args) == 1: menuitem = args[0] if menuitem.name == "clear_all_entries": self.sql_log_model.clear() path, column = self.sql_log_tv.get_cursor() row = self.sql_log_model[path] if menuitem.name == "copy_sql_log": self.clipboard.set_text(row[1]) self.pri_clipboard.set_text(row[1]) elif menuitem.name == "set_as_query_text": self.current_query.textview.get_buffer().set_text(row[1]) if menuitem.name == "delete_sql_log": iter = self.sql_log_model.get_iter(path) self.sql_log_model.remove(iter) return True tv, path, tvc = args query = tv.get_model()[path][1] self.current_query.textview.get_buffer().set_text(query) return True
self.current_query.textview.get_buffer().set_text(row[1])
self.current_query.textview.get_buffer().set_text(row[2])
def on_sql_log_activate(self, *args): if len(args) == 1: menuitem = args[0] if menuitem.name == "clear_all_entries": self.sql_log_model.clear() path, column = self.sql_log_tv.get_cursor() row = self.sql_log_model[path] if menuitem.name == "copy_sql_log": self.clipboard.set_text(row[1]) self.pri_clipboard.set_text(row[1]) elif menuitem.name == "set_as_query_text": self.current_query.textview.get_buffer().set_text(row[1]) if menuitem.name == "delete_sql_log": iter = self.sql_log_model.get_iter(path) self.sql_log_model.remove(iter) return True tv, path, tvc = args query = tv.get_model()[path][1] self.current_query.textview.get_buffer().set_text(query) return True
query = tv.get_model()[path][1]
query = tv.get_model()[path][2]
def on_sql_log_activate(self, *args): if len(args) == 1: menuitem = args[0] if menuitem.name == "clear_all_entries": self.sql_log_model.clear() path, column = self.sql_log_tv.get_cursor() row = self.sql_log_model[path] if menuitem.name == "copy_sql_log": self.clipboard.set_text(row[1]) self.pri_clipboard.set_text(row[1]) elif menuitem.name == "set_as_query_text": self.current_query.textview.get_buffer().set_text(row[1]) if menuitem.name == "delete_sql_log": iter = self.sql_log_model.get_iter(path) self.sql_log_model.remove(iter) return True tv, path, tvc = args query = tv.get_model()[path][1] self.current_query.textview.get_buffer().set_text(query) return True
data = []
data = { "connect_timeout": 6 } widget_map = { "password": "passwd" }
def on_cw_test(self, *args): import _mysql; data = [] for n in ["host", "user", "password"]: data.append(self.xml.get_widget("cw_%s" % n).get_text()) try: handle = _mysql.connect(*data) except: self.show_message("test connection", "could not connect to host <b>%s</b> with user <b>%s</b> and password <b>%s</b>:\n<i>%s</i>" % (data[0], data[1], data[2], sys.exc_value[1])) return self.show_message("test connection", "successfully connected to host <b>%s</b> with user <b>%s</b>!" % (data[0], data[1])) handle.close()
data.append(self.xml.get_widget("cw_%s" % n).get_text())
data[widget_map.get(n, n)] = self.xml.get_widget("cw_%s" % n).get_text()
def on_cw_test(self, *args): import _mysql; data = [] for n in ["host", "user", "password"]: data.append(self.xml.get_widget("cw_%s" % n).get_text()) try: handle = _mysql.connect(*data) except: self.show_message("test connection", "could not connect to host <b>%s</b> with user <b>%s</b> and password <b>%s</b>:\n<i>%s</i>" % (data[0], data[1], data[2], sys.exc_value[1])) return self.show_message("test connection", "successfully connected to host <b>%s</b> with user <b>%s</b>!" % (data[0], data[1])) handle.close()
handle = _mysql.connect(*data)
handle = _mysql.connect(**data)
def on_cw_test(self, *args): import _mysql; data = [] for n in ["host", "user", "password"]: data.append(self.xml.get_widget("cw_%s" % n).get_text()) try: handle = _mysql.connect(*data) except: self.show_message("test connection", "could not connect to host <b>%s</b> with user <b>%s</b> and password <b>%s</b>:\n<i>%s</i>" % (data[0], data[1], data[2], sys.exc_value[1])) return self.show_message("test connection", "successfully connected to host <b>%s</b> with user <b>%s</b>!" % (data[0], data[1])) handle.close()
self.show_message("test connection", "could not connect to host <b>%s</b> with user <b>%s</b> and password <b>%s</b>:\n<i>%s</i>" % (data[0], data[1], data[2], sys.exc_value[1])) return self.show_message("test connection", "successfully connected to host <b>%s</b> with user <b>%s</b>!" % (data[0], data[1]))
self.show_message("test connection", "could not connect to host <b>%s</b> with user <b>%s</b> and password <b>%s</b>:\n<i>%s</i>" % ( data["host"], data["user"], data["passwd"], sys.exc_value )) return self.show_message("test connection", "successfully connected to host <b>%s</b> with user <b>%s</b>!" % ( data["host"], data["user"] ))
def on_cw_test(self, *args): import _mysql; data = [] for n in ["host", "user", "password"]: data.append(self.xml.get_widget("cw_%s" % n).get_text()) try: handle = _mysql.connect(*data) except: self.show_message("test connection", "could not connect to host <b>%s</b> with user <b>%s</b> and password <b>%s</b>:\n<i>%s</i>" % (data[0], data[1], data[2], sys.exc_value[1])) return self.show_message("test connection", "successfully connected to host <b>%s</b> with user <b>%s</b>!" % (data[0], data[1])) handle.close()
conditions = "`%s` %s '%s'" % ( self.fc_combobox[0].get_active_text(), self.fc_op_combobox[0].get_active_text(), self.current_host.escape(self.fc_entry[0].get_text())
def field_operator_value(field, op, value): if op == "ISNULL": return "isnull(`%s`)" % field if op == "NOT ISNULL": return "not isnull(`%s`)" % field eval_kw = "eval: " if value.startswith(eval_kw): return "`%s` %s %s" % (field, op, value[len(eval_kw):]) return "`%s` %s '%s'" % (field, op, self.current_host.escape(value)) conditions = "%s" % ( field_operator_value( self.fc_combobox[0].get_active_text(), self.fc_op_combobox[0].get_active_text(), self.fc_entry[0].get_text() )
def on_template(self, button, t): current_table = self.get_selected_table() current_fc_table = current_table; if t.find("$table$") != -1: if not current_table: show_message("info", "no table selected!\nyou can't execute a template with $table$ in it, if you have no table selected!") return t = t.replace("$table$", self.escape_fieldname(current_table.name)) pos = t.find("$primary_key$") if pos != -1: if not current_table: show_message("info", "no table selected!\nyou can't execute a template with $primary_key$ in it, if you have no table selected!") return if not current_table.fields: show_message("info", "sorry, can't execute this template, because table '%s' has no fields!" % current_table.name) return # is the next token desc or asc? result = re.search("(?i)[ \t\r\n]*(de|a)sc", t[pos:]) order_dir = "" if result: o = result.group(1).lower() if o == "a": order_dir = "asc" else: order_dir = "desc" replace = "" while 1: primary_key = "" for name in current_table.field_order: props = current_table.fields[name] if props[3] != "PRI": continue if primary_key: primary_key += " " + order_dir + ", " primary_key += "`%s`" % self.escape_fieldname(name) if primary_key: replace = primary_key break key = "" for name in current_table.field_order: props = current_table.fields[name] if props[3] != "UNI": continue if key: key += " " + order_dir + ", " key += "`%s`" % self.escape_fieldname(name) if key: replace = key break replace = "`%s`" % self.escape_fieldname(current_table.field_order[0]) break t = t.replace("$primary_key$", replace) if t.find("$field_conditions$") != -1: if not self.field_conditions_initialized: self.field_conditions_initialized = True self.fc_count = 4 self.fc_window = self.xml.get_widget("field_conditions") table = self.xml.get_widget("fc_table") table.resize(1 + self.fc_count, 4) self.fc_entry = [] self.fc_combobox = [] self.fc_op_combobox = [] self.fc_logic_combobox = [] for i in range(self.fc_count): self.fc_entry.append(gtk.Entry()) self.fc_entry[i].connect("activate", lambda *e: self.fc_window.response(gtk.RESPONSE_OK)) self.fc_combobox.append(gtk.combo_box_new_text()) self.fc_op_combobox.append(gtk.combo_box_new_text()) self.fc_op_combobox[i].append_text("=") self.fc_op_combobox[i].append_text("<") self.fc_op_combobox[i].append_text(">") self.fc_op_combobox[i].append_text("!=") self.fc_op_combobox[i].append_text("LIKE") if i: self.fc_logic_combobox.append(gtk.combo_box_new_text()) self.fc_logic_combobox[i - 1].append_text("disabled") self.fc_logic_combobox[i - 1].append_text("AND") self.fc_logic_combobox[i - 1].append_text("OR") table.attach(self.fc_logic_combobox[i - 1], 0, 1, i + 1, i + 2) self.fc_logic_combobox[i - 1].show() table.attach(self.fc_combobox[i], 1, 2, i + 1, i + 2); table.attach(self.fc_op_combobox[i], 2, 3, i + 1, i + 2) table.attach(self.fc_entry[i], 3, 4, i + 1, i + 2); self.fc_combobox[i].show(); self.fc_op_combobox[i].show(); self.fc_entry[i].show(); if not current_table: show_message("info", "no table selected!\nyou can't execute a template with $field_conditions$ in it, if you have no table selected!") return last_field = [] for i in range(self.fc_count): last_field.append(self.fc_combobox[i].get_active_text()) self.fc_combobox[i].get_model().clear() if i: self.fc_logic_combobox[i - 1].set_active(0) fc = 0 for field_name in current_table.field_order: for k in range(self.fc_count): self.fc_combobox[k].append_text(field_name) if last_field[k] == field_name: self.fc_combobox[k].set_active(fc) fc += 1 if not self.fc_op_combobox[0].get_active_text(): self.fc_op_combobox[0].set_active(0) if not self.fc_combobox[0].get_active_text(): self.fc_combobox[0].set_active(0) answer = self.fc_window.run() self.fc_window.hide() if answer != gtk.RESPONSE_OK: return conditions = "`%s` %s '%s'" % ( self.fc_combobox[0].get_active_text(), self.fc_op_combobox[0].get_active_text(), self.current_host.escape(self.fc_entry[0].get_text()) ) for i in range(1, self.fc_count): if self.fc_logic_combobox[i - 1].get_active_text() == "disabled" or self.fc_combobox[i].get_active_text() == "" or self.fc_op_combobox[i].get_active_text() == "": continue conditions += " %s `%s` %s '%s'" % ( self.fc_logic_combobox[i - 1].get_active_text(), self.fc_combobox[i].get_active_text(), self.fc_op_combobox[i].get_active_text(), self.current_host.escape(self.fc_entry[i].get_text()) ) t = t.replace("$field_conditions$", conditions) try: new_order = self.stored_orders[self.current_host.current_db.name][current_table.name] print "found stored order", new_order query = t try: r = self.query_order_re except: r = self.query_order_re = re.compile(re_src_query_order) match = re.search(r, query) if match: before, order, after = match.groups() order = "" addition = "" else: match = re.search(re_src_after_order, query) if not match: before = query after = "" else: before = query[0:match.start()] after = match.group() addition = "\norder by\n\t" order = "" for col, o in new_order: if order: order += ",\n\t" order += col if not o: order += " desc" if order: new_query = ''.join([before, addition, order, after]) else: new_query = re.sub("(?i)order[ \r\n\t]+by[ \r\n\t]+", "", before + after) t = new_query except: pass self.on_execute_query_clicked(None, t)
conditions += " %s `%s` %s '%s'" % (
conditions += " %s %s" % (
def on_template(self, button, t): current_table = self.get_selected_table() current_fc_table = current_table; if t.find("$table$") != -1: if not current_table: show_message("info", "no table selected!\nyou can't execute a template with $table$ in it, if you have no table selected!") return t = t.replace("$table$", self.escape_fieldname(current_table.name)) pos = t.find("$primary_key$") if pos != -1: if not current_table: show_message("info", "no table selected!\nyou can't execute a template with $primary_key$ in it, if you have no table selected!") return if not current_table.fields: show_message("info", "sorry, can't execute this template, because table '%s' has no fields!" % current_table.name) return # is the next token desc or asc? result = re.search("(?i)[ \t\r\n]*(de|a)sc", t[pos:]) order_dir = "" if result: o = result.group(1).lower() if o == "a": order_dir = "asc" else: order_dir = "desc" replace = "" while 1: primary_key = "" for name in current_table.field_order: props = current_table.fields[name] if props[3] != "PRI": continue if primary_key: primary_key += " " + order_dir + ", " primary_key += "`%s`" % self.escape_fieldname(name) if primary_key: replace = primary_key break key = "" for name in current_table.field_order: props = current_table.fields[name] if props[3] != "UNI": continue if key: key += " " + order_dir + ", " key += "`%s`" % self.escape_fieldname(name) if key: replace = key break replace = "`%s`" % self.escape_fieldname(current_table.field_order[0]) break t = t.replace("$primary_key$", replace) if t.find("$field_conditions$") != -1: if not self.field_conditions_initialized: self.field_conditions_initialized = True self.fc_count = 4 self.fc_window = self.xml.get_widget("field_conditions") table = self.xml.get_widget("fc_table") table.resize(1 + self.fc_count, 4) self.fc_entry = [] self.fc_combobox = [] self.fc_op_combobox = [] self.fc_logic_combobox = [] for i in range(self.fc_count): self.fc_entry.append(gtk.Entry()) self.fc_entry[i].connect("activate", lambda *e: self.fc_window.response(gtk.RESPONSE_OK)) self.fc_combobox.append(gtk.combo_box_new_text()) self.fc_op_combobox.append(gtk.combo_box_new_text()) self.fc_op_combobox[i].append_text("=") self.fc_op_combobox[i].append_text("<") self.fc_op_combobox[i].append_text(">") self.fc_op_combobox[i].append_text("!=") self.fc_op_combobox[i].append_text("LIKE") if i: self.fc_logic_combobox.append(gtk.combo_box_new_text()) self.fc_logic_combobox[i - 1].append_text("disabled") self.fc_logic_combobox[i - 1].append_text("AND") self.fc_logic_combobox[i - 1].append_text("OR") table.attach(self.fc_logic_combobox[i - 1], 0, 1, i + 1, i + 2) self.fc_logic_combobox[i - 1].show() table.attach(self.fc_combobox[i], 1, 2, i + 1, i + 2); table.attach(self.fc_op_combobox[i], 2, 3, i + 1, i + 2) table.attach(self.fc_entry[i], 3, 4, i + 1, i + 2); self.fc_combobox[i].show(); self.fc_op_combobox[i].show(); self.fc_entry[i].show(); if not current_table: show_message("info", "no table selected!\nyou can't execute a template with $field_conditions$ in it, if you have no table selected!") return last_field = [] for i in range(self.fc_count): last_field.append(self.fc_combobox[i].get_active_text()) self.fc_combobox[i].get_model().clear() if i: self.fc_logic_combobox[i - 1].set_active(0) fc = 0 for field_name in current_table.field_order: for k in range(self.fc_count): self.fc_combobox[k].append_text(field_name) if last_field[k] == field_name: self.fc_combobox[k].set_active(fc) fc += 1 if not self.fc_op_combobox[0].get_active_text(): self.fc_op_combobox[0].set_active(0) if not self.fc_combobox[0].get_active_text(): self.fc_combobox[0].set_active(0) answer = self.fc_window.run() self.fc_window.hide() if answer != gtk.RESPONSE_OK: return conditions = "`%s` %s '%s'" % ( self.fc_combobox[0].get_active_text(), self.fc_op_combobox[0].get_active_text(), self.current_host.escape(self.fc_entry[0].get_text()) ) for i in range(1, self.fc_count): if self.fc_logic_combobox[i - 1].get_active_text() == "disabled" or self.fc_combobox[i].get_active_text() == "" or self.fc_op_combobox[i].get_active_text() == "": continue conditions += " %s `%s` %s '%s'" % ( self.fc_logic_combobox[i - 1].get_active_text(), self.fc_combobox[i].get_active_text(), self.fc_op_combobox[i].get_active_text(), self.current_host.escape(self.fc_entry[i].get_text()) ) t = t.replace("$field_conditions$", conditions) try: new_order = self.stored_orders[self.current_host.current_db.name][current_table.name] print "found stored order", new_order query = t try: r = self.query_order_re except: r = self.query_order_re = re.compile(re_src_query_order) match = re.search(r, query) if match: before, order, after = match.groups() order = "" addition = "" else: match = re.search(re_src_after_order, query) if not match: before = query after = "" else: before = query[0:match.start()] after = match.group() addition = "\norder by\n\t" order = "" for col, o in new_order: if order: order += ",\n\t" order += col if not o: order += " desc" if order: new_query = ''.join([before, addition, order, after]) else: new_query = re.sub("(?i)order[ \r\n\t]+by[ \r\n\t]+", "", before + after) t = new_query except: pass self.on_execute_query_clicked(None, t)
self.fc_combobox[i].get_active_text(), self.fc_op_combobox[i].get_active_text(), self.current_host.escape(self.fc_entry[i].get_text())
field_operator_value( self.fc_combobox[i].get_active_text(), self.fc_op_combobox[i].get_active_text(), self.fc_entry[i].get_text() )
def on_template(self, button, t): current_table = self.get_selected_table() current_fc_table = current_table; if t.find("$table$") != -1: if not current_table: show_message("info", "no table selected!\nyou can't execute a template with $table$ in it, if you have no table selected!") return t = t.replace("$table$", self.escape_fieldname(current_table.name)) pos = t.find("$primary_key$") if pos != -1: if not current_table: show_message("info", "no table selected!\nyou can't execute a template with $primary_key$ in it, if you have no table selected!") return if not current_table.fields: show_message("info", "sorry, can't execute this template, because table '%s' has no fields!" % current_table.name) return # is the next token desc or asc? result = re.search("(?i)[ \t\r\n]*(de|a)sc", t[pos:]) order_dir = "" if result: o = result.group(1).lower() if o == "a": order_dir = "asc" else: order_dir = "desc" replace = "" while 1: primary_key = "" for name in current_table.field_order: props = current_table.fields[name] if props[3] != "PRI": continue if primary_key: primary_key += " " + order_dir + ", " primary_key += "`%s`" % self.escape_fieldname(name) if primary_key: replace = primary_key break key = "" for name in current_table.field_order: props = current_table.fields[name] if props[3] != "UNI": continue if key: key += " " + order_dir + ", " key += "`%s`" % self.escape_fieldname(name) if key: replace = key break replace = "`%s`" % self.escape_fieldname(current_table.field_order[0]) break t = t.replace("$primary_key$", replace) if t.find("$field_conditions$") != -1: if not self.field_conditions_initialized: self.field_conditions_initialized = True self.fc_count = 4 self.fc_window = self.xml.get_widget("field_conditions") table = self.xml.get_widget("fc_table") table.resize(1 + self.fc_count, 4) self.fc_entry = [] self.fc_combobox = [] self.fc_op_combobox = [] self.fc_logic_combobox = [] for i in range(self.fc_count): self.fc_entry.append(gtk.Entry()) self.fc_entry[i].connect("activate", lambda *e: self.fc_window.response(gtk.RESPONSE_OK)) self.fc_combobox.append(gtk.combo_box_new_text()) self.fc_op_combobox.append(gtk.combo_box_new_text()) self.fc_op_combobox[i].append_text("=") self.fc_op_combobox[i].append_text("<") self.fc_op_combobox[i].append_text(">") self.fc_op_combobox[i].append_text("!=") self.fc_op_combobox[i].append_text("LIKE") if i: self.fc_logic_combobox.append(gtk.combo_box_new_text()) self.fc_logic_combobox[i - 1].append_text("disabled") self.fc_logic_combobox[i - 1].append_text("AND") self.fc_logic_combobox[i - 1].append_text("OR") table.attach(self.fc_logic_combobox[i - 1], 0, 1, i + 1, i + 2) self.fc_logic_combobox[i - 1].show() table.attach(self.fc_combobox[i], 1, 2, i + 1, i + 2); table.attach(self.fc_op_combobox[i], 2, 3, i + 1, i + 2) table.attach(self.fc_entry[i], 3, 4, i + 1, i + 2); self.fc_combobox[i].show(); self.fc_op_combobox[i].show(); self.fc_entry[i].show(); if not current_table: show_message("info", "no table selected!\nyou can't execute a template with $field_conditions$ in it, if you have no table selected!") return last_field = [] for i in range(self.fc_count): last_field.append(self.fc_combobox[i].get_active_text()) self.fc_combobox[i].get_model().clear() if i: self.fc_logic_combobox[i - 1].set_active(0) fc = 0 for field_name in current_table.field_order: for k in range(self.fc_count): self.fc_combobox[k].append_text(field_name) if last_field[k] == field_name: self.fc_combobox[k].set_active(fc) fc += 1 if not self.fc_op_combobox[0].get_active_text(): self.fc_op_combobox[0].set_active(0) if not self.fc_combobox[0].get_active_text(): self.fc_combobox[0].set_active(0) answer = self.fc_window.run() self.fc_window.hide() if answer != gtk.RESPONSE_OK: return conditions = "`%s` %s '%s'" % ( self.fc_combobox[0].get_active_text(), self.fc_op_combobox[0].get_active_text(), self.current_host.escape(self.fc_entry[0].get_text()) ) for i in range(1, self.fc_count): if self.fc_logic_combobox[i - 1].get_active_text() == "disabled" or self.fc_combobox[i].get_active_text() == "" or self.fc_op_combobox[i].get_active_text() == "": continue conditions += " %s `%s` %s '%s'" % ( self.fc_logic_combobox[i - 1].get_active_text(), self.fc_combobox[i].get_active_text(), self.fc_op_combobox[i].get_active_text(), self.current_host.escape(self.fc_entry[i].get_text()) ) t = t.replace("$field_conditions$", conditions) try: new_order = self.stored_orders[self.current_host.current_db.name][current_table.name] print "found stored order", new_order query = t try: r = self.query_order_re except: r = self.query_order_re = re.compile(re_src_query_order) match = re.search(r, query) if match: before, order, after = match.groups() order = "" addition = "" else: match = re.search(re_src_after_order, query) if not match: before = query after = "" else: before = query[0:match.start()] after = match.group() addition = "\norder by\n\t" order = "" for col, o in new_order: if order: order += ",\n\t" order += col if not o: order += " desc" if order: new_query = ''.join([before, addition, order, after]) else: new_query = re.sub("(?i)order[ \r\n\t]+by[ \r\n\t]+", "", before + after) t = new_query except: pass self.on_execute_query_clicked(None, t)
iter = self.sql_log_model.append((timestamp, log))
log = log.replace("<", "&lt;") log = log.replace(">", "&gt;") iter = self.sql_log_model.append((timestamp, log, olog))
def add_sql_log(self, log): max_len = int(self.config["query_log_max_entry_length"]) if len(log) > max_len: log = log[0:max_len] + "\n/* query with length of %d bytes truncated. */" % len(log); # query = db_to_utf8(log); # query = syntax_highlight_markup(query); # query = rxx.replace(query, "[\r\n\t ]+", " ", Regexx::global); if not log: return now = time.time() now = int((now - int(now)) * 100) timestamp = time.strftime("%Y-%m-%d %H:%M:%S") if now: timestamp = "%s.%02d" % (timestamp, now) iter = self.sql_log_model.append((timestamp, log)) self.sql_log_tv.scroll_to_cell(self.sql_log_model.get_path(iter)) #self.xml.get_widget("message_notebook").set_current_page(0) self.process_events()
def __init__(self): swing.JFrame.__init__(self, title=\ "CHAT400 - An AS/400 Instant Messenger", \ resizable=0) self.contentPane.setLayout(awt.GridBagLayout()) self.addWindowListener(self) self.chkFullNames = swing.JCheckBox("Show user's full name", 1) self.chkActive = swing.JCheckBox("Show only Active Users", 1) try: self.srvNam = _session.getHostName() except: self.srvNam = "" self.as400 = acc.AS400(self.srvNam) self.rUsrLst = rsc.RUserList(self.as400) self.rUsrLst.open() self.rUsrLst.waitForComplete() self.usrDct = {} for idx in range(self.rUsrLst.getListLength()): tmp_rUsr = self.rUsrLst.resourceAt(idx) key_usr = tmp_rUsr.getAttributeValue(rsc.RUser.USER_PROFILE_NAME) tmp_usrText = tmp_rUsr.getAttributeValue(rsc.RUser.TEXT_DESCRIPTION) self.usrDct[key_usr] = tmp_usrText self.jobLst = rsc.RJobList(self.as400) self.jobLst.setSelectionValue(rsc.RJobList.PRIMARY_JOB_STATUSES, \ rsc.RJob.JOB_STATUS_ACTIVE) self.jobLst.setSelectionValue(rsc.RJobList.JOB_TYPE, \ rsc.RJob.JOB_TYPE_INTERACTIVE) self.jobLst.setSortValue([rsc.RJob.USER_NAME, rsc.RJob.JOB_NAME]) self.chatTxt = swing.JTextArea(5, 30, lineWrap=1, wrapStyleWord=1) self.rpyTxt = swing.JTextArea(10, 30, lineWrap=1, wrapStyleWord=1) self.users = swing.JComboBox(preferredSize=(175, 25), minimumSize=(150, 25)) self.polchat = Thread(Poller(self)) self.showGui() def windowClosed(self, event): self.polchat.interrupt() self.dispose() def windowClosing(self, event): None def windowActivated(self, event): None def windowDeactivated(self, event): None def windowOpened(self, event): None def windowIconified(self, event): None def windowDeiconified(self, event): None def rtvIntJobs(self): self.jobLst.open() self.jobLst.waitForComplete() self.intJobs = {} for idx in range(self.jobLst.getListLength()): tmp_job = self.jobLst.resourceAt(idx) key_usr = tmp_job.getAttributeValue(rsc.RJob.USER_NAME) if not self.intJobs.has_key(key_usr): self.intJobs[key_usr] = tmp_job self.jobLst.close() if self.chkFullNames.isSelected(): keys=self.usrDct.keys() else: keys=self.intJobs.keys() keys.sort() if self.chkFullNames.isSelected(): for key_usr in keys: menuItem = self.usrDct[key_usr] if self.chkActive.isSelected(): if self.intJobs.has_key(key_usr): self.users.addItem(menuItem) else: if self.intJobs.has_key(key_usr): menuItem = '* ' + menuItem self.users.addItem(menuItem) else: for key_usr in keys: self.users.addItem(key_usr) def btnActSnd(self, event): cmd = acc.CommandCall(self.as400) curUsr = self.as400.getUserId() try: sndUsr = self.intJobs[self.users.getSelectedItem() ].getAttributeValue(rsc.RJob.USER_NAME) except: sndUsr = curUsr chatTxt = self.chatTxt.getText() dq = acc.KeyedDataQueue(self.as400, CHATQ) if not dq.exists(): dq.create(KEYLEN, 512) try: dq.write(sndUsr, "%s::%s"%(curUsr, chatTxt) ) if not curUsr == sndUsr: self.rpyTxt.append("[%s]>>%s\n"%(curUsr, chatTxt)) self.rpyTxt.setCaretPosition( len(self.rpyTxt.getText()) ) self.statusTxt.text='Message send successfull' self.chatTxt.selectAll() self.chatTxt.cut() except: self.statusTxt.text='Message send Failed - Contact your system-operator.' self.chatTxt.requestFocus() def btnActRef(self, event): self.users.removeAllItems() self.rtvIntJobs() def showGui(self): self.rtvIntJobs() self.btnRef = swing.JButton("Refresh User-List", \ actionPerformed = self.btnActRef, \ minimumSize=(135,25), \ preferredSize=(135, 25)) self.btnRef.setMnemonic('R') self.btnSnd = swing.JButton("Send Message", \ actionPerformed = self.btnActSnd) self.btnSnd.setMnemonic('S') self.label1 = swing.JLabel("Send To:", minimumSize=(50, 25), \ preferredSize=(50, 25)) self.rootPane.setDefaultButton(self.btnSnd) self.rpyTxt.setEditable(0) self.statusTxt = swing.JTextField(text='Welcome to CHAT400 - An AS/400 Instant Messenger',\ editable=0, horizontalAlignment=swing.JTextField.CENTER) gbc = awt.GridBagConstraints() gbc.insets = awt.Insets(10, 10, 5, 5) self.contentPane.add(self.label1, gbc) gbc.insets = awt.Insets(10, 5, 5, 5) self.contentPane.add(self.users, gbc) gbc.insets = awt.Insets(10, 5, 5, 10) self.contentPane.add(self.btnRef, gbc) gbc.gridx = 0 gbc.gridwidth = 3 gbc.insets = awt.Insets(5, 0, 0, 0) self.contentPane.add(self.chkFullNames, gbc) gbc.gridx = 0 gbc.gridwidth = 3 gbc.insets = awt.Insets(0, 0, 5, 0) self.contentPane.add(self.chkActive, gbc) gbc.gridx = 0 gbc.gridwidth = 3 gbc.insets = awt.Insets(5, 0, 5, 0) self.contentPane.add(self.btnSnd, gbc) scrollPane1 = swing.JScrollPane(self.chatTxt, swing.JScrollPane.VERTICAL_SCROLLBAR_ALWAYS, \ swing.JScrollPane.HORIZONTAL_SCROLLBAR_NEVER) scrollPane1.setViewportView(self.chatTxt) scrollPane2 = swing.JScrollPane(self.rpyTxt, swing.JScrollPane.VERTICAL_SCROLLBAR_ALWAYS, \ swing.JScrollPane.HORIZONTAL_SCROLLBAR_NEVER) scrollPane2.setViewportView(self.rpyTxt) splitPane = swing.JSplitPane(swing.JSplitPane.VERTICAL_SPLIT, scrollPane1, scrollPane2) gbc.gridx = 0 gbc.gridwidth = 3 gbc.fill = awt.GridBagConstraints.HORIZONTAL gbc.insets = awt.Insets(5, 10, 10, 10) self.contentPane.add(splitPane, gbc) gbc.gridx = 0 gbc.gridwidth = 3 gbc.fill = awt.GridBagConstraints.HORIZONTAL gbc.insets = awt.Insets(0, 10, 10, 10) self.contentPane.add(self.statusTxt, gbc) self.pack() self.polchat.start() self.show() chatter=Chat400()
def __init__(self): swing.JFrame.__init__(self, title=\ "CHAT400 - An AS/400 Instant Messenger", \ resizable=0) try: tmp_srvNam = _session.getHostName() except: tmp_srvNam = "" self.as400 = acc.AS400(tmp_srvNam) rUsrLst = rsc.RUserList(self.as400) rUsrLst.open() rUsrLst.waitForComplete() self.usrDct = {} for idx in range(rUsrLst.getListLength()): tmp_rUsr = rUsrLst.resourceAt(idx) key_usr = tmp_rUsr.getAttributeValue(rsc.RUser.USER_PROFILE_NAME) tmp_usrText = tmp_rUsr.getAttributeValue(rsc.RUser.TEXT_DESCRIPTION) self.usrDct[key_usr] = tmp_usrText print key_usr, tmp_usrText rUsrLst.close() self.jobLst = rsc.RJobList(self.as400) self.jobLst.setSelectionValue(rsc.RJobList.PRIMARY_JOB_STATUSES, \ rsc.RJob.JOB_STATUS_ACTIVE) self.jobLst.setSelectionValue(rsc.RJobList.JOB_TYPE, \ rsc.RJob.JOB_TYPE_INTERACTIVE) self.jobLst.setSortValue([rsc.RJob.USER_NAME, rsc.RJob.JOB_NAME]) self.polchat = Thread(Poller(self)) self.contentPane.setLayout(awt.GridBagLayout()) self.addWindowListener(self) self.chkFullNames = swing.JCheckBox("Show user's full name", 1) self.chkActive = swing.JCheckBox("Show only Active Users", 1) self.chatTxt = swing.JTextArea(5, 30, lineWrap=1, wrapStyleWord=1) self.rpyTxt = swing.JTextArea(10, 30, lineWrap=1, wrapStyleWord=1) self.users = swing.JComboBox(preferredSize=(175, 25), minimumSize=(150, 25)) self.showGui() def windowClosed(self, event): self.polchat.interrupt() self.dispose() def windowClosing(self, event): None def windowActivated(self, event): None def windowDeactivated(self, event): None def windowOpened(self, event): None def windowIconified(self, event): None def windowDeiconified(self, event): None def rtvIntJobs(self): self.jobLst.open() self.jobLst.waitForComplete() self.jobDct = {} for idx in range(self.jobLst.getListLength()): tmp_job = self.jobLst.resourceAt(idx) key_usr = tmp_job.getAttributeValue(rsc.RJob.USER_NAME) if not self.jobDct.has_key(key_usr): self.jobDct[key_usr] = tmp_job self.jobLst.close() keys = self.jobDct.keys() keys.sort() for key_usr in keys: menuItem = key_usr sts = ' ' try: fullName = self.usrDct[key_usr] except: fullName = "*Can't find" if self.chkFullNames.isSelected(): menuItem += ': %s'%(fullName) if self.chkActive.isSelected(): if not self.jobDct.has_key(key_usr): continue sts = '*' menuItem = sts + menuItem self.users.addItem(menuItem) def btnActSnd(self, event): cmd = acc.CommandCall(self.as400) curUsr = self.as400.getUserId() try: sndUsr = self.jobDct[self.users.getSelectedItem() ].getAttributeValue(rsc.RJob.USER_NAME) except: sndUsr = curUsr chatTxt = self.chatTxt.getText() dq = acc.KeyedDataQueue(self.as400, CHATQ) if not dq.exists(): dq.create(KEYLEN, 512) try: dq.write(sndUsr, "%s::%s"%(curUsr, chatTxt) ) if not curUsr == sndUsr: self.rpyTxt.append("[%s]>>%s\n"%(curUsr, chatTxt)) self.rpyTxt.setCaretPosition( len(self.rpyTxt.getText()) ) self.statusTxt.text='Message send successfull' self.chatTxt.selectAll() self.chatTxt.cut() except: self.statusTxt.text='Message send Failed - Contact your system-operator.' self.chatTxt.requestFocus() def btnActRef(self, event): self.users.removeAllItems() self.rtvIntJobs() def showGui(self): self.rtvIntJobs() self.btnRef = swing.JButton("Refresh User-List", \ actionPerformed = self.btnActRef, \ minimumSize=(135,25), \ preferredSize=(135, 25)) self.btnRef.setMnemonic('R') self.btnSnd = swing.JButton("Send Message", \ actionPerformed = self.btnActSnd) self.btnSnd.setMnemonic('S') self.label1 = swing.JLabel("Send To:", minimumSize=(50, 25), \ preferredSize=(50, 25)) self.rootPane.setDefaultButton(self.btnSnd) self.rpyTxt.setEditable(0) self.statusTxt = swing.JTextField(text='Welcome to CHAT400 - An AS/400 Instant Messenger',\ editable=0, horizontalAlignment=swing.JTextField.CENTER) gbc = awt.GridBagConstraints() gbc.insets = awt.Insets(10, 10, 5, 5) self.contentPane.add(self.label1, gbc) gbc.insets = awt.Insets(10, 5, 5, 5) self.contentPane.add(self.users, gbc) gbc.insets = awt.Insets(10, 5, 5, 10) self.contentPane.add(self.btnRef, gbc) gbc.gridx = 0 gbc.gridwidth = 3 gbc.insets = awt.Insets(5, 0, 0, 0) self.contentPane.add(self.chkFullNames, gbc) gbc.gridx = 0 gbc.gridwidth = 3 gbc.insets = awt.Insets(0, 0, 5, 0) self.contentPane.add(self.chkActive, gbc) gbc.gridx = 0 gbc.gridwidth = 3 gbc.insets = awt.Insets(5, 0, 5, 0) self.contentPane.add(self.btnSnd, gbc) scrollPane1 = swing.JScrollPane(self.chatTxt, swing.JScrollPane.VERTICAL_SCROLLBAR_ALWAYS, \ swing.JScrollPane.HORIZONTAL_SCROLLBAR_NEVER) scrollPane1.setViewportView(self.chatTxt) scrollPane2 = swing.JScrollPane(self.rpyTxt, swing.JScrollPane.VERTICAL_SCROLLBAR_ALWAYS, \ swing.JScrollPane.HORIZONTAL_SCROLLBAR_NEVER) scrollPane2.setViewportView(self.rpyTxt) splitPane = swing.JSplitPane(swing.JSplitPane.VERTICAL_SPLIT, scrollPane1, scrollPane2) gbc.gridx = 0 gbc.gridwidth = 3 gbc.fill = awt.GridBagConstraints.HORIZONTAL gbc.insets = awt.Insets(5, 10, 10, 10) self.contentPane.add(splitPane, gbc) gbc.gridx = 0 gbc.gridwidth = 3 gbc.fill = awt.GridBagConstraints.HORIZONTAL gbc.insets = awt.Insets(0, 10, 10, 10) self.contentPane.add(self.statusTxt, gbc) self.pack() self.polchat.start() self.show() chatter=Chat400()
def __init__(self): swing.JFrame.__init__(self, title=\ "CHAT400 - An AS/400 Instant Messenger", \ resizable=0) # We have to activate this when the script # is ready for production-environments. # self.setDefaultCloseOperation(swing.WindowConstants.DISPOSE_ON_CLOSE) self.contentPane.setLayout(awt.GridBagLayout()) self.addWindowListener(self) self.chkFullNames = swing.JCheckBox("Show user's full name", 1) self.chkActive = swing.JCheckBox("Show only Active Users", 1) try: self.srvNam = _session.getHostName() except: self.srvNam = "" self.as400 = acc.AS400(self.srvNam)
if not sndUsr == "AFVAL": self.statusTxt.text='Message Sended.' else: self.rpyTxt.append("MESSAGE IS SENDED TO TRASHCAN !!!") self.rpyTxt.append("\nALWAYS PAY ATTENTION WHO YOU'RE SENDING TO !!!!\n")
if not sndUsr == "ALL TRASH": self.statusTxt.text='Message Sended.' else: self.rpyTxt.append("MESSAGE IS SENDED TO TRASHCAN !!!") self.rpyTxt.append("\nALWAYS PAY ATTENTION WHO YOU'RE SENDING TO !!!!\n")
def btnActSnd(self, event): cmd = acc.CommandCall(self.as400) #curUsr = self.as400.getUserId() selected = self.users.getSelectedItem() sndUsr =selected.split(':')[0] chatTxt = self.chatTxt.getText() dq = acc.KeyedDataQueue(self.as400, CHATQ) if not dq.exists(): dq.create(KEYLEN, 512) try: if not sndUsr == "ALL TRASH": dq.write(sndUsr, "%s:%s"%(curUsr, chatTxt) ) if not curUsr == sndUsr: self.rpyTxt.append("%s -->> %s\n%s\n"%(curUsr, sndUsr, chatTxt)) self.rpyTxt.setCaretPosition( len(self.rpyTxt.getText()) ) if not sndUsr == "AFVAL":
self.rpyTxt.append("%s>>%s\n"%(curUsr, chatTxt))
self.rpyTxt.append("%s -> %s\n%s\n"%(curUsr, sndUsr, chatTxt))
def btnActSnd(self, event): cmd = acc.CommandCall(self.as400) curUsr = self.as400.getUserId() selected = self.users.getSelectedItem() sndUsr =selected.split(':')[0] chatTxt = self.chatTxt.getText() dq = acc.KeyedDataQueue(self.as400, CHATQ) if not dq.exists(): dq.create(KEYLEN, 512) try: dq.write(sndUsr, "%s::%s"%(curUsr, chatTxt) ) if not curUsr == sndUsr: self.rpyTxt.append("%s>>%s\n"%(curUsr, chatTxt)) self.rpyTxt.setCaretPosition( len(self.rpyTxt.getText()) ) self.statusTxt.text='Message send successfull' self.chatTxt.selectAll() self.chatTxt.cut() except: self.statusTxt.text='Message send Failed - Contact your system-operator.' self.chatTxt.requestFocus()
def exit(event): lang.System.exit(0)
def exit(event): lang.System.exit(0)
self.windowClosing = exit
self.windowClosing = self.closing
def __init__(self, title=""): JFrame.__init__(self, title) self.size = 400,500 self.windowClosing = exit label = JLabel(text="Class Name:") label.horizontalAlignment = JLabel.RIGHT tpanel = JPanel(layout = awt.FlowLayout()) self.text = JTextField(20, actionPerformed = self.entered) btn = JButton("Enter", actionPerformed = self.entered) tpanel.add(label) tpanel.add(self.text) tpanel.add(btn) bpanel = JPanel() self.tree = JTree(default_tree()) scrollpane = JScrollPane(self.tree) scrollpane.setMinimumSize(awt.Dimension(200,200)) scrollpane.setPreferredSize(awt.Dimension(350,400)) bpanel.add(scrollpane) bag = GridBag(self.contentPane) bag.addRow(tpanel, fill='HORIZONTAL', weightx=1.0, weighty=0.5) bag.addRow(bpanel, fill='BOTH', weightx=0.5, weighty=1.0)
frame = InfoFrame("Java-to-Jython Event/Property/Method Browser") frame.show()
def main(): frame = InfoFrame("Java-to-Jython Event/Property/Method Browser") frame.show() if __name__=='main' or __name__ =='__main__': main()
def setupTree(self, top, pedict, ppdict, pmdict): tree_model = SampleModel(top) events = tree_model.addNode("Events",["<<Events of the class and its ancestors>>"]) props = tree_model.addNode("Properties",["<<Properties of the class and its ancestors>>"]) meths = tree_model.addNode("Methods",["<<Methods of the class and its ancestors>>"]) for key in pedict.keys(): tree_model.addNode(key, pedict[key], parent=events) for key in ppdict.keys(): tree_model.addNode(key, ppdict[key], parent=props) for key in pmdict.keys(): tree_model.addNode(key, pmdict[key], parent=meths) self.tree.setModel(tree_model)
def test_date_triple_value(self):
def test_date_tuple_value(self):
def test_date_triple_value(self): self.assertEqual(self.date.tuple(), (2004, 9, 4, 0, 17, 15.799999977461994))
saturn_attributes = ('earth_tilt', 'sun_tilt')
def test_date_triple_value(self): self.assertEqual(self.date.tuple(), (2004, 9, 4, 0, 17, 15.799999977461994))
'rise_time', 'rise_az', 'transit_time', 'transit_alt', 'set_time', 'set_az')),
'circumpolar', 'neverup', 'rise_time', 'rise_az', 'transit_time', 'transit_alt', 'set_time', 'set_az')),
def test_date_triple_value(self): self.assertEqual(self.date.tuple(), (2004, 9, 4, 0, 17, 15.799999977461994))
self.o = o = Observer() o.lat, o.long, o.elev = '33:45:10', '-84:23:37', 320.0 o.date = '1997/2/15' def predict_attributes(self, body, was_computed, was_given_observer): predictions = {} for bodytype, needs_observer, attrs in attribute_list: for attr in attrs: if not isinstance(body, bodytype): predictions[attr] = AttributeError elif not was_computed: predictions[attr] = RuntimeError elif needs_observer and not was_given_observer: predictions[attr] = RuntimeError else: predictions[attr] = None return predictions
self.date = Date('1955/05/21') self.obs = obs = Observer() obs.lat, obs.long, obs.elev = '33:45:10', '-84:23:37', 320.0 obs.date = '1997/2/15'
def setUp(self): self.o = o = Observer() o.lat, o.long, o.elev = '33:45:10', '-84:23:37', 320.0 o.date = '1997/2/15'
p = self.predict_attributes(body, was_computed, was_given_observer)
p = predict_attributes(body, was_computed, was_given_observer)
def compare_attributes(self, body, was_computed, was_given_observer): p = self.predict_attributes(body, was_computed, was_given_observer) t = self.measure_attributes(body) for a in Set(p).union(t): if p[a] is None and t[a] is None: continue if p[a] and isinstance(t[a], p[a]): continue if was_computed: if was_given_observer: adjective = 'topo' else: adjective = 'geo' adjective += 'centrically computed' else: adjective = 'uncomputed' raise TestError('accessing %s of %s %s ' 'raised %r "%s" instead of %r' % (a, adjective, body, t[a], t[a].args[0], p[a]))
body.compute(self.o)
body.compute(self.obs)
def run(self, body): self.compare_attributes(body, False, False) body.compute() self.compare_attributes(body, True, False) body.compute(self.o) self.compare_attributes(body, True, True) body.compute() self.compare_attributes(body, True, False)
def test_Fixed(self): fb = FixedBody() fb._epoch, fb._ra, fb._dec = '2000', '1:30', '15:00' self.run(fb)
def build(self, bodytype, line, attributes): bl = readdb(line) ba = bodytype() for attribute, value in attributes.iteritems(): try: setattr(ba, attribute, value) except TypeError: raise TestError, ('cannot modify attribute %s of %r: %s' % (attribute, ba, sys.exc_info()[1])) if not isinstance(bl, bodytype): raise TestError, ('ephem database entry returned type %s' ' rather than type %s' % (type(bl), bodytype)) self.run(bl), self.run(ba) for circumstance in self.date, self.obs: is_observer = isinstance(circumstance, Observer) bl.compute(circumstance), ba.compute(circumstance) attrs = [ a for (a,e) in predict_attributes(bl, 1, is_observer).items() if not e ] for attr in attrs: vl, va = getattr(bl, attr), getattr(ba, attr) if isinstance(vl, float): vl, va = str(float(vl)), str(float(va)) if vl != va: raise TestError, ("%s item from line returns %s for %s" " but constructed object returns %s" % (type(bl), vl, attr, va)) def test_FixedBody(self): self.build( bodytype=FixedBody, line='Achernar,f|V|B3,1:37:42.9,-57:14:12,0.46,2000', attributes = {'name': 'Achernar', '_ra': '1:37:42.9', '_dec': '-57:14:12', 'mag': 0.46, '_epoch': '2000'}) def test_EllipticalBody(self): self.build( bodytype=EllipticalBody, line=('C/1995 O1 (Hale-Bopp),e,89.3918,282.4192,130.8382,' '186.4302,0.0003872,0.99500880,0.0000,' '03/30.4376/1997,2000,g -2.0,4.0'), attributes = {'name': 'Hale-Bopp', '_inc': 89.3918, '_Om': 282.4192, '_om': 130.8382, '_a': 186.4302, '_e': 0.99500880, '_M': 0.0000, '_cepoch': '1997/03/30.4376', '_epoch': '2000', '_size': 0, '_g': -2.0, '_k': 4.0 })
def test_Fixed(self): fb = FixedBody() fb._epoch, fb._ra, fb._dec = '2000', '1:30', '15:00' self.run(fb)
def _esql_preprocess(file): return cfile
def _esql_preprocess(file): return cfile
y2,m2 = divmod(m+self._months,12)
y2,m2 = divmod(m-1+self._months,12)
def __add__(self, other): if isinstance(other, IntervalYearToMonth): return self.__class__(0, int(self._months+other._months)) elif isinstance(other, (datetime.datetime, datetime.date)): # extract the date from the other operand y,m,d = other.timetuple()[0:3] otherdate = datetime.date(y,m,d) # shift the date by the desired number of months y2,m2 = divmod(m+self._months,12) try: date2 = datetime.date(y+y2,m2,d) except ValueError: raise ValueError, "month arithmetic yielded an invalid date." # apply the resulting timedelta to the operand return other + (date2 - otherdate) else: return NotImplemented
date2 = datetime.date(y+y2,m2,d)
date2 = datetime.date(y+y2,m2+1,d)
def __add__(self, other): if isinstance(other, IntervalYearToMonth): return self.__class__(0, int(self._months+other._months)) elif isinstance(other, (datetime.datetime, datetime.date)): # extract the date from the other operand y,m,d = other.timetuple()[0:3] otherdate = datetime.date(y,m,d) # shift the date by the desired number of months y2,m2 = divmod(m+self._months,12) try: date2 = datetime.date(y+y2,m2,d) except ValueError: raise ValueError, "month arithmetic yielded an invalid date." # apply the resulting timedelta to the operand return other + (date2 - otherdate) else: return NotImplemented
if model.type_name in ('state', 'subgraph'): item = self.current_iconview().currentItem() if item: item.update_model()
def edsm_model_updated(self, model): if model.type_name in ('state', 'subgraph'): item = self.current_iconview().currentItem() if item: item.update_model() self.emit_graph_changed()
name = model.name
iconview_name = model.id
def edsm_delete_state(self, state): iconview = state.iconView() if iconview: iconview.takeItem(state) model = state._model if isinstance(state, IconViewSubgraphIcon): name = model.name for iconview in self.iconviews(1): if iconview.name() == name: iconview.clear() self.edsm_tabs.removePage(iconview) model.discard() self.emit_graph_changed()
if iconview.name() == name:
if iconview.name() == iconview_name:
def edsm_delete_state(self, state): iconview = state.iconView() if iconview: iconview.takeItem(state) model = state._model if isinstance(state, IconViewSubgraphIcon): name = model.name for iconview in self.iconviews(1): if iconview.name() == name: iconview.clear() self.edsm_tabs.removePage(iconview) model.discard() self.emit_graph_changed()
svntest.main.run_svn(None, 'ps', 'foo', 'bar', os.path.join(wc_dir, 'A', 'D'))
svntest.main.run_svn(None, 'ps', 'foo', 'bar', D_path)
def hudson_part_2(sbox): "hudson prob 2.0: prop commit on old dir fails." if sbox.build(): return 1 wc_dir = sbox.wc_dir # Remove gamma from the working copy. gamma_path = os.path.join(wc_dir, 'A', 'D', 'gamma') svntest.main.run_svn(None, 'rm', gamma_path) # Create expected commit output. output_list = [ [gamma_path, None, {}, {'verb' : 'Deleting' }] ] expected_output_tree = svntest.tree.build_generic_tree(output_list) # After committing, status should show no sign of gamma. status_list = svntest.actions.get_virginal_status_list(wc_dir, '2') for item in status_list: item[3]['wc_rev'] = '1' status_list.pop(path_index(status_list, gamma_path)) expected_status_tree = svntest.tree.build_generic_tree(status_list) # Commit the deletion of gamma and verify. if svntest.actions.run_and_verify_commit (wc_dir, expected_output_tree, expected_status_tree, None, None, None, None, None, wc_dir): return 1 # Now gamma should be marked as `deleted' under the hood, at # revision 2. Meanwhile, A/D is still lagging at revision 1. # Make a propchange on A/D svntest.main.run_svn(None, 'ps', 'foo', 'bar', os.path.join(wc_dir, 'A', 'D')) # Commit and *expect* a repository Merge failure: return svntest.actions.run_and_verify_commit (wc_dir, None, None, "out-of-date", None, None, None, None, wc_dir)
"out-of-date",
"not up-to-date",
def hudson_part_2(sbox): "hudson prob 2.0: prop commit on old dir fails." if sbox.build(): return 1 wc_dir = sbox.wc_dir # Remove gamma from the working copy. gamma_path = os.path.join(wc_dir, 'A', 'D', 'gamma') svntest.main.run_svn(None, 'rm', gamma_path) # Create expected commit output. output_list = [ [gamma_path, None, {}, {'verb' : 'Deleting' }] ] expected_output_tree = svntest.tree.build_generic_tree(output_list) # After committing, status should show no sign of gamma. status_list = svntest.actions.get_virginal_status_list(wc_dir, '2') for item in status_list: item[3]['wc_rev'] = '1' status_list.pop(path_index(status_list, gamma_path)) expected_status_tree = svntest.tree.build_generic_tree(status_list) # Commit the deletion of gamma and verify. if svntest.actions.run_and_verify_commit (wc_dir, expected_output_tree, expected_status_tree, None, None, None, None, None, wc_dir): return 1 # Now gamma should be marked as `deleted' under the hood, at # revision 2. Meanwhile, A/D is still lagging at revision 1. # Make a propchange on A/D svntest.main.run_svn(None, 'ps', 'foo', 'bar', os.path.join(wc_dir, 'A', 'D')) # Commit and *expect* a repository Merge failure: return svntest.actions.run_and_verify_commit (wc_dir, None, None, "out-of-date", None, None, None, None, wc_dir)
if svntest.actions.run_and_verify_commit (wc_dir, expected_output_tree, expected_status_tree, None, None, None, None, None, wc_dir): return 1 return 0
return svntest.actions.run_and_verify_commit (wc_dir, expected_output_tree, expected_status_tree, None, None, None, None, None, wc_dir)
def merge_mixed_revisions(sbox): "commit mixed-rev wc (no erronous merge error)" if sbox.build(): return 1 wc_dir = sbox.wc_dir # Make some convenient paths. iota_path = os.path.join(wc_dir, 'iota') H_path = os.path.join(wc_dir, 'A', 'D', 'H') chi_path = os.path.join(wc_dir, 'A', 'D', 'H', 'chi') omega_path = os.path.join(wc_dir, 'A', 'D', 'H', 'omega') psi_path = os.path.join(wc_dir, 'A', 'D', 'H', 'psi') # Here's the reproduction formula, in 5 parts. # Hoo, what a buildup of state! # 1. echo "moo" >> iota; echo "moo" >> A/D/H/chi; svn ci svntest.main.file_append(iota_path, "moo") svntest.main.file_append(chi_path, "moo") output_list = [ [iota_path, None, {}, {'verb' : 'Sending' }], [chi_path, None, {}, {'verb' : 'Sending' }] ] expected_output_tree = svntest.tree.build_generic_tree(output_list) status_list = svntest.actions.get_virginal_status_list(wc_dir, '2') for item in status_list: if not ((item[0] == iota_path) or (item[0] == chi_path)): item[3]['wc_rev'] = '1' expected_status_tree = svntest.tree.build_generic_tree(status_list) if svntest.actions.run_and_verify_commit (wc_dir, expected_output_tree, expected_status_tree, None, None, None, None, None, wc_dir): return 1 # 2. svn up A/D/H status_list = [] status_list.append([H_path, None, {}, {'status' : '_ ', 'wc_rev' : '2', 'repos_rev' : '2'}]) status_list.append([chi_path, None, {}, {'status' : '_ ', 'wc_rev' : '2', 'repos_rev' : '2'}]) status_list.append([omega_path, None, {}, {'status' : '_ ', 'wc_rev' : '2', 'repos_rev' : '2'}]) status_list.append([psi_path, None, {}, {'status' : '_ ', 'wc_rev' : '2', 'repos_rev' : '2'}]) expected_status_tree = svntest.tree.build_generic_tree(status_list) my_greek_tree = [['omega', "This is the file 'omega'.", {}, {}], ['chi', "This is the file 'chi'.moo", {}, {}], ['psi', "This is the file 'psi'.", {}, {}]] expected_disk_tree = svntest.tree.build_generic_tree(my_greek_tree) expected_output_tree = svntest.tree.build_generic_tree([]) if svntest.actions.run_and_verify_update (H_path, expected_output_tree, expected_disk_tree, expected_status_tree, None, None, None, None, 0): return 1 # 3. echo "moo" >> iota; svn ci iota svntest.main.file_append(iota_path, "moo2") output_list = [[iota_path, None, {}, {'verb' : 'Sending' }]] expected_output_tree = svntest.tree.build_generic_tree(output_list) status_list = svntest.actions.get_virginal_status_list(wc_dir, '3') for item in status_list: if not (item[0] == iota_path): item[3]['wc_rev'] = '1' if ((item[0] == H_path) or (item[0] == omega_path) or (item[0] == chi_path) or (item[0] == psi_path)): item[3]['wc_rev'] = '2' expected_status_tree = svntest.tree.build_generic_tree(status_list) if svntest.actions.run_and_verify_commit (wc_dir, expected_output_tree, expected_status_tree, None, None, None, None, None, wc_dir): return 1 # 4. echo "moo" >> A/D/H/chi; svn ci A/D/H/chi svntest.main.file_append(chi_path, "moo3") output_list = [[chi_path, None, {}, {'verb' : 'Sending' }]] expected_output_tree = svntest.tree.build_generic_tree(output_list) status_list = svntest.actions.get_virginal_status_list(wc_dir, '4') for item in status_list: if not (item[0] == chi_path): item[3]['wc_rev'] = '1' if ((item[0] == H_path) or (item[0] == omega_path) or (item[0] == psi_path)): item[3]['wc_rev'] = '2' if item[0] == iota_path: item[3]['wc_rev'] = '3' expected_status_tree = svntest.tree.build_generic_tree(status_list) if svntest.actions.run_and_verify_commit (wc_dir, expected_output_tree, expected_status_tree, None, None, None, None, None, wc_dir): return 1 # 5. echo "moo" >> iota; svn ci iota svntest.main.file_append(iota_path, "moomoo") output_list = [[iota_path, None, {}, {'verb' : 'Sending' }]] expected_output_tree = svntest.tree.build_generic_tree(output_list) status_list = svntest.actions.get_virginal_status_list(wc_dir, '5') for item in status_list: if not (item[0] == iota_path): item[3]['wc_rev'] = '1' if ((item[0] == H_path) or (item[0] == omega_path) or (item[0] == psi_path)): item[3]['wc_rev'] = '2' if item[0] == chi_path: item[3]['wc_rev'] = '4' expected_status_tree = svntest.tree.build_generic_tree(status_list) if svntest.actions.run_and_verify_commit (wc_dir, expected_output_tree, expected_status_tree, None, None, None, None, None, wc_dir): return 1 # At this point, here is what our tree should look like: # _ 1 ( 5) working_copies/commit_tests-10 # _ 1 ( 5) working_copies/commit_tests-10/A # _ 1 ( 5) working_copies/commit_tests-10/A/B # _ 1 ( 5) working_copies/commit_tests-10/A/B/E # _ 1 ( 5) working_copies/commit_tests-10/A/B/E/alpha # _ 1 ( 5) working_copies/commit_tests-10/A/B/E/beta # _ 1 ( 5) working_copies/commit_tests-10/A/B/F # _ 1 ( 5) working_copies/commit_tests-10/A/B/lambda # _ 1 ( 5) working_copies/commit_tests-10/A/C # _ 1 ( 5) working_copies/commit_tests-10/A/D # _ 1 ( 5) working_copies/commit_tests-10/A/D/G # _ 1 ( 5) working_copies/commit_tests-10/A/D/G/pi # _ 1 ( 5) working_copies/commit_tests-10/A/D/G/rho # _ 1 ( 5) working_copies/commit_tests-10/A/D/G/tau # _ 2 ( 5) working_copies/commit_tests-10/A/D/H # _ 4 ( 5) working_copies/commit_tests-10/A/D/H/chi # _ 2 ( 5) working_copies/commit_tests-10/A/D/H/omega # _ 2 ( 5) working_copies/commit_tests-10/A/D/H/psi # _ 1 ( 5) working_copies/commit_tests-10/A/D/gamma # _ 1 ( 5) working_copies/commit_tests-10/A/mu # _ 5 ( 5) working_copies/commit_tests-10/iota # At this point, we're ready to modify omega and iota, and commit # from the top. We should *not* get a conflict! svntest.main.file_append(iota_path, "finalmoo") svntest.main.file_append(omega_path, "finalmoo") output_list = [ [iota_path, None, {}, {'verb' : 'Sending' }], [omega_path, None, {}, {'verb' : 'Sending' }] ] expected_output_tree = svntest.tree.build_generic_tree(output_list) status_list = svntest.actions.get_virginal_status_list(wc_dir, '6') for item in status_list: if not ((item[0] == iota_path) or (item[0] == omega_path)): item[3]['wc_rev'] = '1' if ((item[0] == H_path) or (item[0] == psi_path)): item[3]['wc_rev'] = '2' if item[0] == chi_path: item[3]['wc_rev'] = '4' expected_status_tree = svntest.tree.build_generic_tree(status_list) if svntest.actions.run_and_verify_commit (wc_dir, expected_output_tree, expected_status_tree, None, None, None, None, None, wc_dir): return 1 return 0
if svntest.actions.run_and_verify_commit (wc_dir, expected_output_tree, expected_status_tree, None, None, None, None, None, wc_dir): return 1 return 0
return svntest.actions.run_and_verify_commit (wc_dir, expected_output_tree, expected_status_tree, None, None, None, None, None, wc_dir)
def commit_uri_unsafe(sbox): "commit files and dirs with URI-unsafe characters" if sbox.build(): return 1 wc_dir = sbox.wc_dir # Make some convenient paths. hash_dir = os.path.join(wc_dir, '#hash#') nasty_dir = os.path.join(wc_dir, '#![]{}()<>%') space_path = os.path.join(wc_dir, 'A', 'D', 'space path') bang_path = os.path.join(wc_dir, 'A', 'D', 'H', 'bang!') bracket_path = os.path.join(wc_dir, 'A', 'D', 'H', 'bra[ket') brace_path = os.path.join(wc_dir, 'A', 'D', 'H', 'bra{e') angle_path = os.path.join(wc_dir, 'A', 'D', 'H', '<angle>') paren_path = os.path.join(wc_dir, 'A', 'D', 'pare)(theses') percent_path = os.path.join(wc_dir, '#hash#', 'percen%') nasty_path = os.path.join(wc_dir, 'A', '#![]{}()<>%') os.mkdir(hash_dir) os.mkdir(nasty_dir) svntest.main.file_append(space_path, "This path has a space in it.") svntest.main.file_append(bang_path, "This path has a bang in it.") svntest.main.file_append(bracket_path, "This path has a bracket in it.") svntest.main.file_append(brace_path, "This path has a brace in it.") svntest.main.file_append(angle_path, "This path has angle brackets in it.") svntest.main.file_append(paren_path, "This path has parentheses in it.") svntest.main.file_append(percent_path, "This path has a percent in it.") svntest.main.file_append(nasty_path, "This path has all sorts of ick in it.") output_list = [] add_list = [hash_dir, nasty_dir, # not xml-safe space_path, bang_path, bracket_path, brace_path, angle_path, # not xml-safe paren_path, percent_path, nasty_path, # not xml-safe ] for item in add_list: svntest.main.run_svn(None, 'add', item) item_list = [item, None, {}, {'verb' : 'Adding'}] output_list.append(item_list) expected_output_tree = svntest.tree.build_generic_tree(output_list) status_list = svntest.actions.get_virginal_status_list(wc_dir, '2') # Items in the status list are all at rev 1 for item in status_list: item[3]['wc_rev'] = '1' # Items in our add list will be at rev 2 for item in add_list: item_list = [item, None, {}, {'wc_rev': '2', 'repos_rev': '2', 'status': '_ '}] status_list.append(item_list) expected_status_tree = svntest.tree.build_generic_tree(status_list) if svntest.actions.run_and_verify_commit (wc_dir, expected_output_tree, expected_status_tree, None, None, None, None, None, wc_dir): return 1 return 0
if svntest.actions.run_and_verify_commit (wc_dir, expected_output_tree, expected_status_tree, None, None, None, None, None, wc_dir): return 1 return 0
return svntest.actions.run_and_verify_commit (wc_dir, expected_output_tree, expected_status_tree, None, None, None, None, None, wc_dir)
def commit_deleted_edited(sbox): "commit files that have been deleted, but also edited" if sbox.build(): return 1 wc_dir = sbox.wc_dir # Make some convenient paths. iota_path = os.path.join(wc_dir, 'iota') mu_path = os.path.join(wc_dir, 'A', 'mu') # Edit the files. svntest.main.file_append(iota_path, "This file has been edited.") svntest.main.file_append(mu_path, "This file has been edited.") # Schedule the files for removal. svntest.main.run_svn(None, 'remove', iota_path) svntest.main.run_svn(None, 'remove', mu_path) # Make our output list output_list = [(iota_path, None, {}, {'verb' : 'Deleting'}), (mu_path, None, {}, {'verb' : 'Deleting'})] expected_output_tree = svntest.tree.build_generic_tree(output_list) # Items in the status list are all at rev 1, except the two things # we changed...but then, they don't exist at all. status_list = svntest.actions.get_virginal_status_list(wc_dir, '2') status_list.pop(path_index(status_list, iota_path)) status_list.pop(path_index(status_list, mu_path)) for item in status_list: item[3]['wc_rev'] = '1' expected_status_tree = svntest.tree.build_generic_tree(status_list) if svntest.actions.run_and_verify_commit (wc_dir, expected_output_tree, expected_status_tree, None, None, None, None, None, wc_dir): return 1 return 0
a_t = util.apr_ansi_time_to_apr_time(self.t_max)[1]
a_t = util.apr_time_ansi_put(self.t_max)[1]
def get_metadata(self, pool): # by definition, the author and log message must be the same for all # items that went into this commit. therefore, just grab any item from # our record of changes/deletes. if self.changes: file, rev = self.changes[0] else: # there better be one... file, rev = self.deletes[0]
digest = sha.new(log + author).hexdigest()
digest = sha.new(log + '\0' + author).hexdigest()
def set_revision_info(self, revision, log, text): timestamp, author, op, old_ts = self.rev_data[revision] digest = sha.new(log + author).hexdigest() if old_ts: # the timestamp on this revision was changed. log it for later # resynchronization of other files's revisions that occurred # for this time and log message. self.resync.write('%08lx %s %08lx\n' % (old_ts, digest, timestamp))
print 'USAGE: %s [-v] [-p pass] repository-path' % sys.argv[0]
print 'USAGE: %s [-v] [-s svn-repos-path] [-p pass] repository-path' \ % sys.argv[0]
def usage(): print 'USAGE: %s [-v] [-p pass] repository-path' % sys.argv[0] sys.exit(1)
opts, args = getopt.getopt(sys.argv[1:], 'p:v')
opts, args = getopt.getopt(sys.argv[1:], 'p:s:v')
def main(): opts, args = getopt.getopt(sys.argv[1:], 'p:v') if len(args) != 1: usage() verbose = 0 start_pass = 1 for opt, value in opts: if opt == '-p': start_pass = int(value) if start_pass < 1 or start_pass > len(_passes): print 'ERROR: illegal value (%d) for starting pass. ' \ 'must be 1 through %d.' % (start_pass, len(_passes)) sys.exit(1) elif opt == '-v': verbose = 1 util.run_app(convert, args[0], start_pass=start_pass, verbose=verbose)
util.run_app(convert, args[0], start_pass=start_pass, verbose=verbose)
elif opt == '-s': target = value util.run_app(convert, args[0], start_pass=start_pass, verbose=verbose, target=target)
def main(): opts, args = getopt.getopt(sys.argv[1:], 'p:v') if len(args) != 1: usage() verbose = 0 start_pass = 1 for opt, value in opts: if opt == '-p': start_pass = int(value) if start_pass < 1 or start_pass > len(_passes): print 'ERROR: illegal value (%d) for starting pass. ' \ 'must be 1 through %d.' % (start_pass, len(_passes)) sys.exit(1) elif opt == '-v': verbose = 1 util.run_app(convert, args[0], start_pass=start_pass, verbose=verbose)
prnt_n = `n` if n < 10: prnt_n = " " + prnt_n print " ", prnt_n, " ", x.__doc__
print " %2d %s" % (n, x.__doc__)
def run_tests(test_list): "Main routine to run all tests in TEST_LIST." global test_area_url testnum = 0 for arg in sys.argv: if arg == "list": print "Test # Test Description" print "------ ----------------" n = 1 for x in test_list[1:]: prnt_n = `n` if n < 10: prnt_n = " " + prnt_n print " ", prnt_n, " ", x.__doc__ n = n+1 return 0 elif arg == "--url": index = sys.argv.index(arg) test_area_url = sys.argv[index + 1] else: try: testnum = int(arg) except ValueError: pass if testnum: return run_one_test(testnum, test_list) # otherwise, run all tests. got_error = 0 for n in range(len(test_list)): if n: if run_one_test(n, test_list): got_error = 1 return got_error
dep_path = tlib.path if bldtype == 'lib': if lib[:3] == 'lib': lib = lib[3:] libs.append('-L%s -l%s' % (retreat + os.path.join(dep_path, '.libs'), lib)) else: libs.append(retreat + os.path.join(dep_path, lib + '.la'))
libs.append(retreat + os.path.join(tlib.path, lib + '.la'))
def main(fname, oname=None, skip_depends=0): parser = ConfigParser.ConfigParser(_cfg_defaults) parser.read(fname) if oname is None: oname = os.path.splitext(os.path.basename(fname))[0] + '-outputs.mk' ofile = open(oname, 'w') ofile.write('# DO NOT EDIT -- AUTOMATICALLY GENERATED\n\n') errors = 0 targets = { } install = { } # install area name -> targets test_progs = [ ] test_deps = [ ] fs_test_progs = [ ] fs_test_deps = [ ] file_deps = [ ] target_dirs = { } target_names = _filter_targets(parser.sections()) # PASS 1: collect the targets and some basic info for target in target_names: try: target_ob = Target(target, parser.get(target, 'path'), parser.get(target, 'install'), parser.get(target, 'type')) except GenMakeError, e: print e errors = 1 continue targets[target] = target_ob itype = target_ob.install if install.has_key(itype): install[itype].append(target_ob) else: install[itype] = [ target_ob ] target_dirs[target_ob.path] = None if errors: sys.exit(1) # PASS 2: generate the outputs for target in target_names: target_ob = targets[target] path = target_ob.path bldtype = target_ob.type objext = target_ob.objext tpath = target_ob.output tfile = os.path.basename(tpath) if target_ob.install == 'test' and bldtype == 'exe': test_deps.append(tpath) if parser.get(target, 'testing') != 'skip': test_progs.append(tpath) if target_ob.install == 'fs-test' and bldtype == 'exe': fs_test_deps.append(tpath) if parser.get(target, 'testing') != 'skip': fs_test_progs.append(tpath) pats = parser.get(target, 'sources') if not pats: pats = _default_sources[bldtype] sources, s_errors = _collect_paths(pats, path) errors = errors or s_errors objects = [ ] for src in sources: if src[-2:] == '.c': objname = src[:-2] + objext objects.append(objname) file_deps.append((src, objname)) else: print 'ERROR: unknown file extension on', src errors = 1 retreat = _retreat_dots(path) libs = [ ] deps = [ ] for lib in string.split(parser.get(target, 'libs')): if lib in target_names: tlib = targets[lib] target_ob.deps.append(tlib) deps.append(tlib.output) dep_path = tlib.path if bldtype == 'lib': # we need to hack around a libtool problem: it cannot record a # dependency of one shared lib on another shared lib. ### fix this by upgrading to the new libtool 1.4 release... # strip "lib" from the front so we have -lsvn_foo if lib[:3] == 'lib': lib = lib[3:] libs.append('-L%s -l%s' % (retreat + os.path.join(dep_path, '.libs'), lib)) else: # linking executables can refer to .la files libs.append(retreat + os.path.join(dep_path, lib + '.la')) else: # something we don't know, so just include it directly libs.append(lib) targ_varname = string.replace(target, '-', '_') ldflags = parser.get(target, 'link-flags') add_deps = parser.get(target, 'add-deps') objnames = string.join(map(os.path.basename, objects)) ofile.write('%s_DEPS = %s %s\n' '%s_OBJECTS = %s\n' '%s: $(%s_DEPS)\n' '\tcd %s && $(LINK) -o %s %s $(%s_OBJECTS) %s $(LIBS)\n\n' % (targ_varname, string.join(objects + deps), add_deps, targ_varname, objnames, tpath, targ_varname, path, tfile, ldflags, targ_varname, string.join(libs))) custom = parser.get(target, 'custom') if custom == 'apache-mod': # special build, needing Apache includes ofile.write('# build these special -- use APACHE_INCLUDES\n') for src in sources: if src[-2:] == '.c': ofile.write('%s%s: %s\n\t$(COMPILE_APACHE_MOD)\n' % (src[:-2], objext, src)) ofile.write('\n') for g_name, g_targets in install.items(): target_names = [ ] for i in g_targets: target_names.append(i.output) ofile.write('%s: %s\n\n' % (g_name, string.join(target_names))) ofile.write('BUILD_DIRS = %s\n' % string.join(target_dirs.keys())) cfiles = [ ] for target in targets.values(): # .la files are handled by the standard 'clean' rule; clean all the # other targets if target.output[-3:] != '.la': cfiles.append(target.output) ofile.write('CLEAN_FILES = %s\n\n' % string.join(cfiles)) for area, inst_targets in install.items(): files = [ ] for t in inst_targets: files.append(t.output) if area == 'apache-mod': ofile.write('install-mods-shared: %s\n' % (string.join(files),)) la_tweaked = { } for file in files: base, ext = os.path.splitext(os.path.basename(file)) name = string.replace(base, 'libmod_', '') ofile.write('\t$(INSTALL_MOD_SHARED) -n %s %s\n' % (name, file)) if ext == '.la': la_tweaked[file + '-a'] = None for t in inst_targets: for dep in t.deps: bt = dep.output if bt[-3:] == '.la': la_tweaked[bt + '-a'] = None la_tweaked = la_tweaked.keys() s_files, s_errors = _collect_paths(parser.get('static-apache', 'paths')) errors = errors or s_errors ofile.write('\ninstall-mods-static: %s\n' '\t$(MKDIR) %s\n' % (string.join(la_tweaked + s_files), os.path.join('$(APACHE_TARGET)', '.libs'))) for file in la_tweaked: dirname, fname = os.path.split(file) base = os.path.splitext(fname)[0] ofile.write('\t$(INSTALL_MOD_STATIC) %s %s\n' '\t$(INSTALL_MOD_STATIC) %s %s\n' % (os.path.join(dirname, '.libs', base + '.a'), os.path.join('$(APACHE_TARGET)', '.libs', base + '.a'), file, os.path.join('$(APACHE_TARGET)', base + '.la'))) for file in s_files: ofile.write('\t$(INSTALL_MOD_STATIC) %s %s\n' % (file, os.path.join('$(APACHE_TARGET)', os.path.basename(file)))) ofile.write('\n') elif area != 'test' and area != 'fs-test': area_var = string.replace(area, '-', '_') ofile.write('install-%s: %s\n' '\t$(MKDIR) $(%sdir)\n' % (area, string.join(files), area_var)) for file in files: ofile.write('\t$(INSTALL_%s) %s %s\n' % (string.upper(area_var), file, os.path.join('$(%sdir)' % area_var, os.path.basename(file)))) ofile.write('\n') includes, i_errors = _collect_paths(parser.get('includes', 'paths')) errors = errors or i_errors ofile.write('install-include: %s\n' '\t$(MKDIR) $(includedir)\n' % (string.join(includes),)) for file in includes: ofile.write('\t$(INSTALL_INCLUDE) %s %s\n' % (os.path.join('$(top_srcdir)', file), os.path.join('$(includedir)', os.path.basename(file)))) ofile.write('\n# handy shortcut targets\n') for name, target in targets.items(): ofile.write('%s: %s\n' % (name, target.output)) ofile.write('\n') scripts, s_errors = _collect_paths(parser.get('test-scripts', 'paths')) errors = errors or s_errors fs_scripts, fs_errors = _collect_paths(parser.get('fs-test-scripts', 'paths')) errors = errors or fs_errors ofile.write('FS_TEST_DEPS = %s\n\n' % string.join(fs_test_deps + fs_scripts)) ofile.write('FS_TEST_PROGRAMS = %s\n\n' % string.join(fs_test_progs + fs_scripts)) ofile.write('TEST_DEPS = %s\n\n' % string.join(test_deps + scripts)) ofile.write('TEST_PROGRAMS = %s\n\n' % string.join(test_progs + scripts)) if not skip_depends: # # Find all the available headers and what they depend upon. the # include_deps is a dictionary mapping a short header name to a tuple # of the full path to the header and a dictionary of dependent header # names (short) mapping to None. # # Example: # { 'short.h' : ('/path/to/short.h', # { 'other.h' : None, 'foo.h' : None }) } # # Note that this structure does not allow for similarly named headers # in per-project directories. SVN doesn't have this at this time, so # this structure works quite fine. (the alternative would be to use # the full pathname for the key, but that is actually a bit harder to # work with since we only see short names when scanning, and keeping # a second variable around for mapping the short to long names is more # than I cared to do right now) # include_deps = _create_include_deps(includes) for d in target_dirs.keys(): hdrs = glob.glob(os.path.join(d, '*.h')) if hdrs: more_deps = _create_include_deps(hdrs, include_deps) include_deps.update(more_deps) for src, objname in file_deps: hdrs = [ ] for short in _find_includes(src, include_deps): hdrs.append(include_deps[short][0]) ofile.write('%s: %s %s\n' % (objname, src, string.join(hdrs))) if errors: sys.exit(1)
base, ext = os.path.splitext(os.path.basename(file))
dirname, fname = os.path.split(file) base, ext = os.path.splitext(fname)
def main(fname, oname=None, skip_depends=0): parser = ConfigParser.ConfigParser(_cfg_defaults) parser.read(fname) if oname is None: oname = os.path.splitext(os.path.basename(fname))[0] + '-outputs.mk' ofile = open(oname, 'w') ofile.write('# DO NOT EDIT -- AUTOMATICALLY GENERATED\n\n') errors = 0 targets = { } install = { } # install area name -> targets test_progs = [ ] test_deps = [ ] fs_test_progs = [ ] fs_test_deps = [ ] file_deps = [ ] target_dirs = { } target_names = _filter_targets(parser.sections()) # PASS 1: collect the targets and some basic info for target in target_names: try: target_ob = Target(target, parser.get(target, 'path'), parser.get(target, 'install'), parser.get(target, 'type')) except GenMakeError, e: print e errors = 1 continue targets[target] = target_ob itype = target_ob.install if install.has_key(itype): install[itype].append(target_ob) else: install[itype] = [ target_ob ] target_dirs[target_ob.path] = None if errors: sys.exit(1) # PASS 2: generate the outputs for target in target_names: target_ob = targets[target] path = target_ob.path bldtype = target_ob.type objext = target_ob.objext tpath = target_ob.output tfile = os.path.basename(tpath) if target_ob.install == 'test' and bldtype == 'exe': test_deps.append(tpath) if parser.get(target, 'testing') != 'skip': test_progs.append(tpath) if target_ob.install == 'fs-test' and bldtype == 'exe': fs_test_deps.append(tpath) if parser.get(target, 'testing') != 'skip': fs_test_progs.append(tpath) pats = parser.get(target, 'sources') if not pats: pats = _default_sources[bldtype] sources, s_errors = _collect_paths(pats, path) errors = errors or s_errors objects = [ ] for src in sources: if src[-2:] == '.c': objname = src[:-2] + objext objects.append(objname) file_deps.append((src, objname)) else: print 'ERROR: unknown file extension on', src errors = 1 retreat = _retreat_dots(path) libs = [ ] deps = [ ] for lib in string.split(parser.get(target, 'libs')): if lib in target_names: tlib = targets[lib] target_ob.deps.append(tlib) deps.append(tlib.output) dep_path = tlib.path if bldtype == 'lib': # we need to hack around a libtool problem: it cannot record a # dependency of one shared lib on another shared lib. ### fix this by upgrading to the new libtool 1.4 release... # strip "lib" from the front so we have -lsvn_foo if lib[:3] == 'lib': lib = lib[3:] libs.append('-L%s -l%s' % (retreat + os.path.join(dep_path, '.libs'), lib)) else: # linking executables can refer to .la files libs.append(retreat + os.path.join(dep_path, lib + '.la')) else: # something we don't know, so just include it directly libs.append(lib) targ_varname = string.replace(target, '-', '_') ldflags = parser.get(target, 'link-flags') add_deps = parser.get(target, 'add-deps') objnames = string.join(map(os.path.basename, objects)) ofile.write('%s_DEPS = %s %s\n' '%s_OBJECTS = %s\n' '%s: $(%s_DEPS)\n' '\tcd %s && $(LINK) -o %s %s $(%s_OBJECTS) %s $(LIBS)\n\n' % (targ_varname, string.join(objects + deps), add_deps, targ_varname, objnames, tpath, targ_varname, path, tfile, ldflags, targ_varname, string.join(libs))) custom = parser.get(target, 'custom') if custom == 'apache-mod': # special build, needing Apache includes ofile.write('# build these special -- use APACHE_INCLUDES\n') for src in sources: if src[-2:] == '.c': ofile.write('%s%s: %s\n\t$(COMPILE_APACHE_MOD)\n' % (src[:-2], objext, src)) ofile.write('\n') for g_name, g_targets in install.items(): target_names = [ ] for i in g_targets: target_names.append(i.output) ofile.write('%s: %s\n\n' % (g_name, string.join(target_names))) ofile.write('BUILD_DIRS = %s\n' % string.join(target_dirs.keys())) cfiles = [ ] for target in targets.values(): # .la files are handled by the standard 'clean' rule; clean all the # other targets if target.output[-3:] != '.la': cfiles.append(target.output) ofile.write('CLEAN_FILES = %s\n\n' % string.join(cfiles)) for area, inst_targets in install.items(): files = [ ] for t in inst_targets: files.append(t.output) if area == 'apache-mod': ofile.write('install-mods-shared: %s\n' % (string.join(files),)) la_tweaked = { } for file in files: base, ext = os.path.splitext(os.path.basename(file)) name = string.replace(base, 'libmod_', '') ofile.write('\t$(INSTALL_MOD_SHARED) -n %s %s\n' % (name, file)) if ext == '.la': la_tweaked[file + '-a'] = None for t in inst_targets: for dep in t.deps: bt = dep.output if bt[-3:] == '.la': la_tweaked[bt + '-a'] = None la_tweaked = la_tweaked.keys() s_files, s_errors = _collect_paths(parser.get('static-apache', 'paths')) errors = errors or s_errors ofile.write('\ninstall-mods-static: %s\n' '\t$(MKDIR) %s\n' % (string.join(la_tweaked + s_files), os.path.join('$(APACHE_TARGET)', '.libs'))) for file in la_tweaked: dirname, fname = os.path.split(file) base = os.path.splitext(fname)[0] ofile.write('\t$(INSTALL_MOD_STATIC) %s %s\n' '\t$(INSTALL_MOD_STATIC) %s %s\n' % (os.path.join(dirname, '.libs', base + '.a'), os.path.join('$(APACHE_TARGET)', '.libs', base + '.a'), file, os.path.join('$(APACHE_TARGET)', base + '.la'))) for file in s_files: ofile.write('\t$(INSTALL_MOD_STATIC) %s %s\n' % (file, os.path.join('$(APACHE_TARGET)', os.path.basename(file)))) ofile.write('\n') elif area != 'test' and area != 'fs-test': area_var = string.replace(area, '-', '_') ofile.write('install-%s: %s\n' '\t$(MKDIR) $(%sdir)\n' % (area, string.join(files), area_var)) for file in files: ofile.write('\t$(INSTALL_%s) %s %s\n' % (string.upper(area_var), file, os.path.join('$(%sdir)' % area_var, os.path.basename(file)))) ofile.write('\n') includes, i_errors = _collect_paths(parser.get('includes', 'paths')) errors = errors or i_errors ofile.write('install-include: %s\n' '\t$(MKDIR) $(includedir)\n' % (string.join(includes),)) for file in includes: ofile.write('\t$(INSTALL_INCLUDE) %s %s\n' % (os.path.join('$(top_srcdir)', file), os.path.join('$(includedir)', os.path.basename(file)))) ofile.write('\n# handy shortcut targets\n') for name, target in targets.items(): ofile.write('%s: %s\n' % (name, target.output)) ofile.write('\n') scripts, s_errors = _collect_paths(parser.get('test-scripts', 'paths')) errors = errors or s_errors fs_scripts, fs_errors = _collect_paths(parser.get('fs-test-scripts', 'paths')) errors = errors or fs_errors ofile.write('FS_TEST_DEPS = %s\n\n' % string.join(fs_test_deps + fs_scripts)) ofile.write('FS_TEST_PROGRAMS = %s\n\n' % string.join(fs_test_progs + fs_scripts)) ofile.write('TEST_DEPS = %s\n\n' % string.join(test_deps + scripts)) ofile.write('TEST_PROGRAMS = %s\n\n' % string.join(test_progs + scripts)) if not skip_depends: # # Find all the available headers and what they depend upon. the # include_deps is a dictionary mapping a short header name to a tuple # of the full path to the header and a dictionary of dependent header # names (short) mapping to None. # # Example: # { 'short.h' : ('/path/to/short.h', # { 'other.h' : None, 'foo.h' : None }) } # # Note that this structure does not allow for similarly named headers # in per-project directories. SVN doesn't have this at this time, so # this structure works quite fine. (the alternative would be to use # the full pathname for the key, but that is actually a bit harder to # work with since we only see short names when scanning, and keeping # a second variable around for mapping the short to long names is more # than I cared to do right now) # include_deps = _create_include_deps(includes) for d in target_dirs.keys(): hdrs = glob.glob(os.path.join(d, '*.h')) if hdrs: more_deps = _create_include_deps(hdrs, include_deps) include_deps.update(more_deps) for src, objname in file_deps: hdrs = [ ] for short in _find_includes(src, include_deps): hdrs.append(include_deps[short][0]) ofile.write('%s: %s %s\n' % (objname, src, string.join(hdrs))) if errors: sys.exit(1)
ofile.write('\t$(INSTALL_MOD_SHARED) -n %s %s\n' % (name, file))
ofile.write('\tcd %s ; $(INSTALL_MOD_SHARED) -n %s %s\n' % (dirname, name, fname))
def main(fname, oname=None, skip_depends=0): parser = ConfigParser.ConfigParser(_cfg_defaults) parser.read(fname) if oname is None: oname = os.path.splitext(os.path.basename(fname))[0] + '-outputs.mk' ofile = open(oname, 'w') ofile.write('# DO NOT EDIT -- AUTOMATICALLY GENERATED\n\n') errors = 0 targets = { } install = { } # install area name -> targets test_progs = [ ] test_deps = [ ] fs_test_progs = [ ] fs_test_deps = [ ] file_deps = [ ] target_dirs = { } target_names = _filter_targets(parser.sections()) # PASS 1: collect the targets and some basic info for target in target_names: try: target_ob = Target(target, parser.get(target, 'path'), parser.get(target, 'install'), parser.get(target, 'type')) except GenMakeError, e: print e errors = 1 continue targets[target] = target_ob itype = target_ob.install if install.has_key(itype): install[itype].append(target_ob) else: install[itype] = [ target_ob ] target_dirs[target_ob.path] = None if errors: sys.exit(1) # PASS 2: generate the outputs for target in target_names: target_ob = targets[target] path = target_ob.path bldtype = target_ob.type objext = target_ob.objext tpath = target_ob.output tfile = os.path.basename(tpath) if target_ob.install == 'test' and bldtype == 'exe': test_deps.append(tpath) if parser.get(target, 'testing') != 'skip': test_progs.append(tpath) if target_ob.install == 'fs-test' and bldtype == 'exe': fs_test_deps.append(tpath) if parser.get(target, 'testing') != 'skip': fs_test_progs.append(tpath) pats = parser.get(target, 'sources') if not pats: pats = _default_sources[bldtype] sources, s_errors = _collect_paths(pats, path) errors = errors or s_errors objects = [ ] for src in sources: if src[-2:] == '.c': objname = src[:-2] + objext objects.append(objname) file_deps.append((src, objname)) else: print 'ERROR: unknown file extension on', src errors = 1 retreat = _retreat_dots(path) libs = [ ] deps = [ ] for lib in string.split(parser.get(target, 'libs')): if lib in target_names: tlib = targets[lib] target_ob.deps.append(tlib) deps.append(tlib.output) dep_path = tlib.path if bldtype == 'lib': # we need to hack around a libtool problem: it cannot record a # dependency of one shared lib on another shared lib. ### fix this by upgrading to the new libtool 1.4 release... # strip "lib" from the front so we have -lsvn_foo if lib[:3] == 'lib': lib = lib[3:] libs.append('-L%s -l%s' % (retreat + os.path.join(dep_path, '.libs'), lib)) else: # linking executables can refer to .la files libs.append(retreat + os.path.join(dep_path, lib + '.la')) else: # something we don't know, so just include it directly libs.append(lib) targ_varname = string.replace(target, '-', '_') ldflags = parser.get(target, 'link-flags') add_deps = parser.get(target, 'add-deps') objnames = string.join(map(os.path.basename, objects)) ofile.write('%s_DEPS = %s %s\n' '%s_OBJECTS = %s\n' '%s: $(%s_DEPS)\n' '\tcd %s && $(LINK) -o %s %s $(%s_OBJECTS) %s $(LIBS)\n\n' % (targ_varname, string.join(objects + deps), add_deps, targ_varname, objnames, tpath, targ_varname, path, tfile, ldflags, targ_varname, string.join(libs))) custom = parser.get(target, 'custom') if custom == 'apache-mod': # special build, needing Apache includes ofile.write('# build these special -- use APACHE_INCLUDES\n') for src in sources: if src[-2:] == '.c': ofile.write('%s%s: %s\n\t$(COMPILE_APACHE_MOD)\n' % (src[:-2], objext, src)) ofile.write('\n') for g_name, g_targets in install.items(): target_names = [ ] for i in g_targets: target_names.append(i.output) ofile.write('%s: %s\n\n' % (g_name, string.join(target_names))) ofile.write('BUILD_DIRS = %s\n' % string.join(target_dirs.keys())) cfiles = [ ] for target in targets.values(): # .la files are handled by the standard 'clean' rule; clean all the # other targets if target.output[-3:] != '.la': cfiles.append(target.output) ofile.write('CLEAN_FILES = %s\n\n' % string.join(cfiles)) for area, inst_targets in install.items(): files = [ ] for t in inst_targets: files.append(t.output) if area == 'apache-mod': ofile.write('install-mods-shared: %s\n' % (string.join(files),)) la_tweaked = { } for file in files: base, ext = os.path.splitext(os.path.basename(file)) name = string.replace(base, 'libmod_', '') ofile.write('\t$(INSTALL_MOD_SHARED) -n %s %s\n' % (name, file)) if ext == '.la': la_tweaked[file + '-a'] = None for t in inst_targets: for dep in t.deps: bt = dep.output if bt[-3:] == '.la': la_tweaked[bt + '-a'] = None la_tweaked = la_tweaked.keys() s_files, s_errors = _collect_paths(parser.get('static-apache', 'paths')) errors = errors or s_errors ofile.write('\ninstall-mods-static: %s\n' '\t$(MKDIR) %s\n' % (string.join(la_tweaked + s_files), os.path.join('$(APACHE_TARGET)', '.libs'))) for file in la_tweaked: dirname, fname = os.path.split(file) base = os.path.splitext(fname)[0] ofile.write('\t$(INSTALL_MOD_STATIC) %s %s\n' '\t$(INSTALL_MOD_STATIC) %s %s\n' % (os.path.join(dirname, '.libs', base + '.a'), os.path.join('$(APACHE_TARGET)', '.libs', base + '.a'), file, os.path.join('$(APACHE_TARGET)', base + '.la'))) for file in s_files: ofile.write('\t$(INSTALL_MOD_STATIC) %s %s\n' % (file, os.path.join('$(APACHE_TARGET)', os.path.basename(file)))) ofile.write('\n') elif area != 'test' and area != 'fs-test': area_var = string.replace(area, '-', '_') ofile.write('install-%s: %s\n' '\t$(MKDIR) $(%sdir)\n' % (area, string.join(files), area_var)) for file in files: ofile.write('\t$(INSTALL_%s) %s %s\n' % (string.upper(area_var), file, os.path.join('$(%sdir)' % area_var, os.path.basename(file)))) ofile.write('\n') includes, i_errors = _collect_paths(parser.get('includes', 'paths')) errors = errors or i_errors ofile.write('install-include: %s\n' '\t$(MKDIR) $(includedir)\n' % (string.join(includes),)) for file in includes: ofile.write('\t$(INSTALL_INCLUDE) %s %s\n' % (os.path.join('$(top_srcdir)', file), os.path.join('$(includedir)', os.path.basename(file)))) ofile.write('\n# handy shortcut targets\n') for name, target in targets.items(): ofile.write('%s: %s\n' % (name, target.output)) ofile.write('\n') scripts, s_errors = _collect_paths(parser.get('test-scripts', 'paths')) errors = errors or s_errors fs_scripts, fs_errors = _collect_paths(parser.get('fs-test-scripts', 'paths')) errors = errors or fs_errors ofile.write('FS_TEST_DEPS = %s\n\n' % string.join(fs_test_deps + fs_scripts)) ofile.write('FS_TEST_PROGRAMS = %s\n\n' % string.join(fs_test_progs + fs_scripts)) ofile.write('TEST_DEPS = %s\n\n' % string.join(test_deps + scripts)) ofile.write('TEST_PROGRAMS = %s\n\n' % string.join(test_progs + scripts)) if not skip_depends: # # Find all the available headers and what they depend upon. the # include_deps is a dictionary mapping a short header name to a tuple # of the full path to the header and a dictionary of dependent header # names (short) mapping to None. # # Example: # { 'short.h' : ('/path/to/short.h', # { 'other.h' : None, 'foo.h' : None }) } # # Note that this structure does not allow for similarly named headers # in per-project directories. SVN doesn't have this at this time, so # this structure works quite fine. (the alternative would be to use # the full pathname for the key, but that is actually a bit harder to # work with since we only see short names when scanning, and keeping # a second variable around for mapping the short to long names is more # than I cared to do right now) # include_deps = _create_include_deps(includes) for d in target_dirs.keys(): hdrs = glob.glob(os.path.join(d, '*.h')) if hdrs: more_deps = _create_include_deps(hdrs, include_deps) include_deps.update(more_deps) for src, objname in file_deps: hdrs = [ ] for short in _find_includes(src, include_deps): hdrs.append(include_deps[short][0]) ofile.write('%s: %s %s\n' % (objname, src, string.join(hdrs))) if errors: sys.exit(1)
ofile.write('\t$(INSTALL_%s) %s %s\n' % (string.upper(area_var), file, os.path.join('$(%sdir)' % area_var, os.path.basename(file))))
dirname, fname = os.path.split(file) ofile.write('\tcd %s ; $(INSTALL_%s) %s %s\n' % (dirname, string.upper(area_var), fname, os.path.join('$(%sdir)' % area_var, fname)))
def main(fname, oname=None, skip_depends=0): parser = ConfigParser.ConfigParser(_cfg_defaults) parser.read(fname) if oname is None: oname = os.path.splitext(os.path.basename(fname))[0] + '-outputs.mk' ofile = open(oname, 'w') ofile.write('# DO NOT EDIT -- AUTOMATICALLY GENERATED\n\n') errors = 0 targets = { } install = { } # install area name -> targets test_progs = [ ] test_deps = [ ] fs_test_progs = [ ] fs_test_deps = [ ] file_deps = [ ] target_dirs = { } target_names = _filter_targets(parser.sections()) # PASS 1: collect the targets and some basic info for target in target_names: try: target_ob = Target(target, parser.get(target, 'path'), parser.get(target, 'install'), parser.get(target, 'type')) except GenMakeError, e: print e errors = 1 continue targets[target] = target_ob itype = target_ob.install if install.has_key(itype): install[itype].append(target_ob) else: install[itype] = [ target_ob ] target_dirs[target_ob.path] = None if errors: sys.exit(1) # PASS 2: generate the outputs for target in target_names: target_ob = targets[target] path = target_ob.path bldtype = target_ob.type objext = target_ob.objext tpath = target_ob.output tfile = os.path.basename(tpath) if target_ob.install == 'test' and bldtype == 'exe': test_deps.append(tpath) if parser.get(target, 'testing') != 'skip': test_progs.append(tpath) if target_ob.install == 'fs-test' and bldtype == 'exe': fs_test_deps.append(tpath) if parser.get(target, 'testing') != 'skip': fs_test_progs.append(tpath) pats = parser.get(target, 'sources') if not pats: pats = _default_sources[bldtype] sources, s_errors = _collect_paths(pats, path) errors = errors or s_errors objects = [ ] for src in sources: if src[-2:] == '.c': objname = src[:-2] + objext objects.append(objname) file_deps.append((src, objname)) else: print 'ERROR: unknown file extension on', src errors = 1 retreat = _retreat_dots(path) libs = [ ] deps = [ ] for lib in string.split(parser.get(target, 'libs')): if lib in target_names: tlib = targets[lib] target_ob.deps.append(tlib) deps.append(tlib.output) dep_path = tlib.path if bldtype == 'lib': # we need to hack around a libtool problem: it cannot record a # dependency of one shared lib on another shared lib. ### fix this by upgrading to the new libtool 1.4 release... # strip "lib" from the front so we have -lsvn_foo if lib[:3] == 'lib': lib = lib[3:] libs.append('-L%s -l%s' % (retreat + os.path.join(dep_path, '.libs'), lib)) else: # linking executables can refer to .la files libs.append(retreat + os.path.join(dep_path, lib + '.la')) else: # something we don't know, so just include it directly libs.append(lib) targ_varname = string.replace(target, '-', '_') ldflags = parser.get(target, 'link-flags') add_deps = parser.get(target, 'add-deps') objnames = string.join(map(os.path.basename, objects)) ofile.write('%s_DEPS = %s %s\n' '%s_OBJECTS = %s\n' '%s: $(%s_DEPS)\n' '\tcd %s && $(LINK) -o %s %s $(%s_OBJECTS) %s $(LIBS)\n\n' % (targ_varname, string.join(objects + deps), add_deps, targ_varname, objnames, tpath, targ_varname, path, tfile, ldflags, targ_varname, string.join(libs))) custom = parser.get(target, 'custom') if custom == 'apache-mod': # special build, needing Apache includes ofile.write('# build these special -- use APACHE_INCLUDES\n') for src in sources: if src[-2:] == '.c': ofile.write('%s%s: %s\n\t$(COMPILE_APACHE_MOD)\n' % (src[:-2], objext, src)) ofile.write('\n') for g_name, g_targets in install.items(): target_names = [ ] for i in g_targets: target_names.append(i.output) ofile.write('%s: %s\n\n' % (g_name, string.join(target_names))) ofile.write('BUILD_DIRS = %s\n' % string.join(target_dirs.keys())) cfiles = [ ] for target in targets.values(): # .la files are handled by the standard 'clean' rule; clean all the # other targets if target.output[-3:] != '.la': cfiles.append(target.output) ofile.write('CLEAN_FILES = %s\n\n' % string.join(cfiles)) for area, inst_targets in install.items(): files = [ ] for t in inst_targets: files.append(t.output) if area == 'apache-mod': ofile.write('install-mods-shared: %s\n' % (string.join(files),)) la_tweaked = { } for file in files: base, ext = os.path.splitext(os.path.basename(file)) name = string.replace(base, 'libmod_', '') ofile.write('\t$(INSTALL_MOD_SHARED) -n %s %s\n' % (name, file)) if ext == '.la': la_tweaked[file + '-a'] = None for t in inst_targets: for dep in t.deps: bt = dep.output if bt[-3:] == '.la': la_tweaked[bt + '-a'] = None la_tweaked = la_tweaked.keys() s_files, s_errors = _collect_paths(parser.get('static-apache', 'paths')) errors = errors or s_errors ofile.write('\ninstall-mods-static: %s\n' '\t$(MKDIR) %s\n' % (string.join(la_tweaked + s_files), os.path.join('$(APACHE_TARGET)', '.libs'))) for file in la_tweaked: dirname, fname = os.path.split(file) base = os.path.splitext(fname)[0] ofile.write('\t$(INSTALL_MOD_STATIC) %s %s\n' '\t$(INSTALL_MOD_STATIC) %s %s\n' % (os.path.join(dirname, '.libs', base + '.a'), os.path.join('$(APACHE_TARGET)', '.libs', base + '.a'), file, os.path.join('$(APACHE_TARGET)', base + '.la'))) for file in s_files: ofile.write('\t$(INSTALL_MOD_STATIC) %s %s\n' % (file, os.path.join('$(APACHE_TARGET)', os.path.basename(file)))) ofile.write('\n') elif area != 'test' and area != 'fs-test': area_var = string.replace(area, '-', '_') ofile.write('install-%s: %s\n' '\t$(MKDIR) $(%sdir)\n' % (area, string.join(files), area_var)) for file in files: ofile.write('\t$(INSTALL_%s) %s %s\n' % (string.upper(area_var), file, os.path.join('$(%sdir)' % area_var, os.path.basename(file)))) ofile.write('\n') includes, i_errors = _collect_paths(parser.get('includes', 'paths')) errors = errors or i_errors ofile.write('install-include: %s\n' '\t$(MKDIR) $(includedir)\n' % (string.join(includes),)) for file in includes: ofile.write('\t$(INSTALL_INCLUDE) %s %s\n' % (os.path.join('$(top_srcdir)', file), os.path.join('$(includedir)', os.path.basename(file)))) ofile.write('\n# handy shortcut targets\n') for name, target in targets.items(): ofile.write('%s: %s\n' % (name, target.output)) ofile.write('\n') scripts, s_errors = _collect_paths(parser.get('test-scripts', 'paths')) errors = errors or s_errors fs_scripts, fs_errors = _collect_paths(parser.get('fs-test-scripts', 'paths')) errors = errors or fs_errors ofile.write('FS_TEST_DEPS = %s\n\n' % string.join(fs_test_deps + fs_scripts)) ofile.write('FS_TEST_PROGRAMS = %s\n\n' % string.join(fs_test_progs + fs_scripts)) ofile.write('TEST_DEPS = %s\n\n' % string.join(test_deps + scripts)) ofile.write('TEST_PROGRAMS = %s\n\n' % string.join(test_progs + scripts)) if not skip_depends: # # Find all the available headers and what they depend upon. the # include_deps is a dictionary mapping a short header name to a tuple # of the full path to the header and a dictionary of dependent header # names (short) mapping to None. # # Example: # { 'short.h' : ('/path/to/short.h', # { 'other.h' : None, 'foo.h' : None }) } # # Note that this structure does not allow for similarly named headers # in per-project directories. SVN doesn't have this at this time, so # this structure works quite fine. (the alternative would be to use # the full pathname for the key, but that is actually a bit harder to # work with since we only see short names when scanning, and keeping # a second variable around for mapping the short to long names is more # than I cared to do right now) # include_deps = _create_include_deps(includes) for d in target_dirs.keys(): hdrs = glob.glob(os.path.join(d, '*.h')) if hdrs: more_deps = _create_include_deps(hdrs, include_deps) include_deps.update(more_deps) for src, objname in file_deps: hdrs = [ ] for short in _find_includes(src, include_deps): hdrs.append(include_deps[short][0]) ofile.write('%s: %s %s\n' % (objname, src, string.join(hdrs))) if errors: sys.exit(1)
return 1
else: return 1
def compare_line_lists(expected_lines, actual_lines, regexp): """Compare two lists of lines (ignoring orderings), and return 0 if they are the same or 1 if they are different. Specifically, matches will be made between each line in EXPECTED_LINES and those in ACTUAL_LINES using REGEXP. If a line has no match, or if one of the lists has 'leftover' lines at the end, then the comparison will return 1. REGEXP should contain some non-zero number of match groups (presumably separated by arbitrary whitespace (\s+)). A 'match' between lines will compare the first pair of match groups, then the second, and so on. If all pairs match, then the lines themselves are said to match.""" remachine = re.compile(regexp) elist = expected_lines[:] # make copies so we can change them alist = actual_lines[:] for eline in elist: for aline in alist: # alist will shrink each time this loop starts if not compare_lines(eline, aline, remachine): alist.remove(aline) # safe to delete aline, because... break # we're killing this aline loop, starting over with new eline. return 1 # failure: we examined all alines, found no match for eline. # if we get here, then every eline had an aline match. # but what if alist has *extra* lines? if len(alist) > 0: return 1 # failure: alist had extra junk else: return 0 # success: we got a 1-to-1 mapping between sets.
el = ['A /foo/bar', 'M /foo/baz', 'D blee'] al = ['M /foo/baz', 'D blee', 'A /foo/bar',] rm = re.compile(r"^(..)\s+(.+)") print "comparing sets, result is:", compare_line_lists(el, al, rm)
def compare_line_lists(expected_lines, actual_lines, regexp): """Compare two lists of lines (ignoring orderings), and return 0 if they are the same or 1 if they are different. Specifically, matches will be made between each line in EXPECTED_LINES and those in ACTUAL_LINES using REGEXP. If a line has no match, or if one of the lists has 'leftover' lines at the end, then the comparison will return 1. REGEXP should contain some non-zero number of match groups (presumably separated by arbitrary whitespace (\s+)). A 'match' between lines will compare the first pair of match groups, then the second, and so on. If all pairs match, then the lines themselves are said to match.""" remachine = re.compile(regexp) elist = expected_lines[:] # make copies so we can change them alist = actual_lines[:] for eline in elist: for aline in alist: # alist will shrink each time this loop starts if not compare_lines(eline, aline, remachine): alist.remove(aline) # safe to delete aline, because... break # we're killing this aline loop, starting over with new eline. return 1 # failure: we examined all alines, found no match for eline. # if we get here, then every eline had an aline match. # but what if alist has *extra* lines? if len(alist) > 0: return 1 # failure: alist had extra junk else: return 0 # success: we got a 1-to-1 mapping between sets.
target_deps = { }
def main(fname, oname=None, skip_depends=0): parser = ConfigParser.ConfigParser(_cfg_defaults) parser.read(fname) if oname is None: oname = os.path.splitext(os.path.basename(fname))[0] + '-outputs.mk' ofile = open(oname, 'w') ofile.write('# DO NOT EDIT -- AUTOMATICALLY GENERATED\n\n') errors = 0 groups = { } target_deps = { } build_targets = { } install = { } test_progs = [ ] file_deps = [ ] target_dirs = { } targets = _filter_targets(parser.sections()) for target in targets: path = parser.get(target, 'path') target_dirs[path] = None install_type = parser.get(target, 'install') bldtype = parser.get(target, 'type') if bldtype == 'exe': tfile = target objext = '.o' if not install_type: install_type = 'bin' elif bldtype == 'lib': tfile = target + '.la' objext = '.lo' if not install_type: install_type = 'lib' elif bldtype == 'doc': pass else: print 'ERROR: unknown build type:', bldtype errors = 1 continue tpath = os.path.join(path, tfile) build_targets[target] = tpath if install.has_key(install_type): install[install_type].append(target) else: install[install_type] = [ target ] if install_type == 'test' and bldtype == 'exe' \ and parser.get(target, 'testing') != 'skip': test_progs.append(tpath) pats = parser.get(target, 'sources') if not pats: pats = _default_sources[bldtype] sources, s_errors = _collect_paths(pats, path) errors = errors or s_errors objects = [ ] for src in sources: if src[-2:] == '.c': objname = src[:-2] + objext objects.append(objname) file_deps.append((src, objname)) else: print 'ERROR: unknown file extension on', src errors = 1 retreat = _retreat_dots(path) libs = [ ] target_deps[target] = [ ] for lib in string.split(parser.get(target, 'libs')): if lib in targets: target_deps[target].append(lib) dep_path = parser.get(lib, 'path') if bldtype == 'lib': # we need to hack around a libtool problem: it cannot record a # dependency of one shared lib on another shared lib. ### fix this by upgrading to the new libtool 1.4 release... # strip "lib" from the front so we have -lsvn_foo if lib[:3] == 'lib': lib = lib[3:] libs.append('-L%s -l%s' % (retreat + os.path.join(dep_path, '.libs'), lib)) else: # linking executables can refer to .la files libs.append(retreat + os.path.join(dep_path, lib + '.la')) else: # something we don't know, so just include it directly libs.append(lib) targ_varname = string.replace(target, '-', '_') ldflags = parser.get(target, 'link-flags') objstr = string.join(objects) objnames = string.join(map(os.path.basename, objects)) libstr = string.join(libs) ofile.write('%s_DEPS = %s %s\n' '%s_OBJECTS = %s\n' '%s: $(%s_DEPS)\n' '\tcd %s && $(LINK) -o %s %s $(%s_OBJECTS) %s $(LIBS)\n\n' % (targ_varname, objstr, string.join(target_deps[target]), targ_varname, objnames, tpath, targ_varname, path, tfile, ldflags, targ_varname, libstr)) custom = parser.get(target, 'custom') if custom == 'apache-mod': # special build, needing Apache includes ofile.write('# build these special -- use APACHE_INCLUDES\n') for src in sources: if src[-2:] == '.c': ofile.write('%s%s: %s\n\t$(COMPILE_APACHE_MOD)\n' % (src[:-2], objext, src)) ofile.write('\n') group = parser.get(target, 'group') if groups.has_key(group): groups[group].append(target) else: groups[group] = [ target ] for group in groups.keys(): group_deps = _sort_deps(groups[group], target_deps) for i in range(len(group_deps)): group_deps[i] = build_targets[group_deps[i]] ofile.write('%s: %s\n\n' % (group, string.join(group_deps))) ofile.write('CLEAN_DIRS = %s\n' % string.join(target_dirs.keys())) cfiles = filter(_filter_clean_files, build_targets.values()) ofile.write('CLEAN_FILES = %s\n\n' % string.join(cfiles)) for area, inst_targets in install.items(): files = [ ] for t in inst_targets: files.append(build_targets[t]) if area == 'apache-mod': ofile.write('install-mods-shared: %s\n' % (string.join(files),)) la_tweaked = { } for file in files: base, ext = os.path.splitext(os.path.basename(file)) name = string.replace(base, 'libmod_', '') ofile.write('\t$(INSTALL_MOD_SHARED) -n %s %s\n' % (name, file)) if ext == '.la': la_tweaked[file + '-a'] = None for t in inst_targets: for dep in target_deps[t]: bt = build_targets[dep] if bt[-3:] == '.la': la_tweaked[bt + '-a'] = None la_tweaked = la_tweaked.keys() s_files, s_errors = _collect_paths(parser.get('static-apache', 'paths')) errors = errors or s_errors ofile.write('\ninstall-mods-static: %s\n' '\t$(mkinstalldirs) %s\n' % (string.join(la_tweaked + s_files), os.path.join('$(APACHE_TARGET)', '.libs'))) for file in la_tweaked: dirname, fname = os.path.split(file) base = os.path.splitext(fname)[0] ofile.write('\t$(INSTALL_MOD_STATIC) %s %s\n' '\t$(INSTALL_MOD_STATIC) %s %s\n' % (os.path.join(dirname, '.libs', base + '.a'), os.path.join('$(APACHE_TARGET)', '.libs', base + '.a'), file, os.path.join('$(APACHE_TARGET)', base + '.la'))) for file in s_files: ofile.write('\t$(INSTALL_MOD_STATIC) %s %s\n' % (file, os.path.join('$(APACHE_TARGET)', os.path.basename(file)))) ofile.write('\n') elif area != 'test': ofile.write('install-%s: %s\n' '\t$(mkinstalldirs) $(%sdir)\n' % (area, string.join(files), area)) for file in files: ofile.write('\t$(INSTALL_%s) %s %s\n' % (string.upper(area), file, os.path.join('$(%sdir)' % area, os.path.basename(file)))) ofile.write('\n') includes, i_errors = _collect_paths(parser.get('includes', 'paths')) errors = errors or i_errors ofile.write('install-include: %s\n' '\t$(mkinstalldirs) $(includedir)\n' % (string.join(includes),)) for file in includes: ofile.write('\t$(INSTALL_INCLUDE) %s %s\n' % (file, os.path.join('$(includedir)', os.path.basename(file)))) ofile.write('\n# handy shortcut targets\n') for target, tpath in build_targets.items(): ofile.write('%s: %s\n' % (target, tpath)) ofile.write('\n') scripts, s_errors = _collect_paths(parser.get('test-scripts', 'paths')) errors = errors or s_errors ofile.write('TEST_PROGRAMS = %s\n\n' % string.join(test_progs + scripts)) if not skip_depends: # # Find all the available headers and what they depend upon. the # include_deps is a dictionary mapping a short header name to a tuple # of the full path to the header and a dictionary of dependent header # names (short) mapping to None. # # Example: # { 'short.h' : ('/path/to/short.h', # { 'other.h' : None, 'foo.h' : None }) } # # Note that this structure does not allow for similarly named headers # in per-project directories. SVN doesn't have this at this time, so # this structure works quite fine. (the alternative would be to use # the full pathname for the key, but that is actually a bit harder to # work with since we only see short names when scanning, and keeping # a second variable around for mapping the short to long names is more # than I cared to do right now) # include_deps = _create_include_deps(includes) for d in target_dirs.keys(): hdrs = glob.glob(os.path.join(d, '*.h')) if hdrs: more_deps = _create_include_deps(hdrs, include_deps) include_deps.update(more_deps) for src, objname in file_deps: hdrs = [ ] for short in _find_includes(src, include_deps): hdrs.append(include_deps[short][0]) ofile.write('%s: %s %s\n' % (objname, src, string.join(hdrs))) if errors: sys.exit(1)
targets = _filter_targets(parser.sections()) for target in targets: path = parser.get(target, 'path') target_dirs[path] = None install_type = parser.get(target, 'install') bldtype = parser.get(target, 'type') if bldtype == 'exe': tfile = target objext = '.o' if not install_type: install_type = 'bin' elif bldtype == 'lib': tfile = target + '.la' objext = '.lo' if not install_type: install_type = 'lib' elif bldtype == 'doc': pass else: print 'ERROR: unknown build type:', bldtype
target_names = _filter_targets(parser.sections()) for target in target_names: try: target_ob = Target(target, parser.get(target, 'path'), parser.get(target, 'install'), parser.get(target, 'type')) except GenMakeError, e: print e
def main(fname, oname=None, skip_depends=0): parser = ConfigParser.ConfigParser(_cfg_defaults) parser.read(fname) if oname is None: oname = os.path.splitext(os.path.basename(fname))[0] + '-outputs.mk' ofile = open(oname, 'w') ofile.write('# DO NOT EDIT -- AUTOMATICALLY GENERATED\n\n') errors = 0 groups = { } target_deps = { } build_targets = { } install = { } test_progs = [ ] file_deps = [ ] target_dirs = { } targets = _filter_targets(parser.sections()) for target in targets: path = parser.get(target, 'path') target_dirs[path] = None install_type = parser.get(target, 'install') bldtype = parser.get(target, 'type') if bldtype == 'exe': tfile = target objext = '.o' if not install_type: install_type = 'bin' elif bldtype == 'lib': tfile = target + '.la' objext = '.lo' if not install_type: install_type = 'lib' elif bldtype == 'doc': pass else: print 'ERROR: unknown build type:', bldtype errors = 1 continue tpath = os.path.join(path, tfile) build_targets[target] = tpath if install.has_key(install_type): install[install_type].append(target) else: install[install_type] = [ target ] if install_type == 'test' and bldtype == 'exe' \ and parser.get(target, 'testing') != 'skip': test_progs.append(tpath) pats = parser.get(target, 'sources') if not pats: pats = _default_sources[bldtype] sources, s_errors = _collect_paths(pats, path) errors = errors or s_errors objects = [ ] for src in sources: if src[-2:] == '.c': objname = src[:-2] + objext objects.append(objname) file_deps.append((src, objname)) else: print 'ERROR: unknown file extension on', src errors = 1 retreat = _retreat_dots(path) libs = [ ] target_deps[target] = [ ] for lib in string.split(parser.get(target, 'libs')): if lib in targets: target_deps[target].append(lib) dep_path = parser.get(lib, 'path') if bldtype == 'lib': # we need to hack around a libtool problem: it cannot record a # dependency of one shared lib on another shared lib. ### fix this by upgrading to the new libtool 1.4 release... # strip "lib" from the front so we have -lsvn_foo if lib[:3] == 'lib': lib = lib[3:] libs.append('-L%s -l%s' % (retreat + os.path.join(dep_path, '.libs'), lib)) else: # linking executables can refer to .la files libs.append(retreat + os.path.join(dep_path, lib + '.la')) else: # something we don't know, so just include it directly libs.append(lib) targ_varname = string.replace(target, '-', '_') ldflags = parser.get(target, 'link-flags') objstr = string.join(objects) objnames = string.join(map(os.path.basename, objects)) libstr = string.join(libs) ofile.write('%s_DEPS = %s %s\n' '%s_OBJECTS = %s\n' '%s: $(%s_DEPS)\n' '\tcd %s && $(LINK) -o %s %s $(%s_OBJECTS) %s $(LIBS)\n\n' % (targ_varname, objstr, string.join(target_deps[target]), targ_varname, objnames, tpath, targ_varname, path, tfile, ldflags, targ_varname, libstr)) custom = parser.get(target, 'custom') if custom == 'apache-mod': # special build, needing Apache includes ofile.write('# build these special -- use APACHE_INCLUDES\n') for src in sources: if src[-2:] == '.c': ofile.write('%s%s: %s\n\t$(COMPILE_APACHE_MOD)\n' % (src[:-2], objext, src)) ofile.write('\n') group = parser.get(target, 'group') if groups.has_key(group): groups[group].append(target) else: groups[group] = [ target ] for group in groups.keys(): group_deps = _sort_deps(groups[group], target_deps) for i in range(len(group_deps)): group_deps[i] = build_targets[group_deps[i]] ofile.write('%s: %s\n\n' % (group, string.join(group_deps))) ofile.write('CLEAN_DIRS = %s\n' % string.join(target_dirs.keys())) cfiles = filter(_filter_clean_files, build_targets.values()) ofile.write('CLEAN_FILES = %s\n\n' % string.join(cfiles)) for area, inst_targets in install.items(): files = [ ] for t in inst_targets: files.append(build_targets[t]) if area == 'apache-mod': ofile.write('install-mods-shared: %s\n' % (string.join(files),)) la_tweaked = { } for file in files: base, ext = os.path.splitext(os.path.basename(file)) name = string.replace(base, 'libmod_', '') ofile.write('\t$(INSTALL_MOD_SHARED) -n %s %s\n' % (name, file)) if ext == '.la': la_tweaked[file + '-a'] = None for t in inst_targets: for dep in target_deps[t]: bt = build_targets[dep] if bt[-3:] == '.la': la_tweaked[bt + '-a'] = None la_tweaked = la_tweaked.keys() s_files, s_errors = _collect_paths(parser.get('static-apache', 'paths')) errors = errors or s_errors ofile.write('\ninstall-mods-static: %s\n' '\t$(mkinstalldirs) %s\n' % (string.join(la_tweaked + s_files), os.path.join('$(APACHE_TARGET)', '.libs'))) for file in la_tweaked: dirname, fname = os.path.split(file) base = os.path.splitext(fname)[0] ofile.write('\t$(INSTALL_MOD_STATIC) %s %s\n' '\t$(INSTALL_MOD_STATIC) %s %s\n' % (os.path.join(dirname, '.libs', base + '.a'), os.path.join('$(APACHE_TARGET)', '.libs', base + '.a'), file, os.path.join('$(APACHE_TARGET)', base + '.la'))) for file in s_files: ofile.write('\t$(INSTALL_MOD_STATIC) %s %s\n' % (file, os.path.join('$(APACHE_TARGET)', os.path.basename(file)))) ofile.write('\n') elif area != 'test': ofile.write('install-%s: %s\n' '\t$(mkinstalldirs) $(%sdir)\n' % (area, string.join(files), area)) for file in files: ofile.write('\t$(INSTALL_%s) %s %s\n' % (string.upper(area), file, os.path.join('$(%sdir)' % area, os.path.basename(file)))) ofile.write('\n') includes, i_errors = _collect_paths(parser.get('includes', 'paths')) errors = errors or i_errors ofile.write('install-include: %s\n' '\t$(mkinstalldirs) $(includedir)\n' % (string.join(includes),)) for file in includes: ofile.write('\t$(INSTALL_INCLUDE) %s %s\n' % (file, os.path.join('$(includedir)', os.path.basename(file)))) ofile.write('\n# handy shortcut targets\n') for target, tpath in build_targets.items(): ofile.write('%s: %s\n' % (target, tpath)) ofile.write('\n') scripts, s_errors = _collect_paths(parser.get('test-scripts', 'paths')) errors = errors or s_errors ofile.write('TEST_PROGRAMS = %s\n\n' % string.join(test_progs + scripts)) if not skip_depends: # # Find all the available headers and what they depend upon. the # include_deps is a dictionary mapping a short header name to a tuple # of the full path to the header and a dictionary of dependent header # names (short) mapping to None. # # Example: # { 'short.h' : ('/path/to/short.h', # { 'other.h' : None, 'foo.h' : None }) } # # Note that this structure does not allow for similarly named headers # in per-project directories. SVN doesn't have this at this time, so # this structure works quite fine. (the alternative would be to use # the full pathname for the key, but that is actually a bit harder to # work with since we only see short names when scanning, and keeping # a second variable around for mapping the short to long names is more # than I cared to do right now) # include_deps = _create_include_deps(includes) for d in target_dirs.keys(): hdrs = glob.glob(os.path.join(d, '*.h')) if hdrs: more_deps = _create_include_deps(hdrs, include_deps) include_deps.update(more_deps) for src, objname in file_deps: hdrs = [ ] for short in _find_includes(src, include_deps): hdrs.append(include_deps[short][0]) ofile.write('%s: %s %s\n' % (objname, src, string.join(hdrs))) if errors: sys.exit(1)
tpath = os.path.join(path, tfile)
targets[target] = target_ob group = parser.get(target, 'group') if groups.has_key(group): groups[group].append(target_ob.output) else: groups[group] = [ target_ob.output ] if errors: sys.exit(1) for target in target_names: target_ob = targets[target] path = target_ob.path install_type = target_ob.install bldtype = target_ob.type objext = target_ob.objext tpath = target_ob.output tfile = os.path.basename(tpath) target_dirs[path] = None
def main(fname, oname=None, skip_depends=0): parser = ConfigParser.ConfigParser(_cfg_defaults) parser.read(fname) if oname is None: oname = os.path.splitext(os.path.basename(fname))[0] + '-outputs.mk' ofile = open(oname, 'w') ofile.write('# DO NOT EDIT -- AUTOMATICALLY GENERATED\n\n') errors = 0 groups = { } target_deps = { } build_targets = { } install = { } test_progs = [ ] file_deps = [ ] target_dirs = { } targets = _filter_targets(parser.sections()) for target in targets: path = parser.get(target, 'path') target_dirs[path] = None install_type = parser.get(target, 'install') bldtype = parser.get(target, 'type') if bldtype == 'exe': tfile = target objext = '.o' if not install_type: install_type = 'bin' elif bldtype == 'lib': tfile = target + '.la' objext = '.lo' if not install_type: install_type = 'lib' elif bldtype == 'doc': pass else: print 'ERROR: unknown build type:', bldtype errors = 1 continue tpath = os.path.join(path, tfile) build_targets[target] = tpath if install.has_key(install_type): install[install_type].append(target) else: install[install_type] = [ target ] if install_type == 'test' and bldtype == 'exe' \ and parser.get(target, 'testing') != 'skip': test_progs.append(tpath) pats = parser.get(target, 'sources') if not pats: pats = _default_sources[bldtype] sources, s_errors = _collect_paths(pats, path) errors = errors or s_errors objects = [ ] for src in sources: if src[-2:] == '.c': objname = src[:-2] + objext objects.append(objname) file_deps.append((src, objname)) else: print 'ERROR: unknown file extension on', src errors = 1 retreat = _retreat_dots(path) libs = [ ] target_deps[target] = [ ] for lib in string.split(parser.get(target, 'libs')): if lib in targets: target_deps[target].append(lib) dep_path = parser.get(lib, 'path') if bldtype == 'lib': # we need to hack around a libtool problem: it cannot record a # dependency of one shared lib on another shared lib. ### fix this by upgrading to the new libtool 1.4 release... # strip "lib" from the front so we have -lsvn_foo if lib[:3] == 'lib': lib = lib[3:] libs.append('-L%s -l%s' % (retreat + os.path.join(dep_path, '.libs'), lib)) else: # linking executables can refer to .la files libs.append(retreat + os.path.join(dep_path, lib + '.la')) else: # something we don't know, so just include it directly libs.append(lib) targ_varname = string.replace(target, '-', '_') ldflags = parser.get(target, 'link-flags') objstr = string.join(objects) objnames = string.join(map(os.path.basename, objects)) libstr = string.join(libs) ofile.write('%s_DEPS = %s %s\n' '%s_OBJECTS = %s\n' '%s: $(%s_DEPS)\n' '\tcd %s && $(LINK) -o %s %s $(%s_OBJECTS) %s $(LIBS)\n\n' % (targ_varname, objstr, string.join(target_deps[target]), targ_varname, objnames, tpath, targ_varname, path, tfile, ldflags, targ_varname, libstr)) custom = parser.get(target, 'custom') if custom == 'apache-mod': # special build, needing Apache includes ofile.write('# build these special -- use APACHE_INCLUDES\n') for src in sources: if src[-2:] == '.c': ofile.write('%s%s: %s\n\t$(COMPILE_APACHE_MOD)\n' % (src[:-2], objext, src)) ofile.write('\n') group = parser.get(target, 'group') if groups.has_key(group): groups[group].append(target) else: groups[group] = [ target ] for group in groups.keys(): group_deps = _sort_deps(groups[group], target_deps) for i in range(len(group_deps)): group_deps[i] = build_targets[group_deps[i]] ofile.write('%s: %s\n\n' % (group, string.join(group_deps))) ofile.write('CLEAN_DIRS = %s\n' % string.join(target_dirs.keys())) cfiles = filter(_filter_clean_files, build_targets.values()) ofile.write('CLEAN_FILES = %s\n\n' % string.join(cfiles)) for area, inst_targets in install.items(): files = [ ] for t in inst_targets: files.append(build_targets[t]) if area == 'apache-mod': ofile.write('install-mods-shared: %s\n' % (string.join(files),)) la_tweaked = { } for file in files: base, ext = os.path.splitext(os.path.basename(file)) name = string.replace(base, 'libmod_', '') ofile.write('\t$(INSTALL_MOD_SHARED) -n %s %s\n' % (name, file)) if ext == '.la': la_tweaked[file + '-a'] = None for t in inst_targets: for dep in target_deps[t]: bt = build_targets[dep] if bt[-3:] == '.la': la_tweaked[bt + '-a'] = None la_tweaked = la_tweaked.keys() s_files, s_errors = _collect_paths(parser.get('static-apache', 'paths')) errors = errors or s_errors ofile.write('\ninstall-mods-static: %s\n' '\t$(mkinstalldirs) %s\n' % (string.join(la_tweaked + s_files), os.path.join('$(APACHE_TARGET)', '.libs'))) for file in la_tweaked: dirname, fname = os.path.split(file) base = os.path.splitext(fname)[0] ofile.write('\t$(INSTALL_MOD_STATIC) %s %s\n' '\t$(INSTALL_MOD_STATIC) %s %s\n' % (os.path.join(dirname, '.libs', base + '.a'), os.path.join('$(APACHE_TARGET)', '.libs', base + '.a'), file, os.path.join('$(APACHE_TARGET)', base + '.la'))) for file in s_files: ofile.write('\t$(INSTALL_MOD_STATIC) %s %s\n' % (file, os.path.join('$(APACHE_TARGET)', os.path.basename(file)))) ofile.write('\n') elif area != 'test': ofile.write('install-%s: %s\n' '\t$(mkinstalldirs) $(%sdir)\n' % (area, string.join(files), area)) for file in files: ofile.write('\t$(INSTALL_%s) %s %s\n' % (string.upper(area), file, os.path.join('$(%sdir)' % area, os.path.basename(file)))) ofile.write('\n') includes, i_errors = _collect_paths(parser.get('includes', 'paths')) errors = errors or i_errors ofile.write('install-include: %s\n' '\t$(mkinstalldirs) $(includedir)\n' % (string.join(includes),)) for file in includes: ofile.write('\t$(INSTALL_INCLUDE) %s %s\n' % (file, os.path.join('$(includedir)', os.path.basename(file)))) ofile.write('\n# handy shortcut targets\n') for target, tpath in build_targets.items(): ofile.write('%s: %s\n' % (target, tpath)) ofile.write('\n') scripts, s_errors = _collect_paths(parser.get('test-scripts', 'paths')) errors = errors or s_errors ofile.write('TEST_PROGRAMS = %s\n\n' % string.join(test_progs + scripts)) if not skip_depends: # # Find all the available headers and what they depend upon. the # include_deps is a dictionary mapping a short header name to a tuple # of the full path to the header and a dictionary of dependent header # names (short) mapping to None. # # Example: # { 'short.h' : ('/path/to/short.h', # { 'other.h' : None, 'foo.h' : None }) } # # Note that this structure does not allow for similarly named headers # in per-project directories. SVN doesn't have this at this time, so # this structure works quite fine. (the alternative would be to use # the full pathname for the key, but that is actually a bit harder to # work with since we only see short names when scanning, and keeping # a second variable around for mapping the short to long names is more # than I cared to do right now) # include_deps = _create_include_deps(includes) for d in target_dirs.keys(): hdrs = glob.glob(os.path.join(d, '*.h')) if hdrs: more_deps = _create_include_deps(hdrs, include_deps) include_deps.update(more_deps) for src, objname in file_deps: hdrs = [ ] for short in _find_includes(src, include_deps): hdrs.append(include_deps[short][0]) ofile.write('%s: %s %s\n' % (objname, src, string.join(hdrs))) if errors: sys.exit(1)
target_deps[target] = [ ]
deps = [ ]
def main(fname, oname=None, skip_depends=0): parser = ConfigParser.ConfigParser(_cfg_defaults) parser.read(fname) if oname is None: oname = os.path.splitext(os.path.basename(fname))[0] + '-outputs.mk' ofile = open(oname, 'w') ofile.write('# DO NOT EDIT -- AUTOMATICALLY GENERATED\n\n') errors = 0 groups = { } target_deps = { } build_targets = { } install = { } test_progs = [ ] file_deps = [ ] target_dirs = { } targets = _filter_targets(parser.sections()) for target in targets: path = parser.get(target, 'path') target_dirs[path] = None install_type = parser.get(target, 'install') bldtype = parser.get(target, 'type') if bldtype == 'exe': tfile = target objext = '.o' if not install_type: install_type = 'bin' elif bldtype == 'lib': tfile = target + '.la' objext = '.lo' if not install_type: install_type = 'lib' elif bldtype == 'doc': pass else: print 'ERROR: unknown build type:', bldtype errors = 1 continue tpath = os.path.join(path, tfile) build_targets[target] = tpath if install.has_key(install_type): install[install_type].append(target) else: install[install_type] = [ target ] if install_type == 'test' and bldtype == 'exe' \ and parser.get(target, 'testing') != 'skip': test_progs.append(tpath) pats = parser.get(target, 'sources') if not pats: pats = _default_sources[bldtype] sources, s_errors = _collect_paths(pats, path) errors = errors or s_errors objects = [ ] for src in sources: if src[-2:] == '.c': objname = src[:-2] + objext objects.append(objname) file_deps.append((src, objname)) else: print 'ERROR: unknown file extension on', src errors = 1 retreat = _retreat_dots(path) libs = [ ] target_deps[target] = [ ] for lib in string.split(parser.get(target, 'libs')): if lib in targets: target_deps[target].append(lib) dep_path = parser.get(lib, 'path') if bldtype == 'lib': # we need to hack around a libtool problem: it cannot record a # dependency of one shared lib on another shared lib. ### fix this by upgrading to the new libtool 1.4 release... # strip "lib" from the front so we have -lsvn_foo if lib[:3] == 'lib': lib = lib[3:] libs.append('-L%s -l%s' % (retreat + os.path.join(dep_path, '.libs'), lib)) else: # linking executables can refer to .la files libs.append(retreat + os.path.join(dep_path, lib + '.la')) else: # something we don't know, so just include it directly libs.append(lib) targ_varname = string.replace(target, '-', '_') ldflags = parser.get(target, 'link-flags') objstr = string.join(objects) objnames = string.join(map(os.path.basename, objects)) libstr = string.join(libs) ofile.write('%s_DEPS = %s %s\n' '%s_OBJECTS = %s\n' '%s: $(%s_DEPS)\n' '\tcd %s && $(LINK) -o %s %s $(%s_OBJECTS) %s $(LIBS)\n\n' % (targ_varname, objstr, string.join(target_deps[target]), targ_varname, objnames, tpath, targ_varname, path, tfile, ldflags, targ_varname, libstr)) custom = parser.get(target, 'custom') if custom == 'apache-mod': # special build, needing Apache includes ofile.write('# build these special -- use APACHE_INCLUDES\n') for src in sources: if src[-2:] == '.c': ofile.write('%s%s: %s\n\t$(COMPILE_APACHE_MOD)\n' % (src[:-2], objext, src)) ofile.write('\n') group = parser.get(target, 'group') if groups.has_key(group): groups[group].append(target) else: groups[group] = [ target ] for group in groups.keys(): group_deps = _sort_deps(groups[group], target_deps) for i in range(len(group_deps)): group_deps[i] = build_targets[group_deps[i]] ofile.write('%s: %s\n\n' % (group, string.join(group_deps))) ofile.write('CLEAN_DIRS = %s\n' % string.join(target_dirs.keys())) cfiles = filter(_filter_clean_files, build_targets.values()) ofile.write('CLEAN_FILES = %s\n\n' % string.join(cfiles)) for area, inst_targets in install.items(): files = [ ] for t in inst_targets: files.append(build_targets[t]) if area == 'apache-mod': ofile.write('install-mods-shared: %s\n' % (string.join(files),)) la_tweaked = { } for file in files: base, ext = os.path.splitext(os.path.basename(file)) name = string.replace(base, 'libmod_', '') ofile.write('\t$(INSTALL_MOD_SHARED) -n %s %s\n' % (name, file)) if ext == '.la': la_tweaked[file + '-a'] = None for t in inst_targets: for dep in target_deps[t]: bt = build_targets[dep] if bt[-3:] == '.la': la_tweaked[bt + '-a'] = None la_tweaked = la_tweaked.keys() s_files, s_errors = _collect_paths(parser.get('static-apache', 'paths')) errors = errors or s_errors ofile.write('\ninstall-mods-static: %s\n' '\t$(mkinstalldirs) %s\n' % (string.join(la_tweaked + s_files), os.path.join('$(APACHE_TARGET)', '.libs'))) for file in la_tweaked: dirname, fname = os.path.split(file) base = os.path.splitext(fname)[0] ofile.write('\t$(INSTALL_MOD_STATIC) %s %s\n' '\t$(INSTALL_MOD_STATIC) %s %s\n' % (os.path.join(dirname, '.libs', base + '.a'), os.path.join('$(APACHE_TARGET)', '.libs', base + '.a'), file, os.path.join('$(APACHE_TARGET)', base + '.la'))) for file in s_files: ofile.write('\t$(INSTALL_MOD_STATIC) %s %s\n' % (file, os.path.join('$(APACHE_TARGET)', os.path.basename(file)))) ofile.write('\n') elif area != 'test': ofile.write('install-%s: %s\n' '\t$(mkinstalldirs) $(%sdir)\n' % (area, string.join(files), area)) for file in files: ofile.write('\t$(INSTALL_%s) %s %s\n' % (string.upper(area), file, os.path.join('$(%sdir)' % area, os.path.basename(file)))) ofile.write('\n') includes, i_errors = _collect_paths(parser.get('includes', 'paths')) errors = errors or i_errors ofile.write('install-include: %s\n' '\t$(mkinstalldirs) $(includedir)\n' % (string.join(includes),)) for file in includes: ofile.write('\t$(INSTALL_INCLUDE) %s %s\n' % (file, os.path.join('$(includedir)', os.path.basename(file)))) ofile.write('\n# handy shortcut targets\n') for target, tpath in build_targets.items(): ofile.write('%s: %s\n' % (target, tpath)) ofile.write('\n') scripts, s_errors = _collect_paths(parser.get('test-scripts', 'paths')) errors = errors or s_errors ofile.write('TEST_PROGRAMS = %s\n\n' % string.join(test_progs + scripts)) if not skip_depends: # # Find all the available headers and what they depend upon. the # include_deps is a dictionary mapping a short header name to a tuple # of the full path to the header and a dictionary of dependent header # names (short) mapping to None. # # Example: # { 'short.h' : ('/path/to/short.h', # { 'other.h' : None, 'foo.h' : None }) } # # Note that this structure does not allow for similarly named headers # in per-project directories. SVN doesn't have this at this time, so # this structure works quite fine. (the alternative would be to use # the full pathname for the key, but that is actually a bit harder to # work with since we only see short names when scanning, and keeping # a second variable around for mapping the short to long names is more # than I cared to do right now) # include_deps = _create_include_deps(includes) for d in target_dirs.keys(): hdrs = glob.glob(os.path.join(d, '*.h')) if hdrs: more_deps = _create_include_deps(hdrs, include_deps) include_deps.update(more_deps) for src, objname in file_deps: hdrs = [ ] for short in _find_includes(src, include_deps): hdrs.append(include_deps[short][0]) ofile.write('%s: %s %s\n' % (objname, src, string.join(hdrs))) if errors: sys.exit(1)
if lib in targets: target_deps[target].append(lib) dep_path = parser.get(lib, 'path')
if lib in target_names: tlib = targets[lib] target_ob.deps.append(tlib) deps.append(tlib.output) dep_path = tlib.path
def main(fname, oname=None, skip_depends=0): parser = ConfigParser.ConfigParser(_cfg_defaults) parser.read(fname) if oname is None: oname = os.path.splitext(os.path.basename(fname))[0] + '-outputs.mk' ofile = open(oname, 'w') ofile.write('# DO NOT EDIT -- AUTOMATICALLY GENERATED\n\n') errors = 0 groups = { } target_deps = { } build_targets = { } install = { } test_progs = [ ] file_deps = [ ] target_dirs = { } targets = _filter_targets(parser.sections()) for target in targets: path = parser.get(target, 'path') target_dirs[path] = None install_type = parser.get(target, 'install') bldtype = parser.get(target, 'type') if bldtype == 'exe': tfile = target objext = '.o' if not install_type: install_type = 'bin' elif bldtype == 'lib': tfile = target + '.la' objext = '.lo' if not install_type: install_type = 'lib' elif bldtype == 'doc': pass else: print 'ERROR: unknown build type:', bldtype errors = 1 continue tpath = os.path.join(path, tfile) build_targets[target] = tpath if install.has_key(install_type): install[install_type].append(target) else: install[install_type] = [ target ] if install_type == 'test' and bldtype == 'exe' \ and parser.get(target, 'testing') != 'skip': test_progs.append(tpath) pats = parser.get(target, 'sources') if not pats: pats = _default_sources[bldtype] sources, s_errors = _collect_paths(pats, path) errors = errors or s_errors objects = [ ] for src in sources: if src[-2:] == '.c': objname = src[:-2] + objext objects.append(objname) file_deps.append((src, objname)) else: print 'ERROR: unknown file extension on', src errors = 1 retreat = _retreat_dots(path) libs = [ ] target_deps[target] = [ ] for lib in string.split(parser.get(target, 'libs')): if lib in targets: target_deps[target].append(lib) dep_path = parser.get(lib, 'path') if bldtype == 'lib': # we need to hack around a libtool problem: it cannot record a # dependency of one shared lib on another shared lib. ### fix this by upgrading to the new libtool 1.4 release... # strip "lib" from the front so we have -lsvn_foo if lib[:3] == 'lib': lib = lib[3:] libs.append('-L%s -l%s' % (retreat + os.path.join(dep_path, '.libs'), lib)) else: # linking executables can refer to .la files libs.append(retreat + os.path.join(dep_path, lib + '.la')) else: # something we don't know, so just include it directly libs.append(lib) targ_varname = string.replace(target, '-', '_') ldflags = parser.get(target, 'link-flags') objstr = string.join(objects) objnames = string.join(map(os.path.basename, objects)) libstr = string.join(libs) ofile.write('%s_DEPS = %s %s\n' '%s_OBJECTS = %s\n' '%s: $(%s_DEPS)\n' '\tcd %s && $(LINK) -o %s %s $(%s_OBJECTS) %s $(LIBS)\n\n' % (targ_varname, objstr, string.join(target_deps[target]), targ_varname, objnames, tpath, targ_varname, path, tfile, ldflags, targ_varname, libstr)) custom = parser.get(target, 'custom') if custom == 'apache-mod': # special build, needing Apache includes ofile.write('# build these special -- use APACHE_INCLUDES\n') for src in sources: if src[-2:] == '.c': ofile.write('%s%s: %s\n\t$(COMPILE_APACHE_MOD)\n' % (src[:-2], objext, src)) ofile.write('\n') group = parser.get(target, 'group') if groups.has_key(group): groups[group].append(target) else: groups[group] = [ target ] for group in groups.keys(): group_deps = _sort_deps(groups[group], target_deps) for i in range(len(group_deps)): group_deps[i] = build_targets[group_deps[i]] ofile.write('%s: %s\n\n' % (group, string.join(group_deps))) ofile.write('CLEAN_DIRS = %s\n' % string.join(target_dirs.keys())) cfiles = filter(_filter_clean_files, build_targets.values()) ofile.write('CLEAN_FILES = %s\n\n' % string.join(cfiles)) for area, inst_targets in install.items(): files = [ ] for t in inst_targets: files.append(build_targets[t]) if area == 'apache-mod': ofile.write('install-mods-shared: %s\n' % (string.join(files),)) la_tweaked = { } for file in files: base, ext = os.path.splitext(os.path.basename(file)) name = string.replace(base, 'libmod_', '') ofile.write('\t$(INSTALL_MOD_SHARED) -n %s %s\n' % (name, file)) if ext == '.la': la_tweaked[file + '-a'] = None for t in inst_targets: for dep in target_deps[t]: bt = build_targets[dep] if bt[-3:] == '.la': la_tweaked[bt + '-a'] = None la_tweaked = la_tweaked.keys() s_files, s_errors = _collect_paths(parser.get('static-apache', 'paths')) errors = errors or s_errors ofile.write('\ninstall-mods-static: %s\n' '\t$(mkinstalldirs) %s\n' % (string.join(la_tweaked + s_files), os.path.join('$(APACHE_TARGET)', '.libs'))) for file in la_tweaked: dirname, fname = os.path.split(file) base = os.path.splitext(fname)[0] ofile.write('\t$(INSTALL_MOD_STATIC) %s %s\n' '\t$(INSTALL_MOD_STATIC) %s %s\n' % (os.path.join(dirname, '.libs', base + '.a'), os.path.join('$(APACHE_TARGET)', '.libs', base + '.a'), file, os.path.join('$(APACHE_TARGET)', base + '.la'))) for file in s_files: ofile.write('\t$(INSTALL_MOD_STATIC) %s %s\n' % (file, os.path.join('$(APACHE_TARGET)', os.path.basename(file)))) ofile.write('\n') elif area != 'test': ofile.write('install-%s: %s\n' '\t$(mkinstalldirs) $(%sdir)\n' % (area, string.join(files), area)) for file in files: ofile.write('\t$(INSTALL_%s) %s %s\n' % (string.upper(area), file, os.path.join('$(%sdir)' % area, os.path.basename(file)))) ofile.write('\n') includes, i_errors = _collect_paths(parser.get('includes', 'paths')) errors = errors or i_errors ofile.write('install-include: %s\n' '\t$(mkinstalldirs) $(includedir)\n' % (string.join(includes),)) for file in includes: ofile.write('\t$(INSTALL_INCLUDE) %s %s\n' % (file, os.path.join('$(includedir)', os.path.basename(file)))) ofile.write('\n# handy shortcut targets\n') for target, tpath in build_targets.items(): ofile.write('%s: %s\n' % (target, tpath)) ofile.write('\n') scripts, s_errors = _collect_paths(parser.get('test-scripts', 'paths')) errors = errors or s_errors ofile.write('TEST_PROGRAMS = %s\n\n' % string.join(test_progs + scripts)) if not skip_depends: # # Find all the available headers and what they depend upon. the # include_deps is a dictionary mapping a short header name to a tuple # of the full path to the header and a dictionary of dependent header # names (short) mapping to None. # # Example: # { 'short.h' : ('/path/to/short.h', # { 'other.h' : None, 'foo.h' : None }) } # # Note that this structure does not allow for similarly named headers # in per-project directories. SVN doesn't have this at this time, so # this structure works quite fine. (the alternative would be to use # the full pathname for the key, but that is actually a bit harder to # work with since we only see short names when scanning, and keeping # a second variable around for mapping the short to long names is more # than I cared to do right now) # include_deps = _create_include_deps(includes) for d in target_dirs.keys(): hdrs = glob.glob(os.path.join(d, '*.h')) if hdrs: more_deps = _create_include_deps(hdrs, include_deps) include_deps.update(more_deps) for src, objname in file_deps: hdrs = [ ] for short in _find_includes(src, include_deps): hdrs.append(include_deps[short][0]) ofile.write('%s: %s %s\n' % (objname, src, string.join(hdrs))) if errors: sys.exit(1)
% (targ_varname, objstr, string.join(target_deps[target]),
% (targ_varname, objstr, string.join(deps),
def main(fname, oname=None, skip_depends=0): parser = ConfigParser.ConfigParser(_cfg_defaults) parser.read(fname) if oname is None: oname = os.path.splitext(os.path.basename(fname))[0] + '-outputs.mk' ofile = open(oname, 'w') ofile.write('# DO NOT EDIT -- AUTOMATICALLY GENERATED\n\n') errors = 0 groups = { } target_deps = { } build_targets = { } install = { } test_progs = [ ] file_deps = [ ] target_dirs = { } targets = _filter_targets(parser.sections()) for target in targets: path = parser.get(target, 'path') target_dirs[path] = None install_type = parser.get(target, 'install') bldtype = parser.get(target, 'type') if bldtype == 'exe': tfile = target objext = '.o' if not install_type: install_type = 'bin' elif bldtype == 'lib': tfile = target + '.la' objext = '.lo' if not install_type: install_type = 'lib' elif bldtype == 'doc': pass else: print 'ERROR: unknown build type:', bldtype errors = 1 continue tpath = os.path.join(path, tfile) build_targets[target] = tpath if install.has_key(install_type): install[install_type].append(target) else: install[install_type] = [ target ] if install_type == 'test' and bldtype == 'exe' \ and parser.get(target, 'testing') != 'skip': test_progs.append(tpath) pats = parser.get(target, 'sources') if not pats: pats = _default_sources[bldtype] sources, s_errors = _collect_paths(pats, path) errors = errors or s_errors objects = [ ] for src in sources: if src[-2:] == '.c': objname = src[:-2] + objext objects.append(objname) file_deps.append((src, objname)) else: print 'ERROR: unknown file extension on', src errors = 1 retreat = _retreat_dots(path) libs = [ ] target_deps[target] = [ ] for lib in string.split(parser.get(target, 'libs')): if lib in targets: target_deps[target].append(lib) dep_path = parser.get(lib, 'path') if bldtype == 'lib': # we need to hack around a libtool problem: it cannot record a # dependency of one shared lib on another shared lib. ### fix this by upgrading to the new libtool 1.4 release... # strip "lib" from the front so we have -lsvn_foo if lib[:3] == 'lib': lib = lib[3:] libs.append('-L%s -l%s' % (retreat + os.path.join(dep_path, '.libs'), lib)) else: # linking executables can refer to .la files libs.append(retreat + os.path.join(dep_path, lib + '.la')) else: # something we don't know, so just include it directly libs.append(lib) targ_varname = string.replace(target, '-', '_') ldflags = parser.get(target, 'link-flags') objstr = string.join(objects) objnames = string.join(map(os.path.basename, objects)) libstr = string.join(libs) ofile.write('%s_DEPS = %s %s\n' '%s_OBJECTS = %s\n' '%s: $(%s_DEPS)\n' '\tcd %s && $(LINK) -o %s %s $(%s_OBJECTS) %s $(LIBS)\n\n' % (targ_varname, objstr, string.join(target_deps[target]), targ_varname, objnames, tpath, targ_varname, path, tfile, ldflags, targ_varname, libstr)) custom = parser.get(target, 'custom') if custom == 'apache-mod': # special build, needing Apache includes ofile.write('# build these special -- use APACHE_INCLUDES\n') for src in sources: if src[-2:] == '.c': ofile.write('%s%s: %s\n\t$(COMPILE_APACHE_MOD)\n' % (src[:-2], objext, src)) ofile.write('\n') group = parser.get(target, 'group') if groups.has_key(group): groups[group].append(target) else: groups[group] = [ target ] for group in groups.keys(): group_deps = _sort_deps(groups[group], target_deps) for i in range(len(group_deps)): group_deps[i] = build_targets[group_deps[i]] ofile.write('%s: %s\n\n' % (group, string.join(group_deps))) ofile.write('CLEAN_DIRS = %s\n' % string.join(target_dirs.keys())) cfiles = filter(_filter_clean_files, build_targets.values()) ofile.write('CLEAN_FILES = %s\n\n' % string.join(cfiles)) for area, inst_targets in install.items(): files = [ ] for t in inst_targets: files.append(build_targets[t]) if area == 'apache-mod': ofile.write('install-mods-shared: %s\n' % (string.join(files),)) la_tweaked = { } for file in files: base, ext = os.path.splitext(os.path.basename(file)) name = string.replace(base, 'libmod_', '') ofile.write('\t$(INSTALL_MOD_SHARED) -n %s %s\n' % (name, file)) if ext == '.la': la_tweaked[file + '-a'] = None for t in inst_targets: for dep in target_deps[t]: bt = build_targets[dep] if bt[-3:] == '.la': la_tweaked[bt + '-a'] = None la_tweaked = la_tweaked.keys() s_files, s_errors = _collect_paths(parser.get('static-apache', 'paths')) errors = errors or s_errors ofile.write('\ninstall-mods-static: %s\n' '\t$(mkinstalldirs) %s\n' % (string.join(la_tweaked + s_files), os.path.join('$(APACHE_TARGET)', '.libs'))) for file in la_tweaked: dirname, fname = os.path.split(file) base = os.path.splitext(fname)[0] ofile.write('\t$(INSTALL_MOD_STATIC) %s %s\n' '\t$(INSTALL_MOD_STATIC) %s %s\n' % (os.path.join(dirname, '.libs', base + '.a'), os.path.join('$(APACHE_TARGET)', '.libs', base + '.a'), file, os.path.join('$(APACHE_TARGET)', base + '.la'))) for file in s_files: ofile.write('\t$(INSTALL_MOD_STATIC) %s %s\n' % (file, os.path.join('$(APACHE_TARGET)', os.path.basename(file)))) ofile.write('\n') elif area != 'test': ofile.write('install-%s: %s\n' '\t$(mkinstalldirs) $(%sdir)\n' % (area, string.join(files), area)) for file in files: ofile.write('\t$(INSTALL_%s) %s %s\n' % (string.upper(area), file, os.path.join('$(%sdir)' % area, os.path.basename(file)))) ofile.write('\n') includes, i_errors = _collect_paths(parser.get('includes', 'paths')) errors = errors or i_errors ofile.write('install-include: %s\n' '\t$(mkinstalldirs) $(includedir)\n' % (string.join(includes),)) for file in includes: ofile.write('\t$(INSTALL_INCLUDE) %s %s\n' % (file, os.path.join('$(includedir)', os.path.basename(file)))) ofile.write('\n# handy shortcut targets\n') for target, tpath in build_targets.items(): ofile.write('%s: %s\n' % (target, tpath)) ofile.write('\n') scripts, s_errors = _collect_paths(parser.get('test-scripts', 'paths')) errors = errors or s_errors ofile.write('TEST_PROGRAMS = %s\n\n' % string.join(test_progs + scripts)) if not skip_depends: # # Find all the available headers and what they depend upon. the # include_deps is a dictionary mapping a short header name to a tuple # of the full path to the header and a dictionary of dependent header # names (short) mapping to None. # # Example: # { 'short.h' : ('/path/to/short.h', # { 'other.h' : None, 'foo.h' : None }) } # # Note that this structure does not allow for similarly named headers # in per-project directories. SVN doesn't have this at this time, so # this structure works quite fine. (the alternative would be to use # the full pathname for the key, but that is actually a bit harder to # work with since we only see short names when scanning, and keeping # a second variable around for mapping the short to long names is more # than I cared to do right now) # include_deps = _create_include_deps(includes) for d in target_dirs.keys(): hdrs = glob.glob(os.path.join(d, '*.h')) if hdrs: more_deps = _create_include_deps(hdrs, include_deps) include_deps.update(more_deps) for src, objname in file_deps: hdrs = [ ] for short in _find_includes(src, include_deps): hdrs.append(include_deps[short][0]) ofile.write('%s: %s %s\n' % (objname, src, string.join(hdrs))) if errors: sys.exit(1)
group = parser.get(target, 'group') if groups.has_key(group): groups[group].append(target) else: groups[group] = [ target ] for group in groups.keys(): group_deps = _sort_deps(groups[group], target_deps) for i in range(len(group_deps)): group_deps[i] = build_targets[group_deps[i]] ofile.write('%s: %s\n\n' % (group, string.join(group_deps)))
for g_name, g_targets in groups.items(): ofile.write('%s: %s\n\n' % (g_name, string.join(g_targets)))
def main(fname, oname=None, skip_depends=0): parser = ConfigParser.ConfigParser(_cfg_defaults) parser.read(fname) if oname is None: oname = os.path.splitext(os.path.basename(fname))[0] + '-outputs.mk' ofile = open(oname, 'w') ofile.write('# DO NOT EDIT -- AUTOMATICALLY GENERATED\n\n') errors = 0 groups = { } target_deps = { } build_targets = { } install = { } test_progs = [ ] file_deps = [ ] target_dirs = { } targets = _filter_targets(parser.sections()) for target in targets: path = parser.get(target, 'path') target_dirs[path] = None install_type = parser.get(target, 'install') bldtype = parser.get(target, 'type') if bldtype == 'exe': tfile = target objext = '.o' if not install_type: install_type = 'bin' elif bldtype == 'lib': tfile = target + '.la' objext = '.lo' if not install_type: install_type = 'lib' elif bldtype == 'doc': pass else: print 'ERROR: unknown build type:', bldtype errors = 1 continue tpath = os.path.join(path, tfile) build_targets[target] = tpath if install.has_key(install_type): install[install_type].append(target) else: install[install_type] = [ target ] if install_type == 'test' and bldtype == 'exe' \ and parser.get(target, 'testing') != 'skip': test_progs.append(tpath) pats = parser.get(target, 'sources') if not pats: pats = _default_sources[bldtype] sources, s_errors = _collect_paths(pats, path) errors = errors or s_errors objects = [ ] for src in sources: if src[-2:] == '.c': objname = src[:-2] + objext objects.append(objname) file_deps.append((src, objname)) else: print 'ERROR: unknown file extension on', src errors = 1 retreat = _retreat_dots(path) libs = [ ] target_deps[target] = [ ] for lib in string.split(parser.get(target, 'libs')): if lib in targets: target_deps[target].append(lib) dep_path = parser.get(lib, 'path') if bldtype == 'lib': # we need to hack around a libtool problem: it cannot record a # dependency of one shared lib on another shared lib. ### fix this by upgrading to the new libtool 1.4 release... # strip "lib" from the front so we have -lsvn_foo if lib[:3] == 'lib': lib = lib[3:] libs.append('-L%s -l%s' % (retreat + os.path.join(dep_path, '.libs'), lib)) else: # linking executables can refer to .la files libs.append(retreat + os.path.join(dep_path, lib + '.la')) else: # something we don't know, so just include it directly libs.append(lib) targ_varname = string.replace(target, '-', '_') ldflags = parser.get(target, 'link-flags') objstr = string.join(objects) objnames = string.join(map(os.path.basename, objects)) libstr = string.join(libs) ofile.write('%s_DEPS = %s %s\n' '%s_OBJECTS = %s\n' '%s: $(%s_DEPS)\n' '\tcd %s && $(LINK) -o %s %s $(%s_OBJECTS) %s $(LIBS)\n\n' % (targ_varname, objstr, string.join(target_deps[target]), targ_varname, objnames, tpath, targ_varname, path, tfile, ldflags, targ_varname, libstr)) custom = parser.get(target, 'custom') if custom == 'apache-mod': # special build, needing Apache includes ofile.write('# build these special -- use APACHE_INCLUDES\n') for src in sources: if src[-2:] == '.c': ofile.write('%s%s: %s\n\t$(COMPILE_APACHE_MOD)\n' % (src[:-2], objext, src)) ofile.write('\n') group = parser.get(target, 'group') if groups.has_key(group): groups[group].append(target) else: groups[group] = [ target ] for group in groups.keys(): group_deps = _sort_deps(groups[group], target_deps) for i in range(len(group_deps)): group_deps[i] = build_targets[group_deps[i]] ofile.write('%s: %s\n\n' % (group, string.join(group_deps))) ofile.write('CLEAN_DIRS = %s\n' % string.join(target_dirs.keys())) cfiles = filter(_filter_clean_files, build_targets.values()) ofile.write('CLEAN_FILES = %s\n\n' % string.join(cfiles)) for area, inst_targets in install.items(): files = [ ] for t in inst_targets: files.append(build_targets[t]) if area == 'apache-mod': ofile.write('install-mods-shared: %s\n' % (string.join(files),)) la_tweaked = { } for file in files: base, ext = os.path.splitext(os.path.basename(file)) name = string.replace(base, 'libmod_', '') ofile.write('\t$(INSTALL_MOD_SHARED) -n %s %s\n' % (name, file)) if ext == '.la': la_tweaked[file + '-a'] = None for t in inst_targets: for dep in target_deps[t]: bt = build_targets[dep] if bt[-3:] == '.la': la_tweaked[bt + '-a'] = None la_tweaked = la_tweaked.keys() s_files, s_errors = _collect_paths(parser.get('static-apache', 'paths')) errors = errors or s_errors ofile.write('\ninstall-mods-static: %s\n' '\t$(mkinstalldirs) %s\n' % (string.join(la_tweaked + s_files), os.path.join('$(APACHE_TARGET)', '.libs'))) for file in la_tweaked: dirname, fname = os.path.split(file) base = os.path.splitext(fname)[0] ofile.write('\t$(INSTALL_MOD_STATIC) %s %s\n' '\t$(INSTALL_MOD_STATIC) %s %s\n' % (os.path.join(dirname, '.libs', base + '.a'), os.path.join('$(APACHE_TARGET)', '.libs', base + '.a'), file, os.path.join('$(APACHE_TARGET)', base + '.la'))) for file in s_files: ofile.write('\t$(INSTALL_MOD_STATIC) %s %s\n' % (file, os.path.join('$(APACHE_TARGET)', os.path.basename(file)))) ofile.write('\n') elif area != 'test': ofile.write('install-%s: %s\n' '\t$(mkinstalldirs) $(%sdir)\n' % (area, string.join(files), area)) for file in files: ofile.write('\t$(INSTALL_%s) %s %s\n' % (string.upper(area), file, os.path.join('$(%sdir)' % area, os.path.basename(file)))) ofile.write('\n') includes, i_errors = _collect_paths(parser.get('includes', 'paths')) errors = errors or i_errors ofile.write('install-include: %s\n' '\t$(mkinstalldirs) $(includedir)\n' % (string.join(includes),)) for file in includes: ofile.write('\t$(INSTALL_INCLUDE) %s %s\n' % (file, os.path.join('$(includedir)', os.path.basename(file)))) ofile.write('\n# handy shortcut targets\n') for target, tpath in build_targets.items(): ofile.write('%s: %s\n' % (target, tpath)) ofile.write('\n') scripts, s_errors = _collect_paths(parser.get('test-scripts', 'paths')) errors = errors or s_errors ofile.write('TEST_PROGRAMS = %s\n\n' % string.join(test_progs + scripts)) if not skip_depends: # # Find all the available headers and what they depend upon. the # include_deps is a dictionary mapping a short header name to a tuple # of the full path to the header and a dictionary of dependent header # names (short) mapping to None. # # Example: # { 'short.h' : ('/path/to/short.h', # { 'other.h' : None, 'foo.h' : None }) } # # Note that this structure does not allow for similarly named headers # in per-project directories. SVN doesn't have this at this time, so # this structure works quite fine. (the alternative would be to use # the full pathname for the key, but that is actually a bit harder to # work with since we only see short names when scanning, and keeping # a second variable around for mapping the short to long names is more # than I cared to do right now) # include_deps = _create_include_deps(includes) for d in target_dirs.keys(): hdrs = glob.glob(os.path.join(d, '*.h')) if hdrs: more_deps = _create_include_deps(hdrs, include_deps) include_deps.update(more_deps) for src, objname in file_deps: hdrs = [ ] for short in _find_includes(src, include_deps): hdrs.append(include_deps[short][0]) ofile.write('%s: %s %s\n' % (objname, src, string.join(hdrs))) if errors: sys.exit(1)
for dep in target_deps[t]: bt = build_targets[dep]
for dep in targets[t].deps: bt = dep.output
def main(fname, oname=None, skip_depends=0): parser = ConfigParser.ConfigParser(_cfg_defaults) parser.read(fname) if oname is None: oname = os.path.splitext(os.path.basename(fname))[0] + '-outputs.mk' ofile = open(oname, 'w') ofile.write('# DO NOT EDIT -- AUTOMATICALLY GENERATED\n\n') errors = 0 groups = { } target_deps = { } build_targets = { } install = { } test_progs = [ ] file_deps = [ ] target_dirs = { } targets = _filter_targets(parser.sections()) for target in targets: path = parser.get(target, 'path') target_dirs[path] = None install_type = parser.get(target, 'install') bldtype = parser.get(target, 'type') if bldtype == 'exe': tfile = target objext = '.o' if not install_type: install_type = 'bin' elif bldtype == 'lib': tfile = target + '.la' objext = '.lo' if not install_type: install_type = 'lib' elif bldtype == 'doc': pass else: print 'ERROR: unknown build type:', bldtype errors = 1 continue tpath = os.path.join(path, tfile) build_targets[target] = tpath if install.has_key(install_type): install[install_type].append(target) else: install[install_type] = [ target ] if install_type == 'test' and bldtype == 'exe' \ and parser.get(target, 'testing') != 'skip': test_progs.append(tpath) pats = parser.get(target, 'sources') if not pats: pats = _default_sources[bldtype] sources, s_errors = _collect_paths(pats, path) errors = errors or s_errors objects = [ ] for src in sources: if src[-2:] == '.c': objname = src[:-2] + objext objects.append(objname) file_deps.append((src, objname)) else: print 'ERROR: unknown file extension on', src errors = 1 retreat = _retreat_dots(path) libs = [ ] target_deps[target] = [ ] for lib in string.split(parser.get(target, 'libs')): if lib in targets: target_deps[target].append(lib) dep_path = parser.get(lib, 'path') if bldtype == 'lib': # we need to hack around a libtool problem: it cannot record a # dependency of one shared lib on another shared lib. ### fix this by upgrading to the new libtool 1.4 release... # strip "lib" from the front so we have -lsvn_foo if lib[:3] == 'lib': lib = lib[3:] libs.append('-L%s -l%s' % (retreat + os.path.join(dep_path, '.libs'), lib)) else: # linking executables can refer to .la files libs.append(retreat + os.path.join(dep_path, lib + '.la')) else: # something we don't know, so just include it directly libs.append(lib) targ_varname = string.replace(target, '-', '_') ldflags = parser.get(target, 'link-flags') objstr = string.join(objects) objnames = string.join(map(os.path.basename, objects)) libstr = string.join(libs) ofile.write('%s_DEPS = %s %s\n' '%s_OBJECTS = %s\n' '%s: $(%s_DEPS)\n' '\tcd %s && $(LINK) -o %s %s $(%s_OBJECTS) %s $(LIBS)\n\n' % (targ_varname, objstr, string.join(target_deps[target]), targ_varname, objnames, tpath, targ_varname, path, tfile, ldflags, targ_varname, libstr)) custom = parser.get(target, 'custom') if custom == 'apache-mod': # special build, needing Apache includes ofile.write('# build these special -- use APACHE_INCLUDES\n') for src in sources: if src[-2:] == '.c': ofile.write('%s%s: %s\n\t$(COMPILE_APACHE_MOD)\n' % (src[:-2], objext, src)) ofile.write('\n') group = parser.get(target, 'group') if groups.has_key(group): groups[group].append(target) else: groups[group] = [ target ] for group in groups.keys(): group_deps = _sort_deps(groups[group], target_deps) for i in range(len(group_deps)): group_deps[i] = build_targets[group_deps[i]] ofile.write('%s: %s\n\n' % (group, string.join(group_deps))) ofile.write('CLEAN_DIRS = %s\n' % string.join(target_dirs.keys())) cfiles = filter(_filter_clean_files, build_targets.values()) ofile.write('CLEAN_FILES = %s\n\n' % string.join(cfiles)) for area, inst_targets in install.items(): files = [ ] for t in inst_targets: files.append(build_targets[t]) if area == 'apache-mod': ofile.write('install-mods-shared: %s\n' % (string.join(files),)) la_tweaked = { } for file in files: base, ext = os.path.splitext(os.path.basename(file)) name = string.replace(base, 'libmod_', '') ofile.write('\t$(INSTALL_MOD_SHARED) -n %s %s\n' % (name, file)) if ext == '.la': la_tweaked[file + '-a'] = None for t in inst_targets: for dep in target_deps[t]: bt = build_targets[dep] if bt[-3:] == '.la': la_tweaked[bt + '-a'] = None la_tweaked = la_tweaked.keys() s_files, s_errors = _collect_paths(parser.get('static-apache', 'paths')) errors = errors or s_errors ofile.write('\ninstall-mods-static: %s\n' '\t$(mkinstalldirs) %s\n' % (string.join(la_tweaked + s_files), os.path.join('$(APACHE_TARGET)', '.libs'))) for file in la_tweaked: dirname, fname = os.path.split(file) base = os.path.splitext(fname)[0] ofile.write('\t$(INSTALL_MOD_STATIC) %s %s\n' '\t$(INSTALL_MOD_STATIC) %s %s\n' % (os.path.join(dirname, '.libs', base + '.a'), os.path.join('$(APACHE_TARGET)', '.libs', base + '.a'), file, os.path.join('$(APACHE_TARGET)', base + '.la'))) for file in s_files: ofile.write('\t$(INSTALL_MOD_STATIC) %s %s\n' % (file, os.path.join('$(APACHE_TARGET)', os.path.basename(file)))) ofile.write('\n') elif area != 'test': ofile.write('install-%s: %s\n' '\t$(mkinstalldirs) $(%sdir)\n' % (area, string.join(files), area)) for file in files: ofile.write('\t$(INSTALL_%s) %s %s\n' % (string.upper(area), file, os.path.join('$(%sdir)' % area, os.path.basename(file)))) ofile.write('\n') includes, i_errors = _collect_paths(parser.get('includes', 'paths')) errors = errors or i_errors ofile.write('install-include: %s\n' '\t$(mkinstalldirs) $(includedir)\n' % (string.join(includes),)) for file in includes: ofile.write('\t$(INSTALL_INCLUDE) %s %s\n' % (file, os.path.join('$(includedir)', os.path.basename(file)))) ofile.write('\n# handy shortcut targets\n') for target, tpath in build_targets.items(): ofile.write('%s: %s\n' % (target, tpath)) ofile.write('\n') scripts, s_errors = _collect_paths(parser.get('test-scripts', 'paths')) errors = errors or s_errors ofile.write('TEST_PROGRAMS = %s\n\n' % string.join(test_progs + scripts)) if not skip_depends: # # Find all the available headers and what they depend upon. the # include_deps is a dictionary mapping a short header name to a tuple # of the full path to the header and a dictionary of dependent header # names (short) mapping to None. # # Example: # { 'short.h' : ('/path/to/short.h', # { 'other.h' : None, 'foo.h' : None }) } # # Note that this structure does not allow for similarly named headers # in per-project directories. SVN doesn't have this at this time, so # this structure works quite fine. (the alternative would be to use # the full pathname for the key, but that is actually a bit harder to # work with since we only see short names when scanning, and keeping # a second variable around for mapping the short to long names is more # than I cared to do right now) # include_deps = _create_include_deps(includes) for d in target_dirs.keys(): hdrs = glob.glob(os.path.join(d, '*.h')) if hdrs: more_deps = _create_include_deps(hdrs, include_deps) include_deps.update(more_deps) for src, objname in file_deps: hdrs = [ ] for short in _find_includes(src, include_deps): hdrs.append(include_deps[short][0]) ofile.write('%s: %s %s\n' % (objname, src, string.join(hdrs))) if errors: sys.exit(1)
def _sort_deps(targets, deps): "Sort targets based on dependencies specified in deps." order = { } for i in range(len(targets)): order[targets[i]] = i for t in targets: thisval = order[t] for dep in deps[t]: if order.get(dep, -1) > thisval: order[t] = order[dep] order[dep] = thisval thisval = order[t] targets = targets[:] def sortfunc(a, b, order=order): return cmp(order[a], order[b]) targets.sort(sortfunc) return targets
def _filter_clean_files(fname): "Filter files which have a suffix handled by the standard 'clean' rule." # for now, we only look for .la which keeps this code simple. return fname[-3:] != '.la'
"commit a file inside a directory that's already scheduled to be added"
"commit a file inside dir scheduled for addition"
def commit_in_dir_scheduled_for_addition(sbox): "commit a file inside a directory that's already scheduled to be added" if sbox.build(): return 1 wc_dir = sbox.wc_dir A_path = os.path.join(wc_dir, 'A') Z_path = os.path.join(wc_dir, 'Z') mu_path = os.path.join(wc_dir, 'Z', 'mu') svntest.main.run_svn(None, 'move', A_path, Z_path) out, err = svntest.main.run_svn(1, 'commit', '-m', '"logmsg"', mu_path) if len(err) == 0: return 1 return 0
xreadlines(self.log)))
self.log.readlines()))
def run(self, list): 'Run all test programs given in LIST.' self._open_log('w') failed = 0 for prog in list: failed = self._run_test(prog) or failed if failed: print 'At least one test FAILED, checking ' + self.logfile self._open_log('r') map(sys.stdout.write, filter(lambda x: x[:4] == 'FAIL', xreadlines(self.log))) self._close_log()
os.dup2(self.log.fileno(), 1) os.dup2(self.log.fileno(), 2) rv = os.spawnv(os.P_WAIT, cmdline[0], cmdline) os.dup2(old_stdout, 1) os.dup2(old_stderr, 2) os.close(old_stdout) os.close(old_stderr) return rv
try: os.dup2(self.log.fileno(), 1) os.dup2(self.log.fileno(), 2) rv = os.spawnv(os.P_WAIT, cmdline[0], cmdline) except: restore_streams(old_stdout, old_stderr) raise else: restore_streams(old_stdout, old_stderr) return rv
def _run_prog(self, cmdline): 'Execute COMMAND, redirecting standard output and error to the log file.' self.log.flush() old_stdout = os.dup(1) old_stderr = os.dup(2) os.dup2(self.log.fileno(), 1) os.dup2(self.log.fileno(), 2) rv = os.spawnv(os.P_WAIT, cmdline[0], cmdline) os.dup2(old_stdout, 1) os.dup2(old_stderr, 2) os.close(old_stdout) os.close(old_stderr) return rv
output, errput = main.run_svn(1, "proplist", path)
output, errput = main.run_svn(1, "proplist", path, "--verbose")
def get_props(path): "Return a hash of props for PATH, using the svn client." # It's not kosher to look inside .svn/ and try to read the internal # property storage format. Instead, we use 'svn proplist'. After # all, this is the only way the user can retrieve them, so we're # respecting the black-box paradigm. props = {} output, errput = main.run_svn(1, "proplist", path) for line in output: if line.startswith('Properties on '): continue name, value = line.split(' : ') name = string.strip(name) value = string.strip(value) props[name] = value return props
"Return a status list reflecting local mods made by next routine."
"Return a status list reflecting the local mods made by make_standard_slew_of_changes()."
def get_standard_status_list(wc_dir): "Return a status list reflecting local mods made by next routine." status_list = svntest.actions.get_virginal_status_list(wc_dir, '1') ### todo: use status-hash below instead. # `.' status_list[0][3]['status'] = '_M' # A/B/lambda, A/D status_list[5][3]['status'] = 'M ' status_list[11][3]['status'] = 'M ' # A/B/E, A/D/H/chi status_list[6][3]['status'] = 'R ' status_list[18][3]['status'] = 'R ' # A/B/E/alpha, A/B/E/beta, A/C, A/D/gamma status_list[7][3]['status'] = 'D ' status_list[8][3]['status'] = 'D ' status_list[10][3]['status'] = 'D ' status_list[12][3]['status'] = 'D ' status_list[15][3]['status'] = 'D ' # A/D/G/pi, A/D/H/omega status_list[14][3]['status'] = '_M' status_list[20][3]['status'] = 'MM' # New things status_list.append([os.path.join(wc_dir, 'Q'), None, {}, {'status' : 'A ', 'locked' : ' ', 'wc_rev' : '0', 'repos_rev' : '1'}]) status_list.append([os.path.join(wc_dir, 'Q', 'floo'), None, {}, {'status' : 'A ', 'locked' : ' ', 'wc_rev' : '0', 'repos_rev' : '1'}]) status_list.append([os.path.join(wc_dir, 'A', 'D', 'H', 'gloo'), None, {}, {'status' : 'A ', 'locked' : ' ', 'wc_rev' : '0', 'repos_rev' : '1'}]) status_list.append([os.path.join(wc_dir, 'A', 'B', 'E', 'bloo'), None, {}, {'status' : 'A ', 'locked' : ' ', 'wc_rev' : '0', 'repos_rev' : '1'}]) return status_list
["svn_delta.i"], libraries=['svn_fs', 'svn_swig_py', 'swigpy'],
["../svn_delta.i"], libraries=['svn_delta', 'svn_swig_py', 'swigpy'],
def swig_sources(self, sources): swig = self.find_swig() swig_cmd = [swig, "-c", "-python"] for dir in self.include_dirs: swig_cmd.append("-I" + dir)
libraries=['svn_ra', 'svn_swig_py', 'swigpy'],
libraries=['svn_repos', 'svn_swig_py', 'swigpy'],
def swig_sources(self, sources): swig = self.find_swig() swig_cmd = [swig, "-c", "-python"] for dir in self.include_dirs: swig_cmd.append("-I" + dir)
libraries=['svn_ra', 'swigpy'],
libraries=['svn_subr', 'swigpy', 'apr'],
def swig_sources(self, sources): swig = self.find_swig() swig_cmd = [swig, "-c", "-python"] for dir in self.include_dirs: swig_cmd.append("-I" + dir)
def commit(self):
def get_metadata(self, pool): if self.changes: file, rev = self.changes[0] else: file, rev = self.deletes[0] rip = RevInfoParser() rip.parse_cvs_file(file) author = rip.authors[rev] log = rip.logs[rev] a_t = util.apr_ansi_time_to_apr_time(self.t_max)[1] date = util.svn_time_to_nts(a_t, pool) return author, log, date def commit(self, t_fs, ctx):
def commit(self): # commit this transaction print 'committing: %s, over %d seconds' % (time.ctime(self.t_min), self.t_max - self.t_min) for f, r in self.changes: print ' changing %s : %s' % (r, f) for f, r in self.deletes: print ' deleting %s : %s' % (r, f)
print ' changing %s : %s' % (r, f)
repos_path = '/' + relative_name(ctx.cvsroot, f[:-2]) print ' changing %s : %s' % (r, repos_path) dirname = os.path.dirname(repos_path) if dirname != '/': parts = string.split(dirname[1:], os.sep) for i in range(1, len(parts) + 1): parent_dir = '/' + string.join(parts[:i], '/') if fs.check_path(root, parent_dir, f_pool) == svn_node_none: print ' making dir:', parent_dir fs.make_dir(root, parent_dir, f_pool) if fs.check_path(root, repos_path, f_pool) == svn_node_none: created_file = 1 fs.make_file(root, repos_path, f_pool) else: created_file = 0 handler, baton = fs.apply_textdelta(root, repos_path, f_pool) try: statcache.stat(f) except os.error: dirname, fname = os.path.split(f) f = os.path.join(dirname, 'Attic', fname) statcache.stat(f) pipe = os.popen('co -q -p%s %s' % (r, f), 'r', 102400) if created_file: _delta.svn_txdelta_send_string(pipe.read(), handler, baton, f_pool) else: stream2 = util.svn_stream_from_stdio(pipe, f_pool) if repos_path == lastcommit[0]: infile2 = os.popen("co -q -p%s %s" % (lastcommit[1], f), "r", 102400) stream1 = util.svn_stream_from_stdio(infile2, f_pool) else: stream1 = fs.file_contents(root, repos_path, f_pool) txstream = _delta.svn_txdelta(stream1, stream2, f_pool) _delta.svn_txdelta_send_txstream(txstream, handler, baton, f_pool) infile2 = None pipe.close() util.svn_pool_clear(f_pool) lastcommit = (repos_path, r)
def commit(self): # commit this transaction print 'committing: %s, over %d seconds' % (time.ctime(self.t_min), self.t_max - self.t_min) for f, r in self.changes: print ' changing %s : %s' % (r, f) for f, r in self.deletes: print ' deleting %s : %s' % (r, f)
print ' deleting %s : %s' % (r, f)
repos_path = '/' + relative_name(ctx.cvsroot, f[:-2]) print ' deleting %s : %s' % (r, repos_path) if r != '1.1': fs.delete(root, repos_path, f_pool) util.svn_pool_clear(f_pool) author, log, date = self.get_metadata(c_pool) fs.change_txn_prop(txn, 'svn:author', author, c_pool) fs.change_txn_prop(txn, 'svn:log', log, c_pool) conflicts, new_rev = fs.commit_txn(txn) fs.change_rev_prop(t_fs, new_rev, 'svn:date', date, c_pool) if conflicts != '\n': print ' CONFLICTS:', `conflicts` print ' new revision:', new_rev util.svn_pool_destroy(c_pool)
def commit(self): # commit this transaction print 'committing: %s, over %d seconds' % (time.ctime(self.t_min), self.t_max - self.t_min) for f, r in self.changes: print ' changing %s : %s' % (r, f) for f, r in self.deletes: print ' deleting %s : %s' % (r, f)
c.commit()
c.commit(t_fs, ctx) count = count + len(process) if commits: process = [ ] for id, c in commits.items(): process.append((c.t_max, c)) process.sort() for t_max, c in process: c.commit(t_fs, ctx)
def pass4(ctx): # process the logfiles, creating the target commits = { } count = 0 for line in fileinput.FileInput(ctx.log_fname_base + SORTED_REVS_SUFFIX): timestamp, id, op, rev, fname = parse_revs_line(line) if commits.has_key(id): c = commits[id] else: c = commits[id] = Commit() c.add(timestamp, op, fname, rev) # scan for commits to process process = [ ] for id, c in commits.items(): if c.t_max + COMMIT_THRESHOLD < timestamp: process.append((c.t_max, c)) del commits[id] process.sort() for t_max, c in process: c.commit() count = count + len(process) if ctx.verbose: print count, 'commits processed.'
def convert(cvsroot, target=SVNROOT, log_fname_base=DATAFILE, start_pass=1, verbose=0):
def convert(pool, cvsroot, target=SVNROOT, log_fname_base=DATAFILE, start_pass=1, verbose=0):
def convert(cvsroot, target=SVNROOT, log_fname_base=DATAFILE, start_pass=1, verbose=0): "Convert a CVS repository to an SVN repository." # prepare the operation context ctx = _ctx() ctx.cvsroot = cvsroot ctx.target = target ctx.log_fname_base = log_fname_base ctx.verbose = verbose times = [ None ] * len(_passes) for i in range(start_pass - 1, len(_passes)): times[i] = time.time() if verbose: print '----- pass %d -----' % (i + 1) _passes[i](ctx) times.append(time.time()) if verbose: for i in range(start_pass, len(_passes)+1): print 'pass %d: %d seconds' % (i, int(times[i] - times[i-1])) print ' total:', int(times[len(_passes)] - times[start_pass-1]), 'seconds'
print 'USAGE: %s [-p pass] repository-path' % sys.argv[0]
print 'USAGE: %s [-v] [-p pass] repository-path' % sys.argv[0]
def usage(): print 'USAGE: %s [-p pass] repository-path' % sys.argv[0] sys.exit(1)
convert(args[0], start_pass=start_pass, verbose=verbose)
util.run_app(convert, args[0], start_pass=start_pass, verbose=verbose)
def main(): opts, args = getopt.getopt(sys.argv[1:], 'p:v') if len(args) != 1: usage() verbose = 0 start_pass = 1 for opt, value in opts: if opt == '-p': start_pass = int(value) if start_pass < 1 or start_pass > len(_passes): print 'ERROR: illegal value (%d) for starting pass. ' \ 'must be 1 through %d.' % (start_pass, len(_passes)) sys.exit(1) elif opt == '-v': verbose = 1 convert(args[0], start_pass=start_pass, verbose=verbose)
url = os.path.join(svntest.main.test_area_url, svntest.main.general_repo_dir, sbox.name) print url
url = svntest.main.test_area_url + '/' + svntest.main.current_repo_dir
def diff_pure_repository_update_a_file(sbox): "pure repository diff update a file" if sbox.build(): return 1 wc_dir = sbox.wc_dir was_cwd = os.getcwd() os.chdir(wc_dir) update_a_file() svntest.main.run_svn(None, 'ci') os.chdir(was_cwd) url = os.path.join(svntest.main.test_area_url, svntest.main.general_repo_dir, sbox.name) print url diff_output, err_output = svntest.main.run_svn(None, 'diff', '-r1:2', url) return check_update_a_file(diff_output)
files = [ ] for t in inst_targets: files.append(t.output)
files = _sorted_files(inst_targets)
def main(fname, oname=None, skip_depends=0): parser = ConfigParser.ConfigParser(_cfg_defaults) parser.read(fname) if oname is None: oname = os.path.splitext(os.path.basename(fname))[0] + '-outputs.mk' ofile = open(oname, 'w') ofile.write('# DO NOT EDIT -- AUTOMATICALLY GENERATED\n\n') errors = 0 targets = { } install = { } # install area name -> targets test_progs = [ ] test_deps = [ ] fs_test_progs = [ ] fs_test_deps = [ ] file_deps = [ ] target_dirs = { } manpages = [ ] infopages = [ ] target_names = _filter_targets(parser.sections()) # PASS 1: collect the targets and some basic info for target in target_names: try: target_ob = Target(target, parser.get(target, 'path'), parser.get(target, 'install'), parser.get(target, 'type')) except GenMakeError, e: print e errors = 1 continue targets[target] = target_ob itype = target_ob.install if install.has_key(itype): install[itype].append(target_ob) else: install[itype] = [ target_ob ] target_dirs[target_ob.path] = None if errors: sys.exit(1) # PASS 2: generate the outputs for target in target_names: target_ob = targets[target] path = target_ob.path bldtype = target_ob.type objext = target_ob.objext tpath = target_ob.output tfile = os.path.basename(tpath) if target_ob.install == 'test' and bldtype == 'exe': test_deps.append(tpath) if parser.get(target, 'testing') != 'skip': test_progs.append(tpath) if target_ob.install == 'fs-test' and bldtype == 'exe': fs_test_deps.append(tpath) if parser.get(target, 'testing') != 'skip': fs_test_progs.append(tpath) pats = parser.get(target, 'sources') if not pats: pats = _default_sources[bldtype] sources, s_errors = _collect_paths(pats, path) errors = errors or s_errors objects = [ ] for src in sources: if src[-2:] == '.c': objname = src[:-2] + objext objects.append(objname) file_deps.append((src, objname)) else: print 'ERROR: unknown file extension on', src errors = 1 retreat = _retreat_dots(path) libs = [ ] deps = [ ] for lib in string.split(parser.get(target, 'libs')): if lib in target_names: tlib = targets[lib] target_ob.deps.append(tlib) deps.append(tlib.output) # link in the library by simply referring to the .la file ### hmm. use join() for retreat + ... ? libs.append(retreat + os.path.join(tlib.path, lib + '.la')) else: # something we don't know, so just include it directly libs.append(lib) for man in string.split(parser.get(target, 'manpages')): manpages.append(man) for info in string.split(parser.get(target, 'infopages')): infopages.append(info) targ_varname = string.replace(target, '-', '_') ldflags = parser.get(target, 'link-flags') add_deps = parser.get(target, 'add-deps') objnames = string.join(map(os.path.basename, objects)) ofile.write('%s_DEPS = %s %s\n' '%s_OBJECTS = %s\n' '%s: $(%s_DEPS)\n' '\tcd %s && $(LINK) -o %s %s $(%s_OBJECTS) %s $(LIBS)\n\n' % (targ_varname, string.join(objects + deps), add_deps, targ_varname, objnames, tpath, targ_varname, path, tfile, ldflags, targ_varname, string.join(libs))) custom = parser.get(target, 'custom') if custom == 'apache-mod': # special build, needing Apache includes ofile.write('# build these special -- use APACHE_INCLUDES\n') for src in sources: if src[-2:] == '.c': ofile.write('%s%s: %s\n\t$(COMPILE_APACHE_MOD)\n' % (src[:-2], objext, src)) ofile.write('\n') elif custom == 'swig-py': ofile.write('# build this with -DSWIGPYTHON\n') for src in sources: if src[-2:] == '.c': ofile.write('%s%s: %s\n\t$(COMPILE_SWIG_PY)\n' % (src[:-2], objext, src)) ofile.write('\n') for g_name, g_targets in install.items(): target_names = [ ] for i in g_targets: target_names.append(i.output) ofile.write('%s: %s\n\n' % (g_name, string.join(target_names))) cfiles = [ ] for target in targets.values(): # .la files are handled by the standard 'clean' rule; clean all the # other targets if target.output[-3:] != '.la': cfiles.append(target.output) ofile.write('CLEAN_FILES = %s\n\n' % string.join(cfiles)) for area, inst_targets in install.items(): files = [ ] for t in inst_targets: files.append(t.output) if area == 'apache-mod': ofile.write('install-mods-shared: %s\n' % (string.join(files),)) la_tweaked = { } for file in files: # cd to dirname before install to work around libtool 1.4.2 bug. dirname, fname = os.path.split(file) base, ext = os.path.splitext(fname) name = string.replace(base, 'libmod_', '') ofile.write('\tcd %s ; $(INSTALL_MOD_SHARED) -n %s %s\n' % (dirname, name, fname)) if ext == '.la': la_tweaked[file + '-a'] = None for t in inst_targets: for dep in t.deps: bt = dep.output if bt[-3:] == '.la': la_tweaked[bt + '-a'] = None la_tweaked = la_tweaked.keys() s_files, s_errors = _collect_paths(parser.get('static-apache', 'paths')) errors = errors or s_errors # Construct a .libs directory within the Apache area and populate it # with the appropriate files. Also drop the .la file in the target dir. ofile.write('\ninstall-mods-static: %s\n' '\t$(MKDIR) %s\n' % (string.join(la_tweaked + s_files), os.path.join('$(APACHE_TARGET)', '.libs'))) for file in la_tweaked: dirname, fname = os.path.split(file) base = os.path.splitext(fname)[0] ofile.write('\t$(INSTALL_MOD_STATIC) %s %s\n' '\t$(INSTALL_MOD_STATIC) %s %s\n' % (os.path.join(dirname, '.libs', base + '.a'), os.path.join('$(APACHE_TARGET)', '.libs', base + '.a'), file, os.path.join('$(APACHE_TARGET)', base + '.la'))) # copy the other files to the target dir for file in s_files: ofile.write('\t$(INSTALL_MOD_STATIC) %s %s\n' % (file, os.path.join('$(APACHE_TARGET)', os.path.basename(file)))) ofile.write('\n') elif area != 'test' and area != 'fs-test': area_var = string.replace(area, '-', '_') ofile.write('install-%s: %s\n' '\t$(MKDIR) $(%sdir)\n' % (area, string.join(files), area_var)) for file in files: # cd to dirname before install to work around libtool 1.4.2 bug. dirname, fname = os.path.split(file) ofile.write('\tcd %s ; $(INSTALL_%s) %s %s\n' % (dirname, string.upper(area_var), fname, os.path.join('$(%sdir)' % area_var, fname))) ofile.write('\n') includes, i_errors = _collect_paths(parser.get('includes', 'paths')) errors = errors or i_errors ofile.write('install-include: %s\n' '\t$(MKDIR) $(includedir)\n' % (string.join(includes),)) for file in includes: ofile.write('\t$(INSTALL_INCLUDE) %s %s\n' % (os.path.join('$(top_srcdir)', file), os.path.join('$(includedir)', os.path.basename(file)))) ofile.write('\n# handy shortcut targets\n') for name, target in targets.items(): ofile.write('%s: %s\n' % (name, target.output)) ofile.write('\n') scripts, s_errors = _collect_paths(parser.get('test-scripts', 'paths')) errors = errors or s_errors script_dirs = [] for script in scripts: script_dirs.append(re.compile("[-a-z0-9A-Z_.]*$").sub("", script)) fs_scripts, fs_errors = _collect_paths(parser.get('fs-test-scripts', 'paths')) errors = errors or fs_errors ofile.write('BUILD_DIRS = %s %s\n' % (string.join(target_dirs.keys()), string.join(script_dirs))) ofile.write('FS_TEST_DEPS = %s\n\n' % string.join(fs_test_deps + fs_scripts)) ofile.write('FS_TEST_PROGRAMS = %s\n\n' % string.join(fs_test_progs + fs_scripts)) ofile.write('TEST_DEPS = %s\n\n' % string.join(test_deps + scripts)) ofile.write('TEST_PROGRAMS = %s\n\n' % string.join(test_progs + scripts)) ofile.write('MANPAGES = %s\n\n' % string.join(manpages)) ofile.write('INFOPAGES = %s\n\n' % string.join(infopages)) if not skip_depends: # # Find all the available headers and what they depend upon. the # include_deps is a dictionary mapping a short header name to a tuple # of the full path to the header and a dictionary of dependent header # names (short) mapping to None. # # Example: # { 'short.h' : ('/path/to/short.h', # { 'other.h' : None, 'foo.h' : None }) } # # Note that this structure does not allow for similarly named headers # in per-project directories. SVN doesn't have this at this time, so # this structure works quite fine. (the alternative would be to use # the full pathname for the key, but that is actually a bit harder to # work with since we only see short names when scanning, and keeping # a second variable around for mapping the short to long names is more # than I cared to do right now) # include_deps = _create_include_deps(includes) for d in target_dirs.keys(): hdrs = glob.glob(os.path.join(d, '*.h')) if hdrs: more_deps = _create_include_deps(hdrs, include_deps) include_deps.update(more_deps) for src, objname in file_deps: hdrs = [ ] for short in _find_includes(src, include_deps): hdrs.append(include_deps[short][0]) ofile.write('%s: %s %s\n' % (objname, src, string.join(hdrs))) if errors: sys.exit(1)
status_list[6][3]['wc_rev'] = '0'
def get_standard_status_list(wc_dir): "Return a status list reflecting local mods made by next routine." status_list = svntest.actions.get_virginal_status_list(wc_dir, '1') ### todo: use status-hash below instead. # `.' status_list[0][3]['status'] = '_M' # A/B/lambda, A/D status_list[5][3]['status'] = 'M ' status_list[11][3]['status'] = 'M ' # A/B/E, A/D/H/chi status_list[6][3]['status'] = 'R ' status_list[6][3]['wc_rev'] = '0' status_list[18][3]['status'] = 'R ' status_list[18][3]['wc_rev'] = '0' # A/B/E/alpha, A/B/E/beta, A/C, A/D/gamma status_list[7][3]['status'] = 'D ' status_list[8][3]['status'] = 'D ' status_list[10][3]['status'] = 'D ' status_list[12][3]['status'] = 'D ' status_list[15][3]['status'] = 'D ' # A/D/G/pi, A/D/H/omega status_list[14][3]['status'] = '_M' status_list[20][3]['status'] = 'MM' # New things status_list.append([os.path.join(wc_dir, 'Q'), None, {}, {'status' : 'A ', 'locked' : ' ', 'wc_rev' : '0', 'repos_rev' : '1'}]) status_list.append([os.path.join(wc_dir, 'Q', 'floo'), None, {}, {'status' : 'A ', 'locked' : ' ', 'wc_rev' : '0', 'repos_rev' : '1'}]) status_list.append([os.path.join(wc_dir, 'A', 'D', 'H', 'gloo'), None, {}, {'status' : 'A ', 'locked' : ' ', 'wc_rev' : '0', 'repos_rev' : '1'}]) status_list.append([os.path.join(wc_dir, 'A', 'B', 'E', 'bloo'), None, {}, {'status' : 'A ', 'locked' : ' ', 'wc_rev' : '0', 'repos_rev' : '1'}]) return status_list
status_list[18][3]['wc_rev'] = '0'
def get_standard_status_list(wc_dir): "Return a status list reflecting local mods made by next routine." status_list = svntest.actions.get_virginal_status_list(wc_dir, '1') ### todo: use status-hash below instead. # `.' status_list[0][3]['status'] = '_M' # A/B/lambda, A/D status_list[5][3]['status'] = 'M ' status_list[11][3]['status'] = 'M ' # A/B/E, A/D/H/chi status_list[6][3]['status'] = 'R ' status_list[6][3]['wc_rev'] = '0' status_list[18][3]['status'] = 'R ' status_list[18][3]['wc_rev'] = '0' # A/B/E/alpha, A/B/E/beta, A/C, A/D/gamma status_list[7][3]['status'] = 'D ' status_list[8][3]['status'] = 'D ' status_list[10][3]['status'] = 'D ' status_list[12][3]['status'] = 'D ' status_list[15][3]['status'] = 'D ' # A/D/G/pi, A/D/H/omega status_list[14][3]['status'] = '_M' status_list[20][3]['status'] = 'MM' # New things status_list.append([os.path.join(wc_dir, 'Q'), None, {}, {'status' : 'A ', 'locked' : ' ', 'wc_rev' : '0', 'repos_rev' : '1'}]) status_list.append([os.path.join(wc_dir, 'Q', 'floo'), None, {}, {'status' : 'A ', 'locked' : ' ', 'wc_rev' : '0', 'repos_rev' : '1'}]) status_list.append([os.path.join(wc_dir, 'A', 'D', 'H', 'gloo'), None, {}, {'status' : 'A ', 'locked' : ' ', 'wc_rev' : '0', 'repos_rev' : '1'}]) status_list.append([os.path.join(wc_dir, 'A', 'B', 'E', 'bloo'), None, {}, {'status' : 'A ', 'locked' : ' ', 'wc_rev' : '0', 'repos_rev' : '1'}]) return status_list
% (targ_varname, string.join(objects), string.join(deps),
% (targ_varname, string.join(objects + deps), add_deps,
def main(fname, oname=None, skip_depends=0): parser = ConfigParser.ConfigParser(_cfg_defaults) parser.read(fname) if oname is None: oname = os.path.splitext(os.path.basename(fname))[0] + '-outputs.mk' ofile = open(oname, 'w') ofile.write('# DO NOT EDIT -- AUTOMATICALLY GENERATED\n\n') errors = 0 targets = { } groups = { } # group name -> targets install = { } # install area name -> targets test_progs = [ ] test_deps = [ ] file_deps = [ ] target_dirs = { } target_names = _filter_targets(parser.sections()) # PASS 1: collect the targets and some basic info for target in target_names: try: target_ob = Target(target, parser.get(target, 'path'), parser.get(target, 'install'), parser.get(target, 'type')) except GenMakeError, e: print e errors = 1 continue targets[target] = target_ob group = parser.get(target, 'group') if groups.has_key(group): groups[group].append(target_ob.output) else: groups[group] = [ target_ob.output ] itype = target_ob.install if install.has_key(itype): install[itype].append(target_ob) else: install[itype] = [ target_ob ] target_dirs[target_ob.path] = None if errors: sys.exit(1) # PASS 2: generate the outputs for target in target_names: target_ob = targets[target] path = target_ob.path bldtype = target_ob.type objext = target_ob.objext tpath = target_ob.output tfile = os.path.basename(tpath) if target_ob.install == 'test' and bldtype == 'exe': test_deps.append(tpath) if parser.get(target, 'testing') != 'skip': test_progs.append(tpath) pats = parser.get(target, 'sources') if not pats: pats = _default_sources[bldtype] sources, s_errors = _collect_paths(pats, path) errors = errors or s_errors objects = [ ] for src in sources: if src[-2:] == '.c': objname = src[:-2] + objext objects.append(objname) file_deps.append((src, objname)) else: print 'ERROR: unknown file extension on', src errors = 1 retreat = _retreat_dots(path) libs = [ ] deps = [ ] for lib in string.split(parser.get(target, 'libs')): if lib in target_names: tlib = targets[lib] target_ob.deps.append(tlib) deps.append(tlib.output) dep_path = tlib.path if bldtype == 'lib': # we need to hack around a libtool problem: it cannot record a # dependency of one shared lib on another shared lib. ### fix this by upgrading to the new libtool 1.4 release... # strip "lib" from the front so we have -lsvn_foo if lib[:3] == 'lib': lib = lib[3:] libs.append('-L%s -l%s' % (retreat + os.path.join(dep_path, '.libs'), lib)) else: # linking executables can refer to .la files libs.append(retreat + os.path.join(dep_path, lib + '.la')) else: # something we don't know, so just include it directly libs.append(lib) targ_varname = string.replace(target, '-', '_') ldflags = parser.get(target, 'link-flags') objnames = string.join(map(os.path.basename, objects)) ofile.write('%s_DEPS = %s %s\n' '%s_OBJECTS = %s\n' '%s: $(%s_DEPS)\n' '\tcd %s && $(LINK) -o %s %s $(%s_OBJECTS) %s $(LIBS)\n\n' % (targ_varname, string.join(objects), string.join(deps), targ_varname, objnames, tpath, targ_varname, path, tfile, ldflags, targ_varname, string.join(libs))) custom = parser.get(target, 'custom') if custom == 'apache-mod': # special build, needing Apache includes ofile.write('# build these special -- use APACHE_INCLUDES\n') for src in sources: if src[-2:] == '.c': ofile.write('%s%s: %s\n\t$(COMPILE_APACHE_MOD)\n' % (src[:-2], objext, src)) ofile.write('\n') for g_name, g_targets in groups.items(): ofile.write('%s: %s\n\n' % (g_name, string.join(g_targets))) ofile.write('CLEAN_DIRS = %s\n' % string.join(target_dirs.keys())) cfiles = [ ] for target in targets.values(): # .la files are handled by the standard 'clean' rule; clean all the # other targets if target.output[-3:] != '.la': cfiles.append(target.output) ofile.write('CLEAN_FILES = %s\n\n' % string.join(cfiles)) for area, inst_targets in install.items(): files = [ ] for t in inst_targets: files.append(t.output) if area == 'apache-mod': ofile.write('install-mods-shared: %s\n' % (string.join(files),)) la_tweaked = { } for file in files: base, ext = os.path.splitext(os.path.basename(file)) name = string.replace(base, 'libmod_', '') ofile.write('\t$(INSTALL_MOD_SHARED) -n %s %s\n' % (name, file)) if ext == '.la': la_tweaked[file + '-a'] = None for t in inst_targets: for dep in t.deps: bt = dep.output if bt[-3:] == '.la': la_tweaked[bt + '-a'] = None la_tweaked = la_tweaked.keys() s_files, s_errors = _collect_paths(parser.get('static-apache', 'paths')) errors = errors or s_errors ofile.write('\ninstall-mods-static: %s\n' '\t$(mkinstalldirs) %s\n' % (string.join(la_tweaked + s_files), os.path.join('$(APACHE_TARGET)', '.libs'))) for file in la_tweaked: dirname, fname = os.path.split(file) base = os.path.splitext(fname)[0] ofile.write('\t$(INSTALL_MOD_STATIC) %s %s\n' '\t$(INSTALL_MOD_STATIC) %s %s\n' % (os.path.join(dirname, '.libs', base + '.a'), os.path.join('$(APACHE_TARGET)', '.libs', base + '.a'), file, os.path.join('$(APACHE_TARGET)', base + '.la'))) for file in s_files: ofile.write('\t$(INSTALL_MOD_STATIC) %s %s\n' % (file, os.path.join('$(APACHE_TARGET)', os.path.basename(file)))) ofile.write('\n') elif area != 'test': ofile.write('install-%s: %s\n' '\t$(mkinstalldirs) $(%sdir)\n' % (area, string.join(files), area)) for file in files: ofile.write('\t$(INSTALL_%s) %s %s\n' % (string.upper(area), file, os.path.join('$(%sdir)' % area, os.path.basename(file)))) ofile.write('\n') includes, i_errors = _collect_paths(parser.get('includes', 'paths')) errors = errors or i_errors ofile.write('install-include: %s\n' '\t$(mkinstalldirs) $(includedir)\n' % (string.join(includes),)) for file in includes: ofile.write('\t$(INSTALL_INCLUDE) %s %s\n' % (file, os.path.join('$(includedir)', os.path.basename(file)))) ofile.write('\n# handy shortcut targets\n') for name, target in targets.items(): ofile.write('%s: %s\n' % (name, target.output)) ofile.write('\n') scripts, s_errors = _collect_paths(parser.get('test-scripts', 'paths')) errors = errors or s_errors ofile.write('TEST_DEPS = %s\n\n' % string.join(test_deps + scripts)) ofile.write('TEST_PROGRAMS = %s\n\n' % string.join(test_progs + scripts)) if not skip_depends: # # Find all the available headers and what they depend upon. the # include_deps is a dictionary mapping a short header name to a tuple # of the full path to the header and a dictionary of dependent header # names (short) mapping to None. # # Example: # { 'short.h' : ('/path/to/short.h', # { 'other.h' : None, 'foo.h' : None }) } # # Note that this structure does not allow for similarly named headers # in per-project directories. SVN doesn't have this at this time, so # this structure works quite fine. (the alternative would be to use # the full pathname for the key, but that is actually a bit harder to # work with since we only see short names when scanning, and keeping # a second variable around for mapping the short to long names is more # than I cared to do right now) # include_deps = _create_include_deps(includes) for d in target_dirs.keys(): hdrs = glob.glob(os.path.join(d, '*.h')) if hdrs: more_deps = _create_include_deps(hdrs, include_deps) include_deps.update(more_deps) for src, objname in file_deps: hdrs = [ ] for short in _find_includes(src, include_deps): hdrs.append(include_deps[short][0]) ofile.write('%s: %s %s\n' % (objname, src, string.join(hdrs))) if errors: sys.exit(1)
commit_deleted_edited
commit_deleted_edited, commit_in_dir_scheduled_for_addition,
def commit_deleted_edited(): "commit files that have been deleted, but also edited" # Bootstrap: make independent repo and working copy. sbox = sandbox(commit_deleted_edited) wc_dir = os.path.join(svntest.main.general_wc_dir, sbox) if svntest.actions.make_repo_and_wc(sbox): return 1 # Make some convenient paths. iota_path = os.path.join(wc_dir, 'iota') mu_path = os.path.join(wc_dir, 'A', 'mu') # Edit the files. svntest.main.file_append(iota_path, "This file has been edited.") svntest.main.file_append(mu_path, "This file has been edited.") # Schedule the files for removal. svntest.main.run_svn(None, 'remove', iota_path) svntest.main.run_svn(None, 'remove', mu_path) # Make our output list output_list = [(iota_path, None, {}, {'verb' : 'Deleting'}), (mu_path, None, {}, {'verb' : 'Deleting'})] expected_output_tree = svntest.tree.build_generic_tree(output_list) # Items in the status list are all at rev 1, except the two things # we changed...but then, they don't exist at all. status_list = svntest.actions.get_virginal_status_list(wc_dir, '2') status_list.pop(path_index(status_list, iota_path)) status_list.pop(path_index(status_list, mu_path)) for item in status_list: item[3]['wc_rev'] = '1' expected_status_tree = svntest.tree.build_generic_tree(status_list) if svntest.actions.run_and_verify_commit (wc_dir, expected_output_tree, expected_status_tree, None, None, None, None, None, wc_dir): return 1 return 0
script_dirs = [] for script in scripts: script_dirs.append(re.compile("[-a-z0-9A-Z_.]*$").sub("", script))
def write(self): errors = 0 for target in self.target_names: target_ob = self.targets[target]
self.ofile.write('BUILD_DIRS = %s %s\n' % (string.join(self.target_dirs.keys()), string.join(script_dirs)))
script_dirs = map(os.path.dirname, scripts + fs_scripts) build_dirs = self.target_dirs.copy() for d in script_dirs: build_dirs[d] = None self.ofile.write('BUILD_DIRS = %s\n' % string.join(build_dirs.keys()))
def write(self): errors = 0 for target in self.target_names: target_ob = self.targets[target]
commit_multiple_targets
commit_multiple_targets, commit_multiple_targets_2
def commit_multiple_targets(): "commit multiple targets" wc_dir = os.path.join (general_wc_dir, 'commit_multiple_targets') if make_repo_and_wc('commit_multiple_targets'): return 1 # This test will commit three targets: psi, B, and pi. In that order. # Make local mods to many files. AB_path = os.path.join(wc_dir, 'A', 'B') lambda_path = os.path.join(wc_dir, 'A', 'B', 'lambda') rho_path = os.path.join(wc_dir, 'A', 'D', 'G', 'rho') pi_path = os.path.join(wc_dir, 'A', 'D', 'G', 'pi') omega_path = os.path.join(wc_dir, 'A', 'D', 'H', 'omega') psi_path = os.path.join(wc_dir, 'A', 'D', 'H', 'psi') svn_test_main.file_append (lambda_path, 'new appended text for lambda') svn_test_main.file_append (rho_path, 'new appended text for rho') svn_test_main.file_append (pi_path, 'new appended text for pi') svn_test_main.file_append (omega_path, 'new appended text for omega') svn_test_main.file_append (psi_path, 'new appended text for psi') # Just for kicks, add a property to A/D/G as well. We'll make sure # that it *doesn't* get committed. ADG_path = os.path.join(wc_dir, 'A', 'D', 'G') svn_test_main.run_svn('propset', 'foo', 'bar', ADG_path) # Created expected output tree for 'svn ci'. We should see changes # only on these three targets, no others. output_list = [ [psi_path, None, {'verb' : 'Changing' }], [lambda_path, None, {'verb' : 'Changing' }], [pi_path, None, {'verb' : 'Changing' }] ] expected_output_tree = svn_tree.build_generic_tree(output_list) # Create expected status tree; all local revisions should be at 1, # but our three targets should be at 2. status_list = get_virginal_status_list(wc_dir, '2') for item in status_list: if ((item[0] != psi_path) and (item[0] != lambda_path) and (item[0] != pi_path)): item[2]['wc_rev'] = '1' # rho and omega should still display as locally modified: if ((item[0] == rho_path) or (item[0] == omega_path)): item[2]['status'] = 'M ' # A/D/G should still have a local property set, too. if (item[0] == ADG_path): item[2]['status'] = '_M' expected_status_tree = svn_tree.build_generic_tree(status_list) return run_and_verify_commit (wc_dir, expected_output_tree, expected_status_tree, psi_path, AB_path, pi_path)
"ensure update is not reporting additions"
"ensure update is not munging additions or replacements"
def update_ignores_added(): "ensure update is not reporting additions" sbox = sandbox(update_ignores_added) wc_dir = os.path.join (svntest.main.general_wc_dir, sbox) if svntest.actions.make_repo_and_wc(sbox): return 1 # Create a new file, 'zeta', and schedule it for addition. zeta_path = os.path.join(wc_dir, 'A', 'B', 'zeta') svntest.main.file_append(zeta_path, "This is the file 'zeta'.") svntest.main.run_svn(None, 'add', zeta_path) # Now update. "zeta at revision 0" should *not* be reported. # Create expected output tree for an update of the wc_backup. output_list = [] expected_output_tree = svntest.tree.build_generic_tree(output_list) # Create expected disk tree for the update. my_greek_tree = svntest.main.copy_greek_tree() my_greek_tree.append(['A/B/zeta', "This is the file 'zeta'.", {}, {}]) expected_disk_tree = svntest.tree.build_generic_tree(my_greek_tree) # Create expected status tree for the update. status_list = svntest.actions.get_virginal_status_list(wc_dir, '1') status_list.append([zeta_path, None, {}, {'status' : 'A ', 'wc_rev' : '0', 'repos_rev' : '1'}]) expected_status_tree = svntest.tree.build_generic_tree(status_list) # Do the update and check the results in three ways. return svntest.actions.run_and_verify_update(wc_dir, expected_output_tree, expected_disk_tree, expected_status_tree)
gamma_path = os.path.join(wc_dir, 'A', 'D', 'gamma') svntest.main.run_svn(None, 'delete', gamma_path) svntest.main.file_append(gamma_path, "\nThis is a new 'gamma' now.") svntest.main.run_svn(None, 'add', gamma_path)
def update_ignores_added(): "ensure update is not reporting additions" sbox = sandbox(update_ignores_added) wc_dir = os.path.join (svntest.main.general_wc_dir, sbox) if svntest.actions.make_repo_and_wc(sbox): return 1 # Create a new file, 'zeta', and schedule it for addition. zeta_path = os.path.join(wc_dir, 'A', 'B', 'zeta') svntest.main.file_append(zeta_path, "This is the file 'zeta'.") svntest.main.run_svn(None, 'add', zeta_path) # Now update. "zeta at revision 0" should *not* be reported. # Create expected output tree for an update of the wc_backup. output_list = [] expected_output_tree = svntest.tree.build_generic_tree(output_list) # Create expected disk tree for the update. my_greek_tree = svntest.main.copy_greek_tree() my_greek_tree.append(['A/B/zeta', "This is the file 'zeta'.", {}, {}]) expected_disk_tree = svntest.tree.build_generic_tree(my_greek_tree) # Create expected status tree for the update. status_list = svntest.actions.get_virginal_status_list(wc_dir, '1') status_list.append([zeta_path, None, {}, {'status' : 'A ', 'wc_rev' : '0', 'repos_rev' : '1'}]) expected_status_tree = svntest.tree.build_generic_tree(status_list) # Do the update and check the results in three ways. return svntest.actions.run_and_verify_update(wc_dir, expected_output_tree, expected_disk_tree, expected_status_tree)
status_list = svntest.actions.get_virginal_status_list(wc_dir, '1')
status_list = svntest.actions.get_virginal_status_list(wc_dir, '2') for item in status_list: if item[0] == gamma_path: item[3]['wc_rev'] = '1' item[3]['status'] = 'R '
def update_ignores_added(): "ensure update is not reporting additions" sbox = sandbox(update_ignores_added) wc_dir = os.path.join (svntest.main.general_wc_dir, sbox) if svntest.actions.make_repo_and_wc(sbox): return 1 # Create a new file, 'zeta', and schedule it for addition. zeta_path = os.path.join(wc_dir, 'A', 'B', 'zeta') svntest.main.file_append(zeta_path, "This is the file 'zeta'.") svntest.main.run_svn(None, 'add', zeta_path) # Now update. "zeta at revision 0" should *not* be reported. # Create expected output tree for an update of the wc_backup. output_list = [] expected_output_tree = svntest.tree.build_generic_tree(output_list) # Create expected disk tree for the update. my_greek_tree = svntest.main.copy_greek_tree() my_greek_tree.append(['A/B/zeta', "This is the file 'zeta'.", {}, {}]) expected_disk_tree = svntest.tree.build_generic_tree(my_greek_tree) # Create expected status tree for the update. status_list = svntest.actions.get_virginal_status_list(wc_dir, '1') status_list.append([zeta_path, None, {}, {'status' : 'A ', 'wc_rev' : '0', 'repos_rev' : '1'}]) expected_status_tree = svntest.tree.build_generic_tree(status_list) # Do the update and check the results in three ways. return svntest.actions.run_and_verify_update(wc_dir, expected_output_tree, expected_disk_tree, expected_status_tree)
'repos_rev' : '1'}])
'repos_rev' : '2'}])
def update_ignores_added(): "ensure update is not reporting additions" sbox = sandbox(update_ignores_added) wc_dir = os.path.join (svntest.main.general_wc_dir, sbox) if svntest.actions.make_repo_and_wc(sbox): return 1 # Create a new file, 'zeta', and schedule it for addition. zeta_path = os.path.join(wc_dir, 'A', 'B', 'zeta') svntest.main.file_append(zeta_path, "This is the file 'zeta'.") svntest.main.run_svn(None, 'add', zeta_path) # Now update. "zeta at revision 0" should *not* be reported. # Create expected output tree for an update of the wc_backup. output_list = [] expected_output_tree = svntest.tree.build_generic_tree(output_list) # Create expected disk tree for the update. my_greek_tree = svntest.main.copy_greek_tree() my_greek_tree.append(['A/B/zeta', "This is the file 'zeta'.", {}, {}]) expected_disk_tree = svntest.tree.build_generic_tree(my_greek_tree) # Create expected status tree for the update. status_list = svntest.actions.get_virginal_status_list(wc_dir, '1') status_list.append([zeta_path, None, {}, {'status' : 'A ', 'wc_rev' : '0', 'repos_rev' : '1'}]) expected_status_tree = svntest.tree.build_generic_tree(status_list) # Do the update and check the results in three ways. return svntest.actions.run_and_verify_update(wc_dir, expected_output_tree, expected_disk_tree, expected_status_tree)
out, err = svntest.main.run_svn(1, 'commit', '-m', '"logmsg"', mu_path) if len(err) == 0: return 1 return 0
if svntest.actions.run_and_verify_commit (wc_dir, None, None, "unversioned", None, None, None, None, mu_path): return 1 Q_path = os.path.join(wc_dir, 'Q') bloo_path = os.path.join(Q_path, 'bloo') os.mkdir(Q_path) svntest.main.file_append(bloo_path, "New contents.") svntest.main.run_svn(None, 'add', '--recursive', Q_path) return svntest.actions.run_and_verify_commit (wc_dir, None, None, "unversioned", None, None, None, None, bloo_path)
def commit_in_dir_scheduled_for_addition(sbox): "commit a file inside dir scheduled for addition" if sbox.build(): return 1 wc_dir = sbox.wc_dir A_path = os.path.join(wc_dir, 'A') Z_path = os.path.join(wc_dir, 'Z') mu_path = os.path.join(wc_dir, 'Z', 'mu') svntest.main.run_svn(None, 'move', A_path, Z_path) out, err = svntest.main.run_svn(1, 'commit', '-m', '"logmsg"', mu_path) ### FIXME: # # In commit 1275, sussman fixed subversion/libsvn_client/copy.c, and # said: # # This was causing commit_test #15 to fail, but this test was # written only to expect generic failure, so it still passing, so # it looked as though 'make check' was passing. If you ran # commit_tests.py by hand, though, you'd see the extra stderr # output. The moral of the story is that commit_test #15 should # be using run_and_verify_commit() to look for a *specific* # expected errorstring. Anyone wanna fix it? # # This is the test that needs to be fixed, right? if len(err) == 0: return 1 return 0
"Return a status list reflecting the local mods made by make_standard_slew_of_changes()."
def get_standard_status_list(wc_dir): "Return a status list reflecting the local mods made by make_standard_slew_of_changes()." status_list = svntest.actions.get_virginal_status_list(wc_dir, '1') ### todo: use status-hash below instead. # `.' status_list[0][3]['status'] = '_M' # A/B/lambda, A/D status_list[5][3]['status'] = 'M ' status_list[11][3]['status'] = 'M ' # A/B/E, A/D/H/chi status_list[6][3]['status'] = 'R ' status_list[18][3]['status'] = 'R ' # A/B/E/alpha, A/B/E/beta, A/C, A/D/gamma status_list[7][3]['status'] = 'D ' status_list[8][3]['status'] = 'D ' status_list[10][3]['status'] = 'D ' status_list[12][3]['status'] = 'D ' status_list[15][3]['status'] = 'D ' # A/D/G/pi, A/D/H/omega status_list[14][3]['status'] = '_M' status_list[20][3]['status'] = 'MM' # New things status_list.append([os.path.join(wc_dir, 'Q'), None, {}, {'status' : 'A ', 'locked' : ' ', 'wc_rev' : '0', 'repos_rev' : '1'}]) status_list.append([os.path.join(wc_dir, 'Q', 'floo'), None, {}, {'status' : 'A ', 'locked' : ' ', 'wc_rev' : '0', 'repos_rev' : '1'}]) status_list.append([os.path.join(wc_dir, 'A', 'D', 'H', 'gloo'), None, {}, {'status' : 'A ', 'locked' : ' ', 'wc_rev' : '0', 'repos_rev' : '1'}]) status_list.append([os.path.join(wc_dir, 'A', 'B', 'E', 'bloo'), None, {}, {'status' : 'A ', 'locked' : ' ', 'wc_rev' : '0', 'repos_rev' : '1'}]) return status_list
objects.append(objname) file_deps.append((src, objname)) elif src[-5:] == '.texi': objname = src[:-5] + objext
def main(fname, oname=None, skip_depends=0): parser = ConfigParser.ConfigParser(_cfg_defaults) parser.read(fname) if oname is None: oname = os.path.splitext(os.path.basename(fname))[0] + '-outputs.mk' ofile = open(oname, 'w') ofile.write('# DO NOT EDIT -- AUTOMATICALLY GENERATED\n\n') errors = 0 targets = { } groups = { } # group name -> targets install = { } # install area name -> targets test_progs = [ ] test_deps = [ ] file_deps = [ ] target_dirs = { } target_names = _filter_targets(parser.sections()) # PASS 1: collect the targets and some basic info for target in target_names: try: target_ob = Target(target, parser.get(target, 'path'), parser.get(target, 'install'), parser.get(target, 'type')) except GenMakeError, e: print e errors = 1 continue targets[target] = target_ob group = parser.get(target, 'group') if groups.has_key(group): groups[group].append(target_ob.output) else: groups[group] = [ target_ob.output ] itype = target_ob.install if install.has_key(itype): install[itype].append(target_ob) else: install[itype] = [ target_ob ] target_dirs[target_ob.path] = None if errors: sys.exit(1) # PASS 2: generate the outputs for target in target_names: target_ob = targets[target] path = target_ob.path bldtype = target_ob.type objext = target_ob.objext tpath = target_ob.output tfile = os.path.basename(tpath) if target_ob.install == 'test' and bldtype == 'exe': test_deps.append(tpath) if parser.get(target, 'testing') != 'skip': test_progs.append(tpath) pats = parser.get(target, 'sources') if not pats: pats = _default_sources[bldtype] sources, s_errors = _collect_paths(pats, path) errors = errors or s_errors objects = [ ] for src in sources: if src[-2:] == '.c': objname = src[:-2] + objext objects.append(objname) file_deps.append((src, objname)) elif src[-5:] == '.texi': objname = src[:-5] + objext objects.append(objname) file_deps.append((src, objname)) else: print 'ERROR: unknown file extension on', src errors = 1 retreat = _retreat_dots(path) libs = [ ] deps = [ ] for lib in string.split(parser.get(target, 'libs')): if lib in target_names: tlib = targets[lib] target_ob.deps.append(tlib) deps.append(tlib.output) dep_path = tlib.path if bldtype == 'lib': # we need to hack around a libtool problem: it cannot record a # dependency of one shared lib on another shared lib. ### fix this by upgrading to the new libtool 1.4 release... # strip "lib" from the front so we have -lsvn_foo if lib[:3] == 'lib': lib = lib[3:] libs.append('-L%s -l%s' % (retreat + os.path.join(dep_path, '.libs'), lib)) else: # linking executables can refer to .la files libs.append(retreat + os.path.join(dep_path, lib + '.la')) else: # something we don't know, so just include it directly libs.append(lib) targ_varname = string.replace(target, '-', '_') ldflags = parser.get(target, 'link-flags') add_deps = parser.get(target, 'add-deps') objnames = string.join(map(os.path.basename, objects)) ofile.write('%s_DEPS = %s %s\n' '%s_OBJECTS = %s\n' '%s: $(%s_DEPS)\n' '\tcd %s && $(LINK) -o %s %s $(%s_OBJECTS) %s $(LIBS)\n\n' % (targ_varname, string.join(objects + deps), add_deps, targ_varname, objnames, tpath, targ_varname, path, tfile, ldflags, targ_varname, string.join(libs))) custom = parser.get(target, 'custom') if custom == 'apache-mod': # special build, needing Apache includes ofile.write('# build these special -- use APACHE_INCLUDES\n') for src in sources: if src[-2:] == '.c': ofile.write('%s%s: %s\n\t$(COMPILE_APACHE_MOD)\n' % (src[:-2], objext, src)) ofile.write('\n') for g_name, g_targets in groups.items(): ofile.write('%s: %s\n\n' % (g_name, string.join(g_targets))) ofile.write('BUILD_DIRS = %s\n' % string.join(target_dirs.keys())) cfiles = [ ] for target in targets.values(): # .la files are handled by the standard 'clean' rule; clean all the # other targets if target.output[-3:] != '.la': cfiles.append(target.output) ofile.write('CLEAN_FILES = %s\n\n' % string.join(cfiles)) for area, inst_targets in install.items(): files = [ ] for t in inst_targets: files.append(t.output) if area == 'apache-mod': ofile.write('install-mods-shared: %s\n' % (string.join(files),)) la_tweaked = { } for file in files: base, ext = os.path.splitext(os.path.basename(file)) name = string.replace(base, 'libmod_', '') ofile.write('\t$(INSTALL_MOD_SHARED) -n %s %s\n' % (name, file)) if ext == '.la': la_tweaked[file + '-a'] = None for t in inst_targets: for dep in t.deps: bt = dep.output if bt[-3:] == '.la': la_tweaked[bt + '-a'] = None la_tweaked = la_tweaked.keys() s_files, s_errors = _collect_paths(parser.get('static-apache', 'paths')) errors = errors or s_errors ofile.write('\ninstall-mods-static: %s\n' '\t$(MKDIR) %s\n' % (string.join(la_tweaked + s_files), os.path.join('$(APACHE_TARGET)', '.libs'))) for file in la_tweaked: dirname, fname = os.path.split(file) base = os.path.splitext(fname)[0] ofile.write('\t$(INSTALL_MOD_STATIC) %s %s\n' '\t$(INSTALL_MOD_STATIC) %s %s\n' % (os.path.join(dirname, '.libs', base + '.a'), os.path.join('$(APACHE_TARGET)', '.libs', base + '.a'), file, os.path.join('$(APACHE_TARGET)', base + '.la'))) for file in s_files: ofile.write('\t$(INSTALL_MOD_STATIC) %s %s\n' % (file, os.path.join('$(APACHE_TARGET)', os.path.basename(file)))) ofile.write('\n') elif area != 'test': ofile.write('install-%s: %s\n' '\t$(MKDIR) $(%sdir)\n' % (area, string.join(files), area)) for file in files: ofile.write('\t$(INSTALL_%s) %s %s\n' % (string.upper(area), file, os.path.join('$(%sdir)' % area, os.path.basename(file)))) ofile.write('\n') includes, i_errors = _collect_paths(parser.get('includes', 'paths')) errors = errors or i_errors ofile.write('install-include: %s\n' '\t$(MKDIR) $(includedir)\n' % (string.join(includes),)) for file in includes: ofile.write('\t$(INSTALL_INCLUDE) %s %s\n' % (os.path.join('$(top_srcdir)', file), os.path.join('$(includedir)', os.path.basename(file)))) ofile.write('\n# handy shortcut targets\n') for name, target in targets.items(): ofile.write('%s: %s\n' % (name, target.output)) ofile.write('\n') scripts, s_errors = _collect_paths(parser.get('test-scripts', 'paths')) errors = errors or s_errors ofile.write('TEST_DEPS = %s\n\n' % string.join(test_deps + scripts)) ofile.write('TEST_PROGRAMS = %s\n\n' % string.join(test_progs + scripts)) if not skip_depends: # # Find all the available headers and what they depend upon. the # include_deps is a dictionary mapping a short header name to a tuple # of the full path to the header and a dictionary of dependent header # names (short) mapping to None. # # Example: # { 'short.h' : ('/path/to/short.h', # { 'other.h' : None, 'foo.h' : None }) } # # Note that this structure does not allow for similarly named headers # in per-project directories. SVN doesn't have this at this time, so # this structure works quite fine. (the alternative would be to use # the full pathname for the key, but that is actually a bit harder to # work with since we only see short names when scanning, and keeping # a second variable around for mapping the short to long names is more # than I cared to do right now) # include_deps = _create_include_deps(includes) for d in target_dirs.keys(): hdrs = glob.glob(os.path.join(d, '*.h')) if hdrs: more_deps = _create_include_deps(hdrs, include_deps) include_deps.update(more_deps) for src, objname in file_deps: hdrs = [ ] for short in _find_includes(src, include_deps): hdrs.append(include_deps[short][0]) ofile.write('%s: %s %s\n' % (objname, src, string.join(hdrs))) if errors: sys.exit(1)
tfile = name self.objext = '.info'
def __init__(self, name, path, install, type): self.name = name self.deps = [ ] # dependencies (list of other Target objects) self.path = path self.type = type
got_error = run_one_test(n, test_list)
if run_one_test(n, test_list): got_error = 1
def run_tests(test_list): "Main routine to run all tests in TEST_LIST." testnum = 0 # Parse commandline arg, list tests or run one test if (len(sys.argv) > 1): if (sys.argv[1] == 'list'): print "Test # Test Description" print "------ ----------------" n = 1 for x in test_list[1:]: print " ", n, " ", x.__doc__ n = n+1 return 0 else: try: testnum = int(sys.argv[1]) return run_one_test(testnum, test_list) except ValueError: print "warning: ignoring bogus argument" # run all the tests. got_error = 0 for n in range(len(test_list)): if n: got_error = run_one_test(n, test_list) return got_error
status_list = svntest.actions.get_virginal_status_list(wc_dir, 1)
status_list = svntest.actions.get_virginal_status_list(wc_dir, '1')
def commit_props(): "commit properties" # Bootstrap sbox = sandbox(commit_props) wc_dir = os.path.join (svntest.main.general_wc_dir, sbox) if svntest.actions.make_repo_and_wc(sbox): return 1 # Add a property to a file and a directory mu_path = os.path.join(wc_dir, 'A', 'mu') H_path = os.path.join(wc_dir, 'A', 'D', 'H') svntest.main.run_svn('propset', 'blue', 'azul', mu_path) svntest.main.run_svn('propset', 'red', 'rojo', H_path) # Create expected output tree. output_list = [ [mu_path, None, {}, {'verb' : 'Changing'}], [ H_path, None, {}, {'verb' : 'Changing'}] ] expected_output_tree = svntest.tree.build_generic_tree(output_list) # Created expected status tree. status_list = svntest.actions.get_virginal_status_list(wc_dir, 1) for item in status_list: item[3]['repos_rev'] = '2' # post-commit status if (item[0] == mu_path) or (item[0] == H_path): item[3]['wc_rev'] = '2' item[3]['status'] = '__' expected_status_tree = svntest.tree.build_generic_tree(status_list) # Commit the one file. return svntest.actions.run_and_verify_commit (wc_dir, expected_output_tree, expected_status_tree, None, None, None, None, None, wc_dir)
status_list = svntest.actions.get_virginal_status_list(wc_dir, 1)
status_list = svntest.actions.get_virginal_status_list(wc_dir, '1')
def update_props(): "receive properties via update" # Bootstrap sbox = sandbox(update_props) wc_dir = os.path.join (svntest.main.general_wc_dir, sbox) if svntest.actions.make_repo_and_wc(sbox): return 1 # Make a backup copy of the working copy wc_backup = wc_dir + 'backup' svntest.actions.duplicate_dir(wc_dir, wc_backup) # Add a property to a file and a directory mu_path = os.path.join(wc_dir, 'A', 'mu') H_path = os.path.join(wc_dir, 'A', 'D', 'H') svntest.main.run_svn('propset', 'blue', 'azul', mu_path) svntest.main.run_svn('propset', 'red', 'rojo', H_path) # Create expected output tree. output_list = [ [mu_path, None, {}, {'verb' : 'Changing'}], [ H_path, None, {}, {'verb' : 'Changing'}] ] expected_output_tree = svntest.tree.build_generic_tree(output_list) # Created expected status tree. status_list = svntest.actions.get_virginal_status_list(wc_dir, 1) for item in status_list: item[3]['repos_rev'] = '2' # post-commit status if (item[0] == mu_path) or (item[0] == H_path): item[3]['wc_rev'] = '2' item[3]['status'] = '__' expected_status_tree = svntest.tree.build_generic_tree(status_list) # Commit the one file. if svntest.actions.run_and_verify_commit (wc_dir, expected_output_tree, expected_status_tree, None, None, None, None, None, wc_dir): return 1 # Create expected output tree for an update of the wc_backup. output_list = [ [os.path.join(wc_backup, mu_path), None, {}, {'status' : '_U'}], [os.path.join(wc_backup, H_path), None, {}, {'status' : '_U'}] ] expected_output_tree = svntest.tree.build_generic_tree(output_list) # Create expected disk tree for the update. my_greek_tree = svntest.main.copy_greek_tree() my_greek_tree[2][2]['blue'] = 'azul' # A/mu my_greek_tree[16][2]['red'] = 'rojo' # A/D/H expected_disk_tree = svntest.tree.build_generic_tree(my_greek_tree) # Create expected status tree for the update. status_list = svntest.actions.get_virginal_status_list(wc_backup, '2') expected_status_tree = svntest.tree.build_generic_tree(status_list) # Do the update and check the results in three ways. return svntest.actions.run_and_verify_update(wc_backup, expected_output_tree, expected_disk_tree, expected_status_tree)