rem
stringlengths 0
322k
| add
stringlengths 0
2.05M
| context
stringlengths 8
228k
|
---|---|---|
print "Status string: %s, players: %s" % (status_string, self.players)
|
def update_players_list(self): try: self.connection.request("GET", "/status?game_id=%s" % self.game_id) status_string = self.connection.getresponse().read() # TODO: Parse status report and put it into a GameStatus object status, players, size = status_string.split('|')[:3] self.players = players.split(",") print "Status string: %s, players: %s" % (status_string, self.players) self.game_size = int(size) except (httplib.CannotSendRequest, ValueError), err: print ("Status fetching for game %s failed. (%s)" % (self.game_id, err))
|
|
self.title_fnt = self.loader.load_font("KLEPTOMA.TTF", 60)
|
self.title_fnt = self.loader.load_font("KLEPTOMA.TTF", 50)
|
def __init__(self): self.loader = Loader() self.desk = self.loader.load_image("back.png") self.back = self.desk.copy()
|
t = ["Empate!","Ganaste!","Perdiste!"]
|
t = ["Empate!","Jugador Uno!","Jugador Dos!"] if t[result] != t[0]: self._title('Bien',y - 80,x)
|
def render_gameover(self,result): self.back.blit(self.desk,(0,0)) y = 275 x = 300 t = ["Empate!","Ganaste!","Perdiste!"] self._title(t[result],y,x); y+=50 self._text("Pulsa el raton para volver a jugar!",y,x)
|
elif event.key == K_F11: pygame.display.toggle_fullscreen()
|
def main_loop(self): clock = pygame.time.Clock()
|
|
self.mixer_internal = PREFS["mixer_internal"]
|
self.mixer_internal = bool(int(PREFS["mixer_internal"]))
|
def __init__(self): """ Constructor """ gtk.StatusIcon.__init__(self)
|
self.last_id = self.notify.Notify('audiovolume', self.last_id, icon, '', body, [], hints, duration * 1000)
|
self.last_id = self.notify.Notify('volume', self.last_id, icon, self.title, body, [], hints, duration * 1000)
|
def show(self, icon, message, duration, volume): """ Show the notification """ body = self.format(message, volume) hints = {"urgency": dbus.Byte(0), "desktop-entry": dbus.String("volti")} if self.main.notify_position and self.server_capable: hints["x"], hints["y"] = self.get_position() self.last_id = self.notify.Notify('audiovolume', self.last_id, icon, '', body, [], hints, duration * 1000)
|
if term == "linux":
|
if term == "linux" or "":
|
def find_term(self): term = os.getenv("TERM") if term == "linux": if which("gconftool-2"): term = Popen(["gconftool-2", "-g", "/desktop/gnome/applications/terminal/exec"], stdout=PIPE).communicate()[0].strip() else: term = 'xterm' else: if term == "rxvt" and not which(term): term = "urxvt" return term
|
x1,y1 = apply_context_transforms(x1,y1) x2,y2 = apply_context_transforms(x2,y2)
|
x1,y1 = self.apply_context_transforms(x1,y1) x2,y2 = self.apply_context_transforms(x2,y2)
|
def line_message(self, x1,y1,x2,y2): x1+=random()*self.MaxNoise-self.MaxNoise/2 y1+=random()*self.MaxNoise-self.MaxNoise/2 x2+=random()*self.MaxNoise-self.MaxNoise/2 y2+=random()*self.MaxNoise-self.MaxNoise/2 x1,y1 = apply_context_transforms(x1,y1) x2,y2 = apply_context_transforms(x2,y2)
|
self.ReplayInitLog()
|
def __init__(self, config=None): self.localDevice = True self.remoteDevice = None if config: if "server" in config: if not "port" in config: config["port"]=50000
|
|
buf = ""
|
buf2 = ""
|
def ReplayInitLog(self): # find our device self.usbdev=None for bus in usb.busses(): for dev in bus.devices: if dev.idVendor == 0x3333: self.usbdev = dev # was it found? if self.usbdev is None: raise ValueError('Device (3333:5555) not found')
|
buf+=chr(int(byte,16)) handle.controlMsg(reqType,req,buf,value,index)
|
buf2+=chr(int(byte,16)) handle.controlMsg(reqType,req,buf2,value,index)
|
def ReplayInitLog(self): # find our device self.usbdev=None for bus in usb.busses(): for dev in bus.devices: if dev.idVendor == 0x3333: self.usbdev = dev # was it found? if self.usbdev is None: raise ValueError('Device (3333:5555) not found')
|
content = serialize('json', object)
|
content = serialize('json', data)
|
def __init__(self, data, *args, **kwargs): content = None; if isinstance(data, QuerySet): content = serialize('json', object) else: content = json.dumps(data, indent=2, cls=json.DjangoJSONEdncoder, ensure_ascii=False) super(JsonResponse, self).__init__(content, *args, content_type='application/json', **kwargs)
|
content = json.dumps(data, indent=2, cls=json.DjangoJSONEdncoder,
|
content = simplejson.dumps(data, indent=2, cls=json.DjangoJSONEncoder,
|
def __init__(self, data, *args, **kwargs): content = None; if isinstance(data, QuerySet): content = serialize('json', data) else: content = json.dumps(data, indent=2, cls=json.DjangoJSONEdncoder, ensure_ascii=False) kwargs['content_type'] = 'application/json' super(JsonResponse, self).__init__(content, *args, **kwargs)
|
url = "%s/?all
|
url = "%s?all
|
def edit_transaction(request, transaction=None): load_from_database = True if transaction == None: transaction = Transaction(date=datetime.date.today(), auto_generated=False) load_from_database = False else: transaction = get_object_or_404(Transaction, id=int(transaction)) splits = [] commit = True # Is it safe to commit this transaction? # If the user clicked the "Update" button, don't commit yet. if request.POST.has_key("_update"): commit = False # If POST data was submitted, we're in the middle of editing a transaction. # Pull the transaction data out of the POST data. Otherwise, we need to # load the initial data from the database. try: transaction.date = parse_date(request.POST['date']) transaction.description = request.POST['desc'] transaction.auto_generated = request.POST.has_key('auto_generated') n = 0 while True: n = str(int(n) + 1) note = request.POST['note.' + n] account = request.POST['account.' + n] if account == "": account = None else: account = Account.objects.get(id=int(account)) amount = Decimal("0.00") if request.POST['debit.' + n] != "": amount += Decimal(request.POST['debit.' + n]) if request.POST['credit.' + n] != "": amount -= Decimal(request.POST['credit.' + n]) load_from_database = False if account: splits.append({'memo': note, 'account': account, 'amount': amount}) except KeyError: # Assume we hit the end of the inputs pass if len(splits) == 0: commit = False # Load initial splits from the database, if needed if load_from_database: for s in transaction.split_set.all().order_by('-amount'): splits.append({'id': s.id, 'memo': s.memo, 'account': s.account, 'amount': s.amount}) commit = False # Check if the transaction is balanced. If not, add a balancing split to # be filled in by the user. total = Decimal("0.00") for s in splits: total += s['amount'] if total != Decimal("0.00"): splits.append({'memo': "", 'account': None, 'amount': -total}) commit = False # Has the transaction been fully filled-in, with no problems found? If so, # commit to the database. if commit: transaction.save() Split.objects.filter(transaction=transaction).delete() for s in splits: split = Split(transaction=transaction, account=s['account'], memo=s['memo'], amount=s['amount']) split.save() if transaction.auto_generated: url = "%s/?all#t%d" % (reverse("chezbob.finance.views.ledger"), transaction.id,) else: url = "%s/#t%d" % (reverse("chezbob.finance.views.ledger"), transaction.id,) return HttpResponseRedirect(url) # Include a few blank splits at the end of the transaction for entering # additional data. for i in range(4): splits.append({'memo': "", 'account': None, 'amount': Decimal("0.00")}) # Convert splits to a separated debit/credit format for s in splits: s['debit'] = Decimal("0.00") s['credit'] = Decimal("0.00") if s['amount'] > 0: s['debit'] = s['amount'] if s['amount'] < 0: s['credit'] = -s['amount'] return render_to_response('finance/transaction_update.html', {'user': request.user, 'accounts': Account.objects.order_by('name'), 'transaction': transaction, 'splits': splits})
|
url = "%s/
|
url = "%s
|
def edit_transaction(request, transaction=None): load_from_database = True if transaction == None: transaction = Transaction(date=datetime.date.today(), auto_generated=False) load_from_database = False else: transaction = get_object_or_404(Transaction, id=int(transaction)) splits = [] commit = True # Is it safe to commit this transaction? # If the user clicked the "Update" button, don't commit yet. if request.POST.has_key("_update"): commit = False # If POST data was submitted, we're in the middle of editing a transaction. # Pull the transaction data out of the POST data. Otherwise, we need to # load the initial data from the database. try: transaction.date = parse_date(request.POST['date']) transaction.description = request.POST['desc'] transaction.auto_generated = request.POST.has_key('auto_generated') n = 0 while True: n = str(int(n) + 1) note = request.POST['note.' + n] account = request.POST['account.' + n] if account == "": account = None else: account = Account.objects.get(id=int(account)) amount = Decimal("0.00") if request.POST['debit.' + n] != "": amount += Decimal(request.POST['debit.' + n]) if request.POST['credit.' + n] != "": amount -= Decimal(request.POST['credit.' + n]) load_from_database = False if account: splits.append({'memo': note, 'account': account, 'amount': amount}) except KeyError: # Assume we hit the end of the inputs pass if len(splits) == 0: commit = False # Load initial splits from the database, if needed if load_from_database: for s in transaction.split_set.all().order_by('-amount'): splits.append({'id': s.id, 'memo': s.memo, 'account': s.account, 'amount': s.amount}) commit = False # Check if the transaction is balanced. If not, add a balancing split to # be filled in by the user. total = Decimal("0.00") for s in splits: total += s['amount'] if total != Decimal("0.00"): splits.append({'memo': "", 'account': None, 'amount': -total}) commit = False # Has the transaction been fully filled-in, with no problems found? If so, # commit to the database. if commit: transaction.save() Split.objects.filter(transaction=transaction).delete() for s in splits: split = Split(transaction=transaction, account=s['account'], memo=s['memo'], amount=s['amount']) split.save() if transaction.auto_generated: url = "%s/?all#t%d" % (reverse("chezbob.finance.views.ledger"), transaction.id,) else: url = "%s/#t%d" % (reverse("chezbob.finance.views.ledger"), transaction.id,) return HttpResponseRedirect(url) # Include a few blank splits at the end of the transaction for entering # additional data. for i in range(4): splits.append({'memo': "", 'account': None, 'amount': Decimal("0.00")}) # Convert splits to a separated debit/credit format for s in splits: s['debit'] = Decimal("0.00") s['credit'] = Decimal("0.00") if s['amount'] > 0: s['debit'] = s['amount'] if s['amount'] < 0: s['credit'] = -s['amount'] return render_to_response('finance/transaction_update.html', {'user': request.user, 'accounts': Account.objects.order_by('name'), 'transaction': transaction, 'splits': splits})
|
if account is None and not request.GET.has_key('all'): include_auto = False
|
if account is None: include_auto = False if request.GET.has_key('all'): if request.GET['all'] != '0': include_auto = True else: include_auto = False
|
def ledger(request, account=None): if account: account = Account.objects.get(id=account) title = account.name else: title = "General Ledger" transactions = [] transaction_filter = {} include_auto = True if account is None and not request.GET.has_key('all'): include_auto = False if not include_auto: transaction_filter['auto_generated'] = False if account is not None: transaction_filter['split__account'] = account count_per_page = 25 all_transactions = Transaction.objects.filter(**transaction_filter)\ .order_by('date', 'id')\ .distinct() transaction_count = all_transactions.count() paginator = Paginator(range(0, transaction_count), count_per_page) default_pagenum = paginator.num_pages try: pagenum = int(request.GET.get('page', default_pagenum)) except: pagenum = default_pagenum try: page = paginator.page(pagenum) except (EmptyPage, InvalidPage): page = paginator.page(paginator.num_pages) # Slice if len(page.object_list) > 2: page_transactions = \ all_transactions[page.object_list[0]:page.object_list[-1]+1] else: page_transactions = all_transactions if account is not None and len(page_transactions) > 0: balance = Transaction.balance_before(page_transactions[0], account) else: balance = Decimal("0.00") for t in page_transactions: split_list = [] for s in Split.objects.filter(transaction=t): split = {'memo': s.memo, 'account': s.account, 'debit': "", 'credit': ""} if s.amount >= 0: split['debit'] = s.amount else: split['credit'] = -s.amount split_list.append(split) if account is not None and s.account.id == account.id: balance += s.amount if account is None: transactions.append({'info': t, 'splits': split_list}) else: transactions.append({'info': t, 'splits': split_list, 'balance': balance}) if account: for t in transactions: if account.is_reversed(): t['balance'] *= -1 if include_auto: extra_page_params = "all=1&" else: extra_page_params = "" return render_to_response('finance/transactions.html', {'title': title, 'transactions': transactions, 'balances': account is not None, 'page': page, 'extra_page_params': extra_page_params})
|
return HttpResponseRedirect('/finance/accounts/')
|
return HttpResponseRedirect(reverse('chezbob.finance.views.account_list'))
|
def redirect(request): return HttpResponseRedirect('/finance/accounts/')
|
url = "/finance/ledger/?all
|
url = "%s/?all
|
def edit_transaction(request, transaction=None): load_from_database = True if transaction == None: transaction = Transaction(date=datetime.date.today(), auto_generated=False) load_from_database = False else: transaction = get_object_or_404(Transaction, id=int(transaction)) splits = [] commit = True # Is it safe to commit this transaction? # If the user clicked the "Update" button, don't commit yet. if request.POST.has_key("_update"): commit = False # If POST data was submitted, we're in the middle of editing a transaction. # Pull the transaction data out of the POST data. Otherwise, we need to # load the initial data from the database. try: transaction.date = parse_date(request.POST['date']) transaction.description = request.POST['desc'] transaction.auto_generated = request.POST.has_key('auto_generated') n = 0 while True: n = str(int(n) + 1) note = request.POST['note.' + n] account = request.POST['account.' + n] if account == "": account = None else: account = Account.objects.get(id=int(account)) amount = Decimal("0.00") if request.POST['debit.' + n] != "": amount += Decimal(request.POST['debit.' + n]) if request.POST['credit.' + n] != "": amount -= Decimal(request.POST['credit.' + n]) load_from_database = False if account: splits.append({'memo': note, 'account': account, 'amount': amount}) except KeyError: # Assume we hit the end of the inputs pass if len(splits) == 0: commit = False # Load initial splits from the database, if needed if load_from_database: for s in transaction.split_set.all().order_by('-amount'): splits.append({'id': s.id, 'memo': s.memo, 'account': s.account, 'amount': s.amount}) commit = False # Check if the transaction is balanced. If not, add a balancing split to # be filled in by the user. total = Decimal("0.00") for s in splits: total += s['amount'] if total != Decimal("0.00"): splits.append({'memo': "", 'account': None, 'amount': -total}) commit = False # Has the transaction been fully filled-in, with no problems found? If so, # commit to the database. if commit: transaction.save() Split.objects.filter(transaction=transaction).delete() for s in splits: split = Split(transaction=transaction, account=s['account'], memo=s['memo'], amount=s['amount']) split.save() if transaction.auto_generated: url = "/finance/ledger/?all#t%d" % (transaction.id,) else: url = "/finance/ledger/#t%d" % (transaction.id,) return HttpResponseRedirect(url) # Include a few blank splits at the end of the transaction for entering # additional data. for i in range(4): splits.append({'memo': "", 'account': None, 'amount': Decimal("0.00")}) # Convert splits to a separated debit/credit format for s in splits: s['debit'] = Decimal("0.00") s['credit'] = Decimal("0.00") if s['amount'] > 0: s['debit'] = s['amount'] if s['amount'] < 0: s['credit'] = -s['amount'] return render_to_response('finance/transaction_update.html', {'user': request.user, 'accounts': Account.objects.order_by('name'), 'transaction': transaction, 'splits': splits})
|
url = "/finance/ledger/
|
url = "%s/
|
def edit_transaction(request, transaction=None): load_from_database = True if transaction == None: transaction = Transaction(date=datetime.date.today(), auto_generated=False) load_from_database = False else: transaction = get_object_or_404(Transaction, id=int(transaction)) splits = [] commit = True # Is it safe to commit this transaction? # If the user clicked the "Update" button, don't commit yet. if request.POST.has_key("_update"): commit = False # If POST data was submitted, we're in the middle of editing a transaction. # Pull the transaction data out of the POST data. Otherwise, we need to # load the initial data from the database. try: transaction.date = parse_date(request.POST['date']) transaction.description = request.POST['desc'] transaction.auto_generated = request.POST.has_key('auto_generated') n = 0 while True: n = str(int(n) + 1) note = request.POST['note.' + n] account = request.POST['account.' + n] if account == "": account = None else: account = Account.objects.get(id=int(account)) amount = Decimal("0.00") if request.POST['debit.' + n] != "": amount += Decimal(request.POST['debit.' + n]) if request.POST['credit.' + n] != "": amount -= Decimal(request.POST['credit.' + n]) load_from_database = False if account: splits.append({'memo': note, 'account': account, 'amount': amount}) except KeyError: # Assume we hit the end of the inputs pass if len(splits) == 0: commit = False # Load initial splits from the database, if needed if load_from_database: for s in transaction.split_set.all().order_by('-amount'): splits.append({'id': s.id, 'memo': s.memo, 'account': s.account, 'amount': s.amount}) commit = False # Check if the transaction is balanced. If not, add a balancing split to # be filled in by the user. total = Decimal("0.00") for s in splits: total += s['amount'] if total != Decimal("0.00"): splits.append({'memo': "", 'account': None, 'amount': -total}) commit = False # Has the transaction been fully filled-in, with no problems found? If so, # commit to the database. if commit: transaction.save() Split.objects.filter(transaction=transaction).delete() for s in splits: split = Split(transaction=transaction, account=s['account'], memo=s['memo'], amount=s['amount']) split.save() if transaction.auto_generated: url = "/finance/ledger/?all#t%d" % (transaction.id,) else: url = "/finance/ledger/#t%d" % (transaction.id,) return HttpResponseRedirect(url) # Include a few blank splits at the end of the transaction for entering # additional data. for i in range(4): splits.append({'memo': "", 'account': None, 'amount': Decimal("0.00")}) # Convert splits to a separated debit/credit format for s in splits: s['debit'] = Decimal("0.00") s['credit'] = Decimal("0.00") if s['amount'] > 0: s['debit'] = s['amount'] if s['amount'] < 0: s['credit'] = -s['amount'] return render_to_response('finance/transaction_update.html', {'user': request.user, 'accounts': Account.objects.order_by('name'), 'transaction': transaction, 'splits': splits})
|
_errors = [] _warnings = [] _notes = []
|
def __init__(self, data, *args, **kwargs): content = None; if isinstance(data, QuerySet): content = serialize('json', object) else: content = json.dumps(data, indent=2, cls=json.DjangoJSONEdncoder, ensure_ascii=False) super(JsonResponse, self).__init__(content, *args, content_type='application/json', **kwargs)
|
|
return render_to_response('chezbob/bob_message.html', m)
|
return render_to_response('chezbob/base.html', m)
|
def error(m): return render_to_response('chezbob/bob_message.html', m)
|
inventory = inventory_summary[item.bulkid]
|
if item.bulkid in inventory_summary: inventory = inventory_summary[item.bulkid] else: inventory = {'activity': False, 'date': None, 'old_count': 0, 'purchases': 0, 'sales': 0}
|
def take_inventory(request, date): date = parse_date(date) show_all= request.GET.has_key('all') # If a POST request was submitted, apply any updates to the inventory data # in the database before rendering the response. if request.method == 'POST': if request.POST.get('session_key') != get_session_key(request): raise PermissionDenied try: n = 0 while True: n = str(int(n) + 1) bulkid = int(request.POST['id.' + n]) multiplier = int(request.POST['multiplier.' + n]) cases = None loose = None count = 0 modified = False try: cases = float(request.POST['cases.' + n]) count += int(round(cases * multiplier)) if request.POST['cases.' + n] != request.POST['old_cases.' + n]: modified = True except: pass try: loose = int(request.POST['items.' + n]) count += loose if request.POST['items.' + n] != request.POST['old_items.' + n]: modified = True except: pass # If both inventory fields are clear, then we want to delete # the inventory record entirely. if cases is None and loose is None: count = None # Only write the inventory record to the database if it # appeared to have been changed. This should provide limited # protection against concurrent inventory updates as long as # the same item is edited concurrently. try: old_count = request.POST['old_count.' + n] if old_count == "": old_count = None else: old_count = int(old_count) if count != old_count: modified = True except: pass if modified: Inventory.set_inventory(date, bulkid, count, cases, loose, multiplier) except KeyError: # Assume we hit the end of the POST inputs pass counts = Inventory.get_inventory(date) inventory_summary = Inventory.get_inventory_summary(date-datetime.timedelta(days=1), include_latest=False) locations = [] location = { 'name': 'Unknown', 'items': [] } locations.append(location); location = { 'name': 'Shelves', 'items': [] } locations.append(location); location = { 'name': 'Refrigerator', 'items': [] } locations.append(location); location = { 'name': 'Freezer', 'items': [] } locations.append(location); location = { 'name': 'Soda Machine', 'items': [] } locations.append(location); location = { 'name': 'Terminal', 'items': [] } locations.append(location); counter = 1 for item in BulkItem.objects.order_by('description'): #summary should contain an entry for every bulkid inventory = inventory_summary[item.bulkid] if item.bulkid in counts: (count, cases, loose, case_size) = counts[item.bulkid] if cases == None and loose == None: #if this is the old style database entry then compute the values cases = count // case_size loose = count % case_size elif not cases and not loose: loose = 0 cases = "" elif not cases: cases = "" elif not loose: loose = "" else: (count, cases, loose, case_size) = ("", "", "", item.quantity) # active is set to True if the count for this item is non-zero, # if the bulkidem is anntated 'active' in the database, or if # there have been any purchases or sales since the last inventory. active = inventory['activity'] or item.active or (count > 0 and count != "") if not active and not show_all: continue #no reason to inventory item estimate = inventory['old_count'] + inventory['purchases'] - inventory['sales'] info = {'type': item, 'prev_date': inventory['date'], 'prev_count': inventory['old_count'], 'est_add': inventory['purchases'], 'est_sub': inventory['sales'], 'estimate': estimate, 'active': active, 'count': count, 'count_cases': cases, 'count_items': loose, 'multiplier': case_size, 'counter': counter } counter += 1 locations[item.floor_location.id]['items'].append(info) return render_to_response('bobdb/take_inventory.html', {'user': request.user, 'title': "Take Inventory", 'date': date, 'locations': locations, 'session_key': get_session_key(request)})
|
tags.update({'source':'lukr', 'source:date':'2010-09-18'})
|
tags.update({'source':'lukr', 'source:date':'2010-09-17'})
|
def translateAttributes(attrs): if not attrs: return tags = {} #tags on all paths tags.update({'source':'lukr', 'source:date':'2010-09-18'}) tags.update({'lukr:raw':str(attrs).replace("&apos", "")}) #all lukr original tags, including the unique ObjectId. tags.update({'lukr:highway':'footway'}) #remove the lukr: prefix when the path has been reviewed. tags.update({'bicycle':'yes'}) #bicycles are allowed on all roads #add width if it's not zero if attrs['BREIDD'] != ' 0.00': tags = {'width':attrs['BREIDD'].lstrip()}
|
tags = {'width':attrs['BREIDD'].lstrip()}
|
tags.update({'width':attrs['BREIDD'].lstrip()})
|
def translateAttributes(attrs): if not attrs: return tags = {} #tags on all paths tags.update({'source':'lukr', 'source:date':'2010-09-18'}) tags.update({'lukr:raw':str(attrs).replace("&apos", "")}) #all lukr original tags, including the unique ObjectId. tags.update({'lukr:highway':'footway'}) #remove the lukr: prefix when the path has been reviewed. tags.update({'bicycle':'yes'}) #bicycles are allowed on all roads #add width if it's not zero if attrs['BREIDD'] != ' 0.00': tags = {'width':attrs['BREIDD'].lstrip()}
|
None)
|
inverted_index[i])
|
def train_aux_classifiers(self, ds, auxtasks, classifier_trainer, inverted_index=None): dim = ds.dim w_data = [] row = [] col = [] original_instances = ds.instances[ds._idx] print "Run joblib.Parallel" res = Parallel(n_jobs=-1, verbose=1)( delayed(_train_aux_classifier)(i, auxtask, original_instances, dim, classifier_trainer, None) for i, auxtask in enumerate(auxtasks))
|
classifier_trainer, inverted_index=None):
|
classifier_trainer, occurances=None):
|
def _train_aux_classifier(i, auxtask, original_instances, dim, classifier_trainer, inverted_index=None): """Trains a single auxiliary classifier. Parameters ---------- i : int The index of the auxiliary task. auxtask : tuple of ints The auxiliary task. original_instances : array, dtype=bolt.sparsedtype The unlabeled instances. dim : int The dimensionality of the feature space. classifier_trainer : AuxTrainer The concrete trainer for the auxiliary classifiers. inverted_index : dict The inverted index - if any. Returns ------- i : int The index of the auxtask. sparse_w : (array, array) The sparse representation of the weight vector; the first array holds the indizes of the non zero features and the second array holds the values. """ instances = original_instances if inverted_index is None: util.mask(instances, auxtask) labels = util.autolabel(instances, auxtask) else: occurances = inverted_index[j] util.mask(instances[occurances], auxtask) labels = np.ones((instances.shape[0],), dtype=np.float32) labels *= -1.0 labels[occurances] = 1.0 dataset = bolt.io.MemoryDataset(dim, instances, labels) w = classifier_trainer.train_classifier(dataset) return i, (w.nonzero()[0], w[w.nonzero()[0]])
|
if inverted_index is None:
|
if occurances is None:
|
def _train_aux_classifier(i, auxtask, original_instances, dim, classifier_trainer, inverted_index=None): """Trains a single auxiliary classifier. Parameters ---------- i : int The index of the auxiliary task. auxtask : tuple of ints The auxiliary task. original_instances : array, dtype=bolt.sparsedtype The unlabeled instances. dim : int The dimensionality of the feature space. classifier_trainer : AuxTrainer The concrete trainer for the auxiliary classifiers. inverted_index : dict The inverted index - if any. Returns ------- i : int The index of the auxtask. sparse_w : (array, array) The sparse representation of the weight vector; the first array holds the indizes of the non zero features and the second array holds the values. """ instances = original_instances if inverted_index is None: util.mask(instances, auxtask) labels = util.autolabel(instances, auxtask) else: occurances = inverted_index[j] util.mask(instances[occurances], auxtask) labels = np.ones((instances.shape[0],), dtype=np.float32) labels *= -1.0 labels[occurances] = 1.0 dataset = bolt.io.MemoryDataset(dim, instances, labels) w = classifier_trainer.train_classifier(dataset) return i, (w.nonzero()[0], w[w.nonzero()[0]])
|
occurances = inverted_index[j]
|
def _train_aux_classifier(i, auxtask, original_instances, dim, classifier_trainer, inverted_index=None): """Trains a single auxiliary classifier. Parameters ---------- i : int The index of the auxiliary task. auxtask : tuple of ints The auxiliary task. original_instances : array, dtype=bolt.sparsedtype The unlabeled instances. dim : int The dimensionality of the feature space. classifier_trainer : AuxTrainer The concrete trainer for the auxiliary classifiers. inverted_index : dict The inverted index - if any. Returns ------- i : int The index of the auxtask. sparse_w : (array, array) The sparse representation of the weight vector; the first array holds the indizes of the non zero features and the second array holds the values. """ instances = original_instances if inverted_index is None: util.mask(instances, auxtask) labels = util.autolabel(instances, auxtask) else: occurances = inverted_index[j] util.mask(instances[occurances], auxtask) labels = np.ones((instances.shape[0],), dtype=np.float32) labels *= -1.0 labels[occurances] = 1.0 dataset = bolt.io.MemoryDataset(dim, instances, labels) w = classifier_trainer.train_classifier(dataset) return i, (w.nonzero()[0], w[w.nonzero()[0]])
|
|
trainer = structlearn.ElasticNetTrainer(0.00001, 0.85, 10**6.0), strategy = structlearn.HadoopTrainingStrategy()
|
trainer = auxtrainer.ElasticNetTrainer(0.00001, 0.85, 10**6.0), strategy = auxstrategy.HadoopTrainingStrategy()
|
def train(): maxlines = 50000 argv = sys.argv[1:] slang = argv[0] tlang = argv[1] fname_s_train = argv[2] fname_s_unlabeled = argv[3] fname_t_unlabeled = argv[4] fname_dict = argv[5] s_voc = vocabulary(fname_s_train, fname_s_unlabeled, mindf = 2, maxlines = maxlines) t_voc = vocabulary(fname_t_unlabeled, mindf = 2, maxlines = maxlines) s_voc, t_voc, dim = disjoint_voc(s_voc,t_voc) s_ivoc = dict([(idx,term) for term, idx in s_voc.items()]) print("|V_S| = %d\n|V_T| = %d" % (len(s_voc), len(t_voc))) print("|V| = %d" % dim) s_train = load(fname_s_train, s_voc, dim) s_unlabeled = load(fname_s_unlabeled, s_voc, dim, maxlines = maxlines) t_unlabeled = load(fname_t_unlabeled, t_voc, dim, maxlines = maxlines) print("|s_train| = %d" % s_train.n) print("|s_unlabeled| = %d" % s_unlabeled.n) print("|t_unlabeled| = %d" % t_unlabeled.n) translator = DictTranslator.load(fname_dict, s_ivoc, t_voc) pivotselector = pivotselection.MISelector() trainer = structlearn.ElasticNetTrainer(0.00001, 0.85, 10**6.0), strategy = structlearn.HadoopTrainingStrategy()#SerialTrainingStrategy() clscl_trainer = CLSCLTrainer(s_train, s_unlabeled, t_unlabeled, pivotselector, translator, trainer, strategy) model = clscl_trainer.train(450, 30, 100) model.s_voc = s_voc model.t_voc = t_voc f = open(argv[6], "wb+") pickle.dump(model, f) f.close()
|
terms = [(self.s_ivoc[ws],self.t_ivoc[wt]) for ws,wt in pivots] for term in terms[:50]: print term
|
def select_pivots(self, m, phi):
|
|
trainer = auxtrainer.ElasticNetTrainer(0.00001, 0.85, 10**6.0),
|
trainer = auxtrainer.ElasticNetTrainer(0.00001, 0.85, 10**6.0)
|
def train(): maxlines = 50000 argv = sys.argv[1:] slang = argv[0] tlang = argv[1] fname_s_train = argv[2] fname_s_unlabeled = argv[3] fname_t_unlabeled = argv[4] fname_dict = argv[5] s_voc = vocabulary(fname_s_train, fname_s_unlabeled, mindf = 2, maxlines = maxlines) t_voc = vocabulary(fname_t_unlabeled, mindf = 2, maxlines = maxlines) s_voc, t_voc, dim = disjoint_voc(s_voc,t_voc) s_ivoc = dict([(idx,term) for term, idx in s_voc.items()]) t_ivoc = dict([(idx,term) for term, idx in t_voc.items()]) print("|V_S| = %d\n|V_T| = %d" % (len(s_voc), len(t_voc))) print(" |V| = %d" % dim) s_train = load(fname_s_train, s_voc, dim) s_unlabeled = load(fname_s_unlabeled, s_voc, dim, maxlines = maxlines) t_unlabeled = load(fname_t_unlabeled, t_voc, dim, maxlines = maxlines) print(" |s_train| = %d" % s_train.n) print("|s_unlabeled| = %d" % s_unlabeled.n) print("|t_unlabeled| = %d" % t_unlabeled.n) translator = DictTranslator.load(fname_dict, s_ivoc, t_voc) pivotselector = pivotselection.MISelector() trainer = auxtrainer.ElasticNetTrainer(0.00001, 0.85, 10**6.0), strategy = auxstrategy.SerialTrainingStrategy()#SerialTrainingStrategy() clscl_trainer = CLSCLTrainer(s_train, s_unlabeled, t_unlabeled, pivotselector, translator, trainer, strategy) clscl_trainer.s_ivoc = s_ivoc clscl_trainer.t_ivoc = t_ivoc model = clscl_trainer.train(450, 30, 100) model.s_voc = s_voc model.t_voc = t_voc f = open(argv[6], "wb+") pickle.dump(model, f) f.close()
|
t = unichr(n)
|
t = chr(n)
|
def test_bmp(self): for n in range(0,0x10000): # Just check that it doesn't throw an exception t = unichr(n) unidecode(t)
|
''),
|
'A'), ('\U0001d5c4\U0001d5c6/\U0001d5c1', 'km/h'),
|
def test_specific(self):
|
for input, output in TESTS: self.failUnlessEqual(unidecode(input), output)
|
for instr, output in TESTS: self.failUnlessEqual(unidecode(instr), output)
|
def test_specific(self):
|
scipy.optimize.fmin_l_bfgs_b(self.error_func,
|
scipy.optimize.fmin_l_bfgs_b(error_func,
|
def _solve(self, error_func, job_server=None): """ Optimize the parameters of the function. """
|
if polished_error < self.population_errors[best_ind]:
|
if polished_error < self.best_error:
|
def _solve(self, error_func, job_server=None): """ Optimize the parameters of the function. """
|
tmp = {row.index: { 'query': getCurrentUsername(context),
|
mt = getToolByName(context, 'portal_membership') user = mt.getAuthenticatedMember() username = 'Anonymous User' if user: username = user.getUserName() return {row.index: { 'query': username,
|
def _currentUser(context, row): tmp = {row.index: { 'query': getCurrentUsername(context), }, } return tmp
|
return tmp
|
def _currentUser(context, row): tmp = {row.index: { 'query': getCurrentUsername(context), }, } return tmp
|
|
row.values = my_date
|
row = Row(index=row.index, operator=row.operator, values=my_date)
|
def _lessThanRelativeDate(context, row): values = int(row.values) now = DateTime() my_date = now + values my_date = my_date.earliestTime() row.values = my_date return _lessThan(context, row)
|
row.values = my_date
|
row = Row(index=row.index, operator=row.operator, values=my_date)
|
def _moreThanRelativeDate(context, row): values = int(row.values) now = DateTime() my_date = now + values my_date = my_date.latestTime() row.values = my_date return _largerThan(context, row)
|
if obj and hasattr(obj, 'getPhysicalPath'): row.values = '/'.join(obj.getPhysicalPath()) return _path(context, row)
|
row = Row(index=row.index, operator=row.operator, values='/'.join(obj.getPhysicalPath()))
|
def _relativePath(context, row): t = len([x for x in row.values.split('/') if x]) obj = context for x in xrange(t): obj = aq_parent(obj) if obj and hasattr(obj, 'getPhysicalPath'): row.values = '/'.join(obj.getPhysicalPath()) return _path(context, row) row.values = '/'.join(obj.getPhysicalPath()) return _path(context, row)
|
row.values = '/'.join(obj.getPhysicalPath())
|
def _relativePath(context, row): t = len([x for x in row.values.split('/') if x]) obj = context for x in xrange(t): obj = aq_parent(obj) if obj and hasattr(obj, 'getPhysicalPath'): row.values = '/'.join(obj.getPhysicalPath()) return _path(context, row) row.values = '/'.join(obj.getPhysicalPath()) return _path(context, row)
|
|
def getCurrentUsername(context): mt = getToolByName(context, 'portal_membership') user = mt.getAuthenticatedMember() if user: return user.getUserName() return ''
|
def _relativePath(context, row): t = len([x for x in row.values.split('/') if x]) obj = context for x in xrange(t): obj = aq_parent(obj) if obj and hasattr(obj, 'getPhysicalPath'): row.values = '/'.join(obj.getPhysicalPath()) return _path(context, row) row.values = '/'.join(obj.getPhysicalPath()) return _path(context, row)
|
|
if 'form.button.addcriteria' or 'removecriteria' in form:
|
if 'form.button.addcriteria' in form or 'removecriteria' in form:
|
def process_form(self, instance, field, form, empty_marker=None, emptyReturnsMarker=False, validating=True): """A custom implementation for the widget form processing.""" value = form.get(field.getName()) # check if form.button.addcriteria is in request, # this only happends when javascript is disabled if 'form.button.addcriteria' or 'removecriteria' in form: return {}, {} if value: return value, {}
|
data = { 'index': 'modified', 'values': ['2009/08/12', '2009/08/14'], }
|
data = Row(index='modified', operator='_between', values=['2009/08/12', '2009/08/14'])
|
def test__between(self): data = { 'index': 'modified', 'values': ['2009/08/12', '2009/08/14'], } parsed = queryparser._between(None, data) expected = {'modified': {'query': ['2009/08/12', '2009/08/14'], 'range': 'minmax'}} self.assertEqual(parsed, expected)
|
import pdb; pdb.set_trace( )
|
def test_string_equality(self): registry = self.portal.portal_registry prefix = "plone.app.collection.operation.string.is" import pdb; pdb.set_trace( ) assert prefix+'.title' in registry self.assertEqual(registry[prefix+".title"], "equals") self.assertEqual(registry[prefix+".description"], 'Tip: you can use * to autocomplete.') self.assertEqual(registry[prefix+".operation"], 'is')
|
|
sortables = result.get('plone.app.collection.field')
|
sortables = result['sortable']
|
def test_sortable_indexes(self): registry = self.createRegistry(td.minimal_missing_operator_xml) reader = ICollectionRegistryReader(registry) result = reader.parseRegistry() result = reader.mapOperations(result) result = reader.mapSortableIndexes(result) sortables = result.get('plone.app.collection.field')
|
assert len(sortables)
|
assert len(sortables) > 0
|
def test_sortable_indexes(self): registry = self.createRegistry(td.minimal_missing_operator_xml) reader = ICollectionRegistryReader(registry) result = reader.parseRegistry() result = reader.mapOperations(result) result = reader.mapSortableIndexes(result) sortables = result.get('plone.app.collection.field')
|
assert not field['sortable']
|
assert field['sortable'] == True
|
def test_sortable_indexes(self): registry = self.createRegistry(td.minimal_missing_operator_xml) reader = ICollectionRegistryReader(registry) result = reader.parseRegistry() result = reader.mapOperations(result) result = reader.mapSortableIndexes(result) sortables = result.get('plone.app.collection.field')
|
self.parser = queryparser.QueryParser(None, None)
|
self.parser = queryparser.QueryParser(MockSite(), None)
|
def setUp(self): super(TestQueryParserBase, self).setUp()
|
print 'value = %s' % value
|
def process_form(self, instance, field, form, empty_marker=None, emptyReturnsMarker=False, validating=True): """A custom implementation for the widget form processing.""" value = form.get(field.getName()) print 'value = %s' % value self.value = value #if 'form.button.addcriteria' in form: if value: return value, {}
|
|
return np.exp(-1.0/sigma2 * projection_frobenius_norm_matrix)
|
return np.exp(-1.0/sigma2 * projection_frobenius_norm_matrix*projection_frobenius_norm_matrix)
|
def compute_tensorial_kernel(X, Y=None, sigma2=1.0): """Compute tensorial RBF kernel between two datasets. """ if Y is None: projection_frobenius_norm_matrix = compute_projection_frobenius_norm_train(X) else: projection_frobenius_norm_matrix = compute_projection_frobenius_norm_test(X, Y) return np.exp(-1.0/sigma2 * projection_frobenius_norm_matrix)
|
W_Y = Vh_X.T W_X = Vh_Y.T
|
W_X = Vh_X.T W_Y = Vh_Y.T
|
def compute_projection_frobenius_norm(U_s_Vh_list_X, U_s_Vh_list_Y): """Compute the projection Frobenius norm between two tensors. """ projection_frobenius_norm = 0.0 for i in range(len(U_s_Vh_list_X)): U_X, s_X, Vh_X = U_s_Vh_list_X[i] U_Y, s_Y, Vh_Y = U_s_Vh_list_Y[i] if U_X.shape[0] > Vh_X.shape[1]: W_X = U_X W_Y = W_X else: W_Y = Vh_X.T W_X = Vh_Y.T projection_frobenius_norm += np.linalg.norm(np.dot(W_X,W_X.T)-np.dot(W_Y,W_Y.T) , ord='fro') return projection_frobenius_norm
|
def ndim_meshgrid(*arrs): """n-dimensional analogue to numpy.meshgrid""" arrs = tuple(reversed(arrs)) lens = map(len, arrs) dim = len(arrs) sz = 1 for s in lens: sz*=s ans = [] for i, arr in enumerate(arrs): slc = [1]*dim slc[i] = lens[i] arr2 = asarray(arr).reshape(slc) for j, sz in enumerate(lens): if j!=i: arr2 = arr2.repeat(sz, axis=j) ans.append(arr2) return tuple(ans)
|
def ndim_meshgrid(*arrs): """n-dimensional analogue to numpy.meshgrid""" arrs = tuple(reversed(arrs)) #edit lens = map(len, arrs) dim = len(arrs) sz = 1 for s in lens: sz*=s ans = [] for i, arr in enumerate(arrs): slc = [1]*dim slc[i] = lens[i] arr2 = asarray(arr).reshape(slc) for j, sz in enumerate(lens): if j!=i: arr2 = arr2.repeat(sz, axis=j) ans.append(arr2) return tuple(ans)
|
|
takes a list of lists of equal length q = [[1,2],[3,4]]
|
takes a list of lists of arbitrary length q = [[1,2],[3,4]]
|
def gridpts(q): """
|
q = list(reversed(q)) w = ndim_meshgrid(*q) for i in range(len(q)): q[i] = list( w[i].reshape(w[i].size) ) q = zip(*q) return [list(i) for i in q]
|
w = [[] for i in range(len(q[-1]))] for j in range(len(q)-1,-1,-1): for k in range(len(q[j])): for l in range(k*len(w)/len(q[j]), (k+1)*len(w)/len(q[j])): w[l].append(q[j][k]) if j: w += [i[:] for i in w[:]*(len(q[j-1])-1)] return [list(reversed(w[i])) for i in range(len(w))]
|
def gridpts(q): """
|
if len(cand)%2: return cand[len(cand)/2], cand[len(cand)/2] return cand[len(cand)/2], cand[len(cand)/2 - 1]
|
best = [cand[len(cand)/2], n/cand[len(cand)/2]] best.sort(reverse=True) return tuple(best)
|
def best_dimensions(n): "get the 'best' dimensions (n x m) for arranging plots" allfactors = list(factor(n)) from numpy import product cand = [1] + [product(allfactors[:i+1]) for i in range(len(allfactors))] if len(cand)%2: return cand[len(cand)/2], cand[len(cand)/2] return cand[len(cand)/2], cand[len(cand)/2 - 1]
|
def derivative(self,coeffs): """evaluates n-dimensional Rosenbrock derivative for a list of coeffs minimum is f'(x)=[0.0]*n at x=[1.0]*n; x must have len >= 2""" l = len(coeffs) x = [0]*l x[:l]=coeffs x = asarray(x) xm = x[1:-1] xm_m1 = x[:-2] xm_p1 = x[2:] der = zeros_like(x) der[1:-1] = 200*(xm-xm_m1**2) - 400*(xm_p1 - xm**2)*xm - 2*(1-xm) der[0] = -400*x[0]*(x[1]-x[0]**2) - 2*(1-x[0]) der[-1] = 200*(x[-1]-x[-2]**2) return list(der) def hessian(self, coeffs): """evaluates n-dimensional Rosenbrock hessian for the given coeffs coeffs must have len >= 2""" x = atleast_1d(coeffs) H = diag(-400*x[:-1],1) - diag(400*x[:-1],-1) diagonal = zeros(len(x), dtype=x.dtype) diagonal[0] = 1200*x[0]-400*x[1]+2 diagonal[-1] = 200 diagonal[1:-1] = 202 + 1200*x[1:-1]**2 - 400*x[2:] H = H + diag(diagonal) return H def hessian_product(self, coeffs, p): """evaluates n-dimensional Rosenbrock hessian product for the given coeffs both p and coeffs must have len >= 2""" p = atleast_1d(p) x = atleast_1d(coeffs) Hp = zeros(len(x), dtype=x.dtype) Hp[0] = (1200*x[0]**2 - 400*x[1] + 2)*p[0] - 400*x[0]*p[1] Hp[1:-1] = -400*x[:-2]*p[:-2]+(202+1200*x[1:-1]**2-400*x[2:])*p[1:-1] \ -400*x[1:-1]*p[2:] Hp[-1] = -400*x[-2]*p[-2] + 200*p[-1] return Hp
|
# def forward(self,pts):
|
|
reload(south.signals)
|
def load_post_syncdb_signals(): """ This is a duplicate from Django syncdb management command. This code imports any module named 'management' in INSTALLED_APPS. The 'management' module is the preferred way of listening to post_syncdb signals. """ unload_post_syncdb_signals() # If south is available, we should reload it's post_migrate signal. try: import south.signals south.signals.post_migrate.receivers = [] reload(south.signals) except ImportError: pass for app_name in settings.INSTALLED_APPS: try: module = app_name + '.management' # As we first unload signals, we need to reload module # if present in modules cache. That will reload signals. if sys.modules.get(module): reload(sys.modules[module]) else: __import__(module, {}, {}, ['']) except ImportError, exc: msg = exc.args[0] if not msg.startswith('No module named') or 'management' not in msg: raise
|
|
padded = "\x02%s\x00%s" % ("\xFF" * (128 - (len(toEncrypt)) -2), toEncrypt)
|
padded = "\x00\x02%s\x00%s" % ("\xFF" * (128 - (len(toEncrypt)) -3), toEncrypt)
|
def usage(): print "Usage:", sys.argv[0], print "[-p pin][--pin=pin]", print "[-c lib][--lib=lib]", print "[-S][--sign]", print "[-d][--decrypt]", print "[-h][--help]",
|
def initToken(self, pin, label): """ C_InitToken """ rv = self.lib.C_InitToken(self.session, pin, label) if rv != CKR_OK: raise PyKCS11Error(rv)
|
def logout(self): """ C_Logout """ rv = self.lib.C_Logout(self.session) if rv != CKR_OK: raise PyKCS11Error(rv)
|
|
waml.append_waml(blip.insert_inline_blip(match.end()-2),
|
waml.append_waml(blip.insert_inline_blip(match.end()),
|
def OnBlipSubmitted(event, wavelet): game = WaveGame.all().filter('waveid =', wavelet.wave_id).get() if not game: return blip = event.blip com = Commander(game, blip.creator) for match in COMMAND_RE.finditer(blip.text): result = com.command(match.group('commands')) if result is not False: # Swap brackets for parens and italicize to mark the command read blip.at(match.start()).replace('(') blip.at(match.end() - 1).replace(')') blipmatch = blip.range(match.start(), match.end()) blipmatch.annotate('style/fontStyle', 'italic') if any(sen['verb'] == 'act' or sen['verb'] == 'contest' for sen in com.commanded): waml.append_waml(blip, 'wave/roll.yaml', {'roll': game.lastroll}) if com.errors or com.warnings: waml.append_waml(blip.insert_inline_blip(match.end()-2), 'wave/errors.yaml', {'errors': com.errors, 'warnings': com.warnings}, ) if com.tickselapsed: ticks = '' if com.tickselapsed <= 3: ticks = ' '.join(['Tick.'] * com.tickselapsed) else: ticks = '%s Ticks.' % com.tickselapsed waml.append_waml(wavelet.reply(), 'wave/ticks.yaml', {'ticks': ticks, 'atready': com.atready, 'maxpoise': max(max_influence(char, 'poise') for char \ in com.atready)}, ) com.commit()
|
self.xorg_conf.makeSection('Device', identifier='Configured Video Device') self.xorg_conf.addOption('Device', 'Driver', 'vboxvideo', position=0)
|
def customiseConfig(self): # set DefaultDepth to 24; X.org does not work otherwise self.xorg_conf.makeSection('Screen', identifier='Default Screen') self.xorg_conf.addOption('Screen', 'DefaultDepth', '24', position=0, prefix='')
|
|
self.xorg_conf.addOption('Screen', 'DefaultDepth', '24', position=0, prefix='')
|
self.xorg_conf.addOption('Screen', 'Device', 'Configured Video Device', position=0)
|
def customiseConfig(self): # set DefaultDepth to 24; X.org does not work otherwise self.xorg_conf.makeSection('Screen', identifier='Default Screen') self.xorg_conf.addOption('Screen', 'DefaultDepth', '24', position=0, prefix='')
|
self.xorg_conf.makeSubSection('Screen', 'Display', position=0) self.xorg_conf.addSubOption('Screen', 'Display', 'Depth', value='24', position=0) self.xorg_conf.addSubOption('Screen', 'Display', 'Modes', value='"1024x600"', position=0)
|
self.xorg_conf.makeSection('ServerLayout', identifier='Default Layout') self.xorg_conf.addOption('ServerLayout', 'Screen', 'Default Screen', position=0)
|
def customiseConfig(self): # set DefaultDepth to 24; X.org does not work otherwise self.xorg_conf.makeSection('Screen', identifier='Default Screen') self.xorg_conf.addOption('Screen', 'DefaultDepth', '24', position=0, prefix='')
|
r".*\.js", r".*\.py", r".*\.json", r".*\.sh", r".*\.rb",
|
r".*\.js", r".*\.py", r".*\.sh", r".*\.rb", r".*\.pl", r".*\.pm",
|
def __init__(self, *args, **kwargs): raise NotImplementedException() # TODO(joi) Implement.
|
r".*\.java", r".*\.mk", r".*\.am", r".*\.txt",
|
r".*\.java", r".*\.mk", r".*\.am",
|
def __init__(self, *args, **kwargs): raise NotImplementedException() # TODO(joi) Implement.
|
return RunShellWithReturnCode(['svn', 'diff'] + files + args, print_output=True)[1]
|
root = GetRepositoryRoot() cmd = ['svn', 'diff'] cmd.extend([os.path.join(root, x) for x in files]) cmd.extend(args) return RunShellWithReturnCode(cmd, print_output=True)[1]
|
def CMDdiff(args): """Diffs all files in the changelist or all files that aren't in a CL.""" files = None if args: change_info = ChangeInfo.Load(args.pop(0), GetRepositoryRoot(), True, True) files = change_info.GetFileNames() else: files = GetFilesNotInCL() return RunShellWithReturnCode(['svn', 'diff'] + files + args, print_output=True)[1]
|
try: def WebKitRevision(options, opt, value, parser): if not hasattr(options, 'sub_rep'): options.sub_rep = [] if parser.rargs and not parser.rargs[0].startswith('-'): options.sub_rep.append('third_party/WebKit@%s' % parser.rargs.pop(0)) else: options.sub_rep.append('third_party/WebKit') group.add_option("-W", "--webkit", action="callback", callback=WebKitRevision, metavar="BRANCH", help="Shorthand for -s third_party/WebKit@BRANCH. " "BRANCH is optional and is the branch the current " "checkout will be diff'ed against.") except optparse.OptionError: pass
|
def TryChange(argv, file_list, swallow_exception, prog=None, extra_epilog=None): """ Args: argv: Arguments and options. file_list: Default value to pass to --file. swallow_exception: Whether we raise or swallow exceptions. """ # Parse argv parser = optparse.OptionParser(usage=USAGE, version=__version__, prog=prog) epilog = EPILOG % { 'prog': prog } if extra_epilog: epilog += extra_epilog parser.epilog = epilog # Remove epilog formatting parser.format_epilog = lambda x: parser.epilog parser.add_option("-v", "--verbose", action="count", default=0, help="Prints debugging infos") group = optparse.OptionGroup(parser, "Result and status") group.add_option("-u", "--user", default=getpass.getuser(), help="Owner user name [default: %default]") group.add_option("-e", "--email", default=os.environ.get('TRYBOT_RESULTS_EMAIL_ADDRESS', os.environ.get('EMAIL_ADDRESS')), help="Email address where to send the results. Use either " "the TRYBOT_RESULTS_EMAIL_ADDRESS environment " "variable or EMAIL_ADDRESS to set the email address " "the try bots report results to [default: %default]") group.add_option("-n", "--name", help="Descriptive name of the try job") group.add_option("--issue", type='int', help="Update rietveld issue try job status") group.add_option("--patchset", type='int', help="Update rietveld issue try job status. This is " "optional if --issue is used, In that case, the " "latest patchset will be used.") group.add_option("--dry_run", action='store_true', help="Just prints the diff and quits") parser.add_option_group(group) group = optparse.OptionGroup(parser, "Try job options") group.add_option("-b", "--bot", action="append", help="Only use specifics build slaves, ex: '--bot win' to " "run the try job only on the 'win' slave; see the try " "server waterfall for the slave's name") group.add_option("-r", "--revision", help="Revision to use for the try job; default: the " "revision will be determined by the try server; see " "its waterfall for more info") group.add_option("-c", "--clobber", action="store_true", help="Force a clobber before building; e.g. don't do an " "incremental build") # TODO(maruel): help="Select a specific configuration, usually 'debug' or " # "'release'" group.add_option("--target", help=optparse.SUPPRESS_HELP) group.add_option("--project", help="Override which project to use. Projects are defined " "server-side to define what default bot set to use") group.add_option("-t", "--testfilter", action="append", help="Add a gtest_filter to a test. Use multiple times to " "specify filters for different tests. (i.e. " "--testfilter base_unittests:ThreadTest.* " "--testfilter ui_tests) If you specify any testfilters " "the test results will not be reported in rietveld and " "only tests with filters will run.") parser.add_option_group(group) group = optparse.OptionGroup(parser, "Patch to run") group.add_option("-f", "--file", default=file_list, dest="files", metavar="FILE", action="append", help="Use many times to list the files to include in the " "try, relative to the repository root") group.add_option("--diff", help="File containing the diff to try") group.add_option("--url", help="Url where to grab a patch, e.g. " "http://example.com/x.diff") group.add_option("-R", "--rietveld_url", default="codereview.appspot.com", metavar="URL", help="Has 2 usages, both refer to the rietveld instance: " "Specify which code review patch to use as the try job " "or rietveld instance to update the try job results " "Default:%default") group.add_option("--root", help="Root to use for the patch; base subdirectory for " "patch created in a subdirectory") group.add_option("-p", "--patchlevel", type='int', metavar="LEVEL", help="Used as -pN parameter to patch") group.add_option("-s", "--sub_rep", action="append", default=[], help="Subcheckout to use in addition. This is mainly " "useful for gclient-style checkouts. Use @rev or " "@branch or @branch1..branch2 to specify the " "revision/branch to diff against.") # Mostly chromium-specific try: def WebKitRevision(options, opt, value, parser): if not hasattr(options, 'sub_rep'): options.sub_rep = [] if parser.rargs and not parser.rargs[0].startswith('-'): options.sub_rep.append('third_party/WebKit@%s' % parser.rargs.pop(0)) else: options.sub_rep.append('third_party/WebKit') group.add_option("-W", "--webkit", action="callback", callback=WebKitRevision, metavar="BRANCH", help="Shorthand for -s third_party/WebKit@BRANCH. " "BRANCH is optional and is the branch the current " "checkout will be diff'ed against.") except optparse.OptionError: # append_const is not supported on 2.4. Too bad. pass group.add_option("--no_gclient", action="store_true", help="Disable automatic search for gclient checkout.") group.add_option("-E", "--exclude", action="append", default=['ChangeLog'], metavar='REGEXP', help="Regexp patterns to exclude files. Default: %default") parser.add_option_group(group) group = optparse.OptionGroup(parser, "Access the try server by HTTP") group.add_option("--use_http", action="store_const", const=_SendChangeHTTP, dest="send_patch", help="Use HTTP to talk to the try server [default]") group.add_option("-H", "--host", help="Host address") group.add_option("-P", "--port", help="HTTP port") group.add_option("--proxy", help="HTTP proxy") parser.add_option_group(group) group = optparse.OptionGroup(parser, "Access the try server with SVN") group.add_option("--use_svn", action="store_const", const=_SendChangeSVN, dest="send_patch", help="Use SVN to talk to the try server") group.add_option("-S", "--svn_repo", metavar="SVN_URL", help="SVN url to use to write the changes in; --use_svn is " "implied when using --svn_repo") parser.add_option_group(group) options, args = parser.parse_args(argv) # Note that the args array includes the script name, so # a single argument results in len(args) == 2. # If they've asked for help, give it to them if len(args) == 2 and args[1] == 'help': parser.print_help() return 0 # If they've said something confusing, don't spawn a try job until you # understand what they want. if len(args) > 1: plural = "" if len(args) > 2: plural = "s" print "Argument%s \"%s\" not understood" % (plural, " ".join(args[1:])) parser.print_help() return 1 LOG_FORMAT = '%(levelname)s %(filename)s(%(lineno)d): %(message)s' if not swallow_exception: if options.verbose == 0: logging.basicConfig(level=logging.WARNING, format=LOG_FORMAT) elif options.verbose == 1: logging.basicConfig(level=logging.INFO, format=LOG_FORMAT) elif options.verbose > 1: logging.basicConfig(level=logging.DEBUG, format=LOG_FORMAT) logging.debug(argv) # Strip off any @ in the user, otherwise svn gets confused. options.user = options.user.split('@', 1)[0] if options.rietveld_url: # Try to extract the review number if possible and fix the protocol. if not '://' in options.rietveld_url: options.rietveld_url = 'http://' + options.rietveld_url match = re.match(r'^(.*)/(\d+)$', options.rietveld_url) if match: if options.issue or options.patchset: parser.error('Cannot use both --issue and use a review number url') options.issue = int(match.group(2)) options.rietveld_url = match.group(1) try: # Always include os.getcwd() in the checkout settings. checkouts = [] checkouts.append(GuessVCS(options, os.getcwd())) checkouts[0].AutomagicalSettings() for item in options.sub_rep: checkout = GuessVCS(options, os.path.join(checkouts[0].checkout_root, item)) if checkout.checkout_root in [c.checkout_root for c in checkouts]: parser.error('Specified the root %s two times.' % checkout.checkout_root) checkouts.append(checkout) can_http = options.port and options.host can_svn = options.svn_repo # If there was no transport selected yet, now we must have enough data to # select one. if not options.send_patch and not (can_http or can_svn): parser.error('Please specify an access method.') # Convert options.diff into the content of the diff. if options.url: if options.files: parser.error('You cannot specify files and --url at the same time.') options.diff = urllib.urlopen(options.url).read() elif options.diff: if options.files: parser.error('You cannot specify files and --diff at the same time.') options.diff = gclient_utils.FileRead(options.diff, 'rb') elif options.issue and options.patchset is None: # Retrieve the patch from rietveld when the diff is not specified. # When patchset is specified, it's because it's done by gcl/git-try. if json is None: parser.error('json or simplejson library is missing, please install.') api_url = '%s/api/%d' % (options.rietveld_url, options.issue) logging.debug(api_url) contents = json.loads(urllib.urlopen(api_url).read()) options.patchset = contents['patchsets'][-1] diff_url = ('%s/download/issue%d_%d.diff' % (options.rietveld_url, options.issue, options.patchset)) diff = GetMungedDiff('', urllib.urlopen(diff_url).readlines()) options.diff = ''.join(diff) else: # Use this as the base. root = checkouts[0].checkout_root diffs = [] for checkout in checkouts: diff = checkout.GenerateDiff().splitlines(True) path_diff = gclient_utils.PathDifference(root, checkout.checkout_root) # Munge it. diffs.extend(GetMungedDiff(path_diff, diff)) options.diff = ''.join(diffs) if not options.bot: # Get try slaves from PRESUBMIT.py files if not specified. # Even if the diff comes from options.url, use the local checkout for bot # selection. try: import presubmit_support root_presubmit = checkouts[0].ReadRootFile('PRESUBMIT.py') options.bot = presubmit_support.DoGetTrySlaves( checkouts[0].GetFileNames(), checkouts[0].checkout_root, root_presubmit, False, sys.stdout) except ImportError: pass # If no bot is specified, either the default pool will be selected or the # try server will refuse the job. Either case we don't need to interfere. if options.name is None: if options.issue: options.name = 'Issue %s' % options.issue else: options.name = 'Unnamed' print('Note: use --name NAME to change the try job name.') if not options.email: parser.error('Using an anonymous checkout. Please use --email or set ' 'the TRYBOT_RESULTS_EMAIL_ADDRESS environment variable.') else: print('Results will be emailed to: ' + options.email) # Prevent rietveld updates if we aren't running all the tests. if options.testfilter is not None: options.issue = None options.patchset = None # Send the patch. if options.send_patch: # If forced. options.send_patch(options) PrintSuccess(options) return 0 try: if can_http: _SendChangeHTTP(options) PrintSuccess(options) return 0 except NoTryServerAccess: if not can_svn: raise _SendChangeSVN(options) PrintSuccess(options) return 0 except (InvalidScript, NoTryServerAccess), e: if swallow_exception: return 1 print e return 1 return 0
|
|
assert re.match(r'^[a-z]+://[a-z0-9\.-_]+[a-z](|:[0-9]+)$', self.host), (
|
assert re.match(r'^[a-z]+://[a-z0-9\.-_]+(|:[0-9]+)$', self.host), (
|
def __init__(self, host, auth_function, host_override=None, extra_headers={}, save_cookies=False, account_type=AUTH_ACCOUNT_TYPE): """Creates a new HttpRpcServer.
|
try: import simplejson except ImportError: parser.error('simplejson library is missing, please install.')
|
if json is None: parser.error('json or simplejson library is missing, please install.')
|
def WebKitRevision(options, opt, value, parser): if not hasattr(options, 'sub_rep'): options.sub_rep = [] if parser.rargs and not parser.rargs[0].startswith('-'): options.sub_rep.append('third_party/WebKit@%s' % parser.rargs.pop(0)) else: options.sub_rep.append('third_party/WebKit')
|
contents = simplejson.loads(urllib.urlopen(api_url).read())
|
contents = json.loads(urllib.urlopen(api_url).read())
|
def WebKitRevision(options, opt, value, parser): if not hasattr(options, 'sub_rep'): options.sub_rep = [] if parser.rargs and not parser.rargs[0].startswith('-'): options.sub_rep.append('third_party/WebKit@%s' % parser.rargs.pop(0)) else: options.sub_rep.append('third_party/WebKit')
|
return subprocess.Popen(args, **kwargs)
|
try: return subprocess.Popen(args, **kwargs) except OSError, e: if e.errno == errno.EAGAIN and sys.platform == 'cygwin': raise Error( 'Visit ' 'http://code.google.com/p/chromium/wiki/CygwinDllRemappingFailure to ' 'learn how to fix this error; you need to rebase your cygwin dlls') raise
|
def Popen(args, **kwargs): """Calls subprocess.Popen() with hacks to work around certain behaviors. Ensure English outpout for svn and make it work reliably on Windows. """ logging.debug(u'%s, cwd=%s' % (u' '.join(args), kwargs.get('cwd', ''))) if not 'env' in kwargs: # It's easier to parse the stdout if it is always in English. kwargs['env'] = os.environ.copy() kwargs['env']['LANGUAGE'] = 'en' if not 'shell' in kwargs: # *Sigh*: Windows needs shell=True, or else it won't search %PATH% for the # executable, but shell=True makes subprocess on Linux fail when it's called # with a list because it only tries to execute the first item in the list. kwargs['shell'] = (sys.platform=='win32') return subprocess.Popen(args, **kwargs)
|
file_list = None
|
file_list = []
|
def TryChange(change_info, args, swallow_exception): """Create a diff file of change_info and send it to the try server.""" try: import trychange except ImportError: if swallow_exception: return 1 ErrorExit("You need to install trychange.py to use the try server.") trychange_args = [] if change_info: trychange_args.extend(['--name', change_info.name]) if change_info.issue: trychange_args.extend(["--issue", str(change_info.issue)]) if change_info.patchset: trychange_args.extend(["--patchset", str(change_info.patchset)]) trychange_args.extend(args) file_list = change_info.GetFileNames() else: trychange_args.extend(args) file_list = None return trychange.TryChange( trychange_args, file_list=file_list, swallow_exception=swallow_exception, prog='gcl try', extra_epilog='\n' 'When called from gcl, use the format gcl try <change_name>.\n')
|
content = ""
|
def GetCachedFile(filename, max_age=60*60*24*3, use_root=False): """Retrieves a file from the repository and caches it in GetCacheDir() for max_age seconds. use_root: If False, look up the arborescence for the first match, otherwise go directory to the root repository. Note: The cache will be inconsistent if the same file is retrieved with both use_root=True and use_root=False. Don't be stupid. """ if filename not in FILES_CACHE: # Don't try to look up twice. FILES_CACHE[filename] = None # First we check if we have a cached version. try: cached_file = os.path.join(GetCacheDir(), filename) except gclient_utils.Error: return None if (not os.path.exists(cached_file) or (time.time() - os.stat(cached_file).st_mtime) > max_age): dir_info = SVN.CaptureInfo(".") repo_root = dir_info["Repository Root"] if use_root: url_path = repo_root else: url_path = dir_info["URL"] content = "" while True: # Look in the repository at the current level for the file. svn_path = url_path + "/" + filename content, rc = RunShellWithReturnCode(["svn", "cat", svn_path]) if not rc: # Exit the loop if the file was found. Override content. break # Make sure to mark settings as empty if not found. content = "" if url_path == repo_root: # Reached the root. Abandoning search. break # Go up one level to try again. url_path = os.path.dirname(url_path) # Write a cached version even if there isn't a file, so we don't try to # fetch it each time. gclient_utils.FileWrite(cached_file, content) else: content = gclient_utils.FileRead(cached_file, 'r') # Keep the content cached in memory. FILES_CACHE[filename] = content return FILES_CACHE[filename]
|
|
svn_path = url_path + "/" + filename content, rc = RunShellWithReturnCode(["svn", "cat", svn_path]) if not rc:
|
for _ in range(5): content = "" try: content_array = [] svn_path = url_path + "/" + filename SVN.RunAndFilterOutput(['cat', svn_path, '--non-interactive'], '.', False, False, content_array.append) content = '\n'.join(content_array) break except gclient_utils.Error, e: if content_array[0].startswith( 'svn: Can\'t get username or password'): ErrorExit('Your svn credentials expired. Please run svn update ' 'to fix the cached credentials') if not content_array[0].startswith('svn: File not found:'): continue if content:
|
def GetCachedFile(filename, max_age=60*60*24*3, use_root=False): """Retrieves a file from the repository and caches it in GetCacheDir() for max_age seconds. use_root: If False, look up the arborescence for the first match, otherwise go directory to the root repository. Note: The cache will be inconsistent if the same file is retrieved with both use_root=True and use_root=False. Don't be stupid. """ if filename not in FILES_CACHE: # Don't try to look up twice. FILES_CACHE[filename] = None # First we check if we have a cached version. try: cached_file = os.path.join(GetCacheDir(), filename) except gclient_utils.Error: return None if (not os.path.exists(cached_file) or (time.time() - os.stat(cached_file).st_mtime) > max_age): dir_info = SVN.CaptureInfo(".") repo_root = dir_info["Repository Root"] if use_root: url_path = repo_root else: url_path = dir_info["URL"] content = "" while True: # Look in the repository at the current level for the file. svn_path = url_path + "/" + filename content, rc = RunShellWithReturnCode(["svn", "cat", svn_path]) if not rc: # Exit the loop if the file was found. Override content. break # Make sure to mark settings as empty if not found. content = "" if url_path == repo_root: # Reached the root. Abandoning search. break # Go up one level to try again. url_path = os.path.dirname(url_path) # Write a cached version even if there isn't a file, so we don't try to # fetch it each time. gclient_utils.FileWrite(cached_file, content) else: content = gclient_utils.FileRead(cached_file, 'r') # Keep the content cached in memory. FILES_CACHE[filename] = content return FILES_CACHE[filename]
|
stderr=subprocess.STDOUT, shell=use_shell,
|
stderr=subprocess.STDOUT, shell=use_shell, env=env,
|
def RunShellWithReturnCode(command, print_output=False): """Executes a command and returns the output and the return code.""" # Use a shell for subcommands on Windows to get a PATH search, and because svn # may be a batch file. use_shell = sys.platform.startswith("win") p = subprocess.Popen(command, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, shell=use_shell, universal_newlines=True) if print_output: output_array = [] while True: line = p.stdout.readline() if not line: break if print_output: print line.strip('\n') output_array.append(line) output = "".join(output_array) else: output = p.stdout.read() p.wait() p.stdout.close() return output, p.returncode
|
execfile("drover.properties")
|
f = open("drover.properties") exec(f) f.close()
|
def drover(options, args): revision = options.revert or options.merge # Initialize some variables used below. They can be overwritten by # the drover.properties file. BASE_URL = "svn://svn.chromium.org/chrome" TRUNK_URL = BASE_URL + "/trunk/src" BRANCH_URL = BASE_URL + "/branches/$branch/src" SKIP_CHECK_WORKING = True PROMPT_FOR_AUTHOR = False DEFAULT_WORKING = "drover_" + str(revision) if options.branch: DEFAULT_WORKING += ("_" + options.branch) if not isMinimumSVNVersion(1, 5): print "You need to use at least SVN version 1.5.x" return 1 # Override the default properties if there is a drover.properties file. global file_pattern_ if os.path.exists("drover.properties"): FILE_PATTERN = file_pattern_ execfile("drover.properties") if FILE_PATTERN: file_pattern_ = FILE_PATTERN if options.revert and options.branch: url = BRANCH_URL.replace("$branch", options.branch) elif options.merge and options.sbranch: url = BRANCH_URL.replace("$branch", options.sbranch) else: url = TRUNK_URL working = options.workdir or DEFAULT_WORKING if options.local: working = os.getcwd() if not inCheckoutRoot(working): print "'%s' appears not to be the root of a working copy" % working return 1 if (isSVNDirty() and not prompt("Working copy contains uncommitted files. Continue?")): return 1 command = 'svn log ' + url + " -r "+str(revision) + " -v" os.system(command) if not (options.revertbot or prompt("Is this the correct revision?")): return 0 if (os.path.exists(working)) and not options.local: if not (options.revertbot or SKIP_CHECK_WORKING or prompt("Working directory: '%s' already exists, clobber?" % working)): return 0 gclient_utils.RemoveDirectory(working) if not options.local: os.makedirs(working) os.chdir(working) if options.merge: action = "Merge" if not options.local: branch_url = BRANCH_URL.replace("$branch", options.branch) # Checkout everything but stuff that got added into a new dir checkoutRevision(url, revision, branch_url) # Merge everything that changed mergeRevision(url, revision) # "Export" files that were added from the source and add them to branch exportRevision(url, revision) # Delete directories that were deleted (file deletes are handled in the # merge). deleteRevision(url, revision) elif options.revert: action = "Revert" if options.branch: url = BRANCH_URL.replace("$branch", options.branch) checkoutRevision(url, revision, url, True) revertRevision(url, revision) revertExportRevision(url, revision) # Check the base url so we actually find the author who made the change if options.auditor: author = options.auditor else: author = getAuthor(url, revision) if not author: author = getAuthor(TRUNK_URL, revision) filename = str(revision)+".txt" out = open(filename,"w") out.write(action +" " + str(revision) + " - ") out.write(getRevisionLog(url, revision)) if (author): out.write("\nTBR=" + author) out.close() change_cmd = 'change ' + str(revision) + " " + filename if options.revertbot: change_cmd += ' --silent' runGcl(change_cmd) os.unlink(filename) if options.local: return 0 print author print revision print ("gcl upload " + str(revision) + " --send_mail --no_presubmit --reviewers=" + author) if options.revertbot or prompt("Would you like to upload?"): if PROMPT_FOR_AUTHOR: author = text_prompt("Enter new author or press enter to accept default", author) if options.revertbot and options.revertbot_reviewers: author += "," author += options.revertbot_reviewers gclUpload(revision, author) else: print "Deleting the changelist." print "gcl delete " + str(revision) runGcl("delete " + str(revision)) return 0 # We commit if the reverbot is set to commit automatically, or if this is # not the revertbot and the user agrees. if options.revertbot_commit or (not options.revertbot and prompt("Would you like to commit?")): print "gcl commit " + str(revision) + " --no_presubmit --force" return runGcl("commit " + str(revision) + " --no_presubmit --force") else: return 0
|
allows the capture of a overall "revision" for the source tree that can
|
allows the capture of an overall "revision" for the source tree that can
|
def PrintRevInfo(self): """Output revision info mapping for the client and its dependencies. This allows the capture of a overall "revision" for the source tree that can be used to reproduce the same tree in the future. The actual output contains enough information (source paths, svn server urls and revisions) that it can be used either to generate external svn commands (without gclient) or as input to gclient's --rev option (with some massaging of the data).
|
NOTE: Unlike RunOnDeps this does not require a local checkout and is run on the Pulse master. It MUST NOT execute hooks.
|
def PrintRevInfo(self): """Output revision info mapping for the client and its dependencies. This allows the capture of a overall "revision" for the source tree that can be used to reproduce the same tree in the future. The actual output contains enough information (source paths, svn server urls and revisions) that it can be used either to generate external svn commands (without gclient) or as input to gclient's --rev option (with some massaging of the data).
|
|
entries_deps_content[name] = gclient_scm.scm.SVN.Capture( ["cat", "%s/%s@%s" % (url, self._options.deps_file, rev)], os.getcwd())
|
deps_file = solution.get("deps_file", self._options.deps_file) if '/' in deps_file or '\\' in deps_file: raise gclient_utils.Error('deps_file name must not be a path, just a ' 'filename.') try: deps_content = gclient_utils.FileRead( os.path.join(self._root_dir, name, deps_file)) except IOError, e: if e.errno != errno.ENOENT: raise deps_content = "" entries_deps_content[name] = deps_content
|
def GetURLAndRev(name, original_url): url, revision = gclient_utils.SplitUrlRevision(original_url) if not revision: if revision_overrides.has_key(name): return (url, revision_overrides[name]) else: scm = gclient_scm.CreateSCM(solution["url"], self._root_dir, name) return (url, scm.revinfo(self._options, [], None)) else: if revision_overrides.has_key(name): return (url, revision_overrides[name]) else: return (url, revision)
|
print(";\n\n".join(["%s: %s" % (x, entries[x])
|
print(";\n".join(["%s: %s" % (x, entries[x])
|
def GetURLAndRev(name, original_url): url, revision = gclient_utils.SplitUrlRevision(original_url) if not revision: if revision_overrides.has_key(name): return (url, revision_overrides[name]) else: scm = gclient_scm.CreateSCM(solution["url"], self._root_dir, name) return (url, scm.revinfo(self._options, [], None)) else: if revision_overrides.has_key(name): return (url, revision_overrides[name]) else: return (url, revision)
|
'breakpad', 'datetime', 'gclient_utils', 'getpass', 'logging',
|
'breakpad', 'datetime', 'errno', 'gclient_utils', 'getpass', 'logging',
|
def testMembersChanged(self): members = [ 'EscapeDot', 'GIT', 'GuessVCS', 'HELP_STRING', 'InvalidScript', 'NoTryServerAccess', 'PrintSuccess', 'SCM', 'SVN', 'TryChange', 'USAGE', 'breakpad', 'datetime', 'gclient_utils', 'getpass', 'logging', 'optparse', 'os', 'posixpath', 'scm', 'shutil', 'sys', 'tempfile', 'urllib', ] # If this test fails, you should add the relevant test. self.compareMembers(trychange, members)
|
'GenerateDiff', 'GetFileNames', 'GetLocalRoot',
|
'GenerateDiff', 'GetFileNames',
|
def testMembersChanged(self): members = [ 'AutomagicalSettings', 'GclStyleSettings', 'GclientStyleSettings', 'GetCodeReviewSetting', 'ReadRootFile', 'GenerateDiff', 'GetFileNames', 'GetLocalRoot', ] # If this test fails, you should add the relevant test. self.compareMembers(trychange.SVN, members)
|
self.assertEqual(svn.GetLocalRoot(), self.fake_root)
|
self.assertEqual(svn.checkout_root, self.fake_root)
|
def testBasic(self): trychange.scm.SVN.GetCheckoutRoot(self.fake_root).AndReturn(self.fake_root) trychange.scm.SVN.GenerateDiff(['foo.txt', 'bar.txt'], self.fake_root, full_move=True, revision=None).AndReturn('A diff') trychange.scm.SVN.GetEmail(self.fake_root).AndReturn('[email protected]') self.mox.ReplayAll() svn = trychange.SVN(self.options, self.fake_root) self.assertEqual(svn.GetFileNames(), self.expected_files) self.assertEqual(svn.GetLocalRoot(), self.fake_root) self.assertEqual(svn.GenerateDiff(), 'A diff')
|
'GenerateDiff', 'GetFileNames', 'GetLocalRoot',
|
'GenerateDiff', 'GetFileNames',
|
def testMembersChanged(self): members = [ 'AutomagicalSettings', 'GclStyleSettings', 'GclientStyleSettings', 'GetCodeReviewSetting', 'ReadRootFile', 'GenerateDiff', 'GetFileNames', 'GetLocalRoot', ] # If this test fails, you should add the relevant test. self.compareMembers(trychange.GIT, members)
|
self.assertEqual(git.GetLocalRoot(), self.fake_root)
|
self.assertEqual(git.checkout_root, self.fake_root)
|
def testBasic(self): trychange.scm.GIT.GetCheckoutRoot(self.fake_root).AndReturn(self.fake_root) trychange.scm.GIT.GenerateDiff(self.fake_root, full_move=True, branch=None).AndReturn('A diff') trychange.scm.GIT.GetPatchName(self.fake_root).AndReturn('bleh-1233') trychange.scm.GIT.GetEmail(self.fake_root).AndReturn('[email protected]') self.mox.ReplayAll() git = trychange.GIT(self.options, self.fake_root) self.assertEqual(git.GetFileNames(), self.expected_files) self.assertEqual(git.GetLocalRoot(), self.fake_root) self.assertEqual(git.GenerateDiff(), 'A diff')
|
if sys.stdout.isatty():
|
if 'CHROME_HEADLESS' not in os.environ:
|
def Main(argv): """Doesn't parse the arguments here, just find the right subcommand to execute.""" try: # Do it late so all commands are listed. CMDhelp.usage = ('\n\nCommands are:\n' + '\n'.join([ ' %-10s %s' % (fn[3:], Command(fn[3:]).__doc__.split('\n')[0].strip()) for fn in dir(sys.modules[__name__]) if fn.startswith('CMD')])) parser = optparse.OptionParser(version='%prog ' + __version__) # TODO(maruel): Temporary workaround to disable parallel checkout on # buildbots until they can correctly parse its output. Uses that fact that # stdout is redirected as a signal. if sys.stdout.isatty(): jobs = 8 else: jobs = 1 parser.add_option('-j', '--jobs', default=jobs, type='int', help='Specify how many SCM commands can run in parallel; ' 'default=%default') parser.add_option('-v', '--verbose', action='count', default=0, help='Produces additional output for diagnostics. Can be ' 'used up to three times for more logging info.') parser.add_option('--gclientfile', dest='config_filename', default=os.environ.get('GCLIENT_FILE', '.gclient'), help='Specify an alternate %default file') # Integrate standard options processing. old_parser = parser.parse_args def Parse(args): (options, args) = old_parser(args) level = None if options.verbose == 2: level = logging.INFO elif options.verbose > 2: level = logging.DEBUG logging.basicConfig(level=level, format='%(module)s(%(lineno)d) %(funcName)s:%(message)s') options.entries_filename = options.config_filename + '_entries' if options.jobs < 1: parser.error('--jobs must be 1 or higher') # Always autoflush so buildbot doesn't kill us during lengthy operations. options.stdout = gclient_utils.StdoutAutoFlush(sys.stdout) # These hacks need to die. if not hasattr(options, 'revisions'): # GClient.RunOnDeps expects it even if not applicable. options.revisions = [] if not hasattr(options, 'head'): options.head = None if not hasattr(options, 'nohooks'): options.nohooks = True if not hasattr(options, 'deps_os'): options.deps_os = None if not hasattr(options, 'manually_grab_svn_rev'): options.manually_grab_svn_rev = None if not hasattr(options, 'force'): options.force = None return (options, args) parser.parse_args = Parse # We don't want wordwrapping in epilog (usually examples) parser.format_epilog = lambda _: parser.epilog or '' if argv: command = Command(argv[0]) if command: # 'fix' the usage and the description now that we know the subcommand. GenUsage(parser, argv[0]) return command(parser, argv[1:]) # Not a known command. Default to help. GenUsage(parser, 'help') return CMDhelp(parser, argv) except gclient_utils.Error, e: print >> sys.stderr, 'Error: %s' % str(e) return 1
|
if last_tb:
|
if last_tb and sys.last_type is not KeyboardInterrupt:
|
def CheckForException(): last_tb = getattr(sys, 'last_traceback', None) if last_tb: SendStack(''.join(traceback.format_tb(last_tb)))
|
logging.warning(str(e))
|
logging.info(str(e))
|
def _SendChangeHTTP(options): """Send a change to the try server using the HTTP protocol.""" if not options.host: raise NoTryServerAccess('Please use the --host option to specify the try ' 'server host to connect to.') if not options.port: raise NoTryServerAccess('Please use the --port option to specify the try ' 'server port to connect to.') values = _ParseSendChangeOptions(options) description = ''.join("%s=%s\n" % (k, v) for (k, v) in values.iteritems()) values['patch'] = options.diff url = 'http://%s:%s/send_try_patch' % (options.host, options.port) proxies = None if options.proxy: if options.proxy.lower() == 'none': # Effectively disable HTTP_PROXY or Internet settings proxy setup. proxies = {} else: proxies = {'http': options.proxy, 'https': options.proxy} logging.info('Sending by HTTP') logging.info(description) logging.info(url) if options.dry_run: print options.diff return logging.info(options.diff) try: connection = urllib.urlopen(url, urllib.urlencode(values), proxies=proxies) except IOError, e: logging.warning(str(e)) if (values.get('bot') and len(e.args) > 2 and e.args[2] == 'got a bad status line'): raise NoTryServerAccess('%s is unaccessible. Bad --bot argument?' % url) else: raise NoTryServerAccess('%s is unaccessible. Reason: %s' % (url, str(e.args))) if not connection: raise NoTryServerAccess('%s is unaccessible.' % url) response = connection.read() if response != 'OK': raise NoTryServerAccess('%s is unaccessible. Got:\n%s' % (url, response))
|
logging.warn('Unexpected error code: %s' % e.returncode)
|
logging.warning('Unexpected error code: %s' % e.returncode)
|
def GuessVCS(options, path): """Helper to guess the version control system. NOTE: Very similar to upload.GuessVCS. Doesn't look for hg since we don't support it yet. This examines the path directory, guesses which SCM we're using, and returns an instance of the appropriate class. Exit with an error if we can't figure it out. Returns: A SCM instance. Exits if the SCM can't be guessed. """ __pychecker__ = 'no-returnvalues' real_path = path.split('@')[0] logging.info("GuessVCS(%s)" % path) # Subversion has a .svn in all working directories. if os.path.isdir(os.path.join(real_path, '.svn')): return SVN(options, path) # Git has a command to test if you're in a git tree. # Try running it, but don't die if we don't have git installed. try: gclient_utils.CheckCall(['git', 'rev-parse', '--is-inside-work-tree'], cwd=real_path) return GIT(options, path) except gclient_utils.CheckCallError, e: if e.returncode != errno.ENOENT and e.returncode != 128: # ENOENT == 2 = they don't have git installed. # 128 = git error code when not in a repo. logging.warn('Unexpected error code: %s' % e.returncode) raise raise NoTryServerAccess("Could not guess version control system. " "Are you in a working copy directory?")
|
for x in failure: if ('502 Bad Gateway' in x or 'svn: REPORT of \'/svn/!svn/vcc/default\': 200 OK' in x): if os.path.isdir(args[2]): gclient_utils.RemoveDirectory(args[2]) break else:
|
if not IsKnownFailure():
|
def CaptureMatchingLines(line): match = compiled_pattern.search(line) if match: file_list.append(match.group(1)) if line.startswith('svn: '): failure.append(line)
|
if len(file_list) == previous_list_len: for x in failure: if ('502 Bad Gateway' in x or 'svn: REPORT of \'/svn/!svn/vcc/default\': 200 OK' in x): break else: raise else: pass
|
if len(file_list) == previous_list_len and not IsKnownFailure(): raise
|
def CaptureMatchingLines(line): match = compiled_pattern.search(line) if match: file_list.append(match.group(1)) if line.startswith('svn: '): failure.append(line)
|
if len(argv) > 2:
|
if argv and len(argv) > 2:
|
def CMDhelp(argv=None): """Prints this help or help for the given command.""" if len(argv) > 2: if argv[2] == 'try': TryChange(None, ['--help'], swallow_exception=False) return 0 if argv[2] == 'upload': upload.RealMain(['upload.py', '--help']) return 0 print (
|
log = Backquote(['git', 'show', '--name-only', '--pretty=format:%H%n%s%n%n%b']) m = re.match(r'^(\w+)\n(.*)$', log, re.MULTILINE|re.DOTALL) if not m: raise Exception("Could not parse log message: %s" % log) name = m.group(1)
|
name = Backquote(['git', 'rev-parse', 'HEAD'])
|
def __init__(self, commit=None, upstream_branch=None): self.commit = commit self.verbose = None self.default_presubmit = None self.may_prompt = None
|
description = m.group(2)
|
description = Backquote(['git', 'log', '--pretty=format:%s%n%n%b', '%s...' % (upstream_branch)])
|
def __init__(self, commit=None, upstream_branch=None): self.commit = commit self.verbose = None self.default_presubmit = None self.may_prompt = None
|
for s in self.dependencies: if not s.safesync_url: continue handle = urllib.urlopen(s.safesync_url) rev = handle.read().strip() handle.close() if len(rev): self._options.revisions.append('%s@%s' % (s.name, rev))
|
if not self._options.revisions: for s in self.dependencies: if not s.safesync_url: continue handle = urllib.urlopen(s.safesync_url) rev = handle.read().strip() handle.close() if len(rev): self._options.revisions.append('%s@%s' % (s.name, rev))
|
def _EnforceRevisions(self): """Checks for revision overrides.""" revision_overrides = {} if self._options.head: return revision_overrides for s in self.dependencies: if not s.safesync_url: continue handle = urllib.urlopen(s.safesync_url) rev = handle.read().strip() handle.close() if len(rev): self._options.revisions.append('%s@%s' % (s.name, rev)) if not self._options.revisions: return revision_overrides # --revision will take over safesync_url. solutions_names = [s.name for s in self.dependencies] index = 0 for revision in self._options.revisions: if not '@' in revision: # Support for --revision 123 revision = '%s@%s' % (solutions_names[index], revision) sol, rev = revision.split('@', 1) if not sol in solutions_names: #raise gclient_utils.Error('%s is not a valid solution.' % sol) print >> sys.stderr, ('Please fix your script, having invalid ' '--revision flags will soon considered an error.') else: revision_overrides[sol] = rev index += 1 return revision_overrides
|
'if the src@ part is skipped.')
|
'if the src@ part is skipped. Note that specifying ' '--revision means your safesync_url gets ignored.')
|
def CMDsync(parser, args): """Checkout/update all modules.""" parser.add_option('-f', '--force', action='store_true', help='force update even for unchanged modules') parser.add_option('-n', '--nohooks', action='store_true', help='don\'t run hooks after the update is complete') parser.add_option('-r', '--revision', action='append', dest='revisions', metavar='REV', default=[], help='Enforces revision/hash for the solutions with the ' 'format src@rev. The src@ part is optional and can be ' 'skipped. -r can be used multiple times when .gclient ' 'has multiple solutions configured and will work even ' 'if the src@ part is skipped.') parser.add_option('-H', '--head', action='store_true', help='skips any safesync_urls specified in ' 'configured solutions and sync to head instead') parser.add_option('-D', '--delete_unversioned_trees', action='store_true', help='delete any unexpected unversioned trees ' 'that are in the checkout') parser.add_option('-R', '--reset', action='store_true', help='resets any local changes before updating (git only)') parser.add_option('--deps', dest='deps_os', metavar='OS_LIST', help='override deps for the specified (comma-separated) ' 'platform(s); \'all\' will process all deps_os ' 'references') parser.add_option('-m', '--manually_grab_svn_rev', action='store_true', help='Skip svn up whenever possible by requesting ' 'actual HEAD revision from the repository') (options, args) = parser.parse_args(args) client = GClient.LoadCurrentConfig(options) if not client: raise gclient_utils.Error('client not configured; see \'gclient config\'') if options.revisions and options.head: # TODO(maruel): Make it a parser.error if it doesn't break any builder. print('Warning: you cannot use both --head and --revision') if options.verbose: # Print out the .gclient file. This is longer than if we just printed the # client dict, but more legible, and it might contain helpful comments. print(client.config_content) return client.RunOnDeps('update', args)
|
('running', self.root_dir + '/src/file/other'),
|
('running', os.path.join(self.root_dir, 'src', 'file', 'other')),
|
def testSync(self): # TODO(maruel): safesync. if not self.enabled: return self.gclient(['config', self.svn_base + 'trunk/src/']) # Test unversioned checkout. self.parseGclient(['sync', '--deps', 'mac', '--jobs', '1'], ['running', 'running', # This is due to the way svn update is called for a # single file when File() is used in a DEPS file. ('running', self.root_dir + '/src/file/other'), 'running', 'running', 'running', 'running']) tree = self.mangle_svn_tree( ('trunk/src@2', 'src'), ('trunk/third_party/foo@1', 'src/third_party/foo'), ('trunk/other@2', 'src/other')) tree['src/file/other/DEPS'] = ( self.FAKE_REPOS.svn_revs[2]['trunk/other/DEPS']) tree['src/svn_hooked1'] = 'svn_hooked1' self.assertTree(tree)
|
('running', self.root_dir + '/src/file/other'),
|
('running', os.path.join(self.root_dir, 'src', 'file', 'other')),
|
def testSyncIgnoredSolutionName(self): """TODO(maruel): This will become an error soon.""" if not self.enabled: return self.gclient(['config', self.svn_base + 'trunk/src/']) results = self.gclient( ['sync', '--deps', 'mac', '-r', 'invalid@1', '--jobs', '1']) self.checkBlock(results[0], [ 'running', 'running', # This is due to the way svn update is called for a single file when # File() is used in a DEPS file. ('running', self.root_dir + '/src/file/other'), 'running', 'running', 'running', 'running']) self.checkString('Please fix your script, having invalid --revision flags ' 'will soon considered an error.\n', results[1]) self.assertEquals(0, results[2]) tree = self.mangle_svn_tree( ('trunk/src@2', 'src'), ('trunk/third_party/foo@1', 'src/third_party/foo'), ('trunk/other@2', 'src/other')) tree['src/file/other/DEPS'] = ( self.FAKE_REPOS.svn_revs[2]['trunk/other/DEPS']) tree['src/svn_hooked1'] = 'svn_hooked1' self.assertTree(tree)
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.