rem
stringlengths 0
322k
| add
stringlengths 0
2.05M
| context
stringlengths 8
228k
|
---|---|---|
self.SendToRietveld('/%d/close' % self.issue, body, ctype)
|
self.SendToRietveld('/%d/close' % self.issue, payload=body, content_type=ctype)
|
def CloseIssue(self): """Closes the Rietveld issue for this changelist.""" data = [("description", self.description),] ctype, body = upload.EncodeMultipartFormData(data, []) self.SendToRietveld('/%d/close' % self.issue, body, ctype)
|
self.SendToRietveld('/%d/description' % self.issue, body, ctype)
|
self.SendToRietveld('/%d/description' % self.issue, payload=body, content_type=ctype)
|
def UpdateRietveldDescription(self): """Sets the description for an issue on Rietveld.""" data = [("description", self.description),] ctype, body = upload.EncodeMultipartFormData(data, []) self.SendToRietveld('/%d/description' % self.issue, body, ctype)
|
def SendToRietveld(self, request_path, payload=None, content_type="application/octet-stream", timeout=None):
|
def SendToRietveld(self, request_path, timeout=None, **kwargs):
|
def SendToRietveld(self, request_path, payload=None, content_type="application/octet-stream", timeout=None): """Send a POST/GET to Rietveld. Returns the response body.""" if not self.rietveld: ErrorExit(CODEREVIEW_SETTINGS_FILE_NOT_FOUND) def GetUserCredentials(): """Prompts the user for a username and password.""" email = upload.GetEmail('Email (login for uploading to %s)' % self.rietveld) password = getpass.getpass('Password for %s: ' % email) return email, password rpc_server = upload.HttpRpcServer(self.rietveld, GetUserCredentials, save_cookies=True) try: return rpc_server.Send(request_path, payload, content_type, timeout) except urllib2.URLError: if timeout is None: ErrorExit('Error accessing url %s' % request_path) else: return None
|
return rpc_server.Send(request_path, payload, content_type, timeout)
|
return rpc_server.Send(request_path, timeout=timeout, **kwargs)
|
def GetUserCredentials(): """Prompts the user for a username and password.""" email = upload.GetEmail('Email (login for uploading to %s)' % self.rietveld) password = getpass.getpass('Password for %s: ' % email) return email, password
|
match = re.search(r'revision (\d+).', out)
|
last_line = out.splitlines()[-1] match = re.search(r'(\d+)', out)
|
def commit_svn(repo): """Commits the changes and returns the new revision number.""" to_add = [] to_remove = [] for status, filepath in scm.SVN.CaptureStatus(repo): if status[0] == '?': to_add.append(filepath) elif status[0] == '!': to_remove.append(filepath) if to_add: check_call(['svn', 'add', '--no-auto-props', '-q'] + to_add, cwd=repo) if to_remove: check_call(['svn', 'remove', '-q'] + to_remove, cwd=repo) proc = Popen(['svn', 'commit', repo, '-m', 'foo', '--non-interactive', '--no-auth-cache', '--username', 'user1', '--password', 'foo'], cwd=repo) out, err = proc.communicate() match = re.search(r'revision (\d+).', out) if not match: raise Exception('Commit failed', out, err, proc.returncode) rev = match.group(1) st = Popen(['svn', 'status'], cwd=repo).communicate()[0] assert len(st) == 0, st logging.debug('At revision %s' % rev) return rev
|
description = m.group(2)
|
def __init__(self, commit=None, upstream_branch=None): self.commit = commit self.verbose = None self.default_presubmit = None self.may_prompt = None
|
|
elif options.issue:
|
elif options.issue and options.patchset is None:
|
def TryChange(argv, file_list, swallow_exception, prog=None): """ Args: argv: Arguments and options. file_list: Default value to pass to --file. swallow_exception: Whether we raise or swallow exceptions. """ # Parse argv parser = optparse.OptionParser(usage=USAGE, version=__version__, prog=prog) parser.add_option("-v", "--verbose", action="count", default=0, help="Prints debugging infos") group = optparse.OptionGroup(parser, "Result and status") group.add_option("-u", "--user", default=getpass.getuser(), help="Owner user name [default: %default]") group.add_option("-e", "--email", default=os.environ.get('TRYBOT_RESULTS_EMAIL_ADDRESS', os.environ.get('EMAIL_ADDRESS')), help="Email address where to send the results. Use either " "the TRYBOT_RESULTS_EMAIL_ADDRESS environment " "variable or EMAIL_ADDRESS to set the email address " "the try bots report results to [default: %default]") group.add_option("-n", "--name", help="Descriptive name of the try job") group.add_option("--issue", type='int', help="Update rietveld issue try job status") group.add_option("--patchset", type='int', help="Update rietveld issue try job status") group.add_option("--dry_run", action='store_true', help="Just prints the diff and quits") group.add_option("--rietveld_url", help="The code review url.") parser.add_option_group(group) group = optparse.OptionGroup(parser, "Try job options") group.add_option("-b", "--bot", action="append", help="Only use specifics build slaves, ex: '--bot win' to " "run the try job only on the 'win' slave; see the try " "server waterfall for the slave's name") group.add_option("-r", "--revision", help="Revision to use for the try job; default: the " "revision will be determined by the try server; see " "its waterfall for more info") group.add_option("-c", "--clobber", action="store_true", help="Force a clobber before building; e.g. don't do an " "incremental build") # TODO(maruel): help="Select a specific configuration, usually 'debug' or " # "'release'" group.add_option("--target", help=optparse.SUPPRESS_HELP) group.add_option("--project", help="Override which project to use. Projects are defined " "server-side to define what default bot set to use") # Override the list of tests to run, use multiple times to list many tests # (or comma separated) group.add_option("-t", "--tests", action="append", help=optparse.SUPPRESS_HELP) parser.add_option_group(group) group = optparse.OptionGroup(parser, "Patch to run") group.add_option("-f", "--file", default=file_list, dest="files", metavar="FILE", action="append", help="Use many times to list the files to include in the " "try, relative to the repository root") group.add_option("--diff", help="File containing the diff to try") group.add_option("--url", help="Url where to grab a patch") group.add_option("--root", help="Root to use for the patch; base subdirectory for " "patch created in a subdirectory") group.add_option("-p", "--patchlevel", type='int', metavar="LEVEL", help="Used as -pN parameter to patch") group.add_option("-s", "--sub_rep", action="append", default=[], help="Subcheckout to use in addition. This is mainly " "useful for gclient-style checkouts. Use @rev or " "@branch or @branch1..branch2 to specify the " "revision/branch to diff against.") # Mostly chromium-specific try: group.add_option("--webkit", action="append_const", const="third_party/WebKit", dest="sub_rep", help="Shorthand for -s third_party/WebKit") except optparse.OptionError: # append_const is not supported on 2.4. Too bad. pass group.add_option("--no_gclient", action="store_true", help="Disable automatic search for gclient checkout.") parser.add_option_group(group) group = optparse.OptionGroup(parser, "Access the try server by HTTP") group.add_option("--use_http", action="store_const", const=_SendChangeHTTP, dest="send_patch", help="Use HTTP to talk to the try server [default]") group.add_option("-H", "--host", help="Host address") group.add_option("-P", "--port", help="HTTP port") group.add_option("--proxy", help="HTTP proxy") parser.add_option_group(group) group = optparse.OptionGroup(parser, "Access the try server with SVN") group.add_option("--use_svn", action="store_const", const=_SendChangeSVN, dest="send_patch", help="Use SVN to talk to the try server") group.add_option("-S", "--svn_repo", metavar="SVN_URL", help="SVN url to use to write the changes in; --use_svn is " "implied when using --svn_repo") parser.add_option_group(group) options, args = parser.parse_args(argv) if len(args) == 1 and args[0] == 'help': parser.print_help() if not swallow_exception: if options.verbose == 0: logging.basicConfig(level=logging.ERROR) elif options.verbose == 1: logging.basicConfig(level=logging.WARNING) elif options.verbose == 2: logging.basicConfig(level=logging.INFO) elif options.verbose > 2: logging.basicConfig(level=logging.DEBUG) logging.debug(argv) try: # Always include os.getcwd() in the checkout settings. checkouts = [] checkouts.append(GuessVCS(options, os.getcwd())) checkouts[0].AutomagicalSettings() for item in options.sub_rep: checkout = GuessVCS(options, os.path.join(checkouts[0].checkout_root, item)) if checkout.checkout_root in [c.checkout_root for c in checkouts]: parser.error('Specified the root %s two times.' % checkout.checkout_root) checkouts.append(checkout) can_http = options.port and options.host can_svn = options.svn_repo # If there was no transport selected yet, now we must have enough data to # select one. if not options.send_patch and not (can_http or can_svn): parser.error('Please specify an access method.') # Convert options.diff into the content of the diff. if options.url: if options.files: parser.error('You cannot specify files and --url at the same time.') options.diff = urllib.urlopen(options.url).read() elif options.diff: if options.files: parser.error('You cannot specify files and --diff at the same time.') options.diff = gclient_utils.FileRead(options.diff, 'rb') elif options.issue: # Retrieve the patch from rietveld when the diff is not specified. try: import simplejson except ImportError: parser.error('simplejson library is missing, please install.') api_url = 'http://%s/api/%d' % (options.rietveld_url, options.issue) contents = simplejson.loads(urllib.urlopen(api_url).read()) diff_url = ('http://%s/download/issue%d_%d.diff' % (options.rietveld_url, options.issue, contents['patchsets'][-1])) diff = GetMungedDiff('', urllib.urlopen(diff_url).readlines()) options.diff = ''.join(diff) else: # Use this as the base. root = checkouts[0].checkout_root diffs = [] for checkout in checkouts: diff = checkout.GenerateDiff().splitlines(True) path_diff = gclient_utils.PathDifference(root, checkout.checkout_root) # Munge it. diffs.extend(GetMungedDiff(path_diff, diff)) options.diff = ''.join(diffs) if not options.bot: # Get try slaves from PRESUBMIT.py files if not specified. # Even if the diff comes from options.url, use the local checkout for bot # selection. try: import presubmit_support root_presubmit = checkouts[0].ReadRootFile('PRESUBMIT.py') options.bot = presubmit_support.DoGetTrySlaves( checkouts[0].GetFileNames(), checkouts[0].checkout_root, root_presubmit, False, sys.stdout) except ImportError: pass # If no bot is specified, either the default pool will be selected or the # try server will refuse the job. Either case we don't need to interfere. if options.name is None: if options.issue: options.name = 'Issue %s' % options.issue else: options.name = 'Unnamed' print('Note: use --name NAME to change the try job name.') if not options.email: parser.error('Using an anonymous checkout. Please use --email or set ' 'the TRYBOT_RESULTS_EMAIL_ADDRESS environment variable.') else: print('Results will be emailed to: ' + options.email) # Send the patch. if options.send_patch: # If forced. options.send_patch(options) PrintSuccess(options) return 0 try: if can_http: _SendChangeHTTP(options) PrintSuccess(options) return 0 except NoTryServerAccess: if not can_svn: raise _SendChangeSVN(options) PrintSuccess(options) return 0 except (InvalidScript, NoTryServerAccess), e: if swallow_exception: return 1 print e return 1 return 0
|
if len(args) == 1 and args[0] == 'help':
|
if len(args) == 2 and args[1] == 'help':
|
def WebKitRevision(options, opt, value, parser): if not hasattr(options, 'sub_rep'): options.sub_rep = [] if parser.rargs and not parser.rargs[0].startswith('-'): options.sub_rep.append('third_party/WebKit@%s' % parser.rargs.pop(0)) else: options.sub_rep.append('third_party/WebKit')
|
if not len(ref) == 1: raise Exception('Failed to find one reference to %s. %s' % ( url.module_name, ref))
|
if not ref: raise gclient_utils.Error('Failed to find one reference to %s. %s' % ( url.module_name, ref))
|
def LateOverride(self, url): """Resolves the parsed url from url.
|
raise Exception('Couldn\'t find %s in %s, referenced by %s' % (
|
raise gclient_utils.Error( 'Couldn\'t find %s in %s, referenced by %s' % (
|
def LateOverride(self, url): """Resolves the parsed url from url.
|
'deps_hooks', '_file_list'):
|
'deps_hooks', '_file_list', 'processed', 'hooks_ran'):
|
def __str__(self): out = [] for i in ('name', 'url', 'safesync_url', 'custom_deps', 'custom_vars', 'deps_hooks', '_file_list'): # 'deps_file' if self.__dict__[i]: out.append('%s: %s' % (i, self.__dict__[i]))
|
['config', 'branch.%s.merge' % branch], error_ok=True)[0].strip()
|
['config', 'branch.%s.merge' % branch], in_directory=cwd, error_ok=True)[0].strip()
|
def FetchUpstreamTuple(cwd): """Returns a tuple containg remote and remote ref, e.g. 'origin', 'refs/heads/master' """ remote = '.' branch = GIT.GetBranch(cwd) upstream_branch = None upstream_branch = GIT.Capture( ['config', 'branch.%s.merge' % branch], error_ok=True)[0].strip() if upstream_branch: remote = GIT.Capture( ['config', 'branch.%s.remote' % branch], error_ok=True)[0].strip() else: # Fall back on trying a git-svn upstream branch. if GIT.IsGitSvn(cwd): upstream_branch = GIT.GetSVNBranch(cwd) # Fall back on origin/master if it exits. if not upstream_branch: GIT.Capture(['branch', '-r'])[0].split().count('origin/master') remote = 'origin' upstream_branch = 'refs/heads/master' return remote, upstream_branch
|
error_ok=True)[0].strip()
|
in_directory=cwd, error_ok=True)[0].strip()
|
def FetchUpstreamTuple(cwd): """Returns a tuple containg remote and remote ref, e.g. 'origin', 'refs/heads/master' """ remote = '.' branch = GIT.GetBranch(cwd) upstream_branch = None upstream_branch = GIT.Capture( ['config', 'branch.%s.merge' % branch], error_ok=True)[0].strip() if upstream_branch: remote = GIT.Capture( ['config', 'branch.%s.remote' % branch], error_ok=True)[0].strip() else: # Fall back on trying a git-svn upstream branch. if GIT.IsGitSvn(cwd): upstream_branch = GIT.GetSVNBranch(cwd) # Fall back on origin/master if it exits. if not upstream_branch: GIT.Capture(['branch', '-r'])[0].split().count('origin/master') remote = 'origin' upstream_branch = 'refs/heads/master' return remote, upstream_branch
|
if not upstream_branch: GIT.Capture(['branch', '-r'])[0].split().count('origin/master') remote = 'origin' upstream_branch = 'refs/heads/master'
|
def FetchUpstreamTuple(cwd): """Returns a tuple containg remote and remote ref, e.g. 'origin', 'refs/heads/master' """ remote = '.' branch = GIT.GetBranch(cwd) upstream_branch = None upstream_branch = GIT.Capture( ['config', 'branch.%s.merge' % branch], error_ok=True)[0].strip() if upstream_branch: remote = GIT.Capture( ['config', 'branch.%s.remote' % branch], error_ok=True)[0].strip() else: # Fall back on trying a git-svn upstream branch. if GIT.IsGitSvn(cwd): upstream_branch = GIT.GetSVNBranch(cwd) # Fall back on origin/master if it exits. if not upstream_branch: GIT.Capture(['branch', '-r'])[0].split().count('origin/master') remote = 'origin' upstream_branch = 'refs/heads/master' return remote, upstream_branch
|
|
def __init__(self, test_case, verbose=False, revision=None):
|
def __init__(self, verbose=False, revision=None):
|
def __init__(self, test_case, verbose=False, revision=None): self.verbose = verbose self.revision = revision self.manually_grab_svn_rev = True self.deps_os = None self.force = False self.reset = False self.nohooks = False self.stdout = gclient_scm.sys.stdout
|
return self.OptionsObject(self, *args, **kwargs)
|
return self.OptionsObject(*args, **kwargs)
|
def Options(self, *args, **kwargs): return self.OptionsObject(self, *args, **kwargs)
|
logging.warning('Sending by HTTP')
|
logging.info('Sending by HTTP')
|
def _SendChangeHTTP(options): """Send a change to the try server using the HTTP protocol.""" if not options.host: raise NoTryServerAccess('Please use the --host option to specify the try ' 'server host to connect to.') if not options.port: raise NoTryServerAccess('Please use the --port option to specify the try ' 'server port to connect to.') values = _ParseSendChangeOptions(options) description = ''.join("%s=%s\n" % (k,v) for (k,v) in values.iteritems()) values['patch'] = options.diff url = 'http://%s:%s/send_try_patch' % (options.host, options.port) proxies = None if options.proxy: if options.proxy.lower() == 'none': # Effectively disable HTTP_PROXY or Internet settings proxy setup. proxies = {} else: proxies = {'http': options.proxy, 'https': options.proxy} logging.warning('Sending by HTTP') logging.info(description) logging.info(url) logging.info(options.diff) if options.dry_run: return try: connection = urllib.urlopen(url, urllib.urlencode(values), proxies=proxies) except IOError, e: logging.warning(str(e)) if (values.get('bot') and len(e.args) > 2 and e.args[2] == 'got a bad status line'): raise NoTryServerAccess('%s is unaccessible. Bad --bot argument?' % url) else: raise NoTryServerAccess('%s is unaccessible. Reason: %s' % (url, str(e.args))) if not connection: raise NoTryServerAccess('%s is unaccessible.' % url) response = connection.read() if response != 'OK': raise NoTryServerAccess('%s is unaccessible. Got:\n%s' % (url, response))
|
logging.warning('Sending by SVN')
|
logging.info('Sending by SVN')
|
def _SendChangeSVN(options): """Send a change to the try server by committing a diff file on a subversion server.""" if not options.svn_repo: raise NoTryServerAccess('Please use the --svn_repo option to specify the' ' try server svn repository to connect to.') values = _ParseSendChangeOptions(options) description = ''.join("%s=%s\n" % (k,v) for (k,v) in values.iteritems()) logging.warning('Sending by SVN') logging.info(description) logging.info(options.svn_repo) logging.info(options.diff) if options.dry_run: return # Do an empty checkout. temp_dir = tempfile.mkdtemp() temp_file = tempfile.NamedTemporaryFile() try: try: command = ['svn', 'checkout', '--depth', 'empty', '-q', options.svn_repo, temp_dir] if options.email: command.extend(['--username', options.email]) gclient_utils.CheckCall(command) # TODO(maruel): Use a subdirectory per user? current_time = str(datetime.datetime.now()).replace(':', '.') file_name = (EscapeDot(options.user) + '.' + EscapeDot(options.name) + '.%s.diff' % current_time) full_path = os.path.join(temp_dir, file_name) full_url = options.svn_repo + '/' + file_name file_found = False try: gclient_utils.CheckCall(['svn', 'ls', full_url], print_error=False) file_found = True except gclient_utils.CheckCallError: pass if file_found: # The file already exists in the repo. Note that commiting a file is a # no-op if the file's content (the diff) is not modified. This is why # the file name contains the date and time. gclient_utils.CheckCall(['svn', 'update', full_path], print_error=False) gclient_utils.FileWrite(full_path, options.diff, 'wb') else: # Add the file to the repo. gclient_utils.FileWrite(full_path, options.diff, 'wb') gclient_utils.CheckCall(["svn", "add", full_path], print_error=False) temp_file.write(description) temp_file.flush() gclient_utils.CheckCall(["svn", "commit", full_path, '--file', temp_file.name], print_error=False) except gclient_utils.CheckCallError, e: raise NoTryServerAccess(' '.join(e.command) + '\nOuput:\n' + e.stdout) finally: temp_file.close() shutil.rmtree(temp_dir, True)
|
log = "" pos = 0 for line in svn_log: if (pos > 2): log += line.replace('-','').replace('\r','') else: pos = pos + 1 return log
|
return ''.join([l.replace('\r','') for l in svn_log[3:-1]])
|
def getRevisionLog(url, revision): """Takes an svn url and gets the associated revision.""" command = 'svn log ' + url + " -r"+str(revision) svn_log = subprocess.Popen(command, shell=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE).stdout.readlines() log = "" pos = 0 for line in svn_log: if (pos > 2): log += line.replace('-','').replace('\r','') else: pos = pos + 1 return log
|
out.write("TBR=" + author)
|
out.write("\nTBR=" + author)
|
def main(options, args): revision = options.revert or options.merge # Initialize some variables used below. They can be overwritten by # the drover.properties file. BASE_URL = "svn://svn.chromium.org/chrome" TRUNK_URL = BASE_URL + "/trunk/src" BRANCH_URL = BASE_URL + "/branches/$branch/src" SKIP_CHECK_WORKING = True PROMPT_FOR_AUTHOR = False DEFAULT_WORKING = "drover_" + str(revision) if options.branch: DEFAULT_WORKING += ("_" + options.branch) if not isMinimumSVNVersion(1,5): print "You need to use at least SVN version 1.5.x" sys.exit(1) # Override the default properties if there is a drover.properties file. global file_pattern_ if os.path.exists("drover.properties"): file = open("drover.properties") exec(file) file.close() if FILE_PATTERN: file_pattern_ = FILE_PATTERN if options.revert and options.branch: url = BRANCH_URL.replace("$branch", options.branch) elif options.merge and options.sbranch: url = BRANCH_URL.replace("$branch", options.sbranch) else: url = TRUNK_URL working = options.workdir or DEFAULT_WORKING if options.local: working = os.getcwd() if not inCheckoutRoot(working): print "'%s' appears not to be the root of a working copy" % working sys.exit(1) if isSVNDirty(): print "Working copy contains uncommitted files" sys.exit(1) command = 'svn log ' + url + " -r "+str(revision) + " -v" os.system(command) if not (options.revertbot or prompt("Is this the correct revision?")): sys.exit(0) if (os.path.exists(working)) and not options.local: if not (options.revertbot or SKIP_CHECK_WORKING or prompt("Working directory: '%s' already exists, clobber?" % working)): sys.exit(0) deltree(working) if not options.local: os.makedirs(working) os.chdir(working) if options.merge: action = "Merge" if not options.local: branch_url = BRANCH_URL.replace("$branch", options.branch) # Checkout everything but stuff that got added into a new dir checkoutRevision(url, revision, branch_url) # Merge everything that changed mergeRevision(url, revision) # "Export" files that were added from the source and add them to branch exportRevision(url, revision) # Delete directories that were deleted (file deletes are handled in the # merge). deleteRevision(url, revision) elif options.revert: action = "Revert" if options.branch: url = BRANCH_URL.replace("$branch", options.branch) checkoutRevision(url, revision, url, True) revertRevision(url, revision) revertExportRevision(url, revision) # Check the base url so we actually find the author who made the change if options.auditor: author = options.auditor else: author = getAuthor(url, revision) if not author: author = getAuthor(TRUNK_URL, revision) filename = str(revision)+".txt" out = open(filename,"w") out.write(action +" " + str(revision) + " - ") out.write(getRevisionLog(url, revision)) if (author): out.write("TBR=" + author) out.close() change_cmd = 'change ' + str(revision) + " " + filename if options.revertbot: change_cmd += ' --silent' runGcl(change_cmd) os.unlink(filename) if options.local: sys.exit(0) print author print revision print ("gcl upload " + str(revision) + " --send_mail --no_try --no_presubmit --reviewers=" + author) if options.revertbot or prompt("Would you like to upload?"): if PROMPT_FOR_AUTHOR: author = text_prompt("Enter new author or press enter to accept default", author) if options.revertbot and options.revertbot_reviewers: author += "," author += options.revertbot_reviewers gclUpload(revision, author) else: print "Deleting the changelist." print "gcl delete " + str(revision) runGcl("delete " + str(revision)) sys.exit(0) # We commit if the reverbot is set to commit automatically, or if this is # not the revertbot and the user agrees. if options.revertbot_commit or (not options.revertbot and prompt("Would you like to commit?")): print "gcl commit " + str(revision) + " --no_presubmit --force" runGcl("commit " + str(revision) + " --no_presubmit --force") else: sys.exit(0)
|
parser.add_option('-j', '--jobs', default=8, type='int',
|
parser.add_option('-j', '--jobs', default=1, type='int',
|
def Main(argv): """Doesn't parse the arguments here, just find the right subcommand to execute.""" try: # Do it late so all commands are listed. CMDhelp.usage = ('\n\nCommands are:\n' + '\n'.join([ ' %-10s %s' % (fn[3:], Command(fn[3:]).__doc__.split('\n')[0].strip()) for fn in dir(sys.modules[__name__]) if fn.startswith('CMD')])) parser = optparse.OptionParser(version='%prog ' + __version__) parser.add_option('-j', '--jobs', default=8, type='int', help='Specify how many SCM commands can run in parallel; ' 'default=%default') parser.add_option('-v', '--verbose', action='count', default=0, help='Produces additional output for diagnostics. Can be ' 'used up to three times for more logging info.') parser.add_option('--gclientfile', dest='config_filename', default=os.environ.get('GCLIENT_FILE', '.gclient'), help='Specify an alternate %default file') # Integrate standard options processing. old_parser = parser.parse_args def Parse(args): (options, args) = old_parser(args) level = None if options.verbose == 2: level = logging.INFO elif options.verbose > 2: level = logging.DEBUG logging.basicConfig(level=level, format='%(module)s(%(lineno)d) %(funcName)s:%(message)s') options.entries_filename = options.config_filename + '_entries' if options.jobs < 1: parser.error('--jobs must be 1 or higher') # Always autoflush so buildbot doesn't kill us during lengthy operations. options.stdout = gclient_utils.StdoutAutoFlush(sys.stdout) # These hacks need to die. if not hasattr(options, 'revisions'): # GClient.RunOnDeps expects it even if not applicable. options.revisions = [] if not hasattr(options, 'head'): options.head = None if not hasattr(options, 'nohooks'): options.nohooks = True if not hasattr(options, 'deps_os'): options.deps_os = None if not hasattr(options, 'manually_grab_svn_rev'): options.manually_grab_svn_rev = None if not hasattr(options, 'force'): options.force = None return (options, args) parser.parse_args = Parse # We don't want wordwrapping in epilog (usually examples) parser.format_epilog = lambda _: parser.epilog or '' if argv: command = Command(argv[0]) if command: # 'fix' the usage and the description now that we know the subcommand. GenUsage(parser, argv[0]) return command(parser, argv[1:]) # Not a known command. Default to help. GenUsage(parser, 'help') return CMDhelp(parser, argv) except gclient_utils.Error, e: print >> sys.stderr, 'Error: %s' % str(e) return 1
|
for filename in os.listdir(unicode(GetInfoDir())): file_path = os.path.join(unicode(GetInfoDir()), filename) if os.path.isfile(file_path) and filename != CODEREVIEW_SETTINGS_FILE: shutil.move(file_path, GetChangesDir())
|
def main(argv=None): if argv is None: argv = sys.argv if len(argv) == 1: Help() return 0; try: # Create the directories where we store information about changelists if it # doesn't exist. if not os.path.exists(GetInfoDir()): os.mkdir(GetInfoDir()) if not os.path.exists(GetChangesDir()): os.mkdir(GetChangesDir()) # For smooth upgrade support, move the files in GetInfoDir() to # GetChangesDir(). # TODO(maruel): Remove this code in August 2009. for filename in os.listdir(unicode(GetInfoDir())): file_path = os.path.join(unicode(GetInfoDir()), filename) if os.path.isfile(file_path) and filename != CODEREVIEW_SETTINGS_FILE: shutil.move(file_path, GetChangesDir()) if not os.path.exists(GetCacheDir()): os.mkdir(GetCacheDir()) except gclient_utils.Error: # Will throw an exception if not run in a svn checkout. pass # Commands that don't require an argument. command = argv[1] if command == "opened" or command == "status": Opened(command == "status") return 0 if command == "nothave": __pychecker__ = 'no-returnvalues' for filename in UnknownFiles(argv[2:]): print "? " + "".join(filename) return 0 if command == "changes": Changes() return 0 if command == "help": Help(argv[2:]) return 0 if command == "diff" and len(argv) == 2: files = GetFilesNotInCL() print GenerateDiff([x[1] for x in files]) return 0 if command == "settings": # Force load settings GetCodeReviewSetting("UNKNOWN"); del CODEREVIEW_SETTINGS['__just_initialized'] print '\n'.join(("%s: %s" % (str(k), str(v)) for (k,v) in CODEREVIEW_SETTINGS.iteritems())) return 0 if command == "deleteempties": DeleteEmptyChangeLists() return 0 if command == "change": if len(argv) == 2: # Generate a random changelist name. changename = GenerateChangeName() elif argv[2] == '--force': changename = GenerateChangeName() # argv[3:] is passed to Change() as |args| later. Change() should receive # |args| which includes '--force'. argv.insert(2, changename) else: changename = argv[2] elif len(argv) == 2: ErrorExit("Need a changelist name.") else: changename = argv[2] # When the command is 'try' and --patchset is used, the patch to try # is on the Rietveld server. 'change' creates a change so it's fine if the # change didn't exist. All other commands require an existing change. fail_on_not_found = command != "try" and command != "change" if command == "try" and changename.find(',') != -1: change_info = LoadChangelistInfoForMultiple(changename, GetRepositoryRoot(), True, True) else: change_info = ChangeInfo.Load(changename, GetRepositoryRoot(), fail_on_not_found, True) if command == "change": Change(change_info, argv[3:]) elif command == "lint": Lint(change_info, argv[3:]) elif command == "upload": UploadCL(change_info, argv[3:]) elif command == "presubmit": PresubmitCL(change_info) elif command in ("commit", "submit"): Commit(change_info, argv[3:]) elif command == "delete": change_info.Delete() elif command == "try": # When the change contains no file, send the "changename" positional # argument to trychange.py. if change_info.GetFiles(): args = argv[3:] else: change_info = None args = argv[2:] TryChange(change_info, args, swallow_exception=False) else: # Everything else that is passed into gcl we redirect to svn, after adding # the files. This allows commands such as 'gcl diff xxx' to work. if command == "diff" and not change_info.GetFileNames(): return 0 args =["svn", command] root = GetRepositoryRoot() args.extend([os.path.join(root, x) for x in change_info.GetFileNames()]) RunShell(args, True) return 0
|
|
help="Update rietveld issue try job status")
|
help="Update rietveld issue try job status. This is " "optional if --issue is used, In that case, the " "latest patchset will be used.")
|
def TryChange(argv, file_list, swallow_exception, prog=None): """ Args: argv: Arguments and options. file_list: Default value to pass to --file. swallow_exception: Whether we raise or swallow exceptions. """ # Parse argv parser = optparse.OptionParser(usage=USAGE, version=__version__, prog=prog) parser.add_option("-v", "--verbose", action="count", default=0, help="Prints debugging infos") group = optparse.OptionGroup(parser, "Result and status") group.add_option("-u", "--user", default=getpass.getuser(), help="Owner user name [default: %default]") group.add_option("-e", "--email", default=os.environ.get('TRYBOT_RESULTS_EMAIL_ADDRESS', os.environ.get('EMAIL_ADDRESS')), help="Email address where to send the results. Use either " "the TRYBOT_RESULTS_EMAIL_ADDRESS environment " "variable or EMAIL_ADDRESS to set the email address " "the try bots report results to [default: %default]") group.add_option("-n", "--name", help="Descriptive name of the try job") group.add_option("--issue", type='int', help="Update rietveld issue try job status") group.add_option("--patchset", type='int', help="Update rietveld issue try job status") group.add_option("--dry_run", action='store_true', help="Just prints the diff and quits") group.add_option("--rietveld_url", help="The code review url.") parser.add_option_group(group) group = optparse.OptionGroup(parser, "Try job options") group.add_option("-b", "--bot", action="append", help="Only use specifics build slaves, ex: '--bot win' to " "run the try job only on the 'win' slave; see the try " "server waterfall for the slave's name") group.add_option("-r", "--revision", help="Revision to use for the try job; default: the " "revision will be determined by the try server; see " "its waterfall for more info") group.add_option("-c", "--clobber", action="store_true", help="Force a clobber before building; e.g. don't do an " "incremental build") # TODO(maruel): help="Select a specific configuration, usually 'debug' or " # "'release'" group.add_option("--target", help=optparse.SUPPRESS_HELP) group.add_option("--project", help="Override which project to use. Projects are defined " "server-side to define what default bot set to use") # Override the list of tests to run, use multiple times to list many tests # (or comma separated) group.add_option("-t", "--tests", action="append", help=optparse.SUPPRESS_HELP) parser.add_option_group(group) group = optparse.OptionGroup(parser, "Patch to run") group.add_option("-f", "--file", default=file_list, dest="files", metavar="FILE", action="append", help="Use many times to list the files to include in the " "try, relative to the repository root") group.add_option("--diff", help="File containing the diff to try") group.add_option("--url", help="Url where to grab a patch") group.add_option("--root", help="Root to use for the patch; base subdirectory for " "patch created in a subdirectory") group.add_option("-p", "--patchlevel", type='int', metavar="LEVEL", help="Used as -pN parameter to patch") group.add_option("-s", "--sub_rep", action="append", default=[], help="Subcheckout to use in addition. This is mainly " "useful for gclient-style checkouts. Use @rev or " "@branch or @branch1..branch2 to specify the " "revision/branch to diff against.") # Mostly chromium-specific try: group.add_option("--webkit", action="append_const", const="third_party/WebKit", dest="PATH", help="Shorthand for -s third_party/WebKit") except optparse.OptionError: # append_const is not supported on 2.4. Too bad. pass group.add_option("--no_gclient", action="store_true", help="Disable automatic search for gclient checkout.") group.add_option("-E", "--exclude", action="append", default=['ChangeLog'], metavar='REGEXP', help="Regexp patterns to exclude files. Default: %default") parser.add_option_group(group) group = optparse.OptionGroup(parser, "Access the try server by HTTP") group.add_option("--use_http", action="store_const", const=_SendChangeHTTP, dest="send_patch", help="Use HTTP to talk to the try server [default]") group.add_option("-H", "--host", help="Host address") group.add_option("-P", "--port", help="HTTP port") group.add_option("--proxy", help="HTTP proxy") parser.add_option_group(group) group = optparse.OptionGroup(parser, "Access the try server with SVN") group.add_option("--use_svn", action="store_const", const=_SendChangeSVN, dest="send_patch", help="Use SVN to talk to the try server") group.add_option("-S", "--svn_repo", metavar="SVN_URL", help="SVN url to use to write the changes in; --use_svn is " "implied when using --svn_repo") parser.add_option_group(group) options, args = parser.parse_args(argv) if len(args) == 1 and args[0] == 'help': parser.print_help() if not swallow_exception: if options.verbose == 0: logging.basicConfig(level=logging.ERROR) elif options.verbose == 1: logging.basicConfig(level=logging.WARNING) elif options.verbose == 2: logging.basicConfig(level=logging.INFO) elif options.verbose > 2: logging.basicConfig(level=logging.DEBUG) logging.debug(argv) try: # Always include os.getcwd() in the checkout settings. checkouts = [] checkouts.append(GuessVCS(options, os.getcwd())) checkouts[0].AutomagicalSettings() for item in options.sub_rep: checkout = GuessVCS(options, os.path.join(checkouts[0].checkout_root, item)) if checkout.checkout_root in [c.checkout_root for c in checkouts]: parser.error('Specified the root %s two times.' % checkout.checkout_root) checkouts.append(checkout) can_http = options.port and options.host can_svn = options.svn_repo # If there was no transport selected yet, now we must have enough data to # select one. if not options.send_patch and not (can_http or can_svn): parser.error('Please specify an access method.') # Convert options.diff into the content of the diff. if options.url: if options.files: parser.error('You cannot specify files and --url at the same time.') options.diff = urllib.urlopen(options.url).read() elif options.diff: if options.files: parser.error('You cannot specify files and --diff at the same time.') options.diff = gclient_utils.FileRead(options.diff, 'rb') elif options.issue and options.patchset is None: # Retrieve the patch from rietveld when the diff is not specified. try: import simplejson except ImportError: parser.error('simplejson library is missing, please install.') api_url = 'http://%s/api/%d' % (options.rietveld_url, options.issue) contents = simplejson.loads(urllib.urlopen(api_url).read()) diff_url = ('http://%s/download/issue%d_%d.diff' % (options.rietveld_url, options.issue, contents['patchsets'][-1])) diff = GetMungedDiff('', urllib.urlopen(diff_url).readlines()) options.diff = ''.join(diff) else: # Use this as the base. root = checkouts[0].checkout_root diffs = [] for checkout in checkouts: diff = checkout.GenerateDiff().splitlines(True) path_diff = gclient_utils.PathDifference(root, checkout.checkout_root) # Munge it. diffs.extend(GetMungedDiff(path_diff, diff)) options.diff = ''.join(diffs) if not options.bot: # Get try slaves from PRESUBMIT.py files if not specified. # Even if the diff comes from options.url, use the local checkout for bot # selection. try: import presubmit_support root_presubmit = checkouts[0].ReadRootFile('PRESUBMIT.py') options.bot = presubmit_support.DoGetTrySlaves( checkouts[0].GetFileNames(), checkouts[0].checkout_root, root_presubmit, False, sys.stdout) except ImportError: pass # If no bot is specified, either the default pool will be selected or the # try server will refuse the job. Either case we don't need to interfere. if options.name is None: if options.issue: options.name = 'Issue %s' % options.issue else: options.name = 'Unnamed' print('Note: use --name NAME to change the try job name.') if not options.email: parser.error('Using an anonymous checkout. Please use --email or set ' 'the TRYBOT_RESULTS_EMAIL_ADDRESS environment variable.') else: print('Results will be emailed to: ' + options.email) # Send the patch. if options.send_patch: # If forced. options.send_patch(options) PrintSuccess(options) return 0 try: if can_http: _SendChangeHTTP(options) PrintSuccess(options) return 0 except NoTryServerAccess: if not can_svn: raise _SendChangeSVN(options) PrintSuccess(options) return 0 except (InvalidScript, NoTryServerAccess), e: if swallow_exception: return 1 print e return 1 return 0
|
group.add_option("--rietveld_url", help="The code review url.")
|
group.add_option("-R", "--rietveld_url", default="codereview.appspot.com", metavar="URL", help="The root code review url. Default:%default")
|
def TryChange(argv, file_list, swallow_exception, prog=None): """ Args: argv: Arguments and options. file_list: Default value to pass to --file. swallow_exception: Whether we raise or swallow exceptions. """ # Parse argv parser = optparse.OptionParser(usage=USAGE, version=__version__, prog=prog) parser.add_option("-v", "--verbose", action="count", default=0, help="Prints debugging infos") group = optparse.OptionGroup(parser, "Result and status") group.add_option("-u", "--user", default=getpass.getuser(), help="Owner user name [default: %default]") group.add_option("-e", "--email", default=os.environ.get('TRYBOT_RESULTS_EMAIL_ADDRESS', os.environ.get('EMAIL_ADDRESS')), help="Email address where to send the results. Use either " "the TRYBOT_RESULTS_EMAIL_ADDRESS environment " "variable or EMAIL_ADDRESS to set the email address " "the try bots report results to [default: %default]") group.add_option("-n", "--name", help="Descriptive name of the try job") group.add_option("--issue", type='int', help="Update rietveld issue try job status") group.add_option("--patchset", type='int', help="Update rietveld issue try job status") group.add_option("--dry_run", action='store_true', help="Just prints the diff and quits") group.add_option("--rietveld_url", help="The code review url.") parser.add_option_group(group) group = optparse.OptionGroup(parser, "Try job options") group.add_option("-b", "--bot", action="append", help="Only use specifics build slaves, ex: '--bot win' to " "run the try job only on the 'win' slave; see the try " "server waterfall for the slave's name") group.add_option("-r", "--revision", help="Revision to use for the try job; default: the " "revision will be determined by the try server; see " "its waterfall for more info") group.add_option("-c", "--clobber", action="store_true", help="Force a clobber before building; e.g. don't do an " "incremental build") # TODO(maruel): help="Select a specific configuration, usually 'debug' or " # "'release'" group.add_option("--target", help=optparse.SUPPRESS_HELP) group.add_option("--project", help="Override which project to use. Projects are defined " "server-side to define what default bot set to use") # Override the list of tests to run, use multiple times to list many tests # (or comma separated) group.add_option("-t", "--tests", action="append", help=optparse.SUPPRESS_HELP) parser.add_option_group(group) group = optparse.OptionGroup(parser, "Patch to run") group.add_option("-f", "--file", default=file_list, dest="files", metavar="FILE", action="append", help="Use many times to list the files to include in the " "try, relative to the repository root") group.add_option("--diff", help="File containing the diff to try") group.add_option("--url", help="Url where to grab a patch") group.add_option("--root", help="Root to use for the patch; base subdirectory for " "patch created in a subdirectory") group.add_option("-p", "--patchlevel", type='int', metavar="LEVEL", help="Used as -pN parameter to patch") group.add_option("-s", "--sub_rep", action="append", default=[], help="Subcheckout to use in addition. This is mainly " "useful for gclient-style checkouts. Use @rev or " "@branch or @branch1..branch2 to specify the " "revision/branch to diff against.") # Mostly chromium-specific try: group.add_option("--webkit", action="append_const", const="third_party/WebKit", dest="PATH", help="Shorthand for -s third_party/WebKit") except optparse.OptionError: # append_const is not supported on 2.4. Too bad. pass group.add_option("--no_gclient", action="store_true", help="Disable automatic search for gclient checkout.") group.add_option("-E", "--exclude", action="append", default=['ChangeLog'], metavar='REGEXP', help="Regexp patterns to exclude files. Default: %default") parser.add_option_group(group) group = optparse.OptionGroup(parser, "Access the try server by HTTP") group.add_option("--use_http", action="store_const", const=_SendChangeHTTP, dest="send_patch", help="Use HTTP to talk to the try server [default]") group.add_option("-H", "--host", help="Host address") group.add_option("-P", "--port", help="HTTP port") group.add_option("--proxy", help="HTTP proxy") parser.add_option_group(group) group = optparse.OptionGroup(parser, "Access the try server with SVN") group.add_option("--use_svn", action="store_const", const=_SendChangeSVN, dest="send_patch", help="Use SVN to talk to the try server") group.add_option("-S", "--svn_repo", metavar="SVN_URL", help="SVN url to use to write the changes in; --use_svn is " "implied when using --svn_repo") parser.add_option_group(group) options, args = parser.parse_args(argv) if len(args) == 1 and args[0] == 'help': parser.print_help() if not swallow_exception: if options.verbose == 0: logging.basicConfig(level=logging.ERROR) elif options.verbose == 1: logging.basicConfig(level=logging.WARNING) elif options.verbose == 2: logging.basicConfig(level=logging.INFO) elif options.verbose > 2: logging.basicConfig(level=logging.DEBUG) logging.debug(argv) try: # Always include os.getcwd() in the checkout settings. checkouts = [] checkouts.append(GuessVCS(options, os.getcwd())) checkouts[0].AutomagicalSettings() for item in options.sub_rep: checkout = GuessVCS(options, os.path.join(checkouts[0].checkout_root, item)) if checkout.checkout_root in [c.checkout_root for c in checkouts]: parser.error('Specified the root %s two times.' % checkout.checkout_root) checkouts.append(checkout) can_http = options.port and options.host can_svn = options.svn_repo # If there was no transport selected yet, now we must have enough data to # select one. if not options.send_patch and not (can_http or can_svn): parser.error('Please specify an access method.') # Convert options.diff into the content of the diff. if options.url: if options.files: parser.error('You cannot specify files and --url at the same time.') options.diff = urllib.urlopen(options.url).read() elif options.diff: if options.files: parser.error('You cannot specify files and --diff at the same time.') options.diff = gclient_utils.FileRead(options.diff, 'rb') elif options.issue and options.patchset is None: # Retrieve the patch from rietveld when the diff is not specified. try: import simplejson except ImportError: parser.error('simplejson library is missing, please install.') api_url = 'http://%s/api/%d' % (options.rietveld_url, options.issue) contents = simplejson.loads(urllib.urlopen(api_url).read()) diff_url = ('http://%s/download/issue%d_%d.diff' % (options.rietveld_url, options.issue, contents['patchsets'][-1])) diff = GetMungedDiff('', urllib.urlopen(diff_url).readlines()) options.diff = ''.join(diff) else: # Use this as the base. root = checkouts[0].checkout_root diffs = [] for checkout in checkouts: diff = checkout.GenerateDiff().splitlines(True) path_diff = gclient_utils.PathDifference(root, checkout.checkout_root) # Munge it. diffs.extend(GetMungedDiff(path_diff, diff)) options.diff = ''.join(diffs) if not options.bot: # Get try slaves from PRESUBMIT.py files if not specified. # Even if the diff comes from options.url, use the local checkout for bot # selection. try: import presubmit_support root_presubmit = checkouts[0].ReadRootFile('PRESUBMIT.py') options.bot = presubmit_support.DoGetTrySlaves( checkouts[0].GetFileNames(), checkouts[0].checkout_root, root_presubmit, False, sys.stdout) except ImportError: pass # If no bot is specified, either the default pool will be selected or the # try server will refuse the job. Either case we don't need to interfere. if options.name is None: if options.issue: options.name = 'Issue %s' % options.issue else: options.name = 'Unnamed' print('Note: use --name NAME to change the try job name.') if not options.email: parser.error('Using an anonymous checkout. Please use --email or set ' 'the TRYBOT_RESULTS_EMAIL_ADDRESS environment variable.') else: print('Results will be emailed to: ' + options.email) # Send the patch. if options.send_patch: # If forced. options.send_patch(options) PrintSuccess(options) return 0 try: if can_http: _SendChangeHTTP(options) PrintSuccess(options) return 0 except NoTryServerAccess: if not can_svn: raise _SendChangeSVN(options) PrintSuccess(options) return 0 except (InvalidScript, NoTryServerAccess), e: if swallow_exception: return 1 print e return 1 return 0
|
api_url = 'http://%s/api/%d' % (options.rietveld_url, options.issue)
|
api_url = '%s/api/%d' % (options.rietveld_url, options.issue) logging.debug(api_url)
|
def TryChange(argv, file_list, swallow_exception, prog=None): """ Args: argv: Arguments and options. file_list: Default value to pass to --file. swallow_exception: Whether we raise or swallow exceptions. """ # Parse argv parser = optparse.OptionParser(usage=USAGE, version=__version__, prog=prog) parser.add_option("-v", "--verbose", action="count", default=0, help="Prints debugging infos") group = optparse.OptionGroup(parser, "Result and status") group.add_option("-u", "--user", default=getpass.getuser(), help="Owner user name [default: %default]") group.add_option("-e", "--email", default=os.environ.get('TRYBOT_RESULTS_EMAIL_ADDRESS', os.environ.get('EMAIL_ADDRESS')), help="Email address where to send the results. Use either " "the TRYBOT_RESULTS_EMAIL_ADDRESS environment " "variable or EMAIL_ADDRESS to set the email address " "the try bots report results to [default: %default]") group.add_option("-n", "--name", help="Descriptive name of the try job") group.add_option("--issue", type='int', help="Update rietveld issue try job status") group.add_option("--patchset", type='int', help="Update rietveld issue try job status") group.add_option("--dry_run", action='store_true', help="Just prints the diff and quits") group.add_option("--rietveld_url", help="The code review url.") parser.add_option_group(group) group = optparse.OptionGroup(parser, "Try job options") group.add_option("-b", "--bot", action="append", help="Only use specifics build slaves, ex: '--bot win' to " "run the try job only on the 'win' slave; see the try " "server waterfall for the slave's name") group.add_option("-r", "--revision", help="Revision to use for the try job; default: the " "revision will be determined by the try server; see " "its waterfall for more info") group.add_option("-c", "--clobber", action="store_true", help="Force a clobber before building; e.g. don't do an " "incremental build") # TODO(maruel): help="Select a specific configuration, usually 'debug' or " # "'release'" group.add_option("--target", help=optparse.SUPPRESS_HELP) group.add_option("--project", help="Override which project to use. Projects are defined " "server-side to define what default bot set to use") # Override the list of tests to run, use multiple times to list many tests # (or comma separated) group.add_option("-t", "--tests", action="append", help=optparse.SUPPRESS_HELP) parser.add_option_group(group) group = optparse.OptionGroup(parser, "Patch to run") group.add_option("-f", "--file", default=file_list, dest="files", metavar="FILE", action="append", help="Use many times to list the files to include in the " "try, relative to the repository root") group.add_option("--diff", help="File containing the diff to try") group.add_option("--url", help="Url where to grab a patch") group.add_option("--root", help="Root to use for the patch; base subdirectory for " "patch created in a subdirectory") group.add_option("-p", "--patchlevel", type='int', metavar="LEVEL", help="Used as -pN parameter to patch") group.add_option("-s", "--sub_rep", action="append", default=[], help="Subcheckout to use in addition. This is mainly " "useful for gclient-style checkouts. Use @rev or " "@branch or @branch1..branch2 to specify the " "revision/branch to diff against.") # Mostly chromium-specific try: group.add_option("--webkit", action="append_const", const="third_party/WebKit", dest="PATH", help="Shorthand for -s third_party/WebKit") except optparse.OptionError: # append_const is not supported on 2.4. Too bad. pass group.add_option("--no_gclient", action="store_true", help="Disable automatic search for gclient checkout.") group.add_option("-E", "--exclude", action="append", default=['ChangeLog'], metavar='REGEXP', help="Regexp patterns to exclude files. Default: %default") parser.add_option_group(group) group = optparse.OptionGroup(parser, "Access the try server by HTTP") group.add_option("--use_http", action="store_const", const=_SendChangeHTTP, dest="send_patch", help="Use HTTP to talk to the try server [default]") group.add_option("-H", "--host", help="Host address") group.add_option("-P", "--port", help="HTTP port") group.add_option("--proxy", help="HTTP proxy") parser.add_option_group(group) group = optparse.OptionGroup(parser, "Access the try server with SVN") group.add_option("--use_svn", action="store_const", const=_SendChangeSVN, dest="send_patch", help="Use SVN to talk to the try server") group.add_option("-S", "--svn_repo", metavar="SVN_URL", help="SVN url to use to write the changes in; --use_svn is " "implied when using --svn_repo") parser.add_option_group(group) options, args = parser.parse_args(argv) if len(args) == 1 and args[0] == 'help': parser.print_help() if not swallow_exception: if options.verbose == 0: logging.basicConfig(level=logging.ERROR) elif options.verbose == 1: logging.basicConfig(level=logging.WARNING) elif options.verbose == 2: logging.basicConfig(level=logging.INFO) elif options.verbose > 2: logging.basicConfig(level=logging.DEBUG) logging.debug(argv) try: # Always include os.getcwd() in the checkout settings. checkouts = [] checkouts.append(GuessVCS(options, os.getcwd())) checkouts[0].AutomagicalSettings() for item in options.sub_rep: checkout = GuessVCS(options, os.path.join(checkouts[0].checkout_root, item)) if checkout.checkout_root in [c.checkout_root for c in checkouts]: parser.error('Specified the root %s two times.' % checkout.checkout_root) checkouts.append(checkout) can_http = options.port and options.host can_svn = options.svn_repo # If there was no transport selected yet, now we must have enough data to # select one. if not options.send_patch and not (can_http or can_svn): parser.error('Please specify an access method.') # Convert options.diff into the content of the diff. if options.url: if options.files: parser.error('You cannot specify files and --url at the same time.') options.diff = urllib.urlopen(options.url).read() elif options.diff: if options.files: parser.error('You cannot specify files and --diff at the same time.') options.diff = gclient_utils.FileRead(options.diff, 'rb') elif options.issue and options.patchset is None: # Retrieve the patch from rietveld when the diff is not specified. try: import simplejson except ImportError: parser.error('simplejson library is missing, please install.') api_url = 'http://%s/api/%d' % (options.rietveld_url, options.issue) contents = simplejson.loads(urllib.urlopen(api_url).read()) diff_url = ('http://%s/download/issue%d_%d.diff' % (options.rietveld_url, options.issue, contents['patchsets'][-1])) diff = GetMungedDiff('', urllib.urlopen(diff_url).readlines()) options.diff = ''.join(diff) else: # Use this as the base. root = checkouts[0].checkout_root diffs = [] for checkout in checkouts: diff = checkout.GenerateDiff().splitlines(True) path_diff = gclient_utils.PathDifference(root, checkout.checkout_root) # Munge it. diffs.extend(GetMungedDiff(path_diff, diff)) options.diff = ''.join(diffs) if not options.bot: # Get try slaves from PRESUBMIT.py files if not specified. # Even if the diff comes from options.url, use the local checkout for bot # selection. try: import presubmit_support root_presubmit = checkouts[0].ReadRootFile('PRESUBMIT.py') options.bot = presubmit_support.DoGetTrySlaves( checkouts[0].GetFileNames(), checkouts[0].checkout_root, root_presubmit, False, sys.stdout) except ImportError: pass # If no bot is specified, either the default pool will be selected or the # try server will refuse the job. Either case we don't need to interfere. if options.name is None: if options.issue: options.name = 'Issue %s' % options.issue else: options.name = 'Unnamed' print('Note: use --name NAME to change the try job name.') if not options.email: parser.error('Using an anonymous checkout. Please use --email or set ' 'the TRYBOT_RESULTS_EMAIL_ADDRESS environment variable.') else: print('Results will be emailed to: ' + options.email) # Send the patch. if options.send_patch: # If forced. options.send_patch(options) PrintSuccess(options) return 0 try: if can_http: _SendChangeHTTP(options) PrintSuccess(options) return 0 except NoTryServerAccess: if not can_svn: raise _SendChangeSVN(options) PrintSuccess(options) return 0 except (InvalidScript, NoTryServerAccess), e: if swallow_exception: return 1 print e return 1 return 0
|
diff_url = ('http://%s/download/issue%d_%d.diff' % (options.rietveld_url, options.issue, contents['patchsets'][-1]))
|
options.patchset = contents['patchsets'][-1] diff_url = ('%s/download/issue%d_%d.diff' % (options.rietveld_url, options.issue, options.patchset))
|
def TryChange(argv, file_list, swallow_exception, prog=None): """ Args: argv: Arguments and options. file_list: Default value to pass to --file. swallow_exception: Whether we raise or swallow exceptions. """ # Parse argv parser = optparse.OptionParser(usage=USAGE, version=__version__, prog=prog) parser.add_option("-v", "--verbose", action="count", default=0, help="Prints debugging infos") group = optparse.OptionGroup(parser, "Result and status") group.add_option("-u", "--user", default=getpass.getuser(), help="Owner user name [default: %default]") group.add_option("-e", "--email", default=os.environ.get('TRYBOT_RESULTS_EMAIL_ADDRESS', os.environ.get('EMAIL_ADDRESS')), help="Email address where to send the results. Use either " "the TRYBOT_RESULTS_EMAIL_ADDRESS environment " "variable or EMAIL_ADDRESS to set the email address " "the try bots report results to [default: %default]") group.add_option("-n", "--name", help="Descriptive name of the try job") group.add_option("--issue", type='int', help="Update rietveld issue try job status") group.add_option("--patchset", type='int', help="Update rietveld issue try job status") group.add_option("--dry_run", action='store_true', help="Just prints the diff and quits") group.add_option("--rietveld_url", help="The code review url.") parser.add_option_group(group) group = optparse.OptionGroup(parser, "Try job options") group.add_option("-b", "--bot", action="append", help="Only use specifics build slaves, ex: '--bot win' to " "run the try job only on the 'win' slave; see the try " "server waterfall for the slave's name") group.add_option("-r", "--revision", help="Revision to use for the try job; default: the " "revision will be determined by the try server; see " "its waterfall for more info") group.add_option("-c", "--clobber", action="store_true", help="Force a clobber before building; e.g. don't do an " "incremental build") # TODO(maruel): help="Select a specific configuration, usually 'debug' or " # "'release'" group.add_option("--target", help=optparse.SUPPRESS_HELP) group.add_option("--project", help="Override which project to use. Projects are defined " "server-side to define what default bot set to use") # Override the list of tests to run, use multiple times to list many tests # (or comma separated) group.add_option("-t", "--tests", action="append", help=optparse.SUPPRESS_HELP) parser.add_option_group(group) group = optparse.OptionGroup(parser, "Patch to run") group.add_option("-f", "--file", default=file_list, dest="files", metavar="FILE", action="append", help="Use many times to list the files to include in the " "try, relative to the repository root") group.add_option("--diff", help="File containing the diff to try") group.add_option("--url", help="Url where to grab a patch") group.add_option("--root", help="Root to use for the patch; base subdirectory for " "patch created in a subdirectory") group.add_option("-p", "--patchlevel", type='int', metavar="LEVEL", help="Used as -pN parameter to patch") group.add_option("-s", "--sub_rep", action="append", default=[], help="Subcheckout to use in addition. This is mainly " "useful for gclient-style checkouts. Use @rev or " "@branch or @branch1..branch2 to specify the " "revision/branch to diff against.") # Mostly chromium-specific try: group.add_option("--webkit", action="append_const", const="third_party/WebKit", dest="PATH", help="Shorthand for -s third_party/WebKit") except optparse.OptionError: # append_const is not supported on 2.4. Too bad. pass group.add_option("--no_gclient", action="store_true", help="Disable automatic search for gclient checkout.") group.add_option("-E", "--exclude", action="append", default=['ChangeLog'], metavar='REGEXP', help="Regexp patterns to exclude files. Default: %default") parser.add_option_group(group) group = optparse.OptionGroup(parser, "Access the try server by HTTP") group.add_option("--use_http", action="store_const", const=_SendChangeHTTP, dest="send_patch", help="Use HTTP to talk to the try server [default]") group.add_option("-H", "--host", help="Host address") group.add_option("-P", "--port", help="HTTP port") group.add_option("--proxy", help="HTTP proxy") parser.add_option_group(group) group = optparse.OptionGroup(parser, "Access the try server with SVN") group.add_option("--use_svn", action="store_const", const=_SendChangeSVN, dest="send_patch", help="Use SVN to talk to the try server") group.add_option("-S", "--svn_repo", metavar="SVN_URL", help="SVN url to use to write the changes in; --use_svn is " "implied when using --svn_repo") parser.add_option_group(group) options, args = parser.parse_args(argv) if len(args) == 1 and args[0] == 'help': parser.print_help() if not swallow_exception: if options.verbose == 0: logging.basicConfig(level=logging.ERROR) elif options.verbose == 1: logging.basicConfig(level=logging.WARNING) elif options.verbose == 2: logging.basicConfig(level=logging.INFO) elif options.verbose > 2: logging.basicConfig(level=logging.DEBUG) logging.debug(argv) try: # Always include os.getcwd() in the checkout settings. checkouts = [] checkouts.append(GuessVCS(options, os.getcwd())) checkouts[0].AutomagicalSettings() for item in options.sub_rep: checkout = GuessVCS(options, os.path.join(checkouts[0].checkout_root, item)) if checkout.checkout_root in [c.checkout_root for c in checkouts]: parser.error('Specified the root %s two times.' % checkout.checkout_root) checkouts.append(checkout) can_http = options.port and options.host can_svn = options.svn_repo # If there was no transport selected yet, now we must have enough data to # select one. if not options.send_patch and not (can_http or can_svn): parser.error('Please specify an access method.') # Convert options.diff into the content of the diff. if options.url: if options.files: parser.error('You cannot specify files and --url at the same time.') options.diff = urllib.urlopen(options.url).read() elif options.diff: if options.files: parser.error('You cannot specify files and --diff at the same time.') options.diff = gclient_utils.FileRead(options.diff, 'rb') elif options.issue and options.patchset is None: # Retrieve the patch from rietveld when the diff is not specified. try: import simplejson except ImportError: parser.error('simplejson library is missing, please install.') api_url = 'http://%s/api/%d' % (options.rietveld_url, options.issue) contents = simplejson.loads(urllib.urlopen(api_url).read()) diff_url = ('http://%s/download/issue%d_%d.diff' % (options.rietveld_url, options.issue, contents['patchsets'][-1])) diff = GetMungedDiff('', urllib.urlopen(diff_url).readlines()) options.diff = ''.join(diff) else: # Use this as the base. root = checkouts[0].checkout_root diffs = [] for checkout in checkouts: diff = checkout.GenerateDiff().splitlines(True) path_diff = gclient_utils.PathDifference(root, checkout.checkout_root) # Munge it. diffs.extend(GetMungedDiff(path_diff, diff)) options.diff = ''.join(diffs) if not options.bot: # Get try slaves from PRESUBMIT.py files if not specified. # Even if the diff comes from options.url, use the local checkout for bot # selection. try: import presubmit_support root_presubmit = checkouts[0].ReadRootFile('PRESUBMIT.py') options.bot = presubmit_support.DoGetTrySlaves( checkouts[0].GetFileNames(), checkouts[0].checkout_root, root_presubmit, False, sys.stdout) except ImportError: pass # If no bot is specified, either the default pool will be selected or the # try server will refuse the job. Either case we don't need to interfere. if options.name is None: if options.issue: options.name = 'Issue %s' % options.issue else: options.name = 'Unnamed' print('Note: use --name NAME to change the try job name.') if not options.email: parser.error('Using an anonymous checkout. Please use --email or set ' 'the TRYBOT_RESULTS_EMAIL_ADDRESS environment variable.') else: print('Results will be emailed to: ' + options.email) # Send the patch. if options.send_patch: # If forced. options.send_patch(options) PrintSuccess(options) return 0 try: if can_http: _SendChangeHTTP(options) PrintSuccess(options) return 0 except NoTryServerAccess: if not can_svn: raise _SendChangeSVN(options) PrintSuccess(options) return 0 except (InvalidScript, NoTryServerAccess), e: if swallow_exception: return 1 print e return 1 return 0
|
if options.tests: values['tests'] = ','.join(options.tests)
|
if options.testfilter: values['testfilter'] = ','.join(options.testfilter)
|
def _ParseSendChangeOptions(options): """Parse common options passed to _SendChangeHTTP and _SendChangeSVN.""" values = {} if options.email: values['email'] = options.email values['user'] = options.user values['name'] = options.name if options.bot: values['bot'] = ','.join(options.bot) if options.revision: values['revision'] = options.revision if options.clobber: values['clobber'] = 'true' if options.tests: values['tests'] = ','.join(options.tests) if options.root: values['root'] = options.root if options.patchlevel: values['patchlevel'] = options.patchlevel if options.issue: values['issue'] = options.issue if options.patchset: values['patchset'] = options.patchset if options.target: values['target'] = options.target if options.project: values['project'] = options.project return values
|
group.add_option("-t", "--tests", action="append", help=optparse.SUPPRESS_HELP)
|
group.add_option("-t", "--testfilter", action="append", help="Add a gtest_filter to a test. Use multiple times to " "specify filters for different tests. (i.e. " "--testfilter base_unittests:ThreadTest.* " "--testfilter ui_tests) If you specify any testfilters " "the test results will not be reported in rietveld and " "only tests with filters will run.")
|
def TryChange(argv, file_list, swallow_exception, prog=None): """ Args: argv: Arguments and options. file_list: Default value to pass to --file. swallow_exception: Whether we raise or swallow exceptions. """ # Parse argv parser = optparse.OptionParser(usage=USAGE, version=__version__, prog=prog) parser.add_option("-v", "--verbose", action="count", default=0, help="Prints debugging infos") group = optparse.OptionGroup(parser, "Result and status") group.add_option("-u", "--user", default=getpass.getuser(), help="Owner user name [default: %default]") group.add_option("-e", "--email", default=os.environ.get('TRYBOT_RESULTS_EMAIL_ADDRESS', os.environ.get('EMAIL_ADDRESS')), help="Email address where to send the results. Use either " "the TRYBOT_RESULTS_EMAIL_ADDRESS environment " "variable or EMAIL_ADDRESS to set the email address " "the try bots report results to [default: %default]") group.add_option("-n", "--name", help="Descriptive name of the try job") group.add_option("--issue", type='int', help="Update rietveld issue try job status") group.add_option("--patchset", type='int', help="Update rietveld issue try job status. This is " "optional if --issue is used, In that case, the " "latest patchset will be used.") group.add_option("--dry_run", action='store_true', help="Just prints the diff and quits") group.add_option("-R", "--rietveld_url", default="codereview.appspot.com", metavar="URL", help="The root code review url. Default:%default") parser.add_option_group(group) group = optparse.OptionGroup(parser, "Try job options") group.add_option("-b", "--bot", action="append", help="Only use specifics build slaves, ex: '--bot win' to " "run the try job only on the 'win' slave; see the try " "server waterfall for the slave's name") group.add_option("-r", "--revision", help="Revision to use for the try job; default: the " "revision will be determined by the try server; see " "its waterfall for more info") group.add_option("-c", "--clobber", action="store_true", help="Force a clobber before building; e.g. don't do an " "incremental build") # TODO(maruel): help="Select a specific configuration, usually 'debug' or " # "'release'" group.add_option("--target", help=optparse.SUPPRESS_HELP) group.add_option("--project", help="Override which project to use. Projects are defined " "server-side to define what default bot set to use") # Override the list of tests to run, use multiple times to list many tests # (or comma separated) group.add_option("-t", "--tests", action="append", help=optparse.SUPPRESS_HELP) parser.add_option_group(group) group = optparse.OptionGroup(parser, "Patch to run") group.add_option("-f", "--file", default=file_list, dest="files", metavar="FILE", action="append", help="Use many times to list the files to include in the " "try, relative to the repository root") group.add_option("--diff", help="File containing the diff to try") group.add_option("--url", help="Url where to grab a patch") group.add_option("--root", help="Root to use for the patch; base subdirectory for " "patch created in a subdirectory") group.add_option("-p", "--patchlevel", type='int', metavar="LEVEL", help="Used as -pN parameter to patch") group.add_option("-s", "--sub_rep", action="append", default=[], help="Subcheckout to use in addition. This is mainly " "useful for gclient-style checkouts. Use @rev or " "@branch or @branch1..branch2 to specify the " "revision/branch to diff against.") # Mostly chromium-specific try: def WebKitRevision(options, opt, value, parser): if not hasattr(options, 'sub_rep'): options.sub_rep = [] if parser.rargs and not parser.rargs[0].startswith('-'): options.sub_rep.append('third_party/WebKit@%s' % parser.rargs.pop(0)) else: options.sub_rep.append('third_party/WebKit') group.add_option("-W", "--webkit", action="callback", callback=WebKitRevision, metavar="BRANCH", help="Shorthand for -s third_party/WebKit@BRANCH. " "BRANCH is optional and is the branch the current " "checkout will be diff'ed against.") except optparse.OptionError: # append_const is not supported on 2.4. Too bad. pass group.add_option("--no_gclient", action="store_true", help="Disable automatic search for gclient checkout.") group.add_option("-E", "--exclude", action="append", default=['ChangeLog'], metavar='REGEXP', help="Regexp patterns to exclude files. Default: %default") parser.add_option_group(group) group = optparse.OptionGroup(parser, "Access the try server by HTTP") group.add_option("--use_http", action="store_const", const=_SendChangeHTTP, dest="send_patch", help="Use HTTP to talk to the try server [default]") group.add_option("-H", "--host", help="Host address") group.add_option("-P", "--port", help="HTTP port") group.add_option("--proxy", help="HTTP proxy") parser.add_option_group(group) group = optparse.OptionGroup(parser, "Access the try server with SVN") group.add_option("--use_svn", action="store_const", const=_SendChangeSVN, dest="send_patch", help="Use SVN to talk to the try server") group.add_option("-S", "--svn_repo", metavar="SVN_URL", help="SVN url to use to write the changes in; --use_svn is " "implied when using --svn_repo") parser.add_option_group(group) options, args = parser.parse_args(argv) if len(args) == 1 and args[0] == 'help': parser.print_help() if not swallow_exception: if options.verbose == 0: logging.basicConfig(level=logging.ERROR) elif options.verbose == 1: logging.basicConfig(level=logging.WARNING) elif options.verbose == 2: logging.basicConfig(level=logging.INFO) elif options.verbose > 2: logging.basicConfig(level=logging.DEBUG) logging.debug(argv) if options.rietveld_url: # Try to extract the review number if possible and fix the protocol. if not '://' in options.rietveld_url: options.rietveld_url = 'http://' + options.rietveld_url match = re.match(r'^(.*)/(\d+)$', options.rietveld_url) if match: if options.issue or options.patchset: parser.error('Cannot use both --issue and use a review number url') options.issue = int(match.group(2)) options.rietveld_url = match.group(1) try: # Always include os.getcwd() in the checkout settings. checkouts = [] checkouts.append(GuessVCS(options, os.getcwd())) checkouts[0].AutomagicalSettings() for item in options.sub_rep: checkout = GuessVCS(options, os.path.join(checkouts[0].checkout_root, item)) if checkout.checkout_root in [c.checkout_root for c in checkouts]: parser.error('Specified the root %s two times.' % checkout.checkout_root) checkouts.append(checkout) can_http = options.port and options.host can_svn = options.svn_repo # If there was no transport selected yet, now we must have enough data to # select one. if not options.send_patch and not (can_http or can_svn): parser.error('Please specify an access method.') # Convert options.diff into the content of the diff. if options.url: if options.files: parser.error('You cannot specify files and --url at the same time.') options.diff = urllib.urlopen(options.url).read() elif options.diff: if options.files: parser.error('You cannot specify files and --diff at the same time.') options.diff = gclient_utils.FileRead(options.diff, 'rb') elif options.issue and options.patchset is None: # Retrieve the patch from rietveld when the diff is not specified. # When patchset is specified, it's because it's done by gcl/git-try. if json is None: parser.error('json or simplejson library is missing, please install.') api_url = '%s/api/%d' % (options.rietveld_url, options.issue) logging.debug(api_url) contents = json.loads(urllib.urlopen(api_url).read()) options.patchset = contents['patchsets'][-1] diff_url = ('%s/download/issue%d_%d.diff' % (options.rietveld_url, options.issue, options.patchset)) diff = GetMungedDiff('', urllib.urlopen(diff_url).readlines()) options.diff = ''.join(diff) else: # Use this as the base. root = checkouts[0].checkout_root diffs = [] for checkout in checkouts: diff = checkout.GenerateDiff().splitlines(True) path_diff = gclient_utils.PathDifference(root, checkout.checkout_root) # Munge it. diffs.extend(GetMungedDiff(path_diff, diff)) options.diff = ''.join(diffs) if not options.bot: # Get try slaves from PRESUBMIT.py files if not specified. # Even if the diff comes from options.url, use the local checkout for bot # selection. try: import presubmit_support root_presubmit = checkouts[0].ReadRootFile('PRESUBMIT.py') options.bot = presubmit_support.DoGetTrySlaves( checkouts[0].GetFileNames(), checkouts[0].checkout_root, root_presubmit, False, sys.stdout) except ImportError: pass # If no bot is specified, either the default pool will be selected or the # try server will refuse the job. Either case we don't need to interfere. if options.name is None: if options.issue: options.name = 'Issue %s' % options.issue else: options.name = 'Unnamed' print('Note: use --name NAME to change the try job name.') if not options.email: parser.error('Using an anonymous checkout. Please use --email or set ' 'the TRYBOT_RESULTS_EMAIL_ADDRESS environment variable.') else: print('Results will be emailed to: ' + options.email) # Send the patch. if options.send_patch: # If forced. options.send_patch(options) PrintSuccess(options) return 0 try: if can_http: _SendChangeHTTP(options) PrintSuccess(options) return 0 except NoTryServerAccess: if not can_svn: raise _SendChangeSVN(options) PrintSuccess(options) return 0 except (InvalidScript, NoTryServerAccess), e: if swallow_exception: return 1 print e return 1 return 0
|
def _GetAuthToken(self, email, password):
|
def _GetAuthToken(self, host, email, password):
|
def _GetAuthToken(self, email, password): """Uses ClientLogin to authenticate the user, returning an auth token.
|
if self.host.endswith(".google.com"):
|
if host.endswith(".google.com"):
|
def _GetAuthToken(self, email, password): """Uses ClientLogin to authenticate the user, returning an auth token.
|
auth_token = self._GetAuthToken(credentials[0], credentials[1])
|
auth_token = self._GetAuthToken(host, credentials[0], credentials[1])
|
def _Authenticate(self, host): """Authenticates the user.
|
args = ['cat', svn_path]
|
args = ['svn', 'cat', svn_path]
|
def GetCachedFile(filename, max_age=60*60*24*3, use_root=False): """Retrieves a file from the repository and caches it in GetCacheDir() for max_age seconds. use_root: If False, look up the arborescence for the first match, otherwise go directory to the root repository. Note: The cache will be inconsistent if the same file is retrieved with both use_root=True and use_root=False. Don't be stupid. """ if filename not in FILES_CACHE: # Don't try to look up twice. FILES_CACHE[filename] = None # First we check if we have a cached version. try: cached_file = os.path.join(GetCacheDir(), filename) except gclient_utils.Error: return None if (not os.path.exists(cached_file) or (time.time() - os.stat(cached_file).st_mtime) > max_age): dir_info = SVN.CaptureInfo('.') repo_root = dir_info['Repository Root'] if use_root: url_path = repo_root else: url_path = dir_info['URL'] while True: # Look in the repository at the current level for the file. for _ in range(5): content = None try: # Take advantage of the fact that svn won't output to stderr in case # of success but will do in case of failure so don't mind putting # stderr into content_array. content_array = [] svn_path = url_path + '/' + filename args = ['cat', svn_path] if sys.platform != 'darwin': # MacOSX 10.5.2 has a bug with svn 1.4.4 that will trigger the # 'Can\'t get username or password' and can be fixed easily. # The fix doesn't work if the user upgraded to svn 1.6.x. Bleh. # I don't have time to fix their broken stuff. args.append('--non-interactive') gclient_utils.CheckCallAndFilter( args, cwd='.', filter_fn=content_array.append) # Exit the loop if the file was found. Override content. content = '\n'.join(content_array) break except gclient_utils.Error: if content_array[0].startswith( 'svn: Can\'t get username or password'): ErrorExit('Your svn credentials expired. Please run svn update ' 'to fix the cached credentials') if content_array[0].startswith('svn: Can\'t get password'): ErrorExit('If are using a Mac and svn --version shows 1.4.x, ' 'please hack gcl.py to remove --non-interactive usage, it\'s' 'a bug on your installed copy') if not content_array[0].startswith('svn: File not found:'): # Try again. continue if content: break if url_path == repo_root: # Reached the root. Abandoning search. break # Go up one level to try again. url_path = os.path.dirname(url_path) if content is not None or filename != CODEREVIEW_SETTINGS_FILE: # Write a cached version even if there isn't a file, so we don't try to # fetch it each time. codereview.settings must always be present so do # not cache negative. gclient_utils.FileWrite(cached_file, content or '') else: content = gclient_utils.FileRead(cached_file, 'r') # Keep the content cached in memory. FILES_CACHE[filename] = content return FILES_CACHE[filename]
|
def BackquoteAsInteger(cmd, cwd=None): """Like Backquote, but returns either an int or None."""
|
def ConvertToInteger(input): """Convert a string to integer, but returns either an int or None."""
|
def BackquoteAsInteger(cmd, cwd=None): """Like Backquote, but returns either an int or None.""" try: return int(Backquote(cmd, cwd)) except ValueError: return None
|
return int(Backquote(cmd, cwd)) except ValueError:
|
return int(input) except TypeError, ValueError:
|
def BackquoteAsInteger(cmd, cwd=None): """Like Backquote, but returns either an int or None.""" try: return int(Backquote(cmd, cwd)) except ValueError: return None
|
issue = BackquoteAsInteger(['git', 'cl', 'status', '--field=id']) patchset = BackquoteAsInteger(['git', 'cl', 'status', '--field=patch'])
|
cl = git_cl.Changelist() issue = ConvertToInteger(cl.GetIssue()) patchset = ConvertToInteger(cl.GetPatchset())
|
def __init__(self, commit=None, upstream_branch=None): self.commit = commit self.verbose = None self.default_presubmit = None self.may_prompt = None
|
if (os.path.exists(working)):
|
if (os.path.exists(working)) and not options.local:
|
def main(options, args): revision = options.revert or options.merge # Initialize some variables used below. They can be overwritten by # the drover.properties file. BASE_URL = "svn://svn.chromium.org/chrome" TRUNK_URL = BASE_URL + "/trunk/src" BRANCH_URL = BASE_URL + "/branches/$branch/src" SKIP_CHECK_WORKING = True PROMPT_FOR_AUTHOR = False DEFAULT_WORKING = "drover_" + str(revision) if options.branch: DEFAULT_WORKING += ("_" + options.branch) if not isMinimumSVNVersion(1,5): print "You need to use at least SVN version 1.5.x" sys.exit(1) # Override the default properties if there is a drover.properties file. global file_pattern_ if os.path.exists("drover.properties"): file = open("drover.properties") exec(file) file.close() if FILE_PATTERN: file_pattern_ = FILE_PATTERN if options.revert and options.branch: url = BRANCH_URL.replace("$branch", options.branch) elif options.merge and options.sbranch: url = BRANCH_URL.replace("$branch", options.sbranch) else: url = TRUNK_URL working = options.workdir or DEFAULT_WORKING command = 'svn log ' + url + " -r "+str(revision) + " -v" os.system(command) if not (options.revertbot or prompt("Is this the correct revision?")): sys.exit(0) if (os.path.exists(working)): if not (options.revertbot or SKIP_CHECK_WORKING or prompt("Working directory: '%s' already exists, clobber?" % working)): sys.exit(0) deltree(working) os.makedirs(working) os.chdir(working) if options.merge: action = "Merge" branch_url = BRANCH_URL.replace("$branch", options.branch) # Checkout everything but stuff that got added into a new dir checkoutRevision(url, revision, branch_url) # Merge everything that changed mergeRevision(url, revision) # "Export" files that were added from the source and add them to branch exportRevision(url, revision) # Delete directories that were deleted (file deletes are handled in the # merge). deleteRevision(url, revision) elif options.revert: action = "Revert" if options.branch: url = BRANCH_URL.replace("$branch", options.branch) checkoutRevision(url, revision, url, True) revertRevision(url, revision) revertExportRevision(url, revision) # Check the base url so we actually find the author who made the change if options.auditor: author = options.auditor else: author = getAuthor(url, revision) if not author: author = getAuthor(TRUNK_URL, revision) filename = str(revision)+".txt" out = open(filename,"w") out.write(action +" " + str(revision) + " - ") out.write(getRevisionLog(url, revision)) if (author): out.write("TBR=" + author) out.close() change_cmd = 'change ' + str(revision) + " " + filename if options.revertbot: change_cmd += ' --silent' runGcl(change_cmd) os.unlink(filename) print author print revision print ("gcl upload " + str(revision) + " --send_mail --no_try --no_presubmit --reviewers=" + author) if options.revertbot or prompt("Would you like to upload?"): if PROMPT_FOR_AUTHOR: author = text_prompt("Enter new author or press enter to accept default", author) if options.revertbot and options.revertbot_reviewers: author += "," author += options.revertbot_reviewers gclUpload(revision, author) else: print "Deleting the changelist." print "gcl delete " + str(revision) runGcl("delete " + str(revision)) sys.exit(0) # We commit if the reverbot is set to commit automatically, or if this is # not the revertbot and the user agrees. if options.revertbot_commit or (not options.revertbot and prompt("Would you like to commit?")): print "gcl commit " + str(revision) + " --no_presubmit --force" runGcl("commit " + str(revision) + " --no_presubmit --force") else: sys.exit(0)
|
os.makedirs(working) os.chdir(working)
|
if not options.local: os.makedirs(working) os.chdir(working)
|
def main(options, args): revision = options.revert or options.merge # Initialize some variables used below. They can be overwritten by # the drover.properties file. BASE_URL = "svn://svn.chromium.org/chrome" TRUNK_URL = BASE_URL + "/trunk/src" BRANCH_URL = BASE_URL + "/branches/$branch/src" SKIP_CHECK_WORKING = True PROMPT_FOR_AUTHOR = False DEFAULT_WORKING = "drover_" + str(revision) if options.branch: DEFAULT_WORKING += ("_" + options.branch) if not isMinimumSVNVersion(1,5): print "You need to use at least SVN version 1.5.x" sys.exit(1) # Override the default properties if there is a drover.properties file. global file_pattern_ if os.path.exists("drover.properties"): file = open("drover.properties") exec(file) file.close() if FILE_PATTERN: file_pattern_ = FILE_PATTERN if options.revert and options.branch: url = BRANCH_URL.replace("$branch", options.branch) elif options.merge and options.sbranch: url = BRANCH_URL.replace("$branch", options.sbranch) else: url = TRUNK_URL working = options.workdir or DEFAULT_WORKING command = 'svn log ' + url + " -r "+str(revision) + " -v" os.system(command) if not (options.revertbot or prompt("Is this the correct revision?")): sys.exit(0) if (os.path.exists(working)): if not (options.revertbot or SKIP_CHECK_WORKING or prompt("Working directory: '%s' already exists, clobber?" % working)): sys.exit(0) deltree(working) os.makedirs(working) os.chdir(working) if options.merge: action = "Merge" branch_url = BRANCH_URL.replace("$branch", options.branch) # Checkout everything but stuff that got added into a new dir checkoutRevision(url, revision, branch_url) # Merge everything that changed mergeRevision(url, revision) # "Export" files that were added from the source and add them to branch exportRevision(url, revision) # Delete directories that were deleted (file deletes are handled in the # merge). deleteRevision(url, revision) elif options.revert: action = "Revert" if options.branch: url = BRANCH_URL.replace("$branch", options.branch) checkoutRevision(url, revision, url, True) revertRevision(url, revision) revertExportRevision(url, revision) # Check the base url so we actually find the author who made the change if options.auditor: author = options.auditor else: author = getAuthor(url, revision) if not author: author = getAuthor(TRUNK_URL, revision) filename = str(revision)+".txt" out = open(filename,"w") out.write(action +" " + str(revision) + " - ") out.write(getRevisionLog(url, revision)) if (author): out.write("TBR=" + author) out.close() change_cmd = 'change ' + str(revision) + " " + filename if options.revertbot: change_cmd += ' --silent' runGcl(change_cmd) os.unlink(filename) print author print revision print ("gcl upload " + str(revision) + " --send_mail --no_try --no_presubmit --reviewers=" + author) if options.revertbot or prompt("Would you like to upload?"): if PROMPT_FOR_AUTHOR: author = text_prompt("Enter new author or press enter to accept default", author) if options.revertbot and options.revertbot_reviewers: author += "," author += options.revertbot_reviewers gclUpload(revision, author) else: print "Deleting the changelist." print "gcl delete " + str(revision) runGcl("delete " + str(revision)) sys.exit(0) # We commit if the reverbot is set to commit automatically, or if this is # not the revertbot and the user agrees. if options.revertbot_commit or (not options.revertbot and prompt("Would you like to commit?")): print "gcl commit " + str(revision) + " --no_presubmit --force" runGcl("commit " + str(revision) + " --no_presubmit --force") else: sys.exit(0)
|
branch_url = BRANCH_URL.replace("$branch", options.branch) checkoutRevision(url, revision, branch_url)
|
if not options.local: branch_url = BRANCH_URL.replace("$branch", options.branch) checkoutRevision(url, revision, branch_url)
|
def main(options, args): revision = options.revert or options.merge # Initialize some variables used below. They can be overwritten by # the drover.properties file. BASE_URL = "svn://svn.chromium.org/chrome" TRUNK_URL = BASE_URL + "/trunk/src" BRANCH_URL = BASE_URL + "/branches/$branch/src" SKIP_CHECK_WORKING = True PROMPT_FOR_AUTHOR = False DEFAULT_WORKING = "drover_" + str(revision) if options.branch: DEFAULT_WORKING += ("_" + options.branch) if not isMinimumSVNVersion(1,5): print "You need to use at least SVN version 1.5.x" sys.exit(1) # Override the default properties if there is a drover.properties file. global file_pattern_ if os.path.exists("drover.properties"): file = open("drover.properties") exec(file) file.close() if FILE_PATTERN: file_pattern_ = FILE_PATTERN if options.revert and options.branch: url = BRANCH_URL.replace("$branch", options.branch) elif options.merge and options.sbranch: url = BRANCH_URL.replace("$branch", options.sbranch) else: url = TRUNK_URL working = options.workdir or DEFAULT_WORKING command = 'svn log ' + url + " -r "+str(revision) + " -v" os.system(command) if not (options.revertbot or prompt("Is this the correct revision?")): sys.exit(0) if (os.path.exists(working)): if not (options.revertbot or SKIP_CHECK_WORKING or prompt("Working directory: '%s' already exists, clobber?" % working)): sys.exit(0) deltree(working) os.makedirs(working) os.chdir(working) if options.merge: action = "Merge" branch_url = BRANCH_URL.replace("$branch", options.branch) # Checkout everything but stuff that got added into a new dir checkoutRevision(url, revision, branch_url) # Merge everything that changed mergeRevision(url, revision) # "Export" files that were added from the source and add them to branch exportRevision(url, revision) # Delete directories that were deleted (file deletes are handled in the # merge). deleteRevision(url, revision) elif options.revert: action = "Revert" if options.branch: url = BRANCH_URL.replace("$branch", options.branch) checkoutRevision(url, revision, url, True) revertRevision(url, revision) revertExportRevision(url, revision) # Check the base url so we actually find the author who made the change if options.auditor: author = options.auditor else: author = getAuthor(url, revision) if not author: author = getAuthor(TRUNK_URL, revision) filename = str(revision)+".txt" out = open(filename,"w") out.write(action +" " + str(revision) + " - ") out.write(getRevisionLog(url, revision)) if (author): out.write("TBR=" + author) out.close() change_cmd = 'change ' + str(revision) + " " + filename if options.revertbot: change_cmd += ' --silent' runGcl(change_cmd) os.unlink(filename) print author print revision print ("gcl upload " + str(revision) + " --send_mail --no_try --no_presubmit --reviewers=" + author) if options.revertbot or prompt("Would you like to upload?"): if PROMPT_FOR_AUTHOR: author = text_prompt("Enter new author or press enter to accept default", author) if options.revertbot and options.revertbot_reviewers: author += "," author += options.revertbot_reviewers gclUpload(revision, author) else: print "Deleting the changelist." print "gcl delete " + str(revision) runGcl("delete " + str(revision)) sys.exit(0) # We commit if the reverbot is set to commit automatically, or if this is # not the revertbot and the user agrees. if options.revertbot_commit or (not options.revertbot and prompt("Would you like to commit?")): print "gcl commit " + str(revision) + " --no_presubmit --force" runGcl("commit " + str(revision) + " --no_presubmit --force") else: sys.exit(0)
|
if options.merge and not options.branch: option_parser.error("--merge requires a --branch")
|
if options.merge and not options.branch and not options.local: option_parser.error("--merge requires either --branch or --local")
|
def main(options, args): revision = options.revert or options.merge # Initialize some variables used below. They can be overwritten by # the drover.properties file. BASE_URL = "svn://svn.chromium.org/chrome" TRUNK_URL = BASE_URL + "/trunk/src" BRANCH_URL = BASE_URL + "/branches/$branch/src" SKIP_CHECK_WORKING = True PROMPT_FOR_AUTHOR = False DEFAULT_WORKING = "drover_" + str(revision) if options.branch: DEFAULT_WORKING += ("_" + options.branch) if not isMinimumSVNVersion(1,5): print "You need to use at least SVN version 1.5.x" sys.exit(1) # Override the default properties if there is a drover.properties file. global file_pattern_ if os.path.exists("drover.properties"): file = open("drover.properties") exec(file) file.close() if FILE_PATTERN: file_pattern_ = FILE_PATTERN if options.revert and options.branch: url = BRANCH_URL.replace("$branch", options.branch) elif options.merge and options.sbranch: url = BRANCH_URL.replace("$branch", options.sbranch) else: url = TRUNK_URL working = options.workdir or DEFAULT_WORKING command = 'svn log ' + url + " -r "+str(revision) + " -v" os.system(command) if not (options.revertbot or prompt("Is this the correct revision?")): sys.exit(0) if (os.path.exists(working)): if not (options.revertbot or SKIP_CHECK_WORKING or prompt("Working directory: '%s' already exists, clobber?" % working)): sys.exit(0) deltree(working) os.makedirs(working) os.chdir(working) if options.merge: action = "Merge" branch_url = BRANCH_URL.replace("$branch", options.branch) # Checkout everything but stuff that got added into a new dir checkoutRevision(url, revision, branch_url) # Merge everything that changed mergeRevision(url, revision) # "Export" files that were added from the source and add them to branch exportRevision(url, revision) # Delete directories that were deleted (file deletes are handled in the # merge). deleteRevision(url, revision) elif options.revert: action = "Revert" if options.branch: url = BRANCH_URL.replace("$branch", options.branch) checkoutRevision(url, revision, url, True) revertRevision(url, revision) revertExportRevision(url, revision) # Check the base url so we actually find the author who made the change if options.auditor: author = options.auditor else: author = getAuthor(url, revision) if not author: author = getAuthor(TRUNK_URL, revision) filename = str(revision)+".txt" out = open(filename,"w") out.write(action +" " + str(revision) + " - ") out.write(getRevisionLog(url, revision)) if (author): out.write("TBR=" + author) out.close() change_cmd = 'change ' + str(revision) + " " + filename if options.revertbot: change_cmd += ' --silent' runGcl(change_cmd) os.unlink(filename) print author print revision print ("gcl upload " + str(revision) + " --send_mail --no_try --no_presubmit --reviewers=" + author) if options.revertbot or prompt("Would you like to upload?"): if PROMPT_FOR_AUTHOR: author = text_prompt("Enter new author or press enter to accept default", author) if options.revertbot and options.revertbot_reviewers: author += "," author += options.revertbot_reviewers gclUpload(revision, author) else: print "Deleting the changelist." print "gcl delete " + str(revision) runGcl("delete " + str(revision)) sys.exit(0) # We commit if the reverbot is set to commit automatically, or if this is # not the revertbot and the user agrees. if options.revertbot_commit or (not options.revertbot and prompt("Would you like to commit?")): print "gcl commit " + str(revision) + " --no_presubmit --force" runGcl("commit " + str(revision) + " --no_presubmit --force") else: sys.exit(0)
|
class FakeRepos(object):
|
class FakeReposBase(object):
|
def commit_git(repo): """Commits the changes and returns the new hash.""" check_call(['git', 'add', '-A', '-f'], cwd=repo) check_call(['git', 'commit', '-q', '--message', 'foo'], cwd=repo) rev = Popen(['git', 'show-ref', '--head', 'HEAD'], cwd=repo).communicate()[0].split(' ', 1)[0] logging.debug('At revision %s' % rev) return rev
|
And types of dependencies: Relative urls, Full urls, both svn and git."""
|
And types of dependencies: Relative urls, Full urls, both svn and git. populateSvn() and populateGit() need to be implemented by the subclass. """
|
def commit_git(repo): """Commits the changes and returns the new hash.""" check_call(['git', 'add', '-A', '-f'], cwd=repo) check_call(['git', 'commit', '-q', '--message', 'foo'], cwd=repo) rev = Popen(['git', 'show-ref', '--head', 'HEAD'], cwd=repo).communicate()[0].split(' ', 1)[0] logging.debug('At revision %s' % rev) return rev
|
def setUpGIT(self): """Creates git repositories and start the servers.""" if self.gitdaemon: return True self.setUp() if sys.platform == 'win32': return False for repo in ['repo_%d' % r for r in range(1, 5)]: check_call(['git', 'init', '-q', join(self.git_root, repo)]) self.git_hashes[repo] = [None]
|
def populateGit(self):
|
def setUpGIT(self): """Creates git repositories and start the servers.""" if self.gitdaemon: return True self.setUp() if sys.platform == 'win32': return False for repo in ['repo_%d' % r for r in range(1, 5)]: check_call(['git', 'init', '-q', join(self.git_root, repo)]) self.git_hashes[repo] = [None]
|
cmd = ['git', 'daemon', '--export-all', '--base-path=' + self.repos_dir] if self.HOST == '127.0.0.1': cmd.append('--listen=127.0.0.1') logging.debug(cmd) self.gitdaemon = Popen(cmd, cwd=self.repos_dir) return True def _commit_svn(self, tree): self._genTree(self.svn_root, tree) commit_svn(self.svn_root) if self.svn_revs and self.svn_revs[-1]: new_tree = self.svn_revs[-1].copy() new_tree.update(tree) else: new_tree = tree.copy() self.svn_revs.append(new_tree) def _commit_git(self, repo, tree): repo_root = join(self.git_root, repo) self._genTree(repo_root, tree) commit_hash = commit_git(repo_root) if self.git_hashes[repo][-1]: new_tree = self.git_hashes[repo][-1][1].copy() new_tree.update(tree) else: new_tree = tree.copy() self.git_hashes[repo].append((commit_hash, new_tree))
|
def setUpGIT(self): """Creates git repositories and start the servers.""" if self.gitdaemon: return True self.setUp() if sys.platform == 'win32': return False for repo in ['repo_%d' % r for r in range(1, 5)]: check_call(['git', 'init', '-q', join(self.git_root, repo)]) self.git_hashes[repo] = [None]
|
|
FakeReposTestBase.FAKE_REPOS = FakeRepos()
|
FakeReposTestBase.FAKE_REPOS = self.FAKE_REPOS_CLASS()
|
def __init__(self, *args, **kwargs): unittest.TestCase.__init__(self, *args, **kwargs) if not FakeReposTestBase.FAKE_REPOS: FakeReposTestBase.FAKE_REPOS = FakeRepos()
|
def CMDcommit(change_list, args):
|
def CMDcommit(change_info, args):
|
def CMDcommit(change_list, args): """Commits the changelist to the repository.""" if not change_info.GetFiles(): print "Nothing to commit, changelist is empty." return 1 if not OptionallyDoPresubmitChecks(change_info, True, args): return 1 # We face a problem with svn here: Let's say change 'bleh' modifies # svn:ignore on dir1\. but another unrelated change 'pouet' modifies # dir1\foo.cc. When the user `gcl commit bleh`, foo.cc is *also committed*. # The only fix is to use --non-recursive but that has its issues too: # Let's say if dir1 is deleted, --non-recursive must *not* be used otherwise # you'll get "svn: Cannot non-recursively commit a directory deletion of a # directory with child nodes". Yay... commit_cmd = ["svn", "commit"] if change_info.issue: # Get the latest description from Rietveld. change_info.description = GetIssueDescription(change_info.issue) commit_message = change_info.description.replace('\r\n', '\n') if change_info.issue: server = GetCodeReviewSetting("CODE_REVIEW_SERVER") if not server.startswith("http://") and not server.startswith("https://"): server = "http://" + server commit_message += ('\nReview URL: %s/%d' % (server, change_info.issue)) handle, commit_filename = tempfile.mkstemp(text=True) os.write(handle, commit_message) os.close(handle) handle, targets_filename = tempfile.mkstemp(text=True) os.write(handle, "\n".join(change_info.GetFileNames())) os.close(handle) commit_cmd += ['--file=' + commit_filename] commit_cmd += ['--targets=' + targets_filename] # Change the current working directory before calling commit. previous_cwd = os.getcwd() os.chdir(change_info.GetLocalRoot()) output = RunShell(commit_cmd, True) os.remove(commit_filename) os.remove(targets_filename) if output.find("Committed revision") != -1: change_info.Delete() if change_info.issue: revision = re.compile(".*?\nCommitted revision (\d+)", re.DOTALL).match(output).group(1) viewvc_url = GetCodeReviewSetting("VIEW_VC") change_info.description = change_info.description + '\n' if viewvc_url: change_info.description += "\nCommitted: " + viewvc_url + revision change_info.CloseIssue() os.chdir(previous_cwd) return 0
|
option_parser.add_option("", "--verbose", action="store_true", default=False,
|
option_parser.add_option("-v", "--verbose", action="count", default=0,
|
def Main(argv): """Parse command line arguments and dispatch command.""" option_parser = optparse.OptionParser(usage=DEFAULT_USAGE_TEXT, version=__version__) option_parser.disable_interspersed_args() option_parser.add_option("", "--force", action="store_true", default=False, help=("(update/sync only) force update even " "for modules which haven't changed")) option_parser.add_option("", "--nohooks", action="store_true", default=False, help=("(update/sync/revert only) prevent the hooks from " "running")) option_parser.add_option("", "--revision", action="append", dest="revisions", metavar="REV", default=[], help=("(update/sync only) sync to a specific " "revision, can be used multiple times for " "each solution, e.g. --revision=src@123, " "--revision=internal@32")) option_parser.add_option("", "--deps", default=None, dest="deps_os", metavar="OS_LIST", help=("(update/sync only) sync deps for the " "specified (comma-separated) platform(s); " "'all' will sync all platforms")) option_parser.add_option("", "--spec", default=None, help=("(config only) create a gclient file " "containing the provided string")) option_parser.add_option("", "--verbose", action="store_true", default=False, help="produce additional output for diagnostics") option_parser.add_option("", "--manually_grab_svn_rev", action="store_true", default=False, help="Skip svn up whenever possible by requesting " "actual HEAD revision from the repository") option_parser.add_option("", "--head", action="store_true", default=False, help=("skips any safesync_urls specified in " "configured solutions")) option_parser.add_option("", "--delete_unversioned_trees", action="store_true", default=False, help=("on update, delete any unexpected " "unversioned trees that are in the checkout")) if len(argv) < 2: # Users don't need to be told to use the 'help' command. option_parser.print_help() return 1 # Add manual support for --version as first argument. if argv[1] == '--version': option_parser.print_version() return 0 # Add manual support for --help as first argument. if argv[1] == '--help': argv[1] = 'help' command = argv[1] options, args = option_parser.parse_args(argv[2:]) if len(argv) < 3 and command == "help": option_parser.print_help() return 0 if options.verbose: logging.basicConfig(level=logging.DEBUG) # Files used for configuration and state saving. options.config_filename = os.environ.get("GCLIENT_FILE", ".gclient") options.entries_filename = ".gclient_entries" options.deps_file = "DEPS" options.platform = sys.platform return DispatchCommand(command, options, args)
|
if options.verbose:
|
if options.verbose > 1:
|
def Main(argv): """Parse command line arguments and dispatch command.""" option_parser = optparse.OptionParser(usage=DEFAULT_USAGE_TEXT, version=__version__) option_parser.disable_interspersed_args() option_parser.add_option("", "--force", action="store_true", default=False, help=("(update/sync only) force update even " "for modules which haven't changed")) option_parser.add_option("", "--nohooks", action="store_true", default=False, help=("(update/sync/revert only) prevent the hooks from " "running")) option_parser.add_option("", "--revision", action="append", dest="revisions", metavar="REV", default=[], help=("(update/sync only) sync to a specific " "revision, can be used multiple times for " "each solution, e.g. --revision=src@123, " "--revision=internal@32")) option_parser.add_option("", "--deps", default=None, dest="deps_os", metavar="OS_LIST", help=("(update/sync only) sync deps for the " "specified (comma-separated) platform(s); " "'all' will sync all platforms")) option_parser.add_option("", "--spec", default=None, help=("(config only) create a gclient file " "containing the provided string")) option_parser.add_option("", "--verbose", action="store_true", default=False, help="produce additional output for diagnostics") option_parser.add_option("", "--manually_grab_svn_rev", action="store_true", default=False, help="Skip svn up whenever possible by requesting " "actual HEAD revision from the repository") option_parser.add_option("", "--head", action="store_true", default=False, help=("skips any safesync_urls specified in " "configured solutions")) option_parser.add_option("", "--delete_unversioned_trees", action="store_true", default=False, help=("on update, delete any unexpected " "unversioned trees that are in the checkout")) if len(argv) < 2: # Users don't need to be told to use the 'help' command. option_parser.print_help() return 1 # Add manual support for --version as first argument. if argv[1] == '--version': option_parser.print_version() return 0 # Add manual support for --help as first argument. if argv[1] == '--help': argv[1] = 'help' command = argv[1] options, args = option_parser.parse_args(argv[2:]) if len(argv) < 3 and command == "help": option_parser.print_help() return 0 if options.verbose: logging.basicConfig(level=logging.DEBUG) # Files used for configuration and state saving. options.config_filename = os.environ.get("GCLIENT_FILE", ".gclient") options.entries_filename = ".gclient_entries" options.deps_file = "DEPS" options.platform = sys.platform return DispatchCommand(command, options, args)
|
command = ['diff', '-p', '--no-prefix', branch + "..." + branch_head]
|
command = ['diff', '-p', '--no-prefix', '--no-ext-diff', branch + "..." + branch_head]
|
def GenerateDiff(cwd, branch=None, branch_head='HEAD', full_move=False, files=None): """Diffs against the upstream branch or optionally another branch.
|
def Popen(*args, **kwargs):
|
def Popen(args, **kwargs):
|
def Popen(*args, **kwargs): """Calls subprocess.Popen() with hacks to work around certain behaviors. Ensure English outpout for svn and make it work reliably on Windows. """ copied = False if not 'env' in kwargs: copied = True kwargs = kwargs.copy() # It's easier to parse the stdout if it is always in English. kwargs['env'] = os.environ.copy() kwargs['env']['LANGUAGE'] = 'en' if not 'shell' in kwargs: if not copied: kwargs = kwargs.copy() # *Sigh*: Windows needs shell=True, or else it won't search %PATH% for the # executable, but shell=True makes subprocess on Linux fail when it's called # with a list because it only tries to execute the first item in the list. kwargs['shell'] = (sys.platform=='win32') return subprocess.Popen(*args, **kwargs)
|
copied = False
|
logging.debug(u'%s, cwd=%s' % (u' '.join(args), kwargs.get('cwd', '')))
|
def Popen(*args, **kwargs): """Calls subprocess.Popen() with hacks to work around certain behaviors. Ensure English outpout for svn and make it work reliably on Windows. """ copied = False if not 'env' in kwargs: copied = True kwargs = kwargs.copy() # It's easier to parse the stdout if it is always in English. kwargs['env'] = os.environ.copy() kwargs['env']['LANGUAGE'] = 'en' if not 'shell' in kwargs: if not copied: kwargs = kwargs.copy() # *Sigh*: Windows needs shell=True, or else it won't search %PATH% for the # executable, but shell=True makes subprocess on Linux fail when it's called # with a list because it only tries to execute the first item in the list. kwargs['shell'] = (sys.platform=='win32') return subprocess.Popen(*args, **kwargs)
|
copied = True kwargs = kwargs.copy()
|
def Popen(*args, **kwargs): """Calls subprocess.Popen() with hacks to work around certain behaviors. Ensure English outpout for svn and make it work reliably on Windows. """ copied = False if not 'env' in kwargs: copied = True kwargs = kwargs.copy() # It's easier to parse the stdout if it is always in English. kwargs['env'] = os.environ.copy() kwargs['env']['LANGUAGE'] = 'en' if not 'shell' in kwargs: if not copied: kwargs = kwargs.copy() # *Sigh*: Windows needs shell=True, or else it won't search %PATH% for the # executable, but shell=True makes subprocess on Linux fail when it's called # with a list because it only tries to execute the first item in the list. kwargs['shell'] = (sys.platform=='win32') return subprocess.Popen(*args, **kwargs)
|
|
if not copied: kwargs = kwargs.copy()
|
def Popen(*args, **kwargs): """Calls subprocess.Popen() with hacks to work around certain behaviors. Ensure English outpout for svn and make it work reliably on Windows. """ copied = False if not 'env' in kwargs: copied = True kwargs = kwargs.copy() # It's easier to parse the stdout if it is always in English. kwargs['env'] = os.environ.copy() kwargs['env']['LANGUAGE'] = 'en' if not 'shell' in kwargs: if not copied: kwargs = kwargs.copy() # *Sigh*: Windows needs shell=True, or else it won't search %PATH% for the # executable, but shell=True makes subprocess on Linux fail when it's called # with a list because it only tries to execute the first item in the list. kwargs['shell'] = (sys.platform=='win32') return subprocess.Popen(*args, **kwargs)
|
|
return subprocess.Popen(*args, **kwargs)
|
return subprocess.Popen(args, **kwargs)
|
def Popen(*args, **kwargs): """Calls subprocess.Popen() with hacks to work around certain behaviors. Ensure English outpout for svn and make it work reliably on Windows. """ copied = False if not 'env' in kwargs: copied = True kwargs = kwargs.copy() # It's easier to parse the stdout if it is always in English. kwargs['env'] = os.environ.copy() kwargs['env']['LANGUAGE'] = 'en' if not 'shell' in kwargs: if not copied: kwargs = kwargs.copy() # *Sigh*: Windows needs shell=True, or else it won't search %PATH% for the # executable, but shell=True makes subprocess on Linux fail when it's called # with a list because it only tries to execute the first item in the list. kwargs['shell'] = (sys.platform=='win32') return subprocess.Popen(*args, **kwargs)
|
logging.debug('%s, cwd=%s' % (str(command), str(cwd)))
|
def CheckCall(command, cwd=None, print_error=True): """Similar subprocess.check_call() but redirects stdout and returns (stdout, stderr). Works on python 2.4 """ logging.debug('%s, cwd=%s' % (str(command), str(cwd))) try: stderr = None if not print_error: stderr = subprocess.PIPE process = Popen(command, cwd=cwd, stdout=subprocess.PIPE, stderr=stderr) std_out, std_err = process.communicate() except OSError, e: raise CheckCallError(command, cwd, e.errno, None) if process.returncode: raise CheckCallError(command, cwd, process.returncode, std_out, std_err) return std_out, std_err
|
|
logging.debug(args)
|
def CheckCallAndFilter(args, stdout=None, filter_fn=None, print_stdout=None, call_filter_on_first_line=False, **kwargs): """Runs a command and calls back a filter function if needed. Accepts all subprocess.Popen() parameters plus: print_stdout: If True, the command's stdout is forwarded to stdout. filter_fn: A function taking a single string argument called with each line of the subprocess's output. Each line has the trailing newline character trimmed. stdout: Can be any bufferable output. stderr is always redirected to stdout. """ assert print_stdout or filter_fn stdout = stdout or sys.stdout filter_fn = filter_fn or (lambda x: None) assert not 'stderr' in kwargs logging.debug(args) kid = Popen(args, bufsize=0, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, **kwargs) # Do a flush of stdout before we begin reading from the subprocess's stdout last_flushed_at = time.time() stdout.flush() # Also, we need to forward stdout to prevent weird re-ordering of output. # This has to be done on a per byte basis to make sure it is not buffered: # normally buffering is done for each line, but if svn requests input, no # end-of-line character is output after the prompt and it would not show up. in_byte = kid.stdout.read(1) if in_byte: if call_filter_on_first_line: filter_fn(None) in_line = '' while in_byte: if in_byte != '\r': if print_stdout: stdout.write(in_byte) if in_byte != '\n': in_line += in_byte else: filter_fn(in_line) in_line = '' # Flush at least 10 seconds between line writes. We wait at least 10 # seconds to avoid overloading the reader that called us with output, # which can slow busy readers down. if (time.time() - last_flushed_at) > 10: last_flushed_at = time.time() stdout.flush() in_byte = kid.stdout.read(1) # Flush the rest of buffered output. This is only an issue with # stdout/stderr not ending with a \n. if len(in_line): filter_fn(in_line) rv = kid.wait() if rv: raise Error('failed to run command: %s' % ' '.join(args)) return 0
|
|
help="Print info level logs (default).")
|
help="Print info level logs.")
|
def _GetOpener(self): """Returns an OpenerDirector that supports cookies and ignores redirects.
|
if len(out) > 1:
|
status, _ = out[0].split(' ', 1) if len(out) > 1 and status == "A":
|
def GetBaseFile(self, filename): # "hg status" and "hg cat" both take a path relative to the current subdir # rather than to the repo root, but "hg diff" has given us the full path # to the repo root. base_content = "" new_content = None is_binary = False oldrelpath = relpath = self._GetRelPath(filename) # "hg status -C" returns two lines for moved/copied files, one otherwise out = RunShell(["hg", "status", "-C", "--rev", self.base_rev, relpath]) out = out.splitlines() # HACK: strip error message about missing file/directory if it isn't in # the working copy if out[0].startswith('%s: ' % relpath): out = out[1:] if len(out) > 1: # Moved/copied => considered as modified, use old filename to # retrieve base contents oldrelpath = out[1].strip() status = "M" else: status, _ = out[0].split(' ', 1) if ":" in self.base_rev: base_rev = self.base_rev.split(":", 1)[0] else: base_rev = self.base_rev if status != "A": base_content = RunShell(["hg", "cat", "-r", base_rev, oldrelpath], silent_ok=True) is_binary = "\0" in base_content # Mercurial's heuristic if status != "R": new_content = open(relpath, "rb").read() is_binary = is_binary or "\0" in new_content if is_binary and base_content: # Fetch again without converting newlines base_content = RunShell(["hg", "cat", "-r", base_rev, oldrelpath], silent_ok=True, universal_newlines=False) if not is_binary or not self.IsImage(relpath): new_content = None return base_content, new_content, is_binary, status
|
else: status, _ = out[0].split(' ', 1)
|
def GetBaseFile(self, filename): # "hg status" and "hg cat" both take a path relative to the current subdir # rather than to the repo root, but "hg diff" has given us the full path # to the repo root. base_content = "" new_content = None is_binary = False oldrelpath = relpath = self._GetRelPath(filename) # "hg status -C" returns two lines for moved/copied files, one otherwise out = RunShell(["hg", "status", "-C", "--rev", self.base_rev, relpath]) out = out.splitlines() # HACK: strip error message about missing file/directory if it isn't in # the working copy if out[0].startswith('%s: ' % relpath): out = out[1:] if len(out) > 1: # Moved/copied => considered as modified, use old filename to # retrieve base contents oldrelpath = out[1].strip() status = "M" else: status, _ = out[0].split(' ', 1) if ":" in self.base_rev: base_rev = self.base_rev.split(":", 1)[0] else: base_rev = self.base_rev if status != "A": base_content = RunShell(["hg", "cat", "-r", base_rev, oldrelpath], silent_ok=True) is_binary = "\0" in base_content # Mercurial's heuristic if status != "R": new_content = open(relpath, "rb").read() is_binary = is_binary or "\0" in new_content if is_binary and base_content: # Fetch again without converting newlines base_content = RunShell(["hg", "cat", "-r", base_rev, oldrelpath], silent_ok=True, universal_newlines=False) if not is_binary or not self.IsImage(relpath): new_content = None return base_content, new_content, is_binary, status
|
|
subversion_config = os.path.expanduser("~/.subversion/config")
|
if os.name == 'nt': subversion_config = os.environ.get("APPDATA") + "\\Subversion\\config" else: subversion_config = os.path.expanduser("~/.subversion/config")
|
def LoadSubversionAutoProperties(): """Returns the content of [auto-props] section of Subversion's config file as a dictionary. Returns: A dictionary whose key-value pair corresponds the [auto-props] section's key-value pair. In following cases, returns empty dictionary: - config file doesn't exist, or - 'enable-auto-props' is not set to 'true-like-value' in [miscellany]. """ # Todo(hayato): Windows users might use different path for configuration file. subversion_config = os.path.expanduser("~/.subversion/config") if not os.path.exists(subversion_config): return {} config = ConfigParser.ConfigParser() config.read(subversion_config) if (config.has_section("miscellany") and config.has_option("miscellany", "enable-auto-props") and config.getboolean("miscellany", "enable-auto-props") and config.has_section("auto-props")): props = {} for file_pattern in config.options("auto-props"): props[file_pattern] = ParseSubversionPropertyValues( config.get("auto-props", file_pattern)) return props else: return {}
|
r".*\.c", r".*\.cc", r".*\.cpp", r".*\.h", r".*\.m", r".*\.mm", r".*\.inl", r".*\.asm", r".*\.hxx", r".*\.hpp",
|
r".*\.c$", r".*\.cc$", r".*\.cpp$", r".*\.h$", r".*\.m$", r".*\.mm$", r".*\.inl$", r".*\.asm$", r".*\.hxx$", r".*\.hpp$",
|
def __init__(self, *args, **kwargs): raise NotImplementedException() # TODO(joi) Implement.
|
r".*\.js", r".*\.py", r".*\.sh", r".*\.rb", r".*\.pl", r".*\.pm",
|
r".*\.js$", r".*\.py$", r".*\.sh$", r".*\.rb$", r".*\.pl$", r".*\.pm$",
|
def __init__(self, *args, **kwargs): raise NotImplementedException() # TODO(joi) Implement.
|
r"(^|.*[\\\/])[^.]+$",
|
r"(^|.*?[\\\/])[^.]+$",
|
def __init__(self, *args, **kwargs): raise NotImplementedException() # TODO(joi) Implement.
|
r".*\.java", r".*\.mk", r".*\.am",
|
r".*\.java$", r".*\.mk$", r".*\.am$",
|
def __init__(self, *args, **kwargs): raise NotImplementedException() # TODO(joi) Implement.
|
chromium_utils.RemoveDirectory(args[2])
|
gclient_utils.RemoveDirectory(args[2])
|
def CaptureMatchingLines(line): match = compiled_pattern.search(line) if match: file_list.append(match.group(1)) if line.startswith('svn: '): failure.append(line)
|
cl = git_cl.Changelist() issue = cl.GetIssue() patchset = cl.GetPatchset()
|
issue = BackquoteAsInteger(['git', 'cl', 'status', '--field=id']) patchset = BackquoteAsInteger(['git', 'cl', 'status', '--field=patch'])
|
def __init__(self, commit=None, upstream_branch=None): self.commit = commit self.verbose = None self.default_presubmit = None self.may_prompt = None
|
description = cl.GetDescription()
|
description = Backquote(['git', 'cl', 'status', '--field=desc'])
|
def __init__(self, commit=None, upstream_branch=None): self.commit = commit self.verbose = None self.default_presubmit = None self.may_prompt = None
|
def addKill(): """Add kill() method to subprocess.Popen for python <2.6""" if getattr(subprocess.Popen, 'kill', None): return if sys.platform.startswith('win'): def kill_win(process): import win32process return win32process.TerminateProcess(process._handle, -1) subprocess.kill = kill_win else: def kill_nix(process): import signal return os.kill(process.pid, signal.SIGKILL) subprocess.kill = kill_nix
|
class GClientSmokeBase(unittest.TestCase): ROOT_DIR = os.path.join(TRIAL_DIR, 'smoke') def setUp(self): self.env = os.environ.copy() self.env['DEPOT_TOOLS_UPDATE'] = '0' self.root_dir = os.path.join(self.ROOT_DIR, self.id()) rmtree(self.root_dir) if not os.path.exists(self.ROOT_DIR): os.mkdir(self.ROOT_DIR) os.mkdir(self.root_dir) self.svn_base = 'svn://%s/svn/' % HOST self.git_base = 'git://%s/git/' % HOST def tearDown(self): if not SHOULD_LEAK: rmtree(self.root_dir) def gclient(self, cmd, cwd=None): if not cwd: cwd = self.root_dir process = subprocess.Popen([GCLIENT_PATH] + cmd, cwd=cwd, env=self.env, stdout=subprocess.PIPE, stderr=subprocess.PIPE, shell=sys.platform.startswith('win')) (stdout, stderr) = process.communicate() return (stdout, stderr, process.returncode) def check(self, expected, results): def checkString(expected, result): if expected != result: while expected and result and expected[0] == result[0]: expected = expected[1:] result = result[1:] self.assertEquals(expected, result) checkString(expected[0], results[0]) checkString(expected[1], results[1]) self.assertEquals(expected[2], results[2])
|
def addKill(): """Add kill() method to subprocess.Popen for python <2.6""" if getattr(subprocess.Popen, 'kill', None): return if sys.platform.startswith('win'): def kill_win(process): import win32process return win32process.TerminateProcess(process._handle, -1) subprocess.kill = kill_win else: def kill_nix(process): import signal return os.kill(process.pid, signal.SIGKILL) subprocess.kill = kill_nix
|
def rmtree(path): """Delete a directory.""" if os.path.exists(path): shutil.rmtree(path)
|
class GClientSmoke(GClientSmokeBase): def testCommands(self): """This test is to make sure no new command was added.""" result = self.gclient(['help']) self.assertEquals(3189, len(result[0])) self.assertEquals(0, len(result[1])) self.assertEquals(0, result[2]) def testNotConfigured(self): res = ("", "Error: client not configured; see 'gclient config'\n", 1) self.check(res, self.gclient(['cleanup'])) self.check(res, self.gclient(['diff'])) self.check(res, self.gclient(['export', 'foo'])) self.check(res, self.gclient(['pack'])) self.check(res, self.gclient(['revert'])) self.check(res, self.gclient(['revinfo'])) self.check(res, self.gclient(['runhooks'])) self.check(res, self.gclient(['status'])) self.check(res, self.gclient(['sync'])) self.check(res, self.gclient(['update']))
|
def rmtree(path): """Delete a directory.""" if os.path.exists(path): shutil.rmtree(path)
|
def write(path, content): f = open(path, 'wb') f.write(content) f.close()
|
class GClientSmokeSync(GClientSmokeBase): """sync is the most important command. Hence test it more.""" def testSyncSvn(self): """Test pure gclient svn checkout, example of Chromium checkout""" self.gclient(['config', self.svn_base + 'trunk/src/']) results = self.gclient(['sync']) self.assertEquals(0, results[2]) results = self.gclient(['sync', '--revision', 'a@32']) self.assertEquals(0, results[2]) def testSyncGit(self): """Test pure gclient git checkout, example of Chromium OS checkout""" self.gclient(['config', self.git_base + 'repo_1']) results = self.gclient(['sync']) print results[0] print results[1] self.assertEquals(0, results[2])
|
def write(path, content): f = open(path, 'wb') f.write(content) f.close()
|
class FakeRepos(object): def __init__(self, trial_dir, leak, local_only): self.trial_dir = trial_dir self.repos_dir = os.path.join(self.trial_dir, 'repos') self.leak = leak self.local_only = local_only self.svnserve = [] self.gitdaemon = [] addKill() rmtree(self.trial_dir) os.mkdir(self.trial_dir) os.mkdir(self.repos_dir)
|
class GClientSmokeRevert(GClientSmokeBase): """revert is the second most important command. Hence test it more.""" def setUp(self): GClientSmokeBase.setUp(self) self.gclient(['config', self.URL_BASE])
|
def write(path, content): f = open(path, 'wb') f.write(content) f.close()
|
self.setUpSVN() self.setUpGIT()
|
GClientSmokeBase.setUp(self) self.gclient(['config', self.URL_BASE])
|
def setUp(self): self.setUpSVN() self.setUpGIT()
|
def tearDown(self): for i in self.svnserve: i.kill() for i in self.gitdaemon: i.kill() if not self.leak: rmtree(self.trial_dir) def setUpSVN(self): """Creates subversion repositories and start the servers.""" assert not self.svnserve join = os.path.join root = join(self.repos_dir, 'svn') rmtree(root) subprocess.check_call(['svnadmin', 'create', root]) write(join(root, 'conf', 'svnserve.conf'), '[general]\n' 'anon-access = read\n' 'auth-access = write\n' 'password-db = passwd\n') write(join(root, 'conf', 'passwd'), '[users]\n' 'user1 = foo\n' 'user2 = bar\n') repo = join(self.repos_dir, 'svn_import') rmtree(repo) os.mkdir(repo) os.mkdir(join(repo, 'trunk')) os.mkdir(join(repo, 'trunk', 'src')) write(join(repo, 'trunk', 'src', 'DEPS'), """ deps = { 'src/other': 'svn://%(host)s/svn/trunk/other', 'src/third_party': '/trunk/third_party', } deps_os = { 'mac': 'repo_4' } """ % { 'host': 'localhost', }) write(join(repo, 'trunk', 'src', 'origin'), "svn/trunk/src") os.mkdir(join(repo, 'trunk', 'other')) write(join(repo, 'trunk', 'other', 'origin'), "svn/trunk/other") os.mkdir(join(repo, 'trunk', 'third_party')) write(join(repo, 'trunk', 'third_party', 'origin'), "svn/trunk/third_party") cmd = ['svnserve', '-d', '--foreground', '-r', self.repos_dir] if self.local_only: cmd.append('--listen-host=127.0.0.1') self.svnserve.append(subprocess.Popen(cmd, cwd=root)) subprocess.check_call(['svn', 'import', repo, 'svn://127.0.0.1/svn', '-m', 'foo', '-q', '--no-auto-props', '--non-interactive', '--no-auth-cache', '--username', 'user1', '--password', 'foo']) def setUpGIT(self): """Creates git repositories and start the servers.""" assert not self.gitdaemon join = os.path.join root = join(self.repos_dir, 'git') rmtree(root) os.mkdir(root) repo = join(root, 'repo_1') subprocess.check_call(['git', 'init', '-q', repo]) write(join(repo, 'DEPS'), """ deps = { 'repo2': 'git://%(host)s/git/repo_2', 'repo2/repo3': '/repo_3', } deps_os = { 'mac': 'repo_4' } """ % { 'host': 'localhost', }) write(join(repo, 'origin'), "git/repo_1") subprocess.check_call(['git', 'add', '-A', '-f'], cwd=repo) subprocess.check_call(['git', 'commit', '-q', '-m', 'foo'], cwd=repo) repo = join(root, 'repo_2') subprocess.check_call(['git', 'init', '-q', repo]) write(join(repo, 'origin'), "git/repo_2") subprocess.check_call(['git', 'add', '-A', '-f'], cwd=repo) subprocess.check_call(['git', 'commit', '-q', '-m', 'foo'], cwd=repo) repo = join(root, 'repo_3') subprocess.check_call(['git', 'init', '-q', repo]) write(join(repo, 'origin'), "git/repo_3") subprocess.check_call(['git', 'add', '-A', '-f'], cwd=repo) subprocess.check_call(['git', 'commit', '-q', '-m', 'foo'], cwd=repo) cmd = ['git', 'daemon', '--export-all', '--base-path=' + self.repos_dir] if self.local_only: cmd.append('--listen=127.0.0.1') self.gitdaemon.append(subprocess.Popen(cmd, cwd=self.repos_dir, stderr=subprocess.PIPE))
|
def tearDown(self): for i in self.svnserve: i.kill() for i in self.gitdaemon: i.kill() if not self.leak: rmtree(self.trial_dir)
|
|
fake = FakeRepos(os.path.dirname(os.path.abspath(__file__)), False)
|
fake = FakeRepos(TRIAL_DIR, SHOULD_LEAK, True)
|
def setUpGIT(self): """Creates git repositories and start the servers.""" assert not self.gitdaemon join = os.path.join root = join(self.repos_dir, 'git') rmtree(root) os.mkdir(root) # Repo 1 repo = join(root, 'repo_1') subprocess.check_call(['git', 'init', '-q', repo]) write(join(repo, 'DEPS'), """
|
sys.stdin.readline()
|
unittest.main()
|
def setUpGIT(self): """Creates git repositories and start the servers.""" assert not self.gitdaemon join = os.path.join root = join(self.repos_dir, 'git') rmtree(root) os.mkdir(root) # Repo 1 repo = join(root, 'repo_1') subprocess.check_call(['git', 'init', '-q', repo]) write(join(repo, 'DEPS'), """
|
for _ in range(10):
|
i = 0 while True: i += 1
|
def RunAndGetFileList(verbose, args, cwd, file_list, stdout=None): """Runs svn checkout, update, or status, output to stdout.
|
'FindGclientRoot', 'GetNamedNodeText', 'GetNodeNamedAttributeText', 'IsUsingGit', 'PathDifference', 'ParseXML', 'PrintableObject', 'RemoveDirectory', 'SplitUrlRevision', 'SubprocessCall', 'SubprocessCallAndFilter', 'errno', 'logging', 'os', 're', 'stat', 'subprocess', 'sys', 'time', 'xml',
|
'FindFileUpwards', 'FindGclientRoot', 'GetGClientRootAndEntries', 'GetNamedNodeText', 'GetNodeNamedAttributeText', 'IsUsingGit', 'PathDifference', 'ParseXML', 'PrintableObject', 'RemoveDirectory', 'SplitUrlRevision', 'SubprocessCall', 'SubprocessCallAndFilter', 'errno', 'logging', 'os', 're', 'stat', 'subprocess', 'sys', 'time', 'xml',
|
def testMembersChanged(self): members = [ 'CheckCall', 'CheckCallError', 'Error', 'FileRead', 'FileWrite', 'FindGclientRoot', 'GetNamedNodeText', 'GetNodeNamedAttributeText', 'IsUsingGit', 'PathDifference', 'ParseXML', 'PrintableObject', 'RemoveDirectory', 'SplitUrlRevision', 'SubprocessCall', 'SubprocessCallAndFilter', 'errno', 'logging', 'os', 're', 'stat', 'subprocess', 'sys', 'time', 'xml', ] # If this test fails, you should add the relevant test. self.compareMembers(gclient_utils, members)
|
group.add_option("--webkit", action="append_const", const="third_party/WebKit", dest="sub_rep", help="Shorthand for -s third_party/WebKit")
|
try: group.add_option("--webkit", action="append_const", const="third_party/WebKit", dest="sub_rep", help="Shorthand for -s third_party/WebKit") except optparse.OptionError: pass
|
def TryChange(argv, file_list, swallow_exception, prog=None): """ Args: argv: Arguments and options. file_list: Default value to pass to --file. swallow_exception: Whether we raise or swallow exceptions. """ # Parse argv parser = optparse.OptionParser(usage=USAGE, version=__version__, prog=prog) parser.add_option("-v", "--verbose", action="count", default=0, help="Prints debugging infos") group = optparse.OptionGroup(parser, "Result and status") group.add_option("-u", "--user", default=getpass.getuser(), help="Owner user name [default: %default]") group.add_option("-e", "--email", default=os.environ.get('TRYBOT_RESULTS_EMAIL_ADDRESS', os.environ.get('EMAIL_ADDRESS')), help="Email address where to send the results. Use either " "the TRYBOT_RESULTS_EMAIL_ADDRESS environment " "variable or EMAIL_ADDRESS to set the email address " "the try bots report results to [default: %default]") group.add_option("-n", "--name", help="Descriptive name of the try job") group.add_option("--issue", type='int', help="Update rietveld issue try job status") group.add_option("--patchset", type='int', help="Update rietveld issue try job status") group.add_option("--dry_run", action='store_true', help="Just prints the diff and quits") parser.add_option_group(group) group = optparse.OptionGroup(parser, "Try job options") group.add_option("-b", "--bot", action="append", help="Only use specifics build slaves, ex: '--bot win' to " "run the try job only on the 'win' slave; see the try " "server waterfall for the slave's name") group.add_option("-r", "--revision", help="Revision to use for the try job; default: the " "revision will be determined by the try server; see " "its waterfall for more info") group.add_option("-c", "--clobber", action="store_true", help="Force a clobber before building; e.g. don't do an " "incremental build") # TODO(maruel): help="Select a specific configuration, usually 'debug' or " # "'release'" group.add_option("--target", help=optparse.SUPPRESS_HELP) group.add_option("--project", help="Override which project to use. Projects are defined " "server-side to define what default bot set to use") # Override the list of tests to run, use multiple times to list many tests # (or comma separated) group.add_option("-t", "--tests", action="append", help=optparse.SUPPRESS_HELP) parser.add_option_group(group) group = optparse.OptionGroup(parser, "Patch to run") group.add_option("-f", "--file", default=file_list, dest="files", metavar="FILE", action="append", help="Use many times to list the files to include in the " "try, relative to the repository root") group.add_option("--diff", help="File containing the diff to try") group.add_option("--url", help="Url where to grab a patch") group.add_option("--root", help="Root to use for the patch; base subdirectory for " "patch created in a subdirectory") group.add_option("-p", "--patchlevel", type='int', metavar="LEVEL", help="Used as -pN parameter to patch") group.add_option("-s", "--sub_rep", action="append", default=[], help="Subcheckout to use in addition. This is mainly " "useful for gclient-style checkouts. Use @rev or " "@branch or @branch1..branch2 to specify the " "revision/branch to diff against.") # Mostly chromium-specific group.add_option("--webkit", action="append_const", const="third_party/WebKit", dest="sub_rep", help="Shorthand for -s third_party/WebKit") group.add_option("--no_gclient", action="store_true", help="Disable automatic search for gclient checkout.") parser.add_option_group(group) group = optparse.OptionGroup(parser, "Access the try server by HTTP") group.add_option("--use_http", action="store_const", const=_SendChangeHTTP, dest="send_patch", help="Use HTTP to talk to the try server [default]") group.add_option("-H", "--host", help="Host address") group.add_option("-P", "--port", help="HTTP port") group.add_option("--proxy", help="HTTP proxy") parser.add_option_group(group) group = optparse.OptionGroup(parser, "Access the try server with SVN") group.add_option("--use_svn", action="store_const", const=_SendChangeSVN, dest="send_patch", help="Use SVN to talk to the try server") group.add_option("-S", "--svn_repo", metavar="SVN_URL", help="SVN url to use to write the changes in; --use_svn is " "implied when using --svn_repo") parser.add_option_group(group) options, args = parser.parse_args(argv) if len(args) == 1 and args[0] == 'help': parser.print_help() if not swallow_exception: if options.verbose == 0: logging.basicConfig(level=logging.ERROR) elif options.verbose == 1: logging.basicConfig(level=logging.WARNING) elif options.verbose == 2: logging.basicConfig(level=logging.INFO) elif options.verbose > 2: logging.basicConfig(level=logging.DEBUG) logging.debug(argv) try: # Always include os.getcwd() in the checkout settings. checkouts = [] checkouts.append(GuessVCS(options, os.getcwd())) checkouts[0].AutomagicalSettings() for item in options.sub_rep: checkout = GuessVCS(options, os.path.join(checkouts[0].checkout_root, item)) if checkout.checkout_root in [c.checkout_root for c in checkouts]: parser.error('Specified the root %s two times.' % checkout.checkout_root) checkouts.append(checkout) can_http = options.port and options.host can_svn = options.svn_repo # If there was no transport selected yet, now we must have enough data to # select one. if not options.send_patch and not (can_http or can_svn): parser.error('Please specify an access method.') # Convert options.diff into the content of the diff. if options.url: if options.files: parser.error('You cannot specify files and --url at the same time.') options.diff = urllib.urlopen(options.url).read() elif options.diff: if options.files: parser.error('You cannot specify files and --diff at the same time.') options.diff = gclient_utils.FileRead(options.diff, 'rb') else: # Use this as the base. root = checkouts[0].checkout_root diffs = [] for checkout in checkouts: diff = checkout.GenerateDiff().splitlines(True) # Munge it. path_diff = gclient_utils.PathDifference(root, checkout.checkout_root) for i in range(len(diff)): if diff[i].startswith('--- ') or diff[i].startswith('+++ '): diff[i] = diff[i][0:4] + posixpath.join(path_diff, diff[i][4:]) diffs.extend(diff) options.diff = ''.join(diffs) if not options.bot: # Get try slaves from PRESUBMIT.py files if not specified. # Even if the diff comes from options.url, use the local checkout for bot # selection. try: import presubmit_support root_presubmit = checkouts[0].ReadRootFile('PRESUBMIT.py') options.bot = presubmit_support.DoGetTrySlaves( checkouts[0].GetFileNames(), checkouts[0].checkout_root, root_presubmit, False, sys.stdout) except ImportError: pass # If no bot is specified, either the default pool will be selected or the # try server will refuse the job. Either case we don't need to interfere. if options.name is None: if options.issue: options.name = 'Issue %s' % options.issue else: options.name = 'Unnamed' print('Note: use --name NAME to change the try job name.') if not options.email: parser.error('Using an anonymous checkout. Please use --email or set ' 'the TRYBOT_RESULTS_EMAIL_ADDRESS environment variable.') else: print('Results will be emailed to: ' + options.email) # Send the patch. if options.send_patch: # If forced. options.send_patch(options) PrintSuccess(options) return 0 try: if can_http: _SendChangeHTTP(options) PrintSuccess(options) return 0 except NoTryServerAccess: if not can_svn: raise _SendChangeSVN(options) PrintSuccess(options) return 0 except (InvalidScript, NoTryServerAccess), e: if swallow_exception: return 1 print e return 1 return 0
|
result[0:2] = '{ \''
|
result = '{ \'' + result[2:]
|
def _SaveEntries(self, entries): """Creates a .gclient_entries file to record the list of unique checkouts.
|
if self._options.snapshot: url = entries.pop(name)
|
def GetURLAndRev(name, original_url): if not original_url: return None url, _ = gclient_utils.SplitUrlRevision(original_url) scm = gclient_scm.CreateSCM(original_url, self.root_dir(), name) return '%s@%s' % (url, scm.revinfo(self._options, [], None))
|
|
command.extend(['--revision', str(revision)])
|
command.extend(['--revision', str(revision).strip()])
|
def update(self, options, args, file_list): """Runs svn to update or transparently checkout the working copy.
|
command.extend(['--revision', str(options.revision)])
|
command.extend(['--revision', str(options.revision).strip()])
|
def updatesingle(self, options, args, file_list): checkout_path = os.path.join(self._root_dir, self.relpath) filename = args.pop() if scm.SVN.AssertVersion("1.5")[0]: if not os.path.exists(os.path.join(checkout_path, '.svn')): # Create an empty checkout and then update the one file we want. Future # operations will only apply to the one file we checked out. command = ["checkout", "--depth", "empty", self.url, checkout_path] scm.SVN.Run(command, self._root_dir) if os.path.exists(os.path.join(checkout_path, filename)): os.remove(os.path.join(checkout_path, filename)) command = ["update", filename] scm.SVN.RunAndGetFileList(options, command, checkout_path, file_list) # After the initial checkout, we can use update as if it were any other # dep. self.update(options, args, file_list) else: # If the installed version of SVN doesn't support --depth, fallback to # just exporting the file. This has the downside that revision # information is not stored next to the file, so we will have to # re-export the file every time we sync. if not os.path.exists(checkout_path): os.makedirs(checkout_path) command = ["export", os.path.join(self.url, filename), os.path.join(checkout_path, filename)] if options.revision: command.extend(['--revision', str(options.revision)]) scm.SVN.Run(command, self._root_dir)
|
if input_api.re.match(closed, status):
|
if input_api.re.match(closed, status, input_api.re.IGNORECASE):
|
def CheckTreeIsOpen(input_api, output_api, url, closed): """Checks that an url's content doesn't match a regexp that would mean that the tree is closed.""" if not input_api.is_committing: return [] try: connection = input_api.urllib2.urlopen(url) status = connection.read() connection.close() if input_api.re.match(closed, status): long_text = status + '\n' + url return [output_api.PresubmitError('The tree is closed dude!', long_text=long_text)] except IOError: pass return []
|
if command in ('update', 'revert') and sys.stdout.isatty():
|
if (command in ('update', 'revert') and sys.stdout.isatty() and not self._options.verbose):
|
def RunOnDeps(self, command, args): """Runs a command on each dependency in a client and its dependencies.
|
data = "Index: %s\n" % filename
|
data = "Index: %s\n" % filename.replace(os.sep, '/')
|
def _DiffItemInternal(filename, info, bogus_dir, full_move=False, revision=None): """Grabs the diff data.""" command = ["diff", "--config-dir", bogus_dir, filename] if revision: command.extend(['--revision', revision]) data = None if SVN.IsMovedInfo(info): if full_move: if info.get("Node Kind") == "directory": # Things become tricky here. It's a directory copy/move. We need to # diff all the files inside it. # This will put a lot of pressure on the heap. This is why StringIO # is used and converted back into a string at the end. The reason to # return a string instead of a StringIO is that StringIO.write() # doesn't accept a StringIO object. *sigh*. for (dirpath, dirnames, filenames) in os.walk(filename): # Cleanup all files starting with a '.'. for d in dirnames: if d.startswith('.'): dirnames.remove(d) for f in filenames: if f.startswith('.'): filenames.remove(f) for f in filenames: if data is None: data = cStringIO.StringIO() data.write(GenFakeDiff(os.path.join(dirpath, f))) if data: tmp = data.getvalue() data.close() data = tmp else: data = GenFakeDiff(filename) else: if info.get("Node Kind") != "directory": # svn diff on a mv/cp'd file outputs nothing if there was no change. data = SVN.Capture(command, None) if not data: # We put in an empty Index entry so upload.py knows about them. data = "Index: %s\n" % filename # Otherwise silently ignore directories. else: if info.get("Node Kind") != "directory": # Normal simple case. data = SVN.Capture(command, None) # Otherwise silently ignore directories. return data
|
sp = subprocess.Popen(cmd, cwd=cwd, stdout=stdout) output = sp.communicate()[0]
|
logging.debug(cmd) try: sp = subprocess.Popen(cmd, cwd=cwd, stdout=stdout) output = sp.communicate()[0] except OSError: raise gclient_utils.Error("git command '%s' failed to run." % ' '.join(cmd) + "\nCheck that you have git installed.")
|
def _Run(self, args, cwd=None, checkrc=True, redirect_stdout=True): # TODO(maruel): Merge with Capture? if cwd is None: cwd = self.checkout_path stdout=None if redirect_stdout: stdout=subprocess.PIPE if cwd == None: cwd = self.checkout_path cmd = [self.COMMAND] cmd.extend(args) sp = subprocess.Popen(cmd, cwd=cwd, stdout=stdout) output = sp.communicate()[0] if checkrc and sp.returncode: raise gclient_utils.Error('git command %s returned %d' % (args[0], sp.returncode)) if output is not None: return output.strip()
|
print "Argument%s \"%s\" not understood" % (plural, " ".join(args[1:]))
|
print >> sys.stderr, ( 'Argument%s \"%s\" not understood' % (plural, ' '.join(args[1:])))
|
def TryChange(argv, file_list, swallow_exception, prog=None, extra_epilog=None): """ Args: argv: Arguments and options. file_list: Default value to pass to --file. swallow_exception: Whether we raise or swallow exceptions. """ # Parse argv parser = optparse.OptionParser(usage=USAGE, version=__version__, prog=prog) epilog = EPILOG % { 'prog': prog } if extra_epilog: epilog += extra_epilog parser.epilog = epilog # Remove epilog formatting parser.format_epilog = lambda x: parser.epilog parser.add_option("-v", "--verbose", action="count", default=0, help="Prints debugging infos") group = optparse.OptionGroup(parser, "Result and status") group.add_option("-u", "--user", default=getpass.getuser(), help="Owner user name [default: %default]") group.add_option("-e", "--email", default=os.environ.get('TRYBOT_RESULTS_EMAIL_ADDRESS', os.environ.get('EMAIL_ADDRESS')), help="Email address where to send the results. Use either " "the TRYBOT_RESULTS_EMAIL_ADDRESS environment " "variable or EMAIL_ADDRESS to set the email address " "the try bots report results to [default: %default]") group.add_option("-n", "--name", help="Descriptive name of the try job") group.add_option("--issue", type='int', help="Update rietveld issue try job status") group.add_option("--patchset", type='int', help="Update rietveld issue try job status. This is " "optional if --issue is used, In that case, the " "latest patchset will be used.") group.add_option("--dry_run", action='store_true', help="Just prints the diff and quits") parser.add_option_group(group) group = optparse.OptionGroup(parser, "Try job options") group.add_option("-b", "--bot", action="append", help="Only use specifics build slaves, ex: '--bot win' to " "run the try job only on the 'win' slave; see the try " "server waterfall for the slave's name") group.add_option("-r", "--revision", help="Revision to use for the try job; default: the " "revision will be determined by the try server; see " "its waterfall for more info") group.add_option("-c", "--clobber", action="store_true", help="Force a clobber before building; e.g. don't do an " "incremental build") # TODO(maruel): help="Select a specific configuration, usually 'debug' or " # "'release'" group.add_option("--target", help=optparse.SUPPRESS_HELP) group.add_option("--project", help="Override which project to use. Projects are defined " "server-side to define what default bot set to use") group.add_option("-t", "--testfilter", action="append", help="Add a gtest_filter to a test. Use multiple times to " "specify filters for different tests. (i.e. " "--testfilter base_unittests:ThreadTest.* " "--testfilter ui_tests) If you specify any testfilters " "the test results will not be reported in rietveld and " "only tests with filters will run.") parser.add_option_group(group) group = optparse.OptionGroup(parser, "Patch to run") group.add_option("-f", "--file", default=file_list, dest="files", metavar="FILE", action="append", help="Use many times to list the files to include in the " "try, relative to the repository root") group.add_option("--diff", help="File containing the diff to try") group.add_option("--url", help="Url where to grab a patch, e.g. " "http://example.com/x.diff") group.add_option("-R", "--rietveld_url", default="codereview.appspot.com", metavar="URL", help="Has 2 usages, both refer to the rietveld instance: " "Specify which code review patch to use as the try job " "or rietveld instance to update the try job results " "Default:%default") group.add_option("--root", help="Root to use for the patch; base subdirectory for " "patch created in a subdirectory") group.add_option("-p", "--patchlevel", type='int', metavar="LEVEL", help="Used as -pN parameter to patch") group.add_option("-s", "--sub_rep", action="append", default=[], help="Subcheckout to use in addition. This is mainly " "useful for gclient-style checkouts. Use @rev or " "@branch or @branch1..branch2 to specify the " "revision/branch to diff against.") group.add_option("--no_gclient", action="store_true", help="Disable automatic search for gclient checkout.") group.add_option("-E", "--exclude", action="append", default=['ChangeLog'], metavar='REGEXP', help="Regexp patterns to exclude files. Default: %default") parser.add_option_group(group) group = optparse.OptionGroup(parser, "Access the try server by HTTP") group.add_option("--use_http", action="store_const", const=_SendChangeHTTP, dest="send_patch", help="Use HTTP to talk to the try server [default]") group.add_option("-H", "--host", help="Host address") group.add_option("-P", "--port", help="HTTP port") group.add_option("--proxy", help="HTTP proxy") parser.add_option_group(group) group = optparse.OptionGroup(parser, "Access the try server with SVN") group.add_option("--use_svn", action="store_const", const=_SendChangeSVN, dest="send_patch", help="Use SVN to talk to the try server") group.add_option("-S", "--svn_repo", metavar="SVN_URL", help="SVN url to use to write the changes in; --use_svn is " "implied when using --svn_repo") parser.add_option_group(group) options, args = parser.parse_args(argv) # Note that the args array includes the script name, so # a single argument results in len(args) == 2. # If they've asked for help, give it to them if len(args) == 2 and args[1] == 'help': parser.print_help() return 0 # If they've said something confusing, don't spawn a try job until you # understand what they want. if len(args) > 1: plural = "" if len(args) > 2: plural = "s" print "Argument%s \"%s\" not understood" % (plural, " ".join(args[1:])) parser.print_help() return 1 LOG_FORMAT = '%(levelname)s %(filename)s(%(lineno)d): %(message)s' if not swallow_exception: if options.verbose == 0: logging.basicConfig(level=logging.WARNING, format=LOG_FORMAT) elif options.verbose == 1: logging.basicConfig(level=logging.INFO, format=LOG_FORMAT) elif options.verbose > 1: logging.basicConfig(level=logging.DEBUG, format=LOG_FORMAT) logging.debug(argv) # Strip off any @ in the user, otherwise svn gets confused. options.user = options.user.split('@', 1)[0] if options.rietveld_url: # Try to extract the review number if possible and fix the protocol. if not '://' in options.rietveld_url: options.rietveld_url = 'http://' + options.rietveld_url match = re.match(r'^(.*)/(\d+)$', options.rietveld_url) if match: if options.issue or options.patchset: parser.error('Cannot use both --issue and use a review number url') options.issue = int(match.group(2)) options.rietveld_url = match.group(1) try: # Always include os.getcwd() in the checkout settings. checkouts = [] checkouts.append(GuessVCS(options, os.getcwd())) checkouts[0].AutomagicalSettings() for item in options.sub_rep: checkout = GuessVCS(options, os.path.join(checkouts[0].checkout_root, item)) if checkout.checkout_root in [c.checkout_root for c in checkouts]: parser.error('Specified the root %s two times.' % checkout.checkout_root) checkouts.append(checkout) can_http = options.port and options.host can_svn = options.svn_repo # If there was no transport selected yet, now we must have enough data to # select one. if not options.send_patch and not (can_http or can_svn): parser.error('Please specify an access method.') # Convert options.diff into the content of the diff. if options.url: if options.files: parser.error('You cannot specify files and --url at the same time.') options.diff = urllib.urlopen(options.url).read() elif options.diff: if options.files: parser.error('You cannot specify files and --diff at the same time.') options.diff = gclient_utils.FileRead(options.diff, 'rb') elif options.issue and options.patchset is None: # Retrieve the patch from rietveld when the diff is not specified. # When patchset is specified, it's because it's done by gcl/git-try. if json is None: parser.error('json or simplejson library is missing, please install.') api_url = '%s/api/%d' % (options.rietveld_url, options.issue) logging.debug(api_url) contents = json.loads(urllib.urlopen(api_url).read()) options.patchset = contents['patchsets'][-1] diff_url = ('%s/download/issue%d_%d.diff' % (options.rietveld_url, options.issue, options.patchset)) diff = GetMungedDiff('', urllib.urlopen(diff_url).readlines()) options.diff = ''.join(diff) else: # Use this as the base. root = checkouts[0].checkout_root diffs = [] for checkout in checkouts: diff = checkout.GenerateDiff().splitlines(True) path_diff = gclient_utils.PathDifference(root, checkout.checkout_root) # Munge it. diffs.extend(GetMungedDiff(path_diff, diff)) options.diff = ''.join(diffs) if not options.bot: # Get try slaves from PRESUBMIT.py files if not specified. # Even if the diff comes from options.url, use the local checkout for bot # selection. try: import presubmit_support root_presubmit = checkouts[0].ReadRootFile('PRESUBMIT.py') options.bot = presubmit_support.DoGetTrySlaves( checkouts[0].GetFileNames(), checkouts[0].checkout_root, root_presubmit, False, sys.stdout) except ImportError: pass # If no bot is specified, either the default pool will be selected or the # try server will refuse the job. Either case we don't need to interfere. if options.name is None: if options.issue: options.name = 'Issue %s' % options.issue else: options.name = 'Unnamed' print('Note: use --name NAME to change the try job name.') if not options.email: parser.error('Using an anonymous checkout. Please use --email or set ' 'the TRYBOT_RESULTS_EMAIL_ADDRESS environment variable.') else: print('Results will be emailed to: ' + options.email) # Prevent rietveld updates if we aren't running all the tests. if options.testfilter is not None: options.issue = None options.patchset = None # Send the patch. if options.send_patch: # If forced. options.send_patch(options) PrintSuccess(options) return 0 try: if can_http: _SendChangeHTTP(options) PrintSuccess(options) return 0 except NoTryServerAccess: if not can_svn: raise _SendChangeSVN(options) PrintSuccess(options) return 0 except (InvalidScript, NoTryServerAccess), e: if swallow_exception: return 1 print e return 1 return 0
|
print e
|
print >> sys.stderr, e return 1 except gclient_utils.Error, e: print >> sys.stderr, e
|
def TryChange(argv, file_list, swallow_exception, prog=None, extra_epilog=None): """ Args: argv: Arguments and options. file_list: Default value to pass to --file. swallow_exception: Whether we raise or swallow exceptions. """ # Parse argv parser = optparse.OptionParser(usage=USAGE, version=__version__, prog=prog) epilog = EPILOG % { 'prog': prog } if extra_epilog: epilog += extra_epilog parser.epilog = epilog # Remove epilog formatting parser.format_epilog = lambda x: parser.epilog parser.add_option("-v", "--verbose", action="count", default=0, help="Prints debugging infos") group = optparse.OptionGroup(parser, "Result and status") group.add_option("-u", "--user", default=getpass.getuser(), help="Owner user name [default: %default]") group.add_option("-e", "--email", default=os.environ.get('TRYBOT_RESULTS_EMAIL_ADDRESS', os.environ.get('EMAIL_ADDRESS')), help="Email address where to send the results. Use either " "the TRYBOT_RESULTS_EMAIL_ADDRESS environment " "variable or EMAIL_ADDRESS to set the email address " "the try bots report results to [default: %default]") group.add_option("-n", "--name", help="Descriptive name of the try job") group.add_option("--issue", type='int', help="Update rietveld issue try job status") group.add_option("--patchset", type='int', help="Update rietveld issue try job status. This is " "optional if --issue is used, In that case, the " "latest patchset will be used.") group.add_option("--dry_run", action='store_true', help="Just prints the diff and quits") parser.add_option_group(group) group = optparse.OptionGroup(parser, "Try job options") group.add_option("-b", "--bot", action="append", help="Only use specifics build slaves, ex: '--bot win' to " "run the try job only on the 'win' slave; see the try " "server waterfall for the slave's name") group.add_option("-r", "--revision", help="Revision to use for the try job; default: the " "revision will be determined by the try server; see " "its waterfall for more info") group.add_option("-c", "--clobber", action="store_true", help="Force a clobber before building; e.g. don't do an " "incremental build") # TODO(maruel): help="Select a specific configuration, usually 'debug' or " # "'release'" group.add_option("--target", help=optparse.SUPPRESS_HELP) group.add_option("--project", help="Override which project to use. Projects are defined " "server-side to define what default bot set to use") group.add_option("-t", "--testfilter", action="append", help="Add a gtest_filter to a test. Use multiple times to " "specify filters for different tests. (i.e. " "--testfilter base_unittests:ThreadTest.* " "--testfilter ui_tests) If you specify any testfilters " "the test results will not be reported in rietveld and " "only tests with filters will run.") parser.add_option_group(group) group = optparse.OptionGroup(parser, "Patch to run") group.add_option("-f", "--file", default=file_list, dest="files", metavar="FILE", action="append", help="Use many times to list the files to include in the " "try, relative to the repository root") group.add_option("--diff", help="File containing the diff to try") group.add_option("--url", help="Url where to grab a patch, e.g. " "http://example.com/x.diff") group.add_option("-R", "--rietveld_url", default="codereview.appspot.com", metavar="URL", help="Has 2 usages, both refer to the rietveld instance: " "Specify which code review patch to use as the try job " "or rietveld instance to update the try job results " "Default:%default") group.add_option("--root", help="Root to use for the patch; base subdirectory for " "patch created in a subdirectory") group.add_option("-p", "--patchlevel", type='int', metavar="LEVEL", help="Used as -pN parameter to patch") group.add_option("-s", "--sub_rep", action="append", default=[], help="Subcheckout to use in addition. This is mainly " "useful for gclient-style checkouts. Use @rev or " "@branch or @branch1..branch2 to specify the " "revision/branch to diff against.") group.add_option("--no_gclient", action="store_true", help="Disable automatic search for gclient checkout.") group.add_option("-E", "--exclude", action="append", default=['ChangeLog'], metavar='REGEXP', help="Regexp patterns to exclude files. Default: %default") parser.add_option_group(group) group = optparse.OptionGroup(parser, "Access the try server by HTTP") group.add_option("--use_http", action="store_const", const=_SendChangeHTTP, dest="send_patch", help="Use HTTP to talk to the try server [default]") group.add_option("-H", "--host", help="Host address") group.add_option("-P", "--port", help="HTTP port") group.add_option("--proxy", help="HTTP proxy") parser.add_option_group(group) group = optparse.OptionGroup(parser, "Access the try server with SVN") group.add_option("--use_svn", action="store_const", const=_SendChangeSVN, dest="send_patch", help="Use SVN to talk to the try server") group.add_option("-S", "--svn_repo", metavar="SVN_URL", help="SVN url to use to write the changes in; --use_svn is " "implied when using --svn_repo") parser.add_option_group(group) options, args = parser.parse_args(argv) # Note that the args array includes the script name, so # a single argument results in len(args) == 2. # If they've asked for help, give it to them if len(args) == 2 and args[1] == 'help': parser.print_help() return 0 # If they've said something confusing, don't spawn a try job until you # understand what they want. if len(args) > 1: plural = "" if len(args) > 2: plural = "s" print "Argument%s \"%s\" not understood" % (plural, " ".join(args[1:])) parser.print_help() return 1 LOG_FORMAT = '%(levelname)s %(filename)s(%(lineno)d): %(message)s' if not swallow_exception: if options.verbose == 0: logging.basicConfig(level=logging.WARNING, format=LOG_FORMAT) elif options.verbose == 1: logging.basicConfig(level=logging.INFO, format=LOG_FORMAT) elif options.verbose > 1: logging.basicConfig(level=logging.DEBUG, format=LOG_FORMAT) logging.debug(argv) # Strip off any @ in the user, otherwise svn gets confused. options.user = options.user.split('@', 1)[0] if options.rietveld_url: # Try to extract the review number if possible and fix the protocol. if not '://' in options.rietveld_url: options.rietveld_url = 'http://' + options.rietveld_url match = re.match(r'^(.*)/(\d+)$', options.rietveld_url) if match: if options.issue or options.patchset: parser.error('Cannot use both --issue and use a review number url') options.issue = int(match.group(2)) options.rietveld_url = match.group(1) try: # Always include os.getcwd() in the checkout settings. checkouts = [] checkouts.append(GuessVCS(options, os.getcwd())) checkouts[0].AutomagicalSettings() for item in options.sub_rep: checkout = GuessVCS(options, os.path.join(checkouts[0].checkout_root, item)) if checkout.checkout_root in [c.checkout_root for c in checkouts]: parser.error('Specified the root %s two times.' % checkout.checkout_root) checkouts.append(checkout) can_http = options.port and options.host can_svn = options.svn_repo # If there was no transport selected yet, now we must have enough data to # select one. if not options.send_patch and not (can_http or can_svn): parser.error('Please specify an access method.') # Convert options.diff into the content of the diff. if options.url: if options.files: parser.error('You cannot specify files and --url at the same time.') options.diff = urllib.urlopen(options.url).read() elif options.diff: if options.files: parser.error('You cannot specify files and --diff at the same time.') options.diff = gclient_utils.FileRead(options.diff, 'rb') elif options.issue and options.patchset is None: # Retrieve the patch from rietveld when the diff is not specified. # When patchset is specified, it's because it's done by gcl/git-try. if json is None: parser.error('json or simplejson library is missing, please install.') api_url = '%s/api/%d' % (options.rietveld_url, options.issue) logging.debug(api_url) contents = json.loads(urllib.urlopen(api_url).read()) options.patchset = contents['patchsets'][-1] diff_url = ('%s/download/issue%d_%d.diff' % (options.rietveld_url, options.issue, options.patchset)) diff = GetMungedDiff('', urllib.urlopen(diff_url).readlines()) options.diff = ''.join(diff) else: # Use this as the base. root = checkouts[0].checkout_root diffs = [] for checkout in checkouts: diff = checkout.GenerateDiff().splitlines(True) path_diff = gclient_utils.PathDifference(root, checkout.checkout_root) # Munge it. diffs.extend(GetMungedDiff(path_diff, diff)) options.diff = ''.join(diffs) if not options.bot: # Get try slaves from PRESUBMIT.py files if not specified. # Even if the diff comes from options.url, use the local checkout for bot # selection. try: import presubmit_support root_presubmit = checkouts[0].ReadRootFile('PRESUBMIT.py') options.bot = presubmit_support.DoGetTrySlaves( checkouts[0].GetFileNames(), checkouts[0].checkout_root, root_presubmit, False, sys.stdout) except ImportError: pass # If no bot is specified, either the default pool will be selected or the # try server will refuse the job. Either case we don't need to interfere. if options.name is None: if options.issue: options.name = 'Issue %s' % options.issue else: options.name = 'Unnamed' print('Note: use --name NAME to change the try job name.') if not options.email: parser.error('Using an anonymous checkout. Please use --email or set ' 'the TRYBOT_RESULTS_EMAIL_ADDRESS environment variable.') else: print('Results will be emailed to: ' + options.email) # Prevent rietveld updates if we aren't running all the tests. if options.testfilter is not None: options.issue = None options.patchset = None # Send the patch. if options.send_patch: # If forced. options.send_patch(options) PrintSuccess(options) return 0 try: if can_http: _SendChangeHTTP(options) PrintSuccess(options) return 0 except NoTryServerAccess: if not can_svn: raise _SendChangeSVN(options) PrintSuccess(options) return 0 except (InvalidScript, NoTryServerAccess), e: if swallow_exception: return 1 print e return 1 return 0
|
if not argv: argv = ['help'] command = Command(argv[0])
|
def main(argv): try: GetRepositoryRoot() except gclient_utils.Error: print('To use gcl, you need to be in a subversion checkout.') return 1 # Create the directories where we store information about changelists if it # doesn't exist. if not os.path.exists(GetInfoDir()): os.mkdir(GetInfoDir()) if not os.path.exists(GetChangesDir()): os.mkdir(GetChangesDir()) if not os.path.exists(GetCacheDir()): os.mkdir(GetCacheDir()) if not argv: argv = ['help'] command = Command(argv[0]) if command: return command(argv[1:]) # Unknown command, try to pass that to svn return CMDpassthru(argv)
|
|
root = os.path.abspath(self.gclient_root)
|
def ReadRootFile(self, filename): if not self.options.root: filepath = os.path.join(self.checkout_root, filename) if os.path.isfile(filepath): logging.info('Found %s at %s' % (filename, self.checkout_root)) return gclient_util.FileRead(filepath) return None root = os.path.abspath(self.gclient_root) cur = os.path.abspath(self.checkout_root) assert cur.startswith(root), (root, cur) while cur.startswith(root): filepath = os.path.join(cur, filename) if os.path.isfile(filepath): logging.info('Found %s at %s' % (filename, cur)) return gclient_utils.FileRead(filepath) cur = os.path.dirname(cur) logging.warning('Didn\'t find %s' % filename) return None
|
|
ErrorExit("Changelist file %s is corrupt" % info_file)
|
ErrorExit( ('Changelist file %s is corrupt.\n' 'Either run "gcl delete %s" or manually edit the file') % ( info_file, changename))
|
def Load(changename, local_root, fail_on_not_found, update_status): """Gets information about a changelist.
|
for item in files:
|
for item in files[:]:
|
def Load(changename, local_root, fail_on_not_found, update_status): """Gets information about a changelist.
|
@need_change def CMDdelete(change_info):
|
def CMDdelete(args):
|
def CMDdescription(change_info): """Prints the description of the specified change to stdout.""" print change_info.description return 0
|
change_info.Delete()
|
if not len(args) == 1: ErrorExit('You need to pass a change list name') os.remove(GetChangelistInfoFile(args[0]))
|
def CMDdelete(change_info): """Deletes a changelist.""" change_info.Delete() return 0
|
self._Run(['prune'], redirect_stdout=False) self._Run(['fsck'], redirect_stdout=False) self._Run(['gc'], redirect_stdout=False)
|
def cleanup(self, options, args, file_list): """Cleanup working copy.""" __pychecker__ = 'unusednames=options,args,file_list' self._Run(['prune'], redirect_stdout=False) self._Run(['fsck'], redirect_stdout=False) self._Run(['gc'], redirect_stdout=False)
|
|
if rev_type == "branch": remote_output, remote_err = scm.GIT.Capture( ['fetch'] + verbose + ['origin', revision], self.checkout_path, print_error=False) else: remote_output, remote_err = scm.GIT.Capture(
|
remote_output, remote_err = scm.GIT.Capture(
|
def update(self, options, args, file_list): """Runs git to update or transparently checkout the working copy.
|
"Returns a human-readable hierarchical reference to a Dependency."
|
def hierarchy(self): "Returns a human-readable hierarchical reference to a Dependency." out = '%s(%s)' % (self.name, self.url) i = self.parent while i and i.name: out = '%s(%s) -> %s' % (i.name, i.url, out) i = i.parent return out
|
|
print('To use gcl, you need to be in a subversion checkout.')
|
print >> sys.stderr, 'To use gcl, you need to be in a subversion checkout.'
|
def main(argv): if not argv: argv = ['help'] command = Command(argv[0]) # Help can be run from anywhere. if command == CMDhelp: return command(argv[1:]) try: GetRepositoryRoot() except gclient_utils.Error: print('To use gcl, you need to be in a subversion checkout.') return 1 # Create the directories where we store information about changelists if it # doesn't exist. try: if not os.path.exists(GetInfoDir()): os.mkdir(GetInfoDir()) if not os.path.exists(GetChangesDir()): os.mkdir(GetChangesDir()) if not os.path.exists(GetCacheDir()): os.mkdir(GetCacheDir()) if command: return command(argv[1:]) # Unknown command, try to pass that to svn return CMDpassthru(argv) except gclient_utils.Error, e: print('Got an exception') print(str(e))
|
print('Got an exception') print(str(e))
|
print >> sys.stderr, 'Got an exception' print >> sys.stderr, str(e) return 1 except urllib2.HTTPError, e: if e.code != 500: raise print >> sys.stderr, ( 'AppEngine is misbehaving and returned HTTP %d, again. Keep faith ' 'and retry or visit go/isgaeup.\n%s') % (e.code, e.reason) return 1
|
def main(argv): if not argv: argv = ['help'] command = Command(argv[0]) # Help can be run from anywhere. if command == CMDhelp: return command(argv[1:]) try: GetRepositoryRoot() except gclient_utils.Error: print('To use gcl, you need to be in a subversion checkout.') return 1 # Create the directories where we store information about changelists if it # doesn't exist. try: if not os.path.exists(GetInfoDir()): os.mkdir(GetInfoDir()) if not os.path.exists(GetChangesDir()): os.mkdir(GetChangesDir()) if not os.path.exists(GetCacheDir()): os.mkdir(GetCacheDir()) if command: return command(argv[1:]) # Unknown command, try to pass that to svn return CMDpassthru(argv) except gclient_utils.Error, e: print('Got an exception') print(str(e))
|
checkouts.append(GuessVCS(options, os.getcwd()))
|
path = os.getcwd() if options.upstream_branch: path += '@' + options.upstream_branch checkouts.append(GuessVCS(options, path))
|
def TryChange(argv, file_list, swallow_exception, prog=None, extra_epilog=None): """ Args: argv: Arguments and options. file_list: Default value to pass to --file. swallow_exception: Whether we raise or swallow exceptions. """ # Parse argv parser = optparse.OptionParser(usage=USAGE, version=__version__, prog=prog) epilog = EPILOG % { 'prog': prog } if extra_epilog: epilog += extra_epilog parser.epilog = epilog # Remove epilog formatting parser.format_epilog = lambda x: parser.epilog parser.add_option("-v", "--verbose", action="count", default=0, help="Prints debugging infos") group = optparse.OptionGroup(parser, "Result and status") group.add_option("-u", "--user", default=getpass.getuser(), help="Owner user name [default: %default]") group.add_option("-e", "--email", default=os.environ.get('TRYBOT_RESULTS_EMAIL_ADDRESS', os.environ.get('EMAIL_ADDRESS')), help="Email address where to send the results. Use either " "the TRYBOT_RESULTS_EMAIL_ADDRESS environment " "variable or EMAIL_ADDRESS to set the email address " "the try bots report results to [default: %default]") group.add_option("-n", "--name", help="Descriptive name of the try job") group.add_option("--issue", type='int', help="Update rietveld issue try job status") group.add_option("--patchset", type='int', help="Update rietveld issue try job status. This is " "optional if --issue is used, In that case, the " "latest patchset will be used.") group.add_option("--dry_run", action='store_true', help="Just prints the diff and quits") parser.add_option_group(group) group = optparse.OptionGroup(parser, "Try job options") group.add_option("-b", "--bot", action="append", help="Only use specifics build slaves, ex: " "'--bot win,layout_mac'; see the try " "server waterfall for the slave's name") group.add_option("-r", "--revision", help="Revision to use for the try job; default: the " "revision will be determined by the try server; see " "its waterfall for more info") group.add_option("-c", "--clobber", action="store_true", help="Force a clobber before building; e.g. don't do an " "incremental build") # TODO(maruel): help="Select a specific configuration, usually 'debug' or " # "'release'" group.add_option("--target", help=optparse.SUPPRESS_HELP) group.add_option("--project", help="Override which project to use. Projects are defined " "server-side to define what default bot set to use") group.add_option("-t", "--testfilter", action="append", help="Add a gtest_filter to a test. Use multiple times to " "specify filters for different tests. (i.e. " "--testfilter base_unittests:ThreadTest.* " "--testfilter ui_tests) If you specify any testfilters " "the test results will not be reported in rietveld and " "only tests with filters will run.") parser.add_option_group(group) group = optparse.OptionGroup(parser, "Patch to run") group.add_option("-f", "--file", default=file_list, dest="files", metavar="FILE", action="append", help="Use many times to list the files to include in the " "try, relative to the repository root") group.add_option("--diff", help="File containing the diff to try") group.add_option("--url", help="Url where to grab a patch, e.g. " "http://example.com/x.diff") group.add_option("-R", "--rietveld_url", default="codereview.appspot.com", metavar="URL", help="Has 2 usages, both refer to the rietveld instance: " "Specify which code review patch to use as the try job " "or rietveld instance to update the try job results " "Default:%default") group.add_option("--root", help="Root to use for the patch; base subdirectory for " "patch created in a subdirectory") group.add_option("-p", "--patchlevel", type='int', metavar="LEVEL", help="Used as -pN parameter to patch") group.add_option("-s", "--sub_rep", action="append", default=[], help="Subcheckout to use in addition. This is mainly " "useful for gclient-style checkouts. In git, checkout " "the branch with changes first. Use @rev or " "@branch to specify the " "revision/branch to diff against. If no @branch is " "given the diff will be against the upstream branch. " "If @branch then the diff is branch..HEAD. " "All edits must be checked in.") group.add_option("--no_gclient", action="store_true", help="Disable automatic search for gclient checkout.") group.add_option("-E", "--exclude", action="append", default=['ChangeLog'], metavar='REGEXP', help="Regexp patterns to exclude files. Default: %default") parser.add_option_group(group) group = optparse.OptionGroup(parser, "Access the try server by HTTP") group.add_option("--use_http", action="store_const", const=_SendChangeHTTP, dest="send_patch", help="Use HTTP to talk to the try server [default]") group.add_option("-H", "--host", help="Host address") group.add_option("-P", "--port", help="HTTP port") group.add_option("--proxy", help="HTTP proxy") parser.add_option_group(group) group = optparse.OptionGroup(parser, "Access the try server with SVN") group.add_option("--use_svn", action="store_const", const=_SendChangeSVN, dest="send_patch", help="Use SVN to talk to the try server") group.add_option("-S", "--svn_repo", metavar="SVN_URL", help="SVN url to use to write the changes in; --use_svn is " "implied when using --svn_repo") parser.add_option_group(group) options, args = parser.parse_args(argv) # Note that the args array includes the script name, so # a single argument results in len(args) == 2. # If they've asked for help, give it to them if len(args) == 2 and args[1] == 'help': parser.print_help() return 0 # If they've said something confusing, don't spawn a try job until you # understand what they want. if len(args) > 1: plural = "" if len(args) > 2: plural = "s" print >> sys.stderr, ( 'Argument%s \"%s\" not understood' % (plural, ' '.join(args[1:]))) parser.print_help() return 1 LOG_FORMAT = '%(levelname)s %(filename)s(%(lineno)d): %(message)s' if not swallow_exception: if options.verbose == 0: logging.basicConfig(level=logging.WARNING, format=LOG_FORMAT) elif options.verbose == 1: logging.basicConfig(level=logging.INFO, format=LOG_FORMAT) elif options.verbose > 1: logging.basicConfig(level=logging.DEBUG, format=LOG_FORMAT) logging.debug(argv) # Strip off any @ in the user, otherwise svn gets confused. options.user = options.user.split('@', 1)[0] if options.rietveld_url: # Try to extract the review number if possible and fix the protocol. if not '://' in options.rietveld_url: options.rietveld_url = 'http://' + options.rietveld_url match = re.match(r'^(.*)/(\d+)$', options.rietveld_url) if match: if options.issue or options.patchset: parser.error('Cannot use both --issue and use a review number url') options.issue = int(match.group(2)) options.rietveld_url = match.group(1) try: # Always include os.getcwd() in the checkout settings. checkouts = [] checkouts.append(GuessVCS(options, os.getcwd())) checkouts[0].AutomagicalSettings() for item in options.sub_rep: checkout = GuessVCS(options, os.path.join(checkouts[0].checkout_root, item)) if checkout.checkout_root in [c.checkout_root for c in checkouts]: parser.error('Specified the root %s two times.' % checkout.checkout_root) checkouts.append(checkout) can_http = options.port and options.host can_svn = options.svn_repo # If there was no transport selected yet, now we must have enough data to # select one. if not options.send_patch and not (can_http or can_svn): parser.error('Please specify an access method.') # Convert options.diff into the content of the diff. if options.url: if options.files: parser.error('You cannot specify files and --url at the same time.') options.diff = urllib.urlopen(options.url).read() elif options.diff: if options.files: parser.error('You cannot specify files and --diff at the same time.') options.diff = gclient_utils.FileRead(options.diff, 'rb') elif options.issue and options.patchset is None: # Retrieve the patch from rietveld when the diff is not specified. # When patchset is specified, it's because it's done by gcl/git-try. if json is None: parser.error('json or simplejson library is missing, please install.') api_url = '%s/api/%d' % (options.rietveld_url, options.issue) logging.debug(api_url) contents = json.loads(urllib.urlopen(api_url).read()) options.patchset = contents['patchsets'][-1] diff_url = ('%s/download/issue%d_%d.diff' % (options.rietveld_url, options.issue, options.patchset)) diff = GetMungedDiff('', urllib.urlopen(diff_url).readlines()) options.diff = ''.join(diff) else: # Use this as the base. root = checkouts[0].checkout_root diffs = [] for checkout in checkouts: diff = checkout.GenerateDiff().splitlines(True) path_diff = gclient_utils.PathDifference(root, checkout.checkout_root) # Munge it. diffs.extend(GetMungedDiff(path_diff, diff)) options.diff = ''.join(diffs) if not options.bot: # Get try slaves from PRESUBMIT.py files if not specified. # Even if the diff comes from options.url, use the local checkout for bot # selection. try: import presubmit_support root_presubmit = checkouts[0].ReadRootFile('PRESUBMIT.py') options.bot = presubmit_support.DoGetTrySlaves( checkouts[0].GetFileNames(), checkouts[0].checkout_root, root_presubmit, False, sys.stdout) except ImportError: pass # If no bot is specified, either the default pool will be selected or the # try server will refuse the job. Either case we don't need to interfere. if options.name is None: if options.issue: options.name = 'Issue %s' % options.issue else: options.name = 'Unnamed' print('Note: use --name NAME to change the try job name.') if not options.email: parser.error('Using an anonymous checkout. Please use --email or set ' 'the TRYBOT_RESULTS_EMAIL_ADDRESS environment variable.') else: print('Results will be emailed to: ' + options.email) # Prevent rietveld updates if we aren't running all the tests. if options.testfilter is not None: options.issue = None options.patchset = None # Send the patch. if options.send_patch: # If forced. options.send_patch(options) PrintSuccess(options) return 0 try: if can_http: _SendChangeHTTP(options) PrintSuccess(options) return 0 except NoTryServerAccess: if not can_svn: raise _SendChangeSVN(options) PrintSuccess(options) return 0 except (InvalidScript, NoTryServerAccess), e: if swallow_exception: return 1 print >> sys.stderr, e return 1 except gclient_utils.Error, e: print >> sys.stderr, e return 1 return 0
|
SendStack(repr(last_value), ''.join(traceback.format_tb(last_tb)))
|
SendStack(last_value, ''.join(traceback.format_tb(last_tb)))
|
def CheckForException(): """Runs at exit. Look if there was an exception active.""" last_value = getattr(sys, 'last_value', None) if last_value and not isinstance(last_value, KeyboardInterrupt): last_tb = getattr(sys, 'last_traceback', None) if last_tb: SendStack(repr(last_value), ''.join(traceback.format_tb(last_tb)))
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.