rem
stringlengths
0
322k
add
stringlengths
0
2.05M
context
stringlengths
8
228k
('running', self.root_dir + '/src/file/other'),
('running', os.path.join(self.root_dir, 'src', 'file', 'other')),
def testSyncJobs(self): if not self.enabled: return # TODO(maruel): safesync. self.gclient(['config', self.svn_base + 'trunk/src/']) # Test unversioned checkout. self.parseGclient( ['sync', '--deps', 'mac', '--jobs', '8'], ['running', 'running', # This is due to the way svn update is called for a # single file when File() is used in a DEPS file. ('running', self.root_dir + '/src/file/other'), 'running', 'running', 'running', 'running'], untangle=True) tree = self.mangle_svn_tree( ('trunk/src@2', 'src'), ('trunk/third_party/foo@1', 'src/third_party/foo'), ('trunk/other@2', 'src/other')) tree['src/file/other/DEPS'] = ( self.FAKE_REPOS.svn_revs[2]['trunk/other/DEPS']) tree['src/svn_hooked1'] = 'svn_hooked1' self.assertTree(tree)
('running', self.root_dir + '/src/file/other'),
('running', os.path.join(self.root_dir, 'src', 'file', 'other')),
def testInitialCheckoutNotYetDone(self): # Check that gclient can be executed when the initial checkout hasn't been # done yet. if not self.enabled: return self.gclient(['config', self.svn_base + 'trunk/src/']) self.parseGclient(['sync', '--jobs', '1'], ['running', 'running', # This is due to the way svn update is called for a # single file when File() is used in a DEPS file. ('running', self.root_dir + '/src/file/other'), 'running', 'running', 'running', 'running'])
def SendStack(stack, url='http://chromium-status.appspot.com/breakpad'):
def SendStack(last_tb, stack, url=None): if not url: url = DEFAULT_URL
def SendStack(stack, url='http://chromium-status.appspot.com/breakpad'): print 'Sending crash report ...' try: params = { 'args': sys.argv, 'stack': stack, 'user': getpass.getuser(), } request = urllib.urlopen(url, urllib.urlencode(params)) print request.read() request.close() except IOError: print('There was a failure while trying to send the stack trace. Too bad.')
last_tb = getattr(sys, 'last_traceback', None) if last_tb and sys.last_type is not KeyboardInterrupt: SendStack(''.join(traceback.format_tb(last_tb)))
last_value = getattr(sys, 'last_value', None) if last_value and not isinstance(last_value, KeyboardInterrupt): last_tb = getattr(sys, 'last_traceback', None) if last_tb: SendStack(repr(last_value), ''.join(traceback.format_tb(last_tb)))
def CheckForException(): last_tb = getattr(sys, 'last_traceback', None) if last_tb and sys.last_type is not KeyboardInterrupt: SendStack(''.join(traceback.format_tb(last_tb)))
self._AttemptRebase('origin', files=files, verbose=options.verbose, printed_path=printed_path)
upstream_branch = 'origin' if options.revision: upstream_branch = revision self._AttemptRebase(upstream_branch, files=files, verbose=options.verbose, printed_path=printed_path)
def update(self, options, args, file_list): """Runs git to update or transparently checkout the working copy.
[SVN.DiffItem(RelativePath(f, root, revision), full_move=full_move)
[SVN.DiffItem(RelativePath(f, root), full_move=full_move, revision=revision)
def RelativePath(path, root): """We must use relative paths.""" if path.startswith(root): return path[len(root):] return path
'RunOnDeps', 'SaveConfig', 'SetConfig', 'SetDefaultConfig', 'supported_commands', 'PrintRevInfo',
'PrintRevInfo', 'RunOnDeps', 'SaveConfig', 'SetConfig', 'SetDefaultConfig', 'deps_os_choices', 'supported_commands',
def testDir(self): members = [ 'ConfigContent', 'DEFAULT_CLIENT_FILE_TEXT', 'DEFAULT_SNAPSHOT_FILE_TEXT', 'DEFAULT_SNAPSHOT_SOLUTION_TEXT', 'DEPS_FILE', 'FileImpl', 'FromImpl', 'GetVar', 'LoadCurrentConfig', 'RunOnDeps', 'SaveConfig', 'SetConfig', 'SetDefaultConfig', 'supported_commands', 'PrintRevInfo', ]
if isSVNDirty(): print "Working copy contains uncommitted files"
if (isSVNDirty() and not prompt("Working copy contains uncommitted files. Continue?")):
def drover(options, args): revision = options.revert or options.merge # Initialize some variables used below. They can be overwritten by # the drover.properties file. BASE_URL = "svn://svn.chromium.org/chrome" TRUNK_URL = BASE_URL + "/trunk/src" BRANCH_URL = BASE_URL + "/branches/$branch/src" SKIP_CHECK_WORKING = True PROMPT_FOR_AUTHOR = False DEFAULT_WORKING = "drover_" + str(revision) if options.branch: DEFAULT_WORKING += ("_" + options.branch) if not isMinimumSVNVersion(1, 5): print "You need to use at least SVN version 1.5.x" return 1 # Override the default properties if there is a drover.properties file. global file_pattern_ if os.path.exists("drover.properties"): f = open("drover.properties") exec(f) f.close() if FILE_PATTERN: file_pattern_ = FILE_PATTERN if options.revert and options.branch: url = BRANCH_URL.replace("$branch", options.branch) elif options.merge and options.sbranch: url = BRANCH_URL.replace("$branch", options.sbranch) else: url = TRUNK_URL working = options.workdir or DEFAULT_WORKING if options.local: working = os.getcwd() if not inCheckoutRoot(working): print "'%s' appears not to be the root of a working copy" % working return 1 if isSVNDirty(): print "Working copy contains uncommitted files" return 1 command = 'svn log ' + url + " -r "+str(revision) + " -v" os.system(command) if not (options.revertbot or prompt("Is this the correct revision?")): return 0 if (os.path.exists(working)) and not options.local: if not (options.revertbot or SKIP_CHECK_WORKING or prompt("Working directory: '%s' already exists, clobber?" % working)): return 0 deltree(working) if not options.local: os.makedirs(working) os.chdir(working) if options.merge: action = "Merge" if not options.local: branch_url = BRANCH_URL.replace("$branch", options.branch) # Checkout everything but stuff that got added into a new dir checkoutRevision(url, revision, branch_url) # Merge everything that changed mergeRevision(url, revision) # "Export" files that were added from the source and add them to branch exportRevision(url, revision) # Delete directories that were deleted (file deletes are handled in the # merge). deleteRevision(url, revision) elif options.revert: action = "Revert" if options.branch: url = BRANCH_URL.replace("$branch", options.branch) checkoutRevision(url, revision, url, True) revertRevision(url, revision) revertExportRevision(url, revision) # Check the base url so we actually find the author who made the change if options.auditor: author = options.auditor else: author = getAuthor(url, revision) if not author: author = getAuthor(TRUNK_URL, revision) filename = str(revision)+".txt" out = open(filename,"w") out.write(action +" " + str(revision) + " - ") out.write(getRevisionLog(url, revision)) if (author): out.write("\nTBR=" + author) out.close() change_cmd = 'change ' + str(revision) + " " + filename if options.revertbot: change_cmd += ' --silent' runGcl(change_cmd) os.unlink(filename) if options.local: return 0 print author print revision print ("gcl upload " + str(revision) + " --send_mail --no_presubmit --reviewers=" + author) if options.revertbot or prompt("Would you like to upload?"): if PROMPT_FOR_AUTHOR: author = text_prompt("Enter new author or press enter to accept default", author) if options.revertbot and options.revertbot_reviewers: author += "," author += options.revertbot_reviewers gclUpload(revision, author) else: print "Deleting the changelist." print "gcl delete " + str(revision) runGcl("delete " + str(revision)) return 0 # We commit if the reverbot is set to commit automatically, or if this is # not the revertbot and the user agrees. if options.revertbot_commit or (not options.revertbot and prompt("Would you like to commit?")): print "gcl commit " + str(revision) + " --no_presubmit --force" return runGcl("commit " + str(revision) + " --no_presubmit --force") else: return 0
help="Only use specifics build slaves, ex: '--bot win' to " "run the try job only on the 'win' slave; see the try "
help="Only use specifics build slaves, ex: " "'--bot win,layout_mac'; see the try "
def TryChange(argv, file_list, swallow_exception, prog=None, extra_epilog=None): """ Args: argv: Arguments and options. file_list: Default value to pass to --file. swallow_exception: Whether we raise or swallow exceptions. """ # Parse argv parser = optparse.OptionParser(usage=USAGE, version=__version__, prog=prog) epilog = EPILOG % { 'prog': prog } if extra_epilog: epilog += extra_epilog parser.epilog = epilog # Remove epilog formatting parser.format_epilog = lambda x: parser.epilog parser.add_option("-v", "--verbose", action="count", default=0, help="Prints debugging infos") group = optparse.OptionGroup(parser, "Result and status") group.add_option("-u", "--user", default=getpass.getuser(), help="Owner user name [default: %default]") group.add_option("-e", "--email", default=os.environ.get('TRYBOT_RESULTS_EMAIL_ADDRESS', os.environ.get('EMAIL_ADDRESS')), help="Email address where to send the results. Use either " "the TRYBOT_RESULTS_EMAIL_ADDRESS environment " "variable or EMAIL_ADDRESS to set the email address " "the try bots report results to [default: %default]") group.add_option("-n", "--name", help="Descriptive name of the try job") group.add_option("--issue", type='int', help="Update rietveld issue try job status") group.add_option("--patchset", type='int', help="Update rietveld issue try job status. This is " "optional if --issue is used, In that case, the " "latest patchset will be used.") group.add_option("--dry_run", action='store_true', help="Just prints the diff and quits") parser.add_option_group(group) group = optparse.OptionGroup(parser, "Try job options") group.add_option("-b", "--bot", action="append", help="Only use specifics build slaves, ex: '--bot win' to " "run the try job only on the 'win' slave; see the try " "server waterfall for the slave's name") group.add_option("-r", "--revision", help="Revision to use for the try job; default: the " "revision will be determined by the try server; see " "its waterfall for more info") group.add_option("-c", "--clobber", action="store_true", help="Force a clobber before building; e.g. don't do an " "incremental build") # TODO(maruel): help="Select a specific configuration, usually 'debug' or " # "'release'" group.add_option("--target", help=optparse.SUPPRESS_HELP) group.add_option("--project", help="Override which project to use. Projects are defined " "server-side to define what default bot set to use") group.add_option("-t", "--testfilter", action="append", help="Add a gtest_filter to a test. Use multiple times to " "specify filters for different tests. (i.e. " "--testfilter base_unittests:ThreadTest.* " "--testfilter ui_tests) If you specify any testfilters " "the test results will not be reported in rietveld and " "only tests with filters will run.") parser.add_option_group(group) group = optparse.OptionGroup(parser, "Patch to run") group.add_option("-f", "--file", default=file_list, dest="files", metavar="FILE", action="append", help="Use many times to list the files to include in the " "try, relative to the repository root") group.add_option("--diff", help="File containing the diff to try") group.add_option("--url", help="Url where to grab a patch, e.g. " "http://example.com/x.diff") group.add_option("-R", "--rietveld_url", default="codereview.appspot.com", metavar="URL", help="Has 2 usages, both refer to the rietveld instance: " "Specify which code review patch to use as the try job " "or rietveld instance to update the try job results " "Default:%default") group.add_option("--root", help="Root to use for the patch; base subdirectory for " "patch created in a subdirectory") group.add_option("-p", "--patchlevel", type='int', metavar="LEVEL", help="Used as -pN parameter to patch") group.add_option("-s", "--sub_rep", action="append", default=[], help="Subcheckout to use in addition. This is mainly " "useful for gclient-style checkouts. Use @rev or " "@branch or @branch1..branch2 to specify the " "revision/branch to diff against.") group.add_option("--no_gclient", action="store_true", help="Disable automatic search for gclient checkout.") group.add_option("-E", "--exclude", action="append", default=['ChangeLog'], metavar='REGEXP', help="Regexp patterns to exclude files. Default: %default") parser.add_option_group(group) group = optparse.OptionGroup(parser, "Access the try server by HTTP") group.add_option("--use_http", action="store_const", const=_SendChangeHTTP, dest="send_patch", help="Use HTTP to talk to the try server [default]") group.add_option("-H", "--host", help="Host address") group.add_option("-P", "--port", help="HTTP port") group.add_option("--proxy", help="HTTP proxy") parser.add_option_group(group) group = optparse.OptionGroup(parser, "Access the try server with SVN") group.add_option("--use_svn", action="store_const", const=_SendChangeSVN, dest="send_patch", help="Use SVN to talk to the try server") group.add_option("-S", "--svn_repo", metavar="SVN_URL", help="SVN url to use to write the changes in; --use_svn is " "implied when using --svn_repo") parser.add_option_group(group) options, args = parser.parse_args(argv) # Note that the args array includes the script name, so # a single argument results in len(args) == 2. # If they've asked for help, give it to them if len(args) == 2 and args[1] == 'help': parser.print_help() return 0 # If they've said something confusing, don't spawn a try job until you # understand what they want. if len(args) > 1: plural = "" if len(args) > 2: plural = "s" print >> sys.stderr, ( 'Argument%s \"%s\" not understood' % (plural, ' '.join(args[1:]))) parser.print_help() return 1 LOG_FORMAT = '%(levelname)s %(filename)s(%(lineno)d): %(message)s' if not swallow_exception: if options.verbose == 0: logging.basicConfig(level=logging.WARNING, format=LOG_FORMAT) elif options.verbose == 1: logging.basicConfig(level=logging.INFO, format=LOG_FORMAT) elif options.verbose > 1: logging.basicConfig(level=logging.DEBUG, format=LOG_FORMAT) logging.debug(argv) # Strip off any @ in the user, otherwise svn gets confused. options.user = options.user.split('@', 1)[0] if options.rietveld_url: # Try to extract the review number if possible and fix the protocol. if not '://' in options.rietveld_url: options.rietveld_url = 'http://' + options.rietveld_url match = re.match(r'^(.*)/(\d+)$', options.rietveld_url) if match: if options.issue or options.patchset: parser.error('Cannot use both --issue and use a review number url') options.issue = int(match.group(2)) options.rietveld_url = match.group(1) try: # Always include os.getcwd() in the checkout settings. checkouts = [] checkouts.append(GuessVCS(options, os.getcwd())) checkouts[0].AutomagicalSettings() for item in options.sub_rep: checkout = GuessVCS(options, os.path.join(checkouts[0].checkout_root, item)) if checkout.checkout_root in [c.checkout_root for c in checkouts]: parser.error('Specified the root %s two times.' % checkout.checkout_root) checkouts.append(checkout) can_http = options.port and options.host can_svn = options.svn_repo # If there was no transport selected yet, now we must have enough data to # select one. if not options.send_patch and not (can_http or can_svn): parser.error('Please specify an access method.') # Convert options.diff into the content of the diff. if options.url: if options.files: parser.error('You cannot specify files and --url at the same time.') options.diff = urllib.urlopen(options.url).read() elif options.diff: if options.files: parser.error('You cannot specify files and --diff at the same time.') options.diff = gclient_utils.FileRead(options.diff, 'rb') elif options.issue and options.patchset is None: # Retrieve the patch from rietveld when the diff is not specified. # When patchset is specified, it's because it's done by gcl/git-try. if json is None: parser.error('json or simplejson library is missing, please install.') api_url = '%s/api/%d' % (options.rietveld_url, options.issue) logging.debug(api_url) contents = json.loads(urllib.urlopen(api_url).read()) options.patchset = contents['patchsets'][-1] diff_url = ('%s/download/issue%d_%d.diff' % (options.rietveld_url, options.issue, options.patchset)) diff = GetMungedDiff('', urllib.urlopen(diff_url).readlines()) options.diff = ''.join(diff) else: # Use this as the base. root = checkouts[0].checkout_root diffs = [] for checkout in checkouts: diff = checkout.GenerateDiff().splitlines(True) path_diff = gclient_utils.PathDifference(root, checkout.checkout_root) # Munge it. diffs.extend(GetMungedDiff(path_diff, diff)) options.diff = ''.join(diffs) if not options.bot: # Get try slaves from PRESUBMIT.py files if not specified. # Even if the diff comes from options.url, use the local checkout for bot # selection. try: import presubmit_support root_presubmit = checkouts[0].ReadRootFile('PRESUBMIT.py') options.bot = presubmit_support.DoGetTrySlaves( checkouts[0].GetFileNames(), checkouts[0].checkout_root, root_presubmit, False, sys.stdout) except ImportError: pass # If no bot is specified, either the default pool will be selected or the # try server will refuse the job. Either case we don't need to interfere. if options.name is None: if options.issue: options.name = 'Issue %s' % options.issue else: options.name = 'Unnamed' print('Note: use --name NAME to change the try job name.') if not options.email: parser.error('Using an anonymous checkout. Please use --email or set ' 'the TRYBOT_RESULTS_EMAIL_ADDRESS environment variable.') else: print('Results will be emailed to: ' + options.email) # Prevent rietveld updates if we aren't running all the tests. if options.testfilter is not None: options.issue = None options.patchset = None # Send the patch. if options.send_patch: # If forced. options.send_patch(options) PrintSuccess(options) return 0 try: if can_http: _SendChangeHTTP(options) PrintSuccess(options) return 0 except NoTryServerAccess: if not can_svn: raise _SendChangeSVN(options) PrintSuccess(options) return 0 except (InvalidScript, NoTryServerAccess), e: if swallow_exception: return 1 print >> sys.stderr, e return 1 except gclient_utils.Error, e: print >> sys.stderr, e return 1 return 0
"useful for gclient-style checkouts. Use @rev or " "@branch or @branch1..branch2 to specify the " "revision/branch to diff against.")
"useful for gclient-style checkouts. In git, checkout " "the branch with changes first. Use @rev or " "@branch to specify the " "revision/branch to diff against. If no @branch is " "given the diff will be against the upstream branch. " "If @branch then the diff is branch..HEAD. " "All edits must be checked in.")
def TryChange(argv, file_list, swallow_exception, prog=None, extra_epilog=None): """ Args: argv: Arguments and options. file_list: Default value to pass to --file. swallow_exception: Whether we raise or swallow exceptions. """ # Parse argv parser = optparse.OptionParser(usage=USAGE, version=__version__, prog=prog) epilog = EPILOG % { 'prog': prog } if extra_epilog: epilog += extra_epilog parser.epilog = epilog # Remove epilog formatting parser.format_epilog = lambda x: parser.epilog parser.add_option("-v", "--verbose", action="count", default=0, help="Prints debugging infos") group = optparse.OptionGroup(parser, "Result and status") group.add_option("-u", "--user", default=getpass.getuser(), help="Owner user name [default: %default]") group.add_option("-e", "--email", default=os.environ.get('TRYBOT_RESULTS_EMAIL_ADDRESS', os.environ.get('EMAIL_ADDRESS')), help="Email address where to send the results. Use either " "the TRYBOT_RESULTS_EMAIL_ADDRESS environment " "variable or EMAIL_ADDRESS to set the email address " "the try bots report results to [default: %default]") group.add_option("-n", "--name", help="Descriptive name of the try job") group.add_option("--issue", type='int', help="Update rietveld issue try job status") group.add_option("--patchset", type='int', help="Update rietveld issue try job status. This is " "optional if --issue is used, In that case, the " "latest patchset will be used.") group.add_option("--dry_run", action='store_true', help="Just prints the diff and quits") parser.add_option_group(group) group = optparse.OptionGroup(parser, "Try job options") group.add_option("-b", "--bot", action="append", help="Only use specifics build slaves, ex: '--bot win' to " "run the try job only on the 'win' slave; see the try " "server waterfall for the slave's name") group.add_option("-r", "--revision", help="Revision to use for the try job; default: the " "revision will be determined by the try server; see " "its waterfall for more info") group.add_option("-c", "--clobber", action="store_true", help="Force a clobber before building; e.g. don't do an " "incremental build") # TODO(maruel): help="Select a specific configuration, usually 'debug' or " # "'release'" group.add_option("--target", help=optparse.SUPPRESS_HELP) group.add_option("--project", help="Override which project to use. Projects are defined " "server-side to define what default bot set to use") group.add_option("-t", "--testfilter", action="append", help="Add a gtest_filter to a test. Use multiple times to " "specify filters for different tests. (i.e. " "--testfilter base_unittests:ThreadTest.* " "--testfilter ui_tests) If you specify any testfilters " "the test results will not be reported in rietveld and " "only tests with filters will run.") parser.add_option_group(group) group = optparse.OptionGroup(parser, "Patch to run") group.add_option("-f", "--file", default=file_list, dest="files", metavar="FILE", action="append", help="Use many times to list the files to include in the " "try, relative to the repository root") group.add_option("--diff", help="File containing the diff to try") group.add_option("--url", help="Url where to grab a patch, e.g. " "http://example.com/x.diff") group.add_option("-R", "--rietveld_url", default="codereview.appspot.com", metavar="URL", help="Has 2 usages, both refer to the rietveld instance: " "Specify which code review patch to use as the try job " "or rietveld instance to update the try job results " "Default:%default") group.add_option("--root", help="Root to use for the patch; base subdirectory for " "patch created in a subdirectory") group.add_option("-p", "--patchlevel", type='int', metavar="LEVEL", help="Used as -pN parameter to patch") group.add_option("-s", "--sub_rep", action="append", default=[], help="Subcheckout to use in addition. This is mainly " "useful for gclient-style checkouts. Use @rev or " "@branch or @branch1..branch2 to specify the " "revision/branch to diff against.") group.add_option("--no_gclient", action="store_true", help="Disable automatic search for gclient checkout.") group.add_option("-E", "--exclude", action="append", default=['ChangeLog'], metavar='REGEXP', help="Regexp patterns to exclude files. Default: %default") parser.add_option_group(group) group = optparse.OptionGroup(parser, "Access the try server by HTTP") group.add_option("--use_http", action="store_const", const=_SendChangeHTTP, dest="send_patch", help="Use HTTP to talk to the try server [default]") group.add_option("-H", "--host", help="Host address") group.add_option("-P", "--port", help="HTTP port") group.add_option("--proxy", help="HTTP proxy") parser.add_option_group(group) group = optparse.OptionGroup(parser, "Access the try server with SVN") group.add_option("--use_svn", action="store_const", const=_SendChangeSVN, dest="send_patch", help="Use SVN to talk to the try server") group.add_option("-S", "--svn_repo", metavar="SVN_URL", help="SVN url to use to write the changes in; --use_svn is " "implied when using --svn_repo") parser.add_option_group(group) options, args = parser.parse_args(argv) # Note that the args array includes the script name, so # a single argument results in len(args) == 2. # If they've asked for help, give it to them if len(args) == 2 and args[1] == 'help': parser.print_help() return 0 # If they've said something confusing, don't spawn a try job until you # understand what they want. if len(args) > 1: plural = "" if len(args) > 2: plural = "s" print >> sys.stderr, ( 'Argument%s \"%s\" not understood' % (plural, ' '.join(args[1:]))) parser.print_help() return 1 LOG_FORMAT = '%(levelname)s %(filename)s(%(lineno)d): %(message)s' if not swallow_exception: if options.verbose == 0: logging.basicConfig(level=logging.WARNING, format=LOG_FORMAT) elif options.verbose == 1: logging.basicConfig(level=logging.INFO, format=LOG_FORMAT) elif options.verbose > 1: logging.basicConfig(level=logging.DEBUG, format=LOG_FORMAT) logging.debug(argv) # Strip off any @ in the user, otherwise svn gets confused. options.user = options.user.split('@', 1)[0] if options.rietveld_url: # Try to extract the review number if possible and fix the protocol. if not '://' in options.rietveld_url: options.rietveld_url = 'http://' + options.rietveld_url match = re.match(r'^(.*)/(\d+)$', options.rietveld_url) if match: if options.issue or options.patchset: parser.error('Cannot use both --issue and use a review number url') options.issue = int(match.group(2)) options.rietveld_url = match.group(1) try: # Always include os.getcwd() in the checkout settings. checkouts = [] checkouts.append(GuessVCS(options, os.getcwd())) checkouts[0].AutomagicalSettings() for item in options.sub_rep: checkout = GuessVCS(options, os.path.join(checkouts[0].checkout_root, item)) if checkout.checkout_root in [c.checkout_root for c in checkouts]: parser.error('Specified the root %s two times.' % checkout.checkout_root) checkouts.append(checkout) can_http = options.port and options.host can_svn = options.svn_repo # If there was no transport selected yet, now we must have enough data to # select one. if not options.send_patch and not (can_http or can_svn): parser.error('Please specify an access method.') # Convert options.diff into the content of the diff. if options.url: if options.files: parser.error('You cannot specify files and --url at the same time.') options.diff = urllib.urlopen(options.url).read() elif options.diff: if options.files: parser.error('You cannot specify files and --diff at the same time.') options.diff = gclient_utils.FileRead(options.diff, 'rb') elif options.issue and options.patchset is None: # Retrieve the patch from rietveld when the diff is not specified. # When patchset is specified, it's because it's done by gcl/git-try. if json is None: parser.error('json or simplejson library is missing, please install.') api_url = '%s/api/%d' % (options.rietveld_url, options.issue) logging.debug(api_url) contents = json.loads(urllib.urlopen(api_url).read()) options.patchset = contents['patchsets'][-1] diff_url = ('%s/download/issue%d_%d.diff' % (options.rietveld_url, options.issue, options.patchset)) diff = GetMungedDiff('', urllib.urlopen(diff_url).readlines()) options.diff = ''.join(diff) else: # Use this as the base. root = checkouts[0].checkout_root diffs = [] for checkout in checkouts: diff = checkout.GenerateDiff().splitlines(True) path_diff = gclient_utils.PathDifference(root, checkout.checkout_root) # Munge it. diffs.extend(GetMungedDiff(path_diff, diff)) options.diff = ''.join(diffs) if not options.bot: # Get try slaves from PRESUBMIT.py files if not specified. # Even if the diff comes from options.url, use the local checkout for bot # selection. try: import presubmit_support root_presubmit = checkouts[0].ReadRootFile('PRESUBMIT.py') options.bot = presubmit_support.DoGetTrySlaves( checkouts[0].GetFileNames(), checkouts[0].checkout_root, root_presubmit, False, sys.stdout) except ImportError: pass # If no bot is specified, either the default pool will be selected or the # try server will refuse the job. Either case we don't need to interfere. if options.name is None: if options.issue: options.name = 'Issue %s' % options.issue else: options.name = 'Unnamed' print('Note: use --name NAME to change the try job name.') if not options.email: parser.error('Using an anonymous checkout. Please use --email or set ' 'the TRYBOT_RESULTS_EMAIL_ADDRESS environment variable.') else: print('Results will be emailed to: ' + options.email) # Prevent rietveld updates if we aren't running all the tests. if options.testfilter is not None: options.issue = None options.patchset = None # Send the patch. if options.send_patch: # If forced. options.send_patch(options) PrintSuccess(options) return 0 try: if can_http: _SendChangeHTTP(options) PrintSuccess(options) return 0 except NoTryServerAccess: if not can_svn: raise _SendChangeSVN(options) PrintSuccess(options) return 0 except (InvalidScript, NoTryServerAccess), e: if swallow_exception: return 1 print >> sys.stderr, e return 1 except gclient_utils.Error, e: print >> sys.stderr, e return 1 return 0
raise Error('failed to run command: %s' % ' '.join(args))
raise CheckCallError(args, kwargs.get('cwd', None), rv, None)
def CheckCallAndFilter(args, stdout=None, filter_fn=None, print_stdout=None, call_filter_on_first_line=False, **kwargs): """Runs a command and calls back a filter function if needed. Accepts all subprocess.Popen() parameters plus: print_stdout: If True, the command's stdout is forwarded to stdout. filter_fn: A function taking a single string argument called with each line of the subprocess's output. Each line has the trailing newline character trimmed. stdout: Can be any bufferable output. stderr is always redirected to stdout. """ assert print_stdout or filter_fn stdout = stdout or sys.stdout filter_fn = filter_fn or (lambda x: None) assert not 'stderr' in kwargs kid = Popen(args, bufsize=0, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, **kwargs) # Do a flush of stdout before we begin reading from the subprocess's stdout last_flushed_at = time.time() stdout.flush() # Also, we need to forward stdout to prevent weird re-ordering of output. # This has to be done on a per byte basis to make sure it is not buffered: # normally buffering is done for each line, but if svn requests input, no # end-of-line character is output after the prompt and it would not show up. in_byte = kid.stdout.read(1) if in_byte: if call_filter_on_first_line: filter_fn(None) in_line = '' while in_byte: if in_byte != '\r': if print_stdout: stdout.write(in_byte) if in_byte != '\n': in_line += in_byte else: filter_fn(in_line) in_line = '' # Flush at least 10 seconds between line writes. We wait at least 10 # seconds to avoid overloading the reader that called us with output, # which can slow busy readers down. if (time.time() - last_flushed_at) > 10: last_flushed_at = time.time() stdout.flush() in_byte = kid.stdout.read(1) # Flush the rest of buffered output. This is only an issue with # stdout/stderr not ending with a \n. if len(in_line): filter_fn(in_line) rv = kid.wait() if rv: raise Error('failed to run command: %s' % ' '.join(args)) return 0
def _GetAuthCookie(self, auth_token):
def _GetAuthCookie(self, host, auth_token):
def _GetAuthCookie(self, auth_token): """Fetches authentication cookies for an authentication token.
req = self._CreateRequest("%s/_ah/login?%s" % (self.host, urllib.urlencode(args))) try: response = self.opener.open(req) except urllib2.HTTPError, e: response = e
tries = 0 url = "%s/_ah/login?%s" % (host, urllib.urlencode(args)) while tries < 3: req = self._CreateRequest(url) try: response = self.opener.open(req) except urllib2.HTTPError, e: response = e if e.code == 301: url = e.info()["location"] continue break
def _GetAuthCookie(self, auth_token): """Fetches authentication cookies for an authentication token.
def _Authenticate(self):
def _Authenticate(self, host):
def _Authenticate(self): """Authenticates the user.
self._GetAuthCookie(auth_token)
self._GetAuthCookie(host, auth_token)
def _Authenticate(self): """Authenticates the user.
self._Authenticate()
self._Authenticate(self.host)
def Send(self, request_path, payload=None, content_type="application/octet-stream", timeout=None, extra_headers=None, **kwargs): """Sends an RPC and returns the response.
args = dict(kwargs) url = "%s%s" % (self.host, request_path) if args: url += "?" + urllib.urlencode(args)
def Send(self, request_path, payload=None, content_type="application/octet-stream", timeout=None, extra_headers=None, **kwargs): """Sends an RPC and returns the response.
self._Authenticate()
url_loc = urlparse.urlparse(url) self._Authenticate('%s://%s' % (url_loc[0], url_loc[1]))
def Send(self, request_path, payload=None, content_type="application/octet-stream", timeout=None, extra_headers=None, **kwargs): """Sends an RPC and returns the response.
def _Authenticate(self):
def _Authenticate(self, *args):
def _Authenticate(self): """Save the cookie jar after authentication.""" super(HttpRpcServer, self)._Authenticate() if self.save_cookies: StatusUpdate("Saving authentication cookies to %s" % self.cookie_file) self.cookie_jar.save()
super(HttpRpcServer, self)._Authenticate()
super(HttpRpcServer, self)._Authenticate(*args)
def _Authenticate(self): """Save the cookie jar after authentication.""" super(HttpRpcServer, self)._Authenticate() if self.save_cookies: StatusUpdate("Saving authentication cookies to %s" % self.cookie_file) self.cookie_jar.save()
"and verify you are a human. Then try again.")
"and verify you are a human. Then try again.\n" "If you are using a Google Apps account the URL is:\n" "https://www.google.com/a/yourdomain.com/UnlockCaptcha")
def _Authenticate(self): """Authenticates the user.
default="codereview.appspot.com",
default=DEFAULT_REVIEW_SERVER,
def _GetOpener(self): """Returns an OpenerDirector that supports cookies and ignores redirects.
password = getpass.getpass("Password for %s: " % local_email)
password = None if keyring: password = keyring.get_password(host, local_email) if password is not None: print "Using password from system keyring." else: password = getpass.getpass("Password for %s: " % local_email) if keyring: answer = raw_input("Store password in system keyring?(y/N) ").strip() if answer == "y": keyring.set_password(host, local_email, password)
def GetUserCredentials(): """Prompts the user for a username and password.""" # Create a local alias to the email variable to avoid Python's crazy # scoping rules. local_email = email if local_email is None: local_email = GetEmail("Email (login for uploading to %s)" % server) password = getpass.getpass("Password for %s: " % local_email) return (local_email, password)
custom_vars, deps_file):
custom_vars, deps_file, should_process):
def __init__(self, parent, name, url, safesync_url, custom_deps, custom_vars, deps_file): GClientKeywords.__init__(self) self.parent = parent self.name = name self.url = url self.parsed_url = None # These 2 are only set in .gclient and not in DEPS files. self.safesync_url = safesync_url self.custom_vars = custom_vars or {} self.custom_deps = custom_deps or {} self.deps_hooks = [] self.dependencies = [] self.deps_file = deps_file or self.DEPS_FILE # A cache of the files affected by the current operation, necessary for # hooks. self._file_list = [] # If it is not set to True, the dependency wasn't processed for its child # dependency, i.e. its DEPS wasn't read. self.deps_parsed = False # A direct reference is dependency that is referenced by a deps, deps_os or # solution. A indirect one is one that was loaded with From() or that # exceeded recursion limit. self.direct_reference = False # This dependency has been processed, i.e. checked out self.processed = False # This dependency had its hook run self.hooks_ran = False # Required dependencies to run before running this one: self.requirements = [] if self.parent and self.parent.name: self.requirements.append(self.parent.name) if isinstance(self.url, self.FromImpl): self.requirements.append(self.url.module_name)
self.direct_reference = False
self.should_process = should_process
def __init__(self, parent, name, url, safesync_url, custom_deps, custom_vars, deps_file): GClientKeywords.__init__(self) self.parent = parent self.name = name self.url = url self.parsed_url = None # These 2 are only set in .gclient and not in DEPS files. self.safesync_url = safesync_url self.custom_vars = custom_vars or {} self.custom_deps = custom_deps or {} self.deps_hooks = [] self.dependencies = [] self.deps_file = deps_file or self.DEPS_FILE # A cache of the files affected by the current operation, necessary for # hooks. self._file_list = [] # If it is not set to True, the dependency wasn't processed for its child # dependency, i.e. its DEPS wasn't read. self.deps_parsed = False # A direct reference is dependency that is referenced by a deps, deps_os or # solution. A indirect one is one that was loaded with From() or that # exceeded recursion limit. self.direct_reference = False # This dependency has been processed, i.e. checked out self.processed = False # This dependency had its hook run self.hooks_ran = False # Required dependencies to run before running this one: self.requirements = [] if self.parent and self.parent.name: self.requirements.append(self.parent.name) if isinstance(self.url, self.FromImpl): self.requirements.append(self.url.module_name)
ref.ParseDepsFile(False)
ref.ParseDepsFile()
def LateOverride(self, url): """Resolves the parsed url from url.
def ParseDepsFile(self, direct_reference):
def ParseDepsFile(self):
def ParseDepsFile(self, direct_reference): """Parses the DEPS file for this dependency.""" if direct_reference: # Maybe it was referenced earlier by a From() keyword but it's now # directly referenced. self.direct_reference = direct_reference if self.deps_parsed: logging.debug('%s was already parsed' % self.name) return self.deps_parsed = True filepath = os.path.join(self.root_dir(), self.name, self.deps_file) if not os.path.isfile(filepath): logging.info('%s: No DEPS file found at %s' % (self.name, filepath)) return deps_content = gclient_utils.FileRead(filepath) logging.debug(deps_content)
if direct_reference: self.direct_reference = direct_reference
assert self.processed == True
def ParseDepsFile(self, direct_reference): """Parses the DEPS file for this dependency.""" if direct_reference: # Maybe it was referenced earlier by a From() keyword but it's now # directly referenced. self.direct_reference = direct_reference if self.deps_parsed: logging.debug('%s was already parsed' % self.name) return self.deps_parsed = True filepath = os.path.join(self.root_dir(), self.name, self.deps_file) if not os.path.isfile(filepath): logging.info('%s: No DEPS file found at %s' % (self.name, filepath)) return deps_content = gclient_utils.FileRead(filepath) logging.debug(deps_content)
None))
None, should_process))
def ParseDepsFile(self, direct_reference): """Parses the DEPS file for this dependency.""" if direct_reference: # Maybe it was referenced earlier by a From() keyword but it's now # directly referenced. self.direct_reference = direct_reference if self.deps_parsed: logging.debug('%s was already parsed' % self.name) return self.deps_parsed = True filepath = os.path.join(self.root_dir(), self.name, self.deps_file) if not os.path.isfile(filepath): logging.info('%s: No DEPS file found at %s' % (self.name, filepath)) return deps_content = gclient_utils.FileRead(filepath) logging.debug(deps_content)
if self.recursion_limit():
if self.recursion_limit() > 0:
def run(self, options, revision_overrides, command, args, work_queue): """Runs 'command' before parsing the DEPS in case it's a initial checkout or a revert.""" assert self._file_list == [] # When running runhooks, there's no need to consult the SCM. # All known hooks are expected to run unconditionally regardless of working # copy state, so skip the SCM status check. run_scm = command not in ('runhooks', None) self.parsed_url = self.LateOverride(self.url) if run_scm and self.parsed_url: if isinstance(self.parsed_url, self.FileImpl): # Special support for single-file checkout. if not command in (None, 'cleanup', 'diff', 'pack', 'status'): options.revision = self.parsed_url.GetRevision() scm = gclient_scm.SVNWrapper(self.parsed_url.GetPath(), self.root_dir(), self.name) scm.RunCommand('updatesingle', options, args + [self.parsed_url.GetFilename()], self._file_list) else: options.revision = revision_overrides.get(self.name) scm = gclient_scm.CreateSCM(self.parsed_url, self.root_dir(), self.name) scm.RunCommand(command, options, args, self._file_list) self._file_list = [os.path.join(self.name, f.strip()) for f in self._file_list] options.revision = None self.processed = True if self.recursion_limit(): # Then we can parse the DEPS file. self.ParseDepsFile(True) # Adjust the implicit dependency requirement; e.g. if a DEPS file contains # both src/foo and src/foo/bar, src/foo/bar is implicitly dependent of # src/foo. Yes, it's O(n^2)... It's important to do that before # enqueueing them. for s in self.dependencies: for s2 in self.dependencies: if s is s2: continue if s.name.startswith(posixpath.join(s2.name, '')): s.requirements.append(s2.name)
self.ParseDepsFile(True)
self.ParseDepsFile()
def run(self, options, revision_overrides, command, args, work_queue): """Runs 'command' before parsing the DEPS in case it's a initial checkout or a revert.""" assert self._file_list == [] # When running runhooks, there's no need to consult the SCM. # All known hooks are expected to run unconditionally regardless of working # copy state, so skip the SCM status check. run_scm = command not in ('runhooks', None) self.parsed_url = self.LateOverride(self.url) if run_scm and self.parsed_url: if isinstance(self.parsed_url, self.FileImpl): # Special support for single-file checkout. if not command in (None, 'cleanup', 'diff', 'pack', 'status'): options.revision = self.parsed_url.GetRevision() scm = gclient_scm.SVNWrapper(self.parsed_url.GetPath(), self.root_dir(), self.name) scm.RunCommand('updatesingle', options, args + [self.parsed_url.GetFilename()], self._file_list) else: options.revision = revision_overrides.get(self.name) scm = gclient_scm.CreateSCM(self.parsed_url, self.root_dir(), self.name) scm.RunCommand(command, options, args, self._file_list) self._file_list = [os.path.join(self.name, f.strip()) for f in self._file_list] options.revision = None self.processed = True if self.recursion_limit(): # Then we can parse the DEPS file. self.ParseDepsFile(True) # Adjust the implicit dependency requirement; e.g. if a DEPS file contains # both src/foo and src/foo/bar, src/foo/bar is implicitly dependent of # src/foo. Yes, it's O(n^2)... It's important to do that before # enqueueing them. for s in self.dependencies: for s2 in self.dependencies: if s is s2: continue if s.name.startswith(posixpath.join(s2.name, '')): s.requirements.append(s2.name)
if self.deps_hooks and self.direct_reference:
if self.deps_hooks:
def RunHooksRecursively(self, options): """Evaluates all hooks, running actions as needed. run() must have been called before to load the DEPS.""" # If "--force" was specified, run all hooks regardless of what files have # changed. if self.deps_hooks and self.direct_reference: # TODO(maruel): If the user is using git or git-svn, then we don't know # what files have changed so we always run all hooks. It'd be nice to fix # that. if (options.force or isinstance(self.parsed_url, self.FileImpl) or gclient_scm.GetScmName(self.parsed_url) in ('git', None) or os.path.isdir(os.path.join(self.root_dir(), self.name, '.git'))): for hook_dict in self.deps_hooks: self._RunHookAction(hook_dict, []) else: # TODO(phajdan.jr): We should know exactly when the paths are absolute. # Convert all absolute paths to relative. file_list = self.file_list() for i in range(len(file_list)): # It depends on the command being executed (like runhooks vs sync). if not os.path.isabs(file_list[i]): continue
if self.recursion_limit(): for s in self.dependencies: s.RunHooksRecursively(options)
for s in self.dependencies: s.RunHooksRecursively(options)
def RunHooksRecursively(self, options): """Evaluates all hooks, running actions as needed. run() must have been called before to load the DEPS.""" # If "--force" was specified, run all hooks regardless of what files have # changed. if self.deps_hooks and self.direct_reference: # TODO(maruel): If the user is using git or git-svn, then we don't know # what files have changed so we always run all hooks. It'd be nice to fix # that. if (options.force or isinstance(self.parsed_url, self.FileImpl) or gclient_scm.GetScmName(self.parsed_url) in ('git', None) or os.path.isdir(os.path.join(self.root_dir(), self.name, '.git'))): for hook_dict in self.deps_hooks: self._RunHookAction(hook_dict, []) else: # TODO(phajdan.jr): We should know exactly when the paths are absolute. # Convert all absolute paths to relative. file_list = self.file_list() for i in range(len(file_list)): # It depends on the command being executed (like runhooks vs sync). if not os.path.isabs(file_list[i]): continue
if self.direct_reference or include_all: for d in self.dependencies:
for d in self.dependencies: if d.should_process or include_all:
def subtree(self, include_all): result = [] # Add breadth-first. if self.direct_reference or include_all: for d in self.dependencies: result.append(d) for d in self.dependencies: result.extend(d.subtree(include_all)) return result
'custom_vars', 'deps_hooks', '_file_list', 'processed', 'hooks_ran', 'deps_parsed', 'requirements', 'direct_reference'):
'custom_vars', 'deps_hooks', '_file_list', 'should_process', 'processed', 'hooks_ran', 'deps_parsed', 'requirements'):
def __str__(self): out = [] for i in ('name', 'url', 'parsed_url', 'safesync_url', 'custom_deps', 'custom_vars', 'deps_hooks', '_file_list', 'processed', 'hooks_ran', 'deps_parsed', 'requirements', 'direct_reference'): # 'deps_file' if self.__dict__[i]: out.append('%s: %s' % (i, self.__dict__[i]))
Dependency.__init__(self, None, None, None, None, None, None, None)
Dependency.__init__(self, None, None, None, None, None, None, None, True)
def __init__(self, root_dir, options): # Do not change previous behavior. Only solution level and immediate DEPS # are processed. self._recursion_limit = 2 Dependency.__init__(self, None, None, None, None, None, None, None) self._options = options if options.deps_os: enforced_os = options.deps_os.split(',') else: enforced_os = [self.DEPS_OS_CHOICES.get(sys.platform, 'unix')] if 'all' in enforced_os: enforced_os = self.DEPS_OS_CHOICES.itervalues() self._enforced_os = list(set(enforced_os)) self._root_dir = root_dir self.config_content = None
None))
None, True))
def SetConfig(self, content): assert self.dependencies == [] config_dict = {} self.config_content = content try: exec(content, config_dict) except SyntaxError, e: gclient_utils.SyntaxErrorToError('.gclient', e) for s in config_dict.get('solutions', []): try: self.dependencies.append(Dependency( self, s['name'], s['url'], s.get('safesync_url', None), s.get('custom_deps', {}), s.get('custom_vars', {}), None)) except KeyError: raise gclient_utils.Error('Invalid .gclient file. Solution is ' 'incomplete: %s' % s) # .gclient can have hooks. self.deps_hooks = config_dict.get('hooks', []) self.direct_reference = True self.deps_parsed = True
def ParseDepsFile(self, direct_reference):
def ParseDepsFile(self):
def ParseDepsFile(self, direct_reference): """No DEPS to parse for a .gclient file.""" raise gclient_utils.Error('Internal error')
if 'CHROME_HEADLESS' not in os.environ: jobs = 8 else: jobs = 1 parser.add_option('-j', '--jobs', default=jobs, type='int',
parser.add_option('-j', '--jobs', default=1, type='int',
def Main(argv): """Doesn't parse the arguments here, just find the right subcommand to execute.""" try: # Do it late so all commands are listed. CMDhelp.usage = ('\n\nCommands are:\n' + '\n'.join([ ' %-10s %s' % (fn[3:], Command(fn[3:]).__doc__.split('\n')[0].strip()) for fn in dir(sys.modules[__name__]) if fn.startswith('CMD')])) parser = optparse.OptionParser(version='%prog ' + __version__) # TODO(maruel): Temporary workaround to disable parallel checkout on # buildbots until they can correctly parse its output. stdout being # redirected is not a good signal, at least on linux. Check for environment # variable instead. if 'CHROME_HEADLESS' not in os.environ: jobs = 8 else: jobs = 1 parser.add_option('-j', '--jobs', default=jobs, type='int', help='Specify how many SCM commands can run in parallel; ' 'default=%default') parser.add_option('-v', '--verbose', action='count', default=0, help='Produces additional output for diagnostics. Can be ' 'used up to three times for more logging info.') parser.add_option('--gclientfile', dest='config_filename', default=os.environ.get('GCLIENT_FILE', '.gclient'), help='Specify an alternate %default file') # Integrate standard options processing. old_parser = parser.parse_args def Parse(args): (options, args) = old_parser(args) level = None if options.verbose == 2: level = logging.INFO elif options.verbose > 2: level = logging.DEBUG logging.basicConfig(level=level, format='%(module)s(%(lineno)d) %(funcName)s:%(message)s') options.entries_filename = options.config_filename + '_entries' if options.jobs < 1: parser.error('--jobs must be 1 or higher') # Always autoflush so buildbot doesn't kill us during lengthy operations. options.stdout = gclient_utils.StdoutAutoFlush(sys.stdout) # These hacks need to die. if not hasattr(options, 'revisions'): # GClient.RunOnDeps expects it even if not applicable. options.revisions = [] if not hasattr(options, 'head'): options.head = None if not hasattr(options, 'nohooks'): options.nohooks = True if not hasattr(options, 'deps_os'): options.deps_os = None if not hasattr(options, 'manually_grab_svn_rev'): options.manually_grab_svn_rev = None if not hasattr(options, 'force'): options.force = None return (options, args) parser.parse_args = Parse # We don't want wordwrapping in epilog (usually examples) parser.format_epilog = lambda _: parser.epilog or '' if argv: command = Command(argv[0]) if command: # 'fix' the usage and the description now that we know the subcommand. GenUsage(parser, argv[0]) return command(parser, argv[1:]) # Not a known command. Default to help. GenUsage(parser, 'help') return CMDhelp(parser, argv) except gclient_utils.Error, e: print >> sys.stderr, 'Error: %s' % str(e) return 1
self._long_text)
self._long_text.encode('ascii', 'replace'))
def _Handle(self, output_stream, input_stream, may_prompt=True): """Writes this result to the output stream.
if options.dry_run: return
def _SendChangeHTTP(options): """Send a change to the try server using the HTTP protocol.""" if not options.host: raise NoTryServerAccess('Please use the --host option to specify the try ' 'server host to connect to.') if not options.port: raise NoTryServerAccess('Please use the --port option to specify the try ' 'server port to connect to.') values = _ParseSendChangeOptions(options) description = ''.join("%s=%s\n" % (k, v) for (k, v) in values.iteritems()) values['patch'] = options.diff url = 'http://%s:%s/send_try_patch' % (options.host, options.port) proxies = None if options.proxy: if options.proxy.lower() == 'none': # Effectively disable HTTP_PROXY or Internet settings proxy setup. proxies = {} else: proxies = {'http': options.proxy, 'https': options.proxy} logging.info('Sending by HTTP') logging.info(description) logging.info(url) logging.info(options.diff) if options.dry_run: return try: connection = urllib.urlopen(url, urllib.urlencode(values), proxies=proxies) except IOError, e: logging.warning(str(e)) if (values.get('bot') and len(e.args) > 2 and e.args[2] == 'got a bad status line'): raise NoTryServerAccess('%s is unaccessible. Bad --bot argument?' % url) else: raise NoTryServerAccess('%s is unaccessible. Reason: %s' % (url, str(e.args))) if not connection: raise NoTryServerAccess('%s is unaccessible.' % url) response = connection.read() if response != 'OK': raise NoTryServerAccess('%s is unaccessible. Got:\n%s' % (url, response))
url, revision = gclient_utils.SplitUrlRevision(self.url)
url, deps_revision = gclient_utils.SplitUrlRevision(self.url)
def update(self, options, args, file_list): """Runs git to update or transparently checkout the working copy.
if options.revision:
if options.revision or deps_revision:
def update(self, options, args, file_list): """Runs git to update or transparently checkout the working copy.
description = gcl.GetIssueDescription(issue)
description = Backquote(['git', 'cl', 'status', '--field=desc'])
def __init__(self, commit=None, upstream_branch=None): self.commit = commit self.verbose = None self.default_presubmit = None self.may_prompt = None
merge_base = self._Run(['merge-base', 'HEAD', 'origin']) self._Run(['diff', merge_base], redirect_stdout=False)
merge_base = self._Capture(['merge-base', 'HEAD', 'origin']) self._Run(['diff', merge_base])
def diff(self, options, args, file_list): merge_base = self._Run(['merge-base', 'HEAD', 'origin']) self._Run(['diff', merge_base], redirect_stdout=False)
self._Run(['checkout-index', '-a', '--prefix=%s/' % export_path], redirect_stdout=False)
self._Run(['checkout-index', '-a', '--prefix=%s/' % export_path])
def export(self, options, args, file_list): """Export a clean directory tree into the given path.
merge_base = self._Run(['merge-base', 'HEAD', 'origin'])
merge_base = self._Capture(['merge-base', 'HEAD', 'origin'])
def pack(self, options, args, file_list): """Generates a patch file which can be applied to the root of the repository.
files = self._Run(['ls-files']).split()
files = self._Capture(['ls-files']).split()
def update(self, options, args, file_list): """Runs git to update or transparently checkout the working copy.
self._Run(['reset', '--hard', 'HEAD'], redirect_stdout=False)
self._Run(['reset', '--hard', 'HEAD'])
def update(self, options, args, file_list): """Runs git to update or transparently checkout the working copy.
self._Run(['checkout', '--quiet', '%s^0' % revision])
self._Capture(['checkout', '--quiet', '%s^0' % revision])
def update(self, options, args, file_list): """Runs git to update or transparently checkout the working copy.
files = self._Run(['diff', upstream_branch, '--name-only']).split()
files = self._Capture(['diff', upstream_branch, '--name-only']).split()
def update(self, options, args, file_list): """Runs git to update or transparently checkout the working copy.
files = self._Run(['diff', deps_revision, '--name-only']).split() self._Run(['reset', '--hard', deps_revision], redirect_stdout=False)
files = self._Capture(['diff', deps_revision, '--name-only']).split() self._Run(['reset', '--hard', deps_revision])
def revert(self, options, args, file_list): """Reverts local modifications.
return self._Run(['rev-parse', 'HEAD'])
return self._Capture(['rev-parse', 'HEAD'])
def revinfo(self, options, args, file_list): """Display revision""" return self._Run(['rev-parse', 'HEAD'])
merge_base = self._Run(['merge-base', 'HEAD', 'origin']) self._Run(['diff', '--name-status', merge_base], redirect_stdout=False) files = self._Run(['diff', '--name-only', merge_base]).split()
merge_base = self._Capture(['merge-base', 'HEAD', 'origin']) self._Run(['diff', '--name-status', merge_base]) files = self._Capture(['diff', '--name-only', merge_base]).split()
def status(self, options, args, file_list): """Display status information.""" if not os.path.isdir(self.checkout_path): options.stdout.write( ('\n________ couldn\'t run status in %s:\nThe directory ' 'does not exist.\n') % self.checkout_path) else: merge_base = self._Run(['merge-base', 'HEAD', 'origin']) self._Run(['diff', '--name-status', merge_base], redirect_stdout=False) files = self._Run(['diff', '--name-only', merge_base]).split() file_list.extend([os.path.join(self.checkout_path, f) for f in files])
self._Run(clone_cmd, cwd=self._root_dir, redirect_stdout=False)
self._Run(clone_cmd, cwd=self._root_dir)
def _Clone(self, revision, url, options): """Clone a git repository from the given URL.
self._Run(['checkout', '--quiet', '%s^0' % revision])
self._Capture(['checkout', '--quiet', '%s^0' % revision])
def _Clone(self, revision, url, options): """Clone a git repository from the given URL.
files.extend(self._Run(['diff', upstream, '--name-only']).split())
files.extend(self._Capture(['diff', upstream, '--name-only']).split())
def _AttemptRebase(self, upstream, files, options, newbase=None, branch=None, printed_path=False): """Attempt to rebase onto either upstream or, if specified, newbase.""" files.extend(self._Run(['diff', upstream, '--name-only']).split()) revision = upstream if newbase: revision = newbase if not printed_path: options.stdout.write('\n_____ %s : Attempting rebase onto %s...\n' % ( self.relpath, revision)) printed_path = True else: options.stdout.write('Attempting rebase onto %s...\n' % revision)
self._Run(['reset', '--hard', 'HEAD'], redirect_stdout=False)
self._Run(['reset', '--hard', 'HEAD'])
def _AttemptRebase(self, upstream, files, options, newbase=None, branch=None, printed_path=False): """Attempt to rebase onto either upstream or, if specified, newbase.""" files.extend(self._Run(['diff', upstream, '--name-only']).split()) revision = upstream if newbase: revision = newbase if not printed_path: options.stdout.write('\n_____ %s : Attempting rebase onto %s...\n' % ( self.relpath, revision)) printed_path = True else: options.stdout.write('Attempting rebase onto %s...\n' % revision)
name = "saved-by-gclient-" + self._Run(["rev-parse", "--short", "HEAD"]) self._Run(["branch", name])
name = ('saved-by-gclient-' + self._Capture(['rev-parse', '--short', 'HEAD'])) self._Capture(['branch', name])
def _CheckDetachedHead(self, rev_str, options): # HEAD is detached. Make sure it is safe to move away from (i.e., it is # reference by a commit). If not, error out -- most likely a rebase is # in progress, try to detect so we can give a better error. try: _, _ = scm.GIT.Capture( ['name-rev', '--no-undefined', 'HEAD'], self.checkout_path, print_error=False) except gclient_utils.CheckCallError: # Commit is not contained by any rev. See if the user is rebasing: if self._IsRebasing(): # Punt to the user raise gclient_utils.Error('\n____ %s%s\n' '\tAlready in a conflict, i.e. (no branch).\n' '\tFix the conflict and run gclient again.\n' '\tOr to abort run:\n\t\tgit-rebase --abort\n' '\tSee man git-rebase for details.\n' % (self.relpath, rev_str)) # Let's just save off the commit so we can proceed. name = "saved-by-gclient-" + self._Run(["rev-parse", "--short", "HEAD"]) self._Run(["branch", name]) options.stdout.write( '\n_____ found an unreferenced commit and saved it as \'%s\'\n' % name)
branch = self._Run(['rev-parse', '--abbrev-ref=strict', 'HEAD'])
branch = self._Capture(['rev-parse', '--abbrev-ref=strict', 'HEAD'])
def _GetCurrentBranch(self): # Returns name of current branch or None for detached HEAD branch = self._Run(['rev-parse', '--abbrev-ref=strict', 'HEAD']) if branch == 'HEAD': return None return branch
def _Run(self, args, cwd=None, redirect_stdout=True): if cwd is None: cwd = self.checkout_path stdout = None if redirect_stdout: stdout = subprocess.PIPE if cwd == None: cwd = self.checkout_path cmd = ['git'] + args logging.debug(cmd)
def _Capture(self, args): return gclient_utils.CheckCall(['git'] + args, cwd=self.checkout_path)[0].strip() def _Run(self, args, **kwargs): kwargs.setdefault('cwd', self.checkout_path)
def _Run(self, args, cwd=None, redirect_stdout=True): # TODO(maruel): Merge with Capture or better gclient_utils.CheckCall(). if cwd is None: cwd = self.checkout_path stdout = None if redirect_stdout: stdout = subprocess.PIPE if cwd == None: cwd = self.checkout_path cmd = ['git'] + args logging.debug(cmd) try: sp = gclient_utils.Popen(cmd, cwd=cwd, stdout=stdout) output = sp.communicate()[0] except OSError: raise gclient_utils.Error("git command '%s' failed to run." % ' '.join(cmd) + "\nCheck that you have git installed.") if sp.returncode: raise gclient_utils.Error('git command %s returned %d' % (args[0], sp.returncode)) if output is not None: return output.strip()
sp = gclient_utils.Popen(cmd, cwd=cwd, stdout=stdout) output = sp.communicate()[0]
gclient_utils.Popen(['git'] + args, **kwargs).communicate()
def _Run(self, args, cwd=None, redirect_stdout=True): # TODO(maruel): Merge with Capture or better gclient_utils.CheckCall(). if cwd is None: cwd = self.checkout_path stdout = None if redirect_stdout: stdout = subprocess.PIPE if cwd == None: cwd = self.checkout_path cmd = ['git'] + args logging.debug(cmd) try: sp = gclient_utils.Popen(cmd, cwd=cwd, stdout=stdout) output = sp.communicate()[0] except OSError: raise gclient_utils.Error("git command '%s' failed to run." % ' '.join(cmd) + "\nCheck that you have git installed.") if sp.returncode: raise gclient_utils.Error('git command %s returned %d' % (args[0], sp.returncode)) if output is not None: return output.strip()
if sp.returncode: raise gclient_utils.Error('git command %s returned %d' % (args[0], sp.returncode)) if output is not None: return output.strip()
def _Run(self, args, cwd=None, redirect_stdout=True): # TODO(maruel): Merge with Capture or better gclient_utils.CheckCall(). if cwd is None: cwd = self.checkout_path stdout = None if redirect_stdout: stdout = subprocess.PIPE if cwd == None: cwd = self.checkout_path cmd = ['git'] + args logging.debug(cmd) try: sp = gclient_utils.Popen(cmd, cwd=cwd, stdout=stdout) output = sp.communicate()[0] except OSError: raise gclient_utils.Error("git command '%s' failed to run." % ' '.join(cmd) + "\nCheck that you have git installed.") if sp.returncode: raise gclient_utils.Error('git command %s returned %d' % (args[0], sp.returncode)) if output is not None: return output.strip()
'random', 're', 'shutil', 'string', 'subprocess', 'sys', 'tempfile',
'random', 're', 'string', 'subprocess', 'sys', 'tempfile',
def testMembersChanged(self): self.mox.ReplayAll() members = [ 'CODEREVIEW_SETTINGS', 'CODEREVIEW_SETTINGS_FILE', 'CMDchange', 'CMDchanges', 'CMDcommit', 'CMDdelete', 'CMDdeleteempties', 'CMDdescription', 'CMDdiff', 'CMDhelp', 'CMDlint', 'CMDnothave', 'CMDopened', 'CMDpassthru', 'CMDpresubmit', 'CMDrename', 'CMDsettings', 'CMDstatus', 'CMDtry', 'CMDupload', 'ChangeInfo', 'Command', 'DEFAULT_LINT_IGNORE_REGEX', 'DEFAULT_LINT_REGEX', 'CheckHomeForFile', 'DoPresubmitChecks', 'ErrorExit', 'FILES_CACHE', 'FilterFlag', 'GenUsage', 'GenerateChangeName', 'GenerateDiff', 'GetCLs', 'GetCacheDir', 'GetCachedFile', 'GetChangelistInfoFile', 'GetChangesDir', 'GetCodeReviewSetting', 'GetEditor', 'GetFilesNotInCL', 'GetInfoDir', 'GetIssueDescription', 'GetModifiedFiles', 'GetRepositoryRoot', 'ListFiles', 'LoadChangelistInfoForMultiple', 'MISSING_TEST_MSG', 'OptionallyDoPresubmitChecks', 'REPOSITORY_ROOT', 'RunShell', 'RunShellWithReturnCode', 'SVN', 'SendToRietveld', 'TryChange', 'UnknownFiles', 'Warn', 'attrs', 'breakpad', 'defer_attributes', 'gclient_utils', 'getpass', 'main', 'need_change', 'need_change_and_args', 'no_args', 'os', 'random', 're', 'shutil', 'string', 'subprocess', 'sys', 'tempfile', 'time', 'upload', 'urllib2', ] # If this test fails, you should add the relevant test. self.compareMembers(gcl, members)
files = GetFilesNotInCL()
files = [f[1] for f in GetFilesNotInCL()]
def CMDdiff(args): """Diffs all files in the changelist or all files that aren't in a CL.""" files = None if args: change_info = ChangeInfo.Load(args.pop(0), GetRepositoryRoot(), True, True) files = change_info.GetFileNames() else: files = GetFilesNotInCL() root = GetRepositoryRoot() cmd = ['svn', 'diff'] cmd.extend([os.path.join(root, x) for x in files]) cmd.extend(args) return RunShellWithReturnCode(cmd, print_output=True)[1]
group.add_option("--webkit", action="append_const", const="third_party/WebKit", dest="PATH", help="Shorthand for -s third_party/WebKit")
def WebKitRevision(options, opt, value, parser): if parser.rargs and not parser.rargs[0].startswith('-'): options.sub_rep.append('third_party/WebKit@%s' % parser.rargs.pop(0)) else: options.sub_rep.append('third_party/WebKit') group.add_option("-W", "--webkit", action="callback", callback=WebKitRevision, metavar="BRANCH", help="Shorthand for -s third_party/WebKit@BRANCH. " "BRANCH is optional and is the branch the current " "checkout will be diff'ed against.")
def TryChange(argv, file_list, swallow_exception, prog=None): """ Args: argv: Arguments and options. file_list: Default value to pass to --file. swallow_exception: Whether we raise or swallow exceptions. """ # Parse argv parser = optparse.OptionParser(usage=USAGE, version=__version__, prog=prog) parser.add_option("-v", "--verbose", action="count", default=0, help="Prints debugging infos") group = optparse.OptionGroup(parser, "Result and status") group.add_option("-u", "--user", default=getpass.getuser(), help="Owner user name [default: %default]") group.add_option("-e", "--email", default=os.environ.get('TRYBOT_RESULTS_EMAIL_ADDRESS', os.environ.get('EMAIL_ADDRESS')), help="Email address where to send the results. Use either " "the TRYBOT_RESULTS_EMAIL_ADDRESS environment " "variable or EMAIL_ADDRESS to set the email address " "the try bots report results to [default: %default]") group.add_option("-n", "--name", help="Descriptive name of the try job") group.add_option("--issue", type='int', help="Update rietveld issue try job status") group.add_option("--patchset", type='int', help="Update rietveld issue try job status. This is " "optional if --issue is used, In that case, the " "latest patchset will be used.") group.add_option("--dry_run", action='store_true', help="Just prints the diff and quits") group.add_option("-R", "--rietveld_url", default="codereview.appspot.com", metavar="URL", help="The root code review url. Default:%default") parser.add_option_group(group) group = optparse.OptionGroup(parser, "Try job options") group.add_option("-b", "--bot", action="append", help="Only use specifics build slaves, ex: '--bot win' to " "run the try job only on the 'win' slave; see the try " "server waterfall for the slave's name") group.add_option("-r", "--revision", help="Revision to use for the try job; default: the " "revision will be determined by the try server; see " "its waterfall for more info") group.add_option("-c", "--clobber", action="store_true", help="Force a clobber before building; e.g. don't do an " "incremental build") # TODO(maruel): help="Select a specific configuration, usually 'debug' or " # "'release'" group.add_option("--target", help=optparse.SUPPRESS_HELP) group.add_option("--project", help="Override which project to use. Projects are defined " "server-side to define what default bot set to use") # Override the list of tests to run, use multiple times to list many tests # (or comma separated) group.add_option("-t", "--tests", action="append", help=optparse.SUPPRESS_HELP) parser.add_option_group(group) group = optparse.OptionGroup(parser, "Patch to run") group.add_option("-f", "--file", default=file_list, dest="files", metavar="FILE", action="append", help="Use many times to list the files to include in the " "try, relative to the repository root") group.add_option("--diff", help="File containing the diff to try") group.add_option("--url", help="Url where to grab a patch") group.add_option("--root", help="Root to use for the patch; base subdirectory for " "patch created in a subdirectory") group.add_option("-p", "--patchlevel", type='int', metavar="LEVEL", help="Used as -pN parameter to patch") group.add_option("-s", "--sub_rep", action="append", default=[], help="Subcheckout to use in addition. This is mainly " "useful for gclient-style checkouts. Use @rev or " "@branch or @branch1..branch2 to specify the " "revision/branch to diff against.") # Mostly chromium-specific try: group.add_option("--webkit", action="append_const", const="third_party/WebKit", dest="PATH", help="Shorthand for -s third_party/WebKit") except optparse.OptionError: # append_const is not supported on 2.4. Too bad. pass group.add_option("--no_gclient", action="store_true", help="Disable automatic search for gclient checkout.") group.add_option("-E", "--exclude", action="append", default=['ChangeLog'], metavar='REGEXP', help="Regexp patterns to exclude files. Default: %default") parser.add_option_group(group) group = optparse.OptionGroup(parser, "Access the try server by HTTP") group.add_option("--use_http", action="store_const", const=_SendChangeHTTP, dest="send_patch", help="Use HTTP to talk to the try server [default]") group.add_option("-H", "--host", help="Host address") group.add_option("-P", "--port", help="HTTP port") group.add_option("--proxy", help="HTTP proxy") parser.add_option_group(group) group = optparse.OptionGroup(parser, "Access the try server with SVN") group.add_option("--use_svn", action="store_const", const=_SendChangeSVN, dest="send_patch", help="Use SVN to talk to the try server") group.add_option("-S", "--svn_repo", metavar="SVN_URL", help="SVN url to use to write the changes in; --use_svn is " "implied when using --svn_repo") parser.add_option_group(group) options, args = parser.parse_args(argv) if len(args) == 1 and args[0] == 'help': parser.print_help() if not swallow_exception: if options.verbose == 0: logging.basicConfig(level=logging.ERROR) elif options.verbose == 1: logging.basicConfig(level=logging.WARNING) elif options.verbose == 2: logging.basicConfig(level=logging.INFO) elif options.verbose > 2: logging.basicConfig(level=logging.DEBUG) logging.debug(argv) if options.rietveld_url: # Try to extract the review number if possible and fix the protocol. if not '://' in options.rietveld_url: options.rietveld_url = 'http://' + options.rietveld_url match = re.match(r'^(.*)/(\d+)$', options.rietveld_url) if match: if options.issue or options.patchset: parser.error('Cannot use both --issue and use a review number url') options.issue = int(match.group(2)) options.rietveld_url = match.group(1) try: # Always include os.getcwd() in the checkout settings. checkouts = [] checkouts.append(GuessVCS(options, os.getcwd())) checkouts[0].AutomagicalSettings() for item in options.sub_rep: checkout = GuessVCS(options, os.path.join(checkouts[0].checkout_root, item)) if checkout.checkout_root in [c.checkout_root for c in checkouts]: parser.error('Specified the root %s two times.' % checkout.checkout_root) checkouts.append(checkout) can_http = options.port and options.host can_svn = options.svn_repo # If there was no transport selected yet, now we must have enough data to # select one. if not options.send_patch and not (can_http or can_svn): parser.error('Please specify an access method.') # Convert options.diff into the content of the diff. if options.url: if options.files: parser.error('You cannot specify files and --url at the same time.') options.diff = urllib.urlopen(options.url).read() elif options.diff: if options.files: parser.error('You cannot specify files and --diff at the same time.') options.diff = gclient_utils.FileRead(options.diff, 'rb') elif options.issue and options.patchset is None: # Retrieve the patch from rietveld when the diff is not specified. # When patchset is specified, it's because it's done by gcl/git-try. try: import simplejson except ImportError: parser.error('simplejson library is missing, please install.') api_url = '%s/api/%d' % (options.rietveld_url, options.issue) logging.debug(api_url) contents = simplejson.loads(urllib.urlopen(api_url).read()) options.patchset = contents['patchsets'][-1] diff_url = ('%s/download/issue%d_%d.diff' % (options.rietveld_url, options.issue, options.patchset)) diff = GetMungedDiff('', urllib.urlopen(diff_url).readlines()) options.diff = ''.join(diff) else: # Use this as the base. root = checkouts[0].checkout_root diffs = [] for checkout in checkouts: diff = checkout.GenerateDiff().splitlines(True) path_diff = gclient_utils.PathDifference(root, checkout.checkout_root) # Munge it. diffs.extend(GetMungedDiff(path_diff, diff)) options.diff = ''.join(diffs) if not options.bot: # Get try slaves from PRESUBMIT.py files if not specified. # Even if the diff comes from options.url, use the local checkout for bot # selection. try: import presubmit_support root_presubmit = checkouts[0].ReadRootFile('PRESUBMIT.py') options.bot = presubmit_support.DoGetTrySlaves( checkouts[0].GetFileNames(), checkouts[0].checkout_root, root_presubmit, False, sys.stdout) except ImportError: pass # If no bot is specified, either the default pool will be selected or the # try server will refuse the job. Either case we don't need to interfere. if options.name is None: if options.issue: options.name = 'Issue %s' % options.issue else: options.name = 'Unnamed' print('Note: use --name NAME to change the try job name.') if not options.email: parser.error('Using an anonymous checkout. Please use --email or set ' 'the TRYBOT_RESULTS_EMAIL_ADDRESS environment variable.') else: print('Results will be emailed to: ' + options.email) # Send the patch. if options.send_patch: # If forced. options.send_patch(options) PrintSuccess(options) return 0 try: if can_http: _SendChangeHTTP(options) PrintSuccess(options) return 0 except NoTryServerAccess: if not can_svn: raise _SendChangeSVN(options) PrintSuccess(options) return 0 except (InvalidScript, NoTryServerAccess), e: if swallow_exception: return 1 print e return 1 return 0
if not self._options.delete_unversioned_trees or modified_files:
if (not self._options.delete_unversioned_trees or (modified_files and not self._options.force)):
def RunOnDeps(self, command, args): """Runs a command on each dependency in a client and its dependencies.
help='delete any unexpected unversioned trees ' 'that are in the checkout')
help='delete any dependency that have been removed from ' 'last sync as long as there is no local modification. ' 'Coupled with --force, it will remove them even with ' 'local modifications')
def CMDsync(parser, args): """Checkout/update all modules.""" parser.add_option('-f', '--force', action='store_true', help='force update even for unchanged modules') parser.add_option('-n', '--nohooks', action='store_true', help='don\'t run hooks after the update is complete') parser.add_option('-r', '--revision', action='append', dest='revisions', metavar='REV', default=[], help='Enforces revision/hash for the solutions with the ' 'format src@rev. The src@ part is optional and can be ' 'skipped. -r can be used multiple times when .gclient ' 'has multiple solutions configured and will work even ' 'if the src@ part is skipped. Note that specifying ' '--revision means your safesync_url gets ignored.') parser.add_option('-H', '--head', action='store_true', help='skips any safesync_urls specified in ' 'configured solutions and sync to head instead') parser.add_option('-D', '--delete_unversioned_trees', action='store_true', help='delete any unexpected unversioned trees ' 'that are in the checkout') parser.add_option('-R', '--reset', action='store_true', help='resets any local changes before updating (git only)') parser.add_option('--deps', dest='deps_os', metavar='OS_LIST', help='override deps for the specified (comma-separated) ' 'platform(s); \'all\' will process all deps_os ' 'references') parser.add_option('-m', '--manually_grab_svn_rev', action='store_true', help='Skip svn up whenever possible by requesting ' 'actual HEAD revision from the repository') (options, args) = parser.parse_args(args) client = GClient.LoadCurrentConfig(options) if not client: raise gclient_utils.Error('client not configured; see \'gclient config\'') if options.revisions and options.head: # TODO(maruel): Make it a parser.error if it doesn't break any builder. print('Warning: you cannot use both --head and --revision') if options.verbose: # Print out the .gclient file. This is longer than if we just printed the # client dict, but more legible, and it might contain helpful comments. print(client.config_content) return client.RunOnDeps('update', args)
entries = {} entries_deps_content = {}
def GetURLAndRev(name, original_url): url, revision = gclient_utils.SplitUrlRevision(original_url) if not revision: if revision_overrides.has_key(name): return (url, revision_overrides[name]) else: scm = gclient_scm.CreateSCM(solution["url"], self._root_dir, name) return (url, scm.revinfo(self._options, [], None)) else: if revision_overrides.has_key(name): return (url, revision_overrides[name]) else: return (url, revision)
parser.add_option('-j', '--jobs', default=1, type='int', help='Specify how many SCM commands can run in parallel')
parser.add_option('-j', '--jobs', default=8, type='int', help='Specify how many SCM commands can run in parallel; ' 'default=%default')
def Main(argv): """Doesn't parse the arguments here, just find the right subcommand to execute.""" try: # Do it late so all commands are listed. CMDhelp.usage = ('\n\nCommands are:\n' + '\n'.join([ ' %-10s %s' % (fn[3:], Command(fn[3:]).__doc__.split('\n')[0].strip()) for fn in dir(sys.modules[__name__]) if fn.startswith('CMD')])) parser = optparse.OptionParser(version='%prog ' + __version__) parser.add_option('-j', '--jobs', default=1, type='int', help='Specify how many SCM commands can run in parallel') parser.add_option('-v', '--verbose', action='count', default=0, help='Produces additional output for diagnostics. Can be ' 'used up to three times for more logging info.') parser.add_option('--gclientfile', dest='config_filename', default=os.environ.get('GCLIENT_FILE', '.gclient'), help='Specify an alternate %default file') # Integrate standard options processing. old_parser = parser.parse_args def Parse(args): (options, args) = old_parser(args) level = None if options.verbose == 2: level = logging.INFO elif options.verbose > 2: level = logging.DEBUG logging.basicConfig(level=level, format='%(module)s(%(lineno)d) %(funcName)s:%(message)s') options.entries_filename = options.config_filename + '_entries' if options.jobs < 1: parser.error('--jobs must be 1 or higher') # Always autoflush so buildbot doesn't kill us during lengthy operations. options.stdout = gclient_utils.StdoutAutoFlush(sys.stdout) # These hacks need to die. if not hasattr(options, 'revisions'): # GClient.RunOnDeps expects it even if not applicable. options.revisions = [] if not hasattr(options, 'head'): options.head = None if not hasattr(options, 'nohooks'): options.nohooks = True if not hasattr(options, 'deps_os'): options.deps_os = None if not hasattr(options, 'manually_grab_svn_rev'): options.manually_grab_svn_rev = None if not hasattr(options, 'force'): options.force = None return (options, args) parser.parse_args = Parse # We don't want wordwrapping in epilog (usually examples) parser.format_epilog = lambda _: parser.epilog or '' if argv: command = Command(argv[0]) if command: # 'fix' the usage and the description now that we know the subcommand. GenUsage(parser, argv[0]) return command(parser, argv[1:]) # Not a known command. Default to help. GenUsage(parser, 'help') return CMDhelp(parser, argv) except gclient_utils.Error, e: print >> sys.stderr, 'Error: %s' % str(e) return 1
return gclient_util.FileRead(filepath)
return gclient_utils.FileRead(filepath)
def ReadRootFile(self, filename): if not self.options.root: filepath = os.path.join(self.checkout_root, filename) if os.path.isfile(filepath): logging.info('Found %s at %s' % (filename, self.checkout_root)) return gclient_util.FileRead(filepath) return None cur = os.path.abspath(self.checkout_root) if self.gclient_root: root = os.path.abspath(self.gclient_root) else: root = gclient_utils.FindGclientRoot(cur) assert cur.startswith(root), (root, cur) while cur.startswith(root): filepath = os.path.join(cur, filename) if os.path.isfile(filepath): logging.info('Found %s at %s' % (filename, cur)) return gclient_utils.FileRead(filepath) cur = os.path.dirname(cur) logging.warning('Didn\'t find %s' % filename) return None
if current_type == "branch": remote_output, remote_err = scm.GIT.Capture( ['fetch'] + verbose + ['origin', revision], self.checkout_path, print_error=False) else: remote_output, remote_err = scm.GIT.Capture(
remote_output, remote_err = scm.GIT.Capture(
def update(self, options, args, file_list): """Runs git to update or transparently checkout the working copy.
x.startswith('svn: Unknown hostname')):
x.startswith('svn: Unknown hostname') or x.startswith('svn: Server sent unexpected return value')):
def IsKnownFailure(): for x in failure: if (x.startswith('svn: OPTIONS of') or x.startswith('svn: PROPFIND of') or x.startswith('svn: REPORT of') or x.startswith('svn: Unknown hostname')): return True return False
sub_target = url.sub_target_name or url
sub_target = url.sub_target_name or self.name
def LateOverride(self, url): overriden_url = self.get_custom_deps(self.name, url) if overriden_url != url: self.parsed_url = overriden_url logging.debug('%s, %s was overriden to %s' % (self.name, url, self.parsed_url)) elif isinstance(url, self.FromImpl): ref = [dep for dep in self.tree(True) if url.module_name == dep.name] if not len(ref) == 1: raise Exception('Failed to find one reference to %s. %s' % ( url.module_name, ref)) ref = ref[0] sub_target = url.sub_target_name or url # Make sure the referenced dependency DEPS file is loaded and file the # inner referenced dependency. ref.ParseDepsFile(False) found_dep = None for d in ref.dependencies: if d.name == sub_target: found_dep = d break if not found_dep: raise Exception('Couldn\'t find %s in %s, referenced by %s' % ( sub_target, ref.name, self.name)) # Call LateOverride() again. self.parsed_url = found_dep.LateOverride(found_dep.url) logging.debug('%s, %s to %s' % (self.name, url, self.parsed_url)) elif isinstance(url, basestring): parsed_url = urlparse.urlparse(url) if not parsed_url[0]: # A relative url. Fetch the real base. path = parsed_url[2] if not path.startswith('/'): raise gclient_utils.Error( 'relative DEPS entry \'%s\' must begin with a slash' % url) # Create a scm just to query the full url. parent_url = self.parent.parsed_url if isinstance(parent_url, self.FileImpl): parent_url = parent_url.file_location scm = gclient_scm.CreateSCM(parent_url, self.root_dir(), None) self.parsed_url = scm.FullUrlForRelativeUrl(url) else: self.parsed_url = url logging.debug('%s, %s -> %s' % (self.name, url, self.parsed_url)) elif isinstance(url, self.FileImpl): self.parsed_url = url logging.debug('%s, %s -> %s (File)' % (self.name, url, self.parsed_url)) return self.parsed_url
self.direct_reference = direct_reference
self.direct_reference = True
def ParseDepsFile(self, direct_reference): """No DEPS to parse for a .gclient file.""" self.direct_reference = direct_reference self.deps_parsed = True
issue = BackquoteAsInteger(['git', 'cl', 'status', '--field=id']) patchset = BackquoteAsInteger(['git', 'cl', 'status', '--field=patch'])
cl = git_cl.Changelist() issue = cl.GetIssue() patchset = cl.GetPatchset()
def __init__(self, commit=None, upstream_branch=None): self.commit = commit self.verbose = None self.default_presubmit = None self.may_prompt = None
description = Backquote(['git', 'cl', 'status', '--field=desc'])
description = cl.GetDescription()
def __init__(self, commit=None, upstream_branch=None): self.commit = commit self.verbose = None self.default_presubmit = None self.may_prompt = None
gcl_path = os.path.join(os.path.dirname(os.path.abspath(__file__)), "gcl")
gcl_path = os.path.join(depot_tools_dir_, "gcl")
def runGcl(subcommand): gcl_path = os.path.join(os.path.dirname(os.path.abspath(__file__)), "gcl") if not os.path.exists(gcl_path): print "WARNING: gcl not found beside drover.py. Using system gcl instead..." gcl_path = 'gcl' command = "%s %s" % (gcl_path, subcommand) return os.system(command)
data = gcl.GetCachedFile(filename, use_root=True)
data = gcl.GetCachedFile(filename)
def ReadRootFile(self, filename): try: # Try to search on the subversion repository for the file. import gcl data = gcl.GetCachedFile(filename, use_root=True) logging.debug('%s:\n%s' % (filename, data)) return data except ImportError: try: data = gclient_utils.FileRead(os.path.join(self.checkout_root, filename)) logging.debug('%s:\n%s' % (filename, data)) return data except (IOError, OSError): logging.debug('%s:\nNone' % filename) return None
if command == 'update' and not self._options.verbose:
if command in ('update', 'revert') and sys.stdout.isatty():
def RunOnDeps(self, command, args): """Runs a command on each dependency in a client and its dependencies.
self._Authenticate()
url_loc = urlparse.urlparse(url) self._Authenticate('%s://%s' % (url_loc.scheme, url_loc.netloc))
def Send(self, request_path, payload=None, content_type="application/octet-stream", timeout=None, extra_headers=None, **kwargs): """Sends an RPC and returns the response.
data = SVN.Capture(command, None) if data: pass elif SVN.IsMoved(filename):
if SVN.IsMoved(filename):
def DiffItem(filename, full_move=False, revision=None): """Diffs a single file.
data = "Index: %s\n" % filename
data = SVN.Capture(command, None) if not data: data = "Index: %s\n" % filename
def DiffItem(filename, full_move=False, revision=None): """Diffs a single file.
pass
data = SVN.Capture(command, None)
def DiffItem(filename, full_move=False, revision=None): """Diffs a single file.
ErrorExit(msg, exit=False) def ErrorExit(msg, do_exit=True): """Print an error message to stderr and optionally exit."""
def Warn(msg): ErrorExit(msg, exit=False)
if do_exit: sys.exit(1)
def ErrorExit(msg): print >> sys.stderr, msg sys.exit(1)
def ErrorExit(msg, do_exit=True): """Print an error message to stderr and optionally exit.""" print >> sys.stderr, msg if do_exit: sys.exit(1)
path = os.path.join(self._root_dir, self.relpath)
def pack(self, options, args, file_list): """Generates a patch file which can be applied to the root of the repository.
command = ['git', 'diff', merge_base] filterer = DiffFilterer(self.relpath)
def pack(self, options, args, file_list): """Generates a patch file which can be applied to the root of the repository.
command, cwd=path, filter_fn=filterer.Filter, stdout=options.stdout)
['git', 'diff', merge_base], cwd=self.checkout_path, filter_fn=DiffFilterer(self.relpath, options.stdout).Filter, stdout=options.stdout)
def pack(self, options, args, file_list): """Generates a patch file which can be applied to the root of the repository.
path = os.path.join(self._root_dir, self.relpath) if not os.path.isdir(path):
if not os.path.isdir(self.checkout_path):
def revert(self, options, args, file_list): """Reverts local modifications.
path = os.path.join(self._root_dir, self.relpath) if not os.path.isdir(path): raise gclient_utils.Error('Directory %s is not present.' % path)
if not os.path.isdir(self.checkout_path): raise gclient_utils.Error('Directory %s is not present.' % self.checkout_path)
def diff(self, options, args, file_list): # NOTE: This function does not currently modify file_list. path = os.path.join(self._root_dir, self.relpath) if not os.path.isdir(path): raise gclient_utils.Error('Directory %s is not present.' % path) self._Run(['diff'] + args, options)
path = os.path.join(self._root_dir, self.relpath) if not os.path.isdir(path): raise gclient_utils.Error('Directory %s is not present.' % path) command = ['svn', 'diff', '-x', '--ignore-eol-style'] command.extend(args) filterer = DiffFilterer(self.relpath, options.stdout) gclient_utils.CheckCallAndFilter(command, cwd=path, always=False, print_stdout=False, filter_fn=filterer.Filter,
if not os.path.isdir(self.checkout_path): raise gclient_utils.Error('Directory %s is not present.' % self.checkout_path) gclient_utils.CheckCallAndFilter( ['svn', 'diff', '-x', '--ignore-eol-style'] + args, cwd=self.checkout_path, print_stdout=False, filter_fn=DiffFilterer(self.relpath, options.stdout).Filter,
def pack(self, options, args, file_list): """Generates a patch file which can be applied to the root of the repository.""" path = os.path.join(self._root_dir, self.relpath) if not os.path.isdir(path): raise gclient_utils.Error('Directory %s is not present.' % path) command = ['svn', 'diff', '-x', '--ignore-eol-style'] command.extend(args)
checkout_path = os.path.join(self._root_dir, self.relpath) git_path = os.path.join(self._root_dir, self.relpath, '.git')
git_path = os.path.join(self.checkout_path, '.git')
def update(self, options, args, file_list): """Runs svn to update or transparently checkout the working copy.
if not os.path.exists(checkout_path):
if not os.path.exists(self.checkout_path):
def update(self, options, args, file_list): """Runs svn to update or transparently checkout the working copy.
command = ['checkout', url, checkout_path]
command = ['checkout', url, self.checkout_path]
def update(self, options, args, file_list): """Runs svn to update or transparently checkout the working copy.
from_info = scm.SVN.CaptureInfo(os.path.join(checkout_path, '.'), '.')
from_info = scm.SVN.CaptureInfo(os.path.join(self.checkout_path, '.'), '.')
def update(self, options, args, file_list): """Runs svn to update or transparently checkout the working copy.
checkout_path)
self.checkout_path)
def update(self, options, args, file_list): """Runs svn to update or transparently checkout the working copy.
dir_info = scm.SVN.CaptureStatus(os.path.join(checkout_path, '.')) if [True for d in dir_info if d[0][2] == 'L' and d[1] == checkout_path]:
dir_info = scm.SVN.CaptureStatus(os.path.join(self.checkout_path, '.')) if [True for d in dir_info if d[0][2] == 'L' and d[1] == self.checkout_path]:
def update(self, options, args, file_list): """Runs svn to update or transparently checkout the working copy.
for status in scm.SVN.CaptureStatus(checkout_path):
for status in scm.SVN.CaptureStatus(self.checkout_path):
def update(self, options, args, file_list): """Runs svn to update or transparently checkout the working copy.
'try again.') % (url, checkout_path))
'try again.') % (url, self.checkout_path))
def update(self, options, args, file_list): """Runs svn to update or transparently checkout the working copy.