Merge "Fix removing broken symlink in reference dir"
diff --git a/.flake8 b/.flake8
new file mode 100644
index 0000000..45ab656
--- /dev/null
+++ b/.flake8
@@ -0,0 +1,3 @@
+[flake8]
+max-line-length=80
+ignore=E111,E114,E402
diff --git a/.mailmap b/.mailmap
index f070b49..eb64bd2 100644
--- a/.mailmap
+++ b/.mailmap
@@ -1,8 +1,11 @@
Anthony Newnam <anthony.newnam@garmin.com> Anthony <anthony@bnovc.com>
-Shawn Pearce <sop@google.com> Shawn O. Pearce <sop@google.com>
+Hu Xiuyun <xiuyun.hu@hisilicon.com> Hu xiuyun <xiuyun.hu@hisilicon.com>
+Hu Xiuyun <xiuyun.hu@hisilicon.com> Hu Xiuyun <clouds08@qq.com>
+Jelly Chen <chenguodong@huawei.com> chenguodong <chenguodong@huawei.com>
Jia Bi <bijia@xiaomi.com> bijia <bijia@xiaomi.com>
JoonCheol Park <jooncheol@gmail.com> Jooncheol Park <jooncheol@gmail.com>
Sergii Pylypenko <x.pelya.x@gmail.com> pelya <x.pelya.x@gmail.com>
+Shawn Pearce <sop@google.com> Shawn O. Pearce <sop@google.com>
Ulrik Sjölin <ulrik.sjolin@sonyericsson.com> Ulrik Sjolin <ulrik.sjolin@gmail.com>
Ulrik Sjölin <ulrik.sjolin@sonyericsson.com> Ulrik Sjolin <ulrik.sjolin@sonyericsson.com>
Ulrik Sjölin <ulrik.sjolin@sonyericsson.com> Ulrik Sjölin <ulrik.sjolin@sonyericsson.com>
diff --git a/.pylintrc b/.pylintrc
deleted file mode 100644
index 413d66a..0000000
--- a/.pylintrc
+++ /dev/null
@@ -1,298 +0,0 @@
-# lint Python modules using external checkers.
-#
-# This is the main checker controling the other ones and the reports
-# generation. It is itself both a raw checker and an astng checker in order
-# to:
-# * handle message activation / deactivation at the module level
-# * handle some basic but necessary stats'data (number of classes, methods...)
-#
-[MASTER]
-
-# Specify a configuration file.
-#rcfile=
-
-# Python code to execute, usually for sys.path manipulation such as
-# pygtk.require().
-#init-hook=
-
-# Profiled execution.
-profile=no
-
-# Add <file or directory> to the black list. It should be a base name, not a
-# path. You may set this option multiple times.
-ignore=SVN
-
-# Pickle collected data for later comparisons.
-persistent=yes
-
-# Set the cache size for astng objects.
-cache-size=500
-
-# List of plugins (as comma separated values of python modules names) to load,
-# usually to register additional checkers.
-load-plugins=
-
-
-[MESSAGES CONTROL]
-
-# Enable only checker(s) with the given id(s). This option conflicts with the
-# disable-checker option
-#enable-checker=
-
-# Enable all checker(s) except those with the given id(s). This option
-# conflicts with the enable-checker option
-#disable-checker=
-
-# Enable all messages in the listed categories.
-#enable-msg-cat=
-
-# Disable all messages in the listed categories.
-#disable-msg-cat=
-
-# Enable the message(s) with the given id(s).
-enable=RP0004
-
-# Disable the message(s) with the given id(s).
-disable=C0326,R0903,R0912,R0913,R0914,R0915,W0141,C0111,C0103,W0603,W0703,R0911,C0301,C0302,R0902,R0904,W0142,W0212,E1101,E1103,R0201,W0201,W0122,W0232,RP0001,RP0003,RP0101,RP0002,RP0401,RP0701,RP0801,F0401,E0611,R0801,I0011
-
-[REPORTS]
-
-# set the output format. Available formats are text, parseable, colorized, msvs
-# (visual studio) and html
-output-format=text
-
-# Put messages in a separate file for each module / package specified on the
-# command line instead of printing them on stdout. Reports (if any) will be
-# written in a file name "pylint_global.[txt|html]".
-files-output=no
-
-# Tells whether to display a full report or only the messages
-reports=yes
-
-# Python expression which should return a note less than 10 (10 is the highest
-# note).You have access to the variables errors warning, statement which
-# respectivly contain the number of errors / warnings messages and the total
-# number of statements analyzed. This is used by the global evaluation report
-# (R0004).
-evaluation=10.0 - ((float(5 * error + warning + refactor + convention) / statement) * 10)
-
-# Add a comment according to your evaluation note. This is used by the global
-# evaluation report (R0004).
-comment=no
-
-# checks for
-# * unused variables / imports
-# * undefined variables
-# * redefinition of variable from builtins or from an outer scope
-# * use of variable before assigment
-#
-[VARIABLES]
-
-# Tells whether we should check for unused import in __init__ files.
-init-import=no
-
-# A regular expression matching names used for dummy variables (i.e. not used).
-dummy-variables-rgx=_|dummy
-
-# List of additional names supposed to be defined in builtins. Remember that
-# you should avoid to define new builtins when possible.
-additional-builtins=
-
-
-# try to find bugs in the code using type inference
-#
-[TYPECHECK]
-
-# Tells whether missing members accessed in mixin class should be ignored. A
-# mixin class is detected if its name ends with "mixin" (case insensitive).
-ignore-mixin-members=yes
-
-# List of classes names for which member attributes should not be checked
-# (useful for classes with attributes dynamicaly set).
-ignored-classes=SQLObject
-
-# When zope mode is activated, consider the acquired-members option to ignore
-# access to some undefined attributes.
-zope=no
-
-# List of members which are usually get through zope's acquisition mecanism and
-# so shouldn't trigger E0201 when accessed (need zope=yes to be considered).
-acquired-members=REQUEST,acl_users,aq_parent
-
-
-# checks for :
-# * doc strings
-# * modules / classes / functions / methods / arguments / variables name
-# * number of arguments, local variables, branchs, returns and statements in
-# functions, methods
-# * required module attributes
-# * dangerous default values as arguments
-# * redefinition of function / method / class
-# * uses of the global statement
-#
-[BASIC]
-
-# Required attributes for module, separated by a comma
-required-attributes=
-
-# Regular expression which should only match functions or classes name which do
-# not require a docstring
-no-docstring-rgx=_main|__.*__
-
-# Regular expression which should only match correct module names
-module-rgx=(([a-z_][a-z0-9_]*)|([A-Z][a-zA-Z0-9]+))$
-
-# Regular expression which should only match correct module level names
-const-rgx=(([A-Z_][A-Z1-9_]*)|(__.*__))|(log)$
-
-# Regular expression which should only match correct class names
-class-rgx=[A-Z_][a-zA-Z0-9]+$
-
-# Regular expression which should only match correct function names
-function-rgx=[a-z_][a-z0-9_]{2,30}$
-
-# Regular expression which should only match correct method names
-method-rgx=[a-z_][a-z0-9_]{2,30}$
-
-# Regular expression which should only match correct instance attribute names
-attr-rgx=[a-z_][a-z0-9_]{2,30}$
-
-# Regular expression which should only match correct argument names
-argument-rgx=[a-z_][a-z0-9_]{2,30}$
-
-# Regular expression which should only match correct variable names
-variable-rgx=[a-z_][a-z0-9_]{2,30}$
-
-# Regular expression which should only match correct list comprehension /
-# generator expression variable names
-inlinevar-rgx=[A-Za-z_][A-Za-z0-9_]*$
-
-# Good variable names which should always be accepted, separated by a comma
-good-names=i,j,k,ex,Run,_,e,d1,d2,v,f,l,d
-
-# Bad variable names which should always be refused, separated by a comma
-bad-names=foo,bar,baz,toto,tutu,tata
-
-# List of builtins function names that should not be used, separated by a comma
-bad-functions=map,filter,apply,input
-
-
-# checks for sign of poor/misdesign:
-# * number of methods, attributes, local variables...
-# * size, complexity of functions, methods
-#
-[DESIGN]
-
-# Maximum number of arguments for function / method
-max-args=5
-
-# Maximum number of locals for function / method body
-max-locals=15
-
-# Maximum number of return / yield for function / method body
-max-returns=6
-
-# Maximum number of branch for function / method body
-max-branchs=12
-
-# Maximum number of statements in function / method body
-max-statements=50
-
-# Maximum number of parents for a class (see R0901).
-max-parents=7
-
-# Maximum number of attributes for a class (see R0902).
-max-attributes=20
-
-# Minimum number of public methods for a class (see R0903).
-min-public-methods=2
-
-# Maximum number of public methods for a class (see R0904).
-max-public-methods=30
-
-
-# checks for
-# * external modules dependencies
-# * relative / wildcard imports
-# * cyclic imports
-# * uses of deprecated modules
-#
-[IMPORTS]
-
-# Deprecated modules which should not be used, separated by a comma
-deprecated-modules=regsub,string,TERMIOS,Bastion,rexec
-
-# Create a graph of every (i.e. internal and external) dependencies in the
-# given file (report R0402 must not be disabled)
-import-graph=
-
-# Create a graph of external dependencies in the given file (report R0402 must
-# not be disabled)
-ext-import-graph=
-
-# Create a graph of internal dependencies in the given file (report R0402 must
-# not be disabled)
-int-import-graph=
-
-
-# checks for :
-# * methods without self as first argument
-# * overridden methods signature
-# * access only to existant members via self
-# * attributes not defined in the __init__ method
-# * supported interfaces implementation
-# * unreachable code
-#
-[CLASSES]
-
-# List of interface methods to ignore, separated by a comma. This is used for
-# instance to not check methods defines in Zope's Interface base class.
-ignore-iface-methods=isImplementedBy,deferred,extends,names,namesAndDescriptions,queryDescriptionFor,getBases,getDescriptionFor,getDoc,getName,getTaggedValue,getTaggedValueTags,isEqualOrExtendedBy,setTaggedValue,isImplementedByInstancesOf,adaptWith,is_implemented_by
-
-# List of method names used to declare (i.e. assign) instance attributes.
-defining-attr-methods=__init__,__new__,setUp
-
-
-# checks for similarities and duplicated code. This computation may be
-# memory / CPU intensive, so you should disable it if you experiments some
-# problems.
-#
-[SIMILARITIES]
-
-# Minimum lines number of a similarity.
-min-similarity-lines=4
-
-# Ignore comments when computing similarities.
-ignore-comments=yes
-
-# Ignore docstrings when computing similarities.
-ignore-docstrings=yes
-
-
-# checks for:
-# * warning notes in the code like FIXME, XXX
-# * PEP 263: source code with non ascii character but no encoding declaration
-#
-[MISCELLANEOUS]
-
-# List of note tags to take in consideration, separated by a comma.
-notes=FIXME,XXX,TODO
-
-
-# checks for :
-# * unauthorized constructions
-# * strict indentation
-# * line length
-# * use of <> instead of !=
-#
-[FORMAT]
-
-# Maximum number of characters on a single line.
-max-line-length=80
-
-# Maximum number of lines in a module
-max-module-lines=1000
-
-# String used as indentation unit. This is usually " " (4 spaces) or "\t" (1
-# tab). In repo it is 2 spaces.
-indent-string=' '
diff --git a/README.md b/README.md
new file mode 100644
index 0000000..e35f8e9
--- /dev/null
+++ b/README.md
@@ -0,0 +1,14 @@
+# repo
+
+Repo is a tool built on top of Git. Repo helps manage many Git repositories,
+does the uploads to revision control systems, and automates parts of the
+development workflow. Repo is not meant to replace Git, only to make it
+easier to work with Git. The repo command is an executable Python script
+that you can put anywhere in your path.
+
+* Homepage: https://code.google.com/p/git-repo/
+* Bug reports: https://code.google.com/p/git-repo/issues/
+* Source: https://code.google.com/p/git-repo/
+* Overview: https://source.android.com/source/developing.html
+* Docs: https://source.android.com/source/using-repo.html
+* [Submitting patches](./SUBMITTING_PATCHES.md)
diff --git a/SUBMITTING_PATCHES b/SUBMITTING_PATCHES.md
similarity index 70%
rename from SUBMITTING_PATCHES
rename to SUBMITTING_PATCHES.md
index 8656ee7..07f7661 100644
--- a/SUBMITTING_PATCHES
+++ b/SUBMITTING_PATCHES.md
@@ -1,17 +1,17 @@
-Short Version:
+# Short Version
- Make small logical changes.
- Provide a meaningful commit message.
- - Check for coding errors with pylint
+ - Check for coding errors and style nits with pyflakes and flake8
- Make sure all code is under the Apache License, 2.0.
- Publish your changes for review.
- Make corrections if requested.
- Verify your changes on gerrit so they can be submitted.
- git push https://gerrit-review.googlesource.com/git-repo HEAD:refs/for/master
+ `git push https://gerrit-review.googlesource.com/git-repo HEAD:refs/for/master`
-Long Version:
+# Long Version
I wanted a file describing how to submit patches for repo,
so I started with the one found in the core Git distribution
@@ -19,10 +19,10 @@
patch submission guidelines for the Linux kernel.
However there are some differences, so please review and familiarize
-yourself with the following relevant bits:
+yourself with the following relevant bits.
-(1) Make separate commits for logically separate changes.
+## Make separate commits for logically separate changes.
Unless your patch is really trivial, you should not be sending
out a patch that was generated between your working tree and your
@@ -36,14 +36,34 @@
probably need to split up your commit to finer grained pieces.
-(2) Check for coding errors with pylint
+## Check for coding errors and style nits with pyflakes and flake8
-Run pylint on changed modules using the provided configuration:
+### Coding errors
- pylint --rcfile=.pylintrc file.py
+Run `pyflakes` on changed modules:
+ pyflakes file.py
-(3) Check the license
+Ideally there should be no new errors or warnings introduced.
+
+### Style violations
+
+Run `flake8` on changes modules:
+
+ flake8 file.py
+
+Note that repo generally follows [Google's python style guide]
+(https://google.github.io/styleguide/pyguide.html) rather than [PEP 8]
+(https://www.python.org/dev/peps/pep-0008/), so it's possible that
+the output of `flake8` will be quite noisy. It's not mandatory to
+avoid all warnings, but at least the maximum line length should be
+followed.
+
+If there are many occurrences of the same warning that cannot be
+avoided without going against the Google style guide, these may be
+suppressed in the included `.flake8` file.
+
+## Check the license
repo is licensed under the Apache License, 2.0.
@@ -59,7 +79,7 @@
has been applied and pushed out.
-(4) Sending your patches.
+## Sending your patches.
Do not email your patches to anyone.
@@ -91,23 +111,23 @@
Push your patches over HTTPS to the review server, possibly through
a remembered remote to make this easier in the future:
- git config remote.review.url https://gerrit-review.googlesource.com/git-repo
- git config remote.review.push HEAD:refs/for/master
+ git config remote.review.url https://gerrit-review.googlesource.com/git-repo
+ git config remote.review.push HEAD:refs/for/master
- git push review
+ git push review
You will be automatically emailed a copy of your commits, and any
comments made by the project maintainers.
-(5) Make changes if requested
+## Make changes if requested
The project maintainer who reviews your changes might request changes to your
commit. If you make the requested changes you will need to amend your commit
and push it to the review server again.
-(6) Verify your changes on gerrit
+## Verify your changes on gerrit
After you receive a Code-Review+2 from the maintainer, select the Verified
button on the gerrit page for the change. This verifies that you have tested
diff --git a/command.py b/command.py
index bc2f950..2ff0a34 100644
--- a/command.py
+++ b/command.py
@@ -119,6 +119,11 @@
except KeyError:
oldpath = path
path = os.path.dirname(path)
+ if not project and path == manifest.topdir:
+ try:
+ project = self._by_path[path]
+ except KeyError:
+ pass
else:
try:
project = self._by_path[path]
diff --git a/docs/manifest-format.txt b/docs/manifest-format.txt
index 8fd9137..2a07f19 100644
--- a/docs/manifest-format.txt
+++ b/docs/manifest-format.txt
@@ -35,6 +35,7 @@
<!ATTLIST remote name ID #REQUIRED>
<!ATTLIST remote alias CDATA #IMPLIED>
<!ATTLIST remote fetch CDATA #REQUIRED>
+ <!ATTLIST remote pushurl CDATA #IMPLIED>
<!ATTLIST remote review CDATA #IMPLIED>
<!ATTLIST remote revision CDATA #IMPLIED>
@@ -125,6 +126,12 @@
this remote. Each project's name is appended to this prefix to
form the actual URL used to clone the project.
+Attribute `pushurl`: The Git "push" URL prefix for all projects
+which use this remote. Each project's name is appended to this
+prefix to form the actual URL used to "git push" the project.
+This attribute is optional; if not specified then "git push"
+will use the same URL as the `fetch` attribute.
+
Attribute `review`: Hostname of the Gerrit server where reviews
are uploaded to by `repo upload`. This attribute is optional;
if not specified then `repo upload` will not function.
diff --git a/git_config.py b/git_config.py
index 0379181..e223678 100644
--- a/git_config.py
+++ b/git_config.py
@@ -464,9 +464,13 @@
% (host,port, str(e)), file=sys.stderr)
return False
+ time.sleep(1)
+ ssh_died = (p.poll() is not None)
+ if ssh_died:
+ return False
+
_master_processes.append(p)
_master_keys.add(key)
- time.sleep(1)
return True
finally:
_master_keys_lock.release()
@@ -568,6 +572,7 @@
self._config = config
self.name = name
self.url = self._Get('url')
+ self.pushUrl = self._Get('pushurl')
self.review = self._Get('review')
self.projectname = self._Get('projectname')
self.fetch = list(map(RefSpec.FromString,
@@ -694,6 +699,10 @@
"""Save this remote to the configuration.
"""
self._Set('url', self.url)
+ if self.pushUrl is not None:
+ self._Set('pushurl', self.pushUrl + '/' + self.projectname)
+ else:
+ self._Set('pushurl', self.pushUrl)
self._Set('review', self.review)
self._Set('projectname', self.projectname)
self._Set('fetch', list(map(str, self.fetch)))
diff --git a/gitc_utils.py b/gitc_utils.py
index a388dc2..0d4a5c3 100644
--- a/gitc_utils.py
+++ b/gitc_utils.py
@@ -24,7 +24,9 @@
import git_config
import wrapper
-NUM_BATCH_RETRIEVE_REVISIONID = 300
+from error import ManifestParseError
+
+NUM_BATCH_RETRIEVE_REVISIONID = 32
def get_gitc_manifest_dir():
return wrapper.Wrapper().get_gitc_manifest_dir()
@@ -54,7 +56,11 @@
if gitcmd.Wait():
print('FATAL: Failed to retrieve revisionExpr for %s' % proj)
sys.exit(1)
- proj.revisionExpr = gitcmd.stdout.split('\t')[0]
+ revisionExpr = gitcmd.stdout.split('\t')[0]
+ if not revisionExpr:
+ raise(ManifestParseError('Invalid SHA-1 revision project %s (%s)' %
+ (proj.remote.url, proj.revisionExpr)))
+ proj.revisionExpr = revisionExpr
def _manifest_groups(manifest):
"""Returns the manifest group string that should be synced
diff --git a/manifest_xml.py b/manifest_xml.py
index 295493d..0859e1f 100644
--- a/manifest_xml.py
+++ b/manifest_xml.py
@@ -40,8 +40,18 @@
LOCAL_MANIFESTS_DIR_NAME = 'local_manifests'
# urljoin gets confused if the scheme is not known.
-urllib.parse.uses_relative.extend(['ssh', 'git', 'persistent-https', 'rpc'])
-urllib.parse.uses_netloc.extend(['ssh', 'git', 'persistent-https', 'rpc'])
+urllib.parse.uses_relative.extend([
+ 'ssh',
+ 'git',
+ 'persistent-https',
+ 'sso',
+ 'rpc'])
+urllib.parse.uses_netloc.extend([
+ 'ssh',
+ 'git',
+ 'persistent-https',
+ 'sso',
+ 'rpc'])
class _Default(object):
"""Project defaults within the manifest."""
@@ -64,11 +74,13 @@
name,
alias=None,
fetch=None,
+ pushUrl=None,
manifestUrl=None,
review=None,
revision=None):
self.name = name
self.fetchUrl = fetch
+ self.pushUrl = pushUrl
self.manifestUrl = manifestUrl
self.remoteAlias = alias
self.reviewUrl = review
@@ -104,6 +116,7 @@
remoteName = self.remoteAlias
return RemoteSpec(remoteName,
url=url,
+ pushUrl=self.pushUrl,
review=self.reviewUrl,
orig_name=self.name)
@@ -160,6 +173,8 @@
root.appendChild(e)
e.setAttribute('name', r.name)
e.setAttribute('fetch', r.fetchUrl)
+ if r.pushUrl is not None:
+ e.setAttribute('pushurl', r.pushUrl)
if r.remoteAlias is not None:
e.setAttribute('alias', r.remoteAlias)
if r.reviewUrl is not None:
@@ -639,6 +654,9 @@
if alias == '':
alias = None
fetch = self._reqatt(node, 'fetch')
+ pushUrl = node.getAttribute('pushurl')
+ if pushUrl == '':
+ pushUrl = None
review = node.getAttribute('review')
if review == '':
review = None
@@ -646,7 +664,7 @@
if revision == '':
revision = None
manifestUrl = self.manifestProject.config.GetString('remote.origin.url')
- return _XmlRemote(name, alias, fetch, manifestUrl, review, revision)
+ return _XmlRemote(name, alias, fetch, pushUrl, manifestUrl, review, revision)
def _ParseDefault(self, node):
"""
diff --git a/project.py b/project.py
index 9a7128a..0d60fc6 100644
--- a/project.py
+++ b/project.py
@@ -40,7 +40,13 @@
from git_refs import GitRefs, HEAD, R_HEADS, R_TAGS, R_PUB, R_M
from pyversion import is_python3
-if not is_python3():
+if is_python3():
+ import urllib.parse
+else:
+ import imp
+ import urlparse
+ urllib = imp.new_module('urllib')
+ urllib.parse = urlparse
# pylint:disable=W0622
input = raw_input
# pylint:enable=W0622
@@ -314,11 +320,13 @@
def __init__(self,
name,
url=None,
+ pushUrl=None,
review=None,
revision=None,
orig_name=None):
self.name = name
self.url = url
+ self.pushUrl = pushUrl
self.review = review
self.revision = revision
self.orig_name = orig_name
@@ -343,6 +351,7 @@
hook_type,
hooks_project,
topdir,
+ manifest_url,
abort_if_user_denies=False):
"""RepoHook constructor.
@@ -356,11 +365,13 @@
topdir: Repo's top directory (the one containing the .repo directory).
Scripts will run with CWD as this directory. If you have a manifest,
this is manifest.topdir
+ manifest_url: The URL to the manifest git repo.
abort_if_user_denies: If True, we'll throw a HookError() if the user
doesn't allow us to run the hook.
"""
self._hook_type = hook_type
self._hooks_project = hooks_project
+ self._manifest_url = manifest_url
self._topdir = topdir
self._abort_if_user_denies = abort_if_user_denies
@@ -409,9 +420,9 @@
def _CheckForHookApproval(self):
"""Check to see whether this hook has been approved.
- We'll look at the hash of all of the hooks. If this matches the hash that
- the user last approved, we're done. If it doesn't, we'll ask the user
- about approval.
+ We'll accept approval of manifest URLs if they're using secure transports.
+ This way the user can say they trust the manifest hoster. For insecure
+ hosts, we fall back to checking the hash of the hooks repo.
Note that we ask permission for each individual hook even though we use
the hash of all hooks when detecting changes. We'd like the user to be
@@ -425,44 +436,58 @@
HookError: Raised if the user doesn't approve and abort_if_user_denies
was passed to the consturctor.
"""
+ if self._ManifestUrlHasSecureScheme():
+ return self._CheckForHookApprovalManifest()
+ else:
+ return self._CheckForHookApprovalHash()
+
+ def _CheckForHookApprovalHelper(self, subkey, new_val, main_prompt,
+ changed_prompt):
+ """Check for approval for a particular attribute and hook.
+
+ Args:
+ subkey: The git config key under [repo.hooks.<hook_type>] to store the
+ last approved string.
+ new_val: The new value to compare against the last approved one.
+ main_prompt: Message to display to the user to ask for approval.
+ changed_prompt: Message explaining why we're re-asking for approval.
+
+ Returns:
+ True if this hook is approved to run; False otherwise.
+
+ Raises:
+ HookError: Raised if the user doesn't approve and abort_if_user_denies
+ was passed to the consturctor.
+ """
hooks_config = self._hooks_project.config
- git_approval_key = 'repo.hooks.%s.approvedhash' % self._hook_type
+ git_approval_key = 'repo.hooks.%s.%s' % (self._hook_type, subkey)
- # Get the last hash that the user approved for this hook; may be None.
- old_hash = hooks_config.GetString(git_approval_key)
+ # Get the last value that the user approved for this hook; may be None.
+ old_val = hooks_config.GetString(git_approval_key)
- # Get the current hash so we can tell if scripts changed since approval.
- new_hash = self._GetHash()
-
- if old_hash is not None:
+ if old_val is not None:
# User previously approved hook and asked not to be prompted again.
- if new_hash == old_hash:
+ if new_val == old_val:
# Approval matched. We're done.
return True
else:
# Give the user a reason why we're prompting, since they last told
# us to "never ask again".
- prompt = 'WARNING: Scripts have changed since %s was allowed.\n\n' % (
- self._hook_type)
+ prompt = 'WARNING: %s\n\n' % (changed_prompt,)
else:
prompt = ''
# Prompt the user if we're not on a tty; on a tty we'll assume "no".
if sys.stdout.isatty():
- prompt += ('Repo %s run the script:\n'
- ' %s\n'
- '\n'
- 'Do you want to allow this script to run '
- '(yes/yes-never-ask-again/NO)? ') % (self._GetMustVerb(),
- self._script_fullpath)
+ prompt += main_prompt + ' (yes/always/NO)? '
response = input(prompt).lower()
print()
# User is doing a one-time approval.
if response in ('y', 'yes'):
return True
- elif response == 'yes-never-ask-again':
- hooks_config.SetString(git_approval_key, new_hash)
+ elif response == 'always':
+ hooks_config.SetString(git_approval_key, new_val)
return True
# For anything else, we'll assume no approval.
@@ -472,6 +497,40 @@
return False
+ def _ManifestUrlHasSecureScheme(self):
+ """Check if the URI for the manifest is a secure transport."""
+ secure_schemes = ('file', 'https', 'ssh', 'persistent-https', 'sso', 'rpc')
+ parse_results = urllib.parse.urlparse(self._manifest_url)
+ return parse_results.scheme in secure_schemes
+
+ def _CheckForHookApprovalManifest(self):
+ """Check whether the user has approved this manifest host.
+
+ Returns:
+ True if this hook is approved to run; False otherwise.
+ """
+ return self._CheckForHookApprovalHelper(
+ 'approvedmanifest',
+ self._manifest_url,
+ 'Run hook scripts from %s' % (self._manifest_url,),
+ 'Manifest URL has changed since %s was allowed.' % (self._hook_type,))
+
+ def _CheckForHookApprovalHash(self):
+ """Check whether the user has approved the hooks repo.
+
+ Returns:
+ True if this hook is approved to run; False otherwise.
+ """
+ prompt = ('Repo %s run the script:\n'
+ ' %s\n'
+ '\n'
+ 'Do you want to allow this script to run')
+ return self._CheckForHookApprovalHelper(
+ 'approvedhash',
+ self._GetHash(),
+ prompt % (self._GetMustVerb(), self._script_fullpath),
+ 'Scripts have changed since %s was allowed.' % (self._hook_type,))
+
def _ExecuteHook(self, **kwargs):
"""Actually execute the given hook.
@@ -628,7 +687,7 @@
self.gitdir = gitdir.replace('\\', '/')
self.objdir = objdir.replace('\\', '/')
if worktree:
- self.worktree = worktree.replace('\\', '/')
+ self.worktree = os.path.normpath(worktree.replace('\\', '/'))
else:
self.worktree = None
self.relpath = relpath
@@ -852,11 +911,13 @@
else:
return False
- def PrintWorkTreeStatus(self, output_redir=None):
+ def PrintWorkTreeStatus(self, output_redir=None, quiet=False):
"""Prints the status of the repository to stdout.
Args:
output: If specified, redirect the output to this object.
+ quiet: If True then only print the project name. Do not print
+ the modified files, branch name, etc.
"""
if not os.path.isdir(self.worktree):
if output_redir is None:
@@ -882,6 +943,10 @@
out.redirect(output_redir)
out.project('project %-40s', self.relpath + '/ ')
+ if quiet:
+ out.nl()
+ return 'DIRTY'
+
branch = self.CurrentBranch
if branch is None:
out.nobranch('(*** NO BRANCH ***)')
@@ -1199,13 +1264,18 @@
elif self.manifest.default.sync_c:
current_branch_only = True
+ if self.clone_depth:
+ depth = self.clone_depth
+ else:
+ depth = self.manifest.manifestProject.config.GetString('repo.depth')
+
need_to_fetch = not (optimized_fetch and
(ID_RE.match(self.revisionExpr) and
self._CheckForSha1()))
if (need_to_fetch and
not self._RemoteFetch(initial=is_new, quiet=quiet, alt_dir=alt_dir,
current_branch_only=current_branch_only,
- no_tags=no_tags, prune=prune)):
+ no_tags=no_tags, prune=prune, depth=depth)):
return False
if self.worktree:
@@ -1768,6 +1838,7 @@
remote = RemoteSpec(self.remote.name,
url=url,
+ pushUrl=self.remote.pushUrl,
review=self.remote.review,
revision=self.remote.revision)
subproject = Project(manifest=self.manifest,
@@ -1777,7 +1848,7 @@
objdir=objdir,
worktree=worktree,
relpath=relpath,
- revisionExpr=self.revisionExpr,
+ revisionExpr=rev,
revisionId=rev,
rebase=self.rebase,
groups=self.groups,
@@ -1820,23 +1891,17 @@
quiet=False,
alt_dir=None,
no_tags=False,
- prune=False):
+ prune=False,
+ depth=None):
is_sha1 = False
tag_name = None
- depth = None
-
# The depth should not be used when fetching to a mirror because
# it will result in a shallow repository that cannot be cloned or
# fetched from.
- if not self.manifest.IsMirror:
- if self.clone_depth:
- depth = self.clone_depth
- else:
- depth = self.manifest.manifestProject.config.GetString('repo.depth')
- # The repo project should never be synced with partial depth
- if self.relpath == '.repo/repo':
- depth = None
+ # The repo project should also never be synced with partial depth.
+ if self.manifest.IsMirror or self.relpath == '.repo/repo':
+ depth = None
if depth:
current_branch_only = True
@@ -1997,21 +2062,22 @@
os.remove(packed_refs)
self.bare_git.pack_refs('--all', '--prune')
- if is_sha1 and current_branch_only and self.upstream:
+ if is_sha1 and current_branch_only:
# We just synced the upstream given branch; verify we
# got what we wanted, else trigger a second run of all
# refs.
if not self._CheckForSha1():
- if not depth:
- # Avoid infinite recursion when depth is True (since depth implies
- # current_branch_only)
- return self._RemoteFetch(name=name, current_branch_only=False,
- initial=False, quiet=quiet, alt_dir=alt_dir)
- if self.clone_depth:
- self.clone_depth = None
+ if current_branch_only and depth:
+ # Sync the current branch only with depth set to None
return self._RemoteFetch(name=name,
current_branch_only=current_branch_only,
- initial=False, quiet=quiet, alt_dir=alt_dir)
+ initial=False, quiet=quiet, alt_dir=alt_dir,
+ depth=None)
+ else:
+ # Avoid infinite recursion: sync all branches with depth set to None
+ return self._RemoteFetch(name=name, current_branch_only=False,
+ initial=False, quiet=quiet, alt_dir=alt_dir,
+ depth=None)
return ok
@@ -2235,6 +2301,7 @@
for key in ['user.name', 'user.email']:
if m.Has(key, include_defaults=False):
self.config.SetString(key, m.GetString(key))
+ self.config.SetString('filter.lfs.smudge', 'git-lfs smudge --skip -- %f')
if self.manifest.IsMirror:
self.config.SetString('core.bare', 'true')
else:
@@ -2288,6 +2355,7 @@
if self.remote.url:
remote = self.GetRemote(self.remote.name)
remote.url = self.remote.url
+ remote.pushUrl = self.remote.pushUrl
remote.review = self.remote.review
remote.projectname = self.name
@@ -2332,6 +2400,7 @@
src = os.path.realpath(os.path.join(srcdir, name))
# Fail if the links are pointing to the wrong place
if src != dst:
+ _error('%s is different in %s vs %s', name, destdir, srcdir)
raise GitError('--force-sync not enabled; cannot overwrite a local '
'work tree. If you\'re comfortable with the '
'possibility of losing the work tree\'s git metadata,'
diff --git a/repo b/repo
index e5cb890..4293c79 100755
--- a/repo
+++ b/repo
@@ -23,10 +23,13 @@
# limitations under the License.
# increment this whenever we make important changes to this script
-VERSION = (1, 22)
+VERSION = (1, 23)
# increment this if the MAINTAINER_KEYS block is modified
KEYRING_VERSION = (1, 2)
+
+# Each individual key entry is created by using:
+# gpg --armor --export keyid
MAINTAINER_KEYS = """
Repo Maintainer <repo@android.kernel.org>
@@ -196,6 +199,9 @@
help='restrict manifest projects to ones with a specified '
'platform group [auto|all|none|linux|darwin|...]',
metavar='PLATFORM')
+group.add_option('--no-clone-bundle',
+ dest='no_clone_bundle', action='store_true',
+ help='disable use of /clone.bundle on HTTP/HTTPS')
# Tool
@@ -339,7 +345,11 @@
can_verify = True
dst = os.path.abspath(os.path.join(repodir, S_repo))
- _Clone(url, dst, opt.quiet)
+ _Clone(url, dst, opt.quiet, not opt.no_clone_bundle)
+
+ if not os.path.isfile('%s/repo' % dst):
+ _print("warning: '%s' does not look like a git-repo repository, is "
+ "REPO_URL set correctly?" % url, file=sys.stderr)
if can_verify and not opt.no_repo_verify:
rev = _Verify(dst, branch, opt.quiet)
@@ -432,7 +442,10 @@
sys.exit(1)
env = os.environ.copy()
- env['GNUPGHOME'] = gpg_dir.encode()
+ try:
+ env['GNUPGHOME'] = gpg_dir
+ except UnicodeEncodeError:
+ env['GNUPGHOME'] = gpg_dir.encode()
cmd = ['gpg', '--import']
try:
@@ -574,7 +587,7 @@
os.remove(path)
-def _Clone(url, local, quiet):
+def _Clone(url, local, quiet, clone_bundle):
"""Clones a git repository to a new subdirectory of repodir
"""
try:
@@ -604,7 +617,7 @@
_SetConfig(local,
'remote.origin.fetch',
'+refs/heads/*:refs/remotes/origin/*')
- if _DownloadBundle(url, local, quiet):
+ if clone_bundle and _DownloadBundle(url, local, quiet):
_ImportBundle(local)
_Fetch(url, local, 'origin', quiet)
@@ -638,7 +651,10 @@
_print(file=sys.stderr)
env = os.environ.copy()
- env['GNUPGHOME'] = gpg_dir.encode()
+ try:
+ env['GNUPGHOME'] = gpg_dir
+ except UnicodeEncodeError:
+ env['GNUPGHOME'] = gpg_dir.encode()
cmd = [GIT, 'tag', '-v', cur]
proc = subprocess.Popen(cmd,
@@ -841,7 +857,10 @@
try:
_Init(args, gitc_init=(cmd == 'gitc-init'))
except CloneFailure:
- shutil.rmtree(os.path.join(repodir, S_repo), ignore_errors=True)
+ path = os.path.join(repodir, S_repo)
+ _print("fatal: cloning the git-repo repository failed, will remove "
+ "'%s' " % path, file=sys.stderr)
+ shutil.rmtree(path, ignore_errors=True)
sys.exit(1)
repo_main, rel_repo_dir = _FindRepo()
else:
diff --git a/subcmds/abandon.py b/subcmds/abandon.py
index b94ccdd..6f78da7 100644
--- a/subcmds/abandon.py
+++ b/subcmds/abandon.py
@@ -16,6 +16,7 @@
from __future__ import print_function
import sys
from command import Command
+from collections import defaultdict
from git_command import git
from progress import Progress
@@ -23,49 +24,75 @@
common = True
helpSummary = "Permanently abandon a development branch"
helpUsage = """
-%prog <branchname> [<project>...]
+%prog [--all | <branchname>] [<project>...]
This subcommand permanently abandons a development branch by
deleting it (and all its history) from your local repository.
It is equivalent to "git branch -D <branchname>".
"""
+ def _Options(self, p):
+ p.add_option('--all',
+ dest='all', action='store_true',
+ help='delete all branches in all projects')
def Execute(self, opt, args):
- if not args:
+ if not opt.all and not args:
self.Usage()
- nb = args[0]
- if not git.check_ref_format('heads/%s' % nb):
- print("error: '%s' is not a valid name" % nb, file=sys.stderr)
- sys.exit(1)
+ if not opt.all:
+ nb = args[0]
+ if not git.check_ref_format('heads/%s' % nb):
+ print("error: '%s' is not a valid name" % nb, file=sys.stderr)
+ sys.exit(1)
+ else:
+ args.insert(0,None)
+ nb = "'All local branches'"
- nb = args[0]
- err = []
- success = []
+ err = defaultdict(list)
+ success = defaultdict(list)
all_projects = self.GetProjects(args[1:])
pm = Progress('Abandon %s' % nb, len(all_projects))
for project in all_projects:
pm.update()
- status = project.AbandonBranch(nb)
- if status is not None:
- if status:
- success.append(project)
- else:
- err.append(project)
+ if opt.all:
+ branches = project.GetBranches().keys()
+ else:
+ branches = [nb]
+
+ for name in branches:
+ status = project.AbandonBranch(name)
+ if status is not None:
+ if status:
+ success[name].append(project)
+ else:
+ err[name].append(project)
pm.end()
+ width = 25
+ for name in branches:
+ if width < len(name):
+ width = len(name)
+
if err:
- for p in err:
- print("error: %s/: cannot abandon %s" % (p.relpath, nb),
- file=sys.stderr)
+ for br in err.keys():
+ err_msg = "error: cannot abandon %s" %br
+ print(err_msg, file=sys.stderr)
+ for proj in err[br]:
+ print(' '*len(err_msg) + " | %s" % p.relpath, file=sys.stderr)
sys.exit(1)
elif not success:
- print('error: no project has branch %s' % nb, file=sys.stderr)
+ print('error: no project has local branch(es) : %s' % nb,
+ file=sys.stderr)
sys.exit(1)
else:
- print('Abandoned in %d project(s):\n %s'
- % (len(success), '\n '.join(p.relpath for p in success)),
- file=sys.stderr)
+ print('Abandoned branches:', file=sys.stderr)
+ for br in success.keys():
+ if len(all_projects) > 1 and len(all_projects) == len(success[br]):
+ result = "all project"
+ else:
+ result = "%s" % (
+ ('\n'+' '*width + '| ').join(p.relpath for p in success[br]))
+ print("%s%s| %s\n" % (br,' '*(width-len(br)), result),file=sys.stderr)
diff --git a/subcmds/init.py b/subcmds/init.py
index b8e3de5..45d69b7 100644
--- a/subcmds/init.py
+++ b/subcmds/init.py
@@ -61,6 +61,11 @@
directory when fetching from the server. This will make the sync
go a lot faster by reducing data traffic on the network.
+The --no-clone-bundle option disables any attempt to use
+$URL/clone.bundle to bootstrap a new Git repository from a
+resumeable bundle file on a content delivery network. This
+may be necessary if there are problems with the local Python
+HTTP client or proxy configuration, but the Git binary works.
Switching Manifest Branches
---------------------------
@@ -113,6 +118,9 @@
help='restrict manifest projects to ones with a specified '
'platform group [auto|all|none|linux|darwin|...]',
metavar='PLATFORM')
+ g.add_option('--no-clone-bundle',
+ dest='no_clone_bundle', action='store_true',
+ help='disable use of /clone.bundle on HTTP/HTTPS')
# Tool
g = p.add_option_group('repo Version options')
@@ -222,7 +230,8 @@
'in another location.', file=sys.stderr)
sys.exit(1)
- if not m.Sync_NetworkHalf(is_new=is_new):
+ if not m.Sync_NetworkHalf(is_new=is_new, quiet=opt.quiet,
+ clone_bundle=not opt.no_clone_bundle):
r = m.GetRemote(m.remote.name)
print('fatal: cannot obtain manifest %s' % r.url, file=sys.stderr)
diff --git a/subcmds/start.py b/subcmds/start.py
index d1430a9..290b689 100644
--- a/subcmds/start.py
+++ b/subcmds/start.py
@@ -54,8 +54,7 @@
if not opt.all:
projects = args[1:]
if len(projects) < 1:
- print("error: at least one project must be specified", file=sys.stderr)
- sys.exit(1)
+ projects = ['.',] # start it in the local project by default
all_projects = self.GetProjects(projects,
missing_ok=bool(self.gitc_manifest))
diff --git a/subcmds/status.py b/subcmds/status.py
index 38c229b..60e26ff 100644
--- a/subcmds/status.py
+++ b/subcmds/status.py
@@ -89,8 +89,10 @@
p.add_option('-o', '--orphans',
dest='orphans', action='store_true',
help="include objects in working directory outside of repo projects")
+ p.add_option('-q', '--quiet', action='store_true',
+ help="only print the name of modified projects")
- def _StatusHelper(self, project, clean_counter, sem):
+ def _StatusHelper(self, project, clean_counter, sem, quiet):
"""Obtains the status for a specific project.
Obtains the status for a project, redirecting the output to
@@ -104,7 +106,7 @@
output: Where to output the status.
"""
try:
- state = project.PrintWorkTreeStatus()
+ state = project.PrintWorkTreeStatus(quiet=quiet)
if state == 'CLEAN':
next(clean_counter)
finally:
@@ -132,7 +134,7 @@
if opt.jobs == 1:
for project in all_projects:
- state = project.PrintWorkTreeStatus()
+ state = project.PrintWorkTreeStatus(quiet=opt.quiet)
if state == 'CLEAN':
next(counter)
else:
@@ -142,13 +144,13 @@
sem.acquire()
t = _threading.Thread(target=self._StatusHelper,
- args=(project, counter, sem))
+ args=(project, counter, sem, opt.quiet))
threads.append(t)
t.daemon = True
t.start()
for t in threads:
t.join()
- if len(all_projects) == next(counter):
+ if not opt.quiet and len(all_projects) == next(counter):
print('nothing to commit (working directory clean)')
if opt.orphans:
diff --git a/subcmds/sync.py b/subcmds/sync.py
index 9124a65..bbb166c 100644
--- a/subcmds/sync.py
+++ b/subcmds/sync.py
@@ -255,7 +255,7 @@
dest='repo_upgraded', action='store_true',
help=SUPPRESS_HELP)
- def _FetchProjectList(self, opt, projects, *args, **kwargs):
+ def _FetchProjectList(self, opt, projects, sem, *args, **kwargs):
"""Main function of the fetch threads when jobs are > 1.
Delegates most of the work to _FetchHelper.
@@ -263,15 +263,20 @@
Args:
opt: Program options returned from optparse. See _Options().
projects: Projects to fetch.
+ sem: We'll release() this semaphore when we exit so that another thread
+ can be started up.
*args, **kwargs: Remaining arguments to pass to _FetchHelper. See the
_FetchHelper docstring for details.
"""
- for project in projects:
- success = self._FetchHelper(opt, project, *args, **kwargs)
- if not success and not opt.force_broken:
- break
+ try:
+ for project in projects:
+ success = self._FetchHelper(opt, project, *args, **kwargs)
+ if not success and not opt.force_broken:
+ break
+ finally:
+ sem.release()
- def _FetchHelper(self, opt, project, lock, fetched, pm, sem, err_event):
+ def _FetchHelper(self, opt, project, lock, fetched, pm, err_event):
"""Fetch git objects for a single project.
Args:
@@ -283,8 +288,6 @@
(with our lock held).
pm: Instance of a Project object. We will call pm.update() (with our
lock held).
- sem: We'll release() this semaphore when we exit so that another thread
- can be started up.
err_event: We'll set this event in the case of an error (after printing
out info about the error).
@@ -340,7 +343,6 @@
finally:
if did_lock:
lock.release()
- sem.release()
return success
@@ -365,10 +367,10 @@
sem.acquire()
kwargs = dict(opt=opt,
projects=project_list,
+ sem=sem,
lock=lock,
fetched=fetched,
pm=pm,
- sem=sem,
err_event=err_event)
if self.jobs > 1:
t = _threading.Thread(target = self._FetchProjectList,
@@ -397,9 +399,12 @@
return fetched
def _GCProjects(self, projects):
- gitdirs = {}
+ gc_gitdirs = {}
for project in projects:
- gitdirs[project.gitdir] = project.bare_git
+ if len(project.manifest.GetProjectsWithName(project.name)) > 1:
+ print('Shared project %s found, disabling pruning.' % project.name)
+ project.bare_git.config('--replace-all', 'gc.pruneExpire', 'never')
+ gc_gitdirs[project.gitdir] = project.bare_git
has_dash_c = git_require((1, 7, 2))
if multiprocessing and has_dash_c:
@@ -409,7 +414,7 @@
jobs = min(self.jobs, cpu_count)
if jobs < 2:
- for bare_git in gitdirs.values():
+ for bare_git in gc_gitdirs.values():
bare_git.gc('--auto')
return
@@ -431,7 +436,7 @@
finally:
sem.release()
- for bare_git in gitdirs.values():
+ for bare_git in gc_gitdirs.values():
if err_event.isSet():
break
sem.acquire()
@@ -454,6 +459,65 @@
else:
self.manifest._Unload()
+ def _DeleteProject(self, path):
+ print('Deleting obsolete path %s' % path, file=sys.stderr)
+
+ # Delete the .git directory first, so we're less likely to have a partially
+ # working git repository around. There shouldn't be any git projects here,
+ # so rmtree works.
+ try:
+ shutil.rmtree(os.path.join(path, '.git'))
+ except OSError:
+ print('Failed to remove %s' % os.path.join(path, '.git'), file=sys.stderr)
+ print('error: Failed to delete obsolete path %s' % path, file=sys.stderr)
+ print(' remove manually, then run sync again', file=sys.stderr)
+ return -1
+
+ # Delete everything under the worktree, except for directories that contain
+ # another git project
+ dirs_to_remove = []
+ failed = False
+ for root, dirs, files in os.walk(path):
+ for f in files:
+ try:
+ os.remove(os.path.join(root, f))
+ except OSError:
+ print('Failed to remove %s' % os.path.join(root, f), file=sys.stderr)
+ failed = True
+ dirs[:] = [d for d in dirs
+ if not os.path.lexists(os.path.join(root, d, '.git'))]
+ dirs_to_remove += [os.path.join(root, d) for d in dirs
+ if os.path.join(root, d) not in dirs_to_remove]
+ for d in reversed(dirs_to_remove):
+ if os.path.islink(d):
+ try:
+ os.remove(d)
+ except OSError:
+ print('Failed to remove %s' % os.path.join(root, d), file=sys.stderr)
+ failed = True
+ elif len(os.listdir(d)) == 0:
+ try:
+ os.rmdir(d)
+ except OSError:
+ print('Failed to remove %s' % os.path.join(root, d), file=sys.stderr)
+ failed = True
+ continue
+ if failed:
+ print('error: Failed to delete obsolete path %s' % path, file=sys.stderr)
+ print(' remove manually, then run sync again', file=sys.stderr)
+ return -1
+
+ # Try deleting parent dirs if they are empty
+ project_dir = path
+ while project_dir != self.manifest.topdir:
+ if len(os.listdir(project_dir)) == 0:
+ os.rmdir(project_dir)
+ else:
+ break
+ project_dir = os.path.dirname(project_dir)
+
+ return 0
+
def UpdateProjectList(self):
new_project_paths = []
for project in self.GetProjects(None, missing_ok=True):
@@ -474,8 +538,8 @@
continue
if path not in new_project_paths:
# If the path has already been deleted, we don't need to do it
- if os.path.exists(self.manifest.topdir + '/' + path):
- gitdir = os.path.join(self.manifest.topdir, path, '.git')
+ gitdir = os.path.join(self.manifest.topdir, path, '.git')
+ if os.path.exists(gitdir):
project = Project(
manifest = self.manifest,
name = path,
@@ -494,18 +558,8 @@
print(' commit changes, then run sync again',
file=sys.stderr)
return -1
- else:
- print('Deleting obsolete path %s' % project.worktree,
- file=sys.stderr)
- shutil.rmtree(project.worktree)
- # Try deleting parent subdirs if they are empty
- project_dir = os.path.dirname(project.worktree)
- while project_dir != self.manifest.topdir:
- try:
- os.rmdir(project_dir)
- except OSError:
- break
- project_dir = os.path.dirname(project_dir)
+ elif self._DeleteProject(project.worktree):
+ return -1
new_project_paths.sort()
fd = open(file_path, 'w')
diff --git a/subcmds/upload.py b/subcmds/upload.py
index 674fc17..1172dad 100644
--- a/subcmds/upload.py
+++ b/subcmds/upload.py
@@ -454,9 +454,15 @@
if avail:
pending.append((project, avail))
- if pending and (not opt.bypass_hooks):
+ if not pending:
+ print("no branches ready for upload", file=sys.stderr)
+ return
+
+ if not opt.bypass_hooks:
hook = RepoHook('pre-upload', self.manifest.repo_hooks_project,
- self.manifest.topdir, abort_if_user_denies=True)
+ self.manifest.topdir,
+ self.manifest.manifestProject.GetRemote('origin').url,
+ abort_if_user_denies=True)
pending_proj_names = [project.name for (project, avail) in pending]
pending_worktrees = [project.worktree for (project, avail) in pending]
try:
@@ -472,9 +478,7 @@
cc = _SplitEmails(opt.cc)
people = (reviewers, cc)
- if not pending:
- print("no branches ready for upload", file=sys.stderr)
- elif len(pending) == 1 and len(pending[0][1]) == 1:
+ if len(pending) == 1 and len(pending[0][1]) == 1:
self._SingleBranch(opt, pending[0][1][0], people)
else:
self._MultipleBranches(opt, pending, people)