Merge "Fix UrlInsteadOf to handle multiple strings"
diff --git a/docs/manifest-format.txt b/docs/manifest-format.txt
index e48b75f..f187bfa 100644
--- a/docs/manifest-format.txt
+++ b/docs/manifest-format.txt
@@ -35,6 +35,7 @@
<!ATTLIST remote alias CDATA #IMPLIED>
<!ATTLIST remote fetch CDATA #REQUIRED>
<!ATTLIST remote review CDATA #IMPLIED>
+ <!ATTLIST remote revision CDATA #IMPLIED>
<!ELEMENT default (EMPTY)>
<!ATTLIST default remote IDREF #IMPLIED>
@@ -112,6 +113,10 @@
are uploaded to by `repo upload`. This attribute is optional;
if not specified then `repo upload` will not function.
+Attribute `revision`: Name of a Git branch (e.g. `master` or
+`refs/heads/master`). Remotes with their own revision will override
+the default revision.
+
Element default
---------------
@@ -208,7 +213,8 @@
(e.g. just "master") or absolute (e.g. "refs/heads/master").
Tags and/or explicit SHA-1s should work in theory, but have not
been extensively tested. If not supplied the revision given by
-the default element is used.
+the remote element is used if applicable, else the default
+element is used.
Attribute `dest-branch`: Name of a Git branch (e.g. `master`).
When using `repo upload`, changes will be submitted for code
diff --git a/error.py b/error.py
index 7e52b01..ff948f9 100644
--- a/error.py
+++ b/error.py
@@ -24,6 +24,13 @@
class NoManifestException(Exception):
"""The required manifest does not exist.
"""
+ def __init__(self, path, reason):
+ super(NoManifestException, self).__init__()
+ self.path = path
+ self.reason = reason
+
+ def __str__(self):
+ return self.reason
class EditorError(Exception):
"""Unspecified error from the user's text editor.
diff --git a/git_config.py b/git_config.py
index c1a6b55..a667b3f 100644
--- a/git_config.py
+++ b/git_config.py
@@ -15,8 +15,8 @@
from __future__ import print_function
+import json
import os
-import pickle
import re
import subprocess
import sys
@@ -80,7 +80,7 @@
return cls(configfile = os.path.join(gitdir, 'config'),
defaults = defaults)
- def __init__(self, configfile, defaults=None, pickleFile=None):
+ def __init__(self, configfile, defaults=None, jsonFile=None):
self.file = configfile
self.defaults = defaults
self._cache_dict = None
@@ -88,12 +88,11 @@
self._remotes = {}
self._branches = {}
- if pickleFile is None:
- self._pickle = os.path.join(
+ self._json = jsonFile
+ if self._json is None:
+ self._json = os.path.join(
os.path.dirname(self.file),
- '.repopickle_' + os.path.basename(self.file))
- else:
- self._pickle = pickleFile
+ '.repo_' + os.path.basename(self.file) + '.json')
def Has(self, name, include_defaults = True):
"""Return true if this configuration file has the key.
@@ -248,50 +247,41 @@
return self._cache_dict
def _Read(self):
- d = self._ReadPickle()
+ d = self._ReadJson()
if d is None:
d = self._ReadGit()
- self._SavePickle(d)
+ self._SaveJson(d)
return d
- def _ReadPickle(self):
+ def _ReadJson(self):
try:
- if os.path.getmtime(self._pickle) \
+ if os.path.getmtime(self._json) \
<= os.path.getmtime(self.file):
- os.remove(self._pickle)
+ os.remove(self._json)
return None
except OSError:
return None
try:
- Trace(': unpickle %s', self.file)
- fd = open(self._pickle, 'rb')
+ Trace(': parsing %s', self.file)
+ fd = open(self._json)
try:
- return pickle.load(fd)
+ return json.load(fd)
finally:
fd.close()
- except EOFError:
- os.remove(self._pickle)
- return None
- except IOError:
- os.remove(self._pickle)
- return None
- except pickle.PickleError:
- os.remove(self._pickle)
+ except (IOError, ValueError):
+ os.remove(self._json)
return None
- def _SavePickle(self, cache):
+ def _SaveJson(self, cache):
try:
- fd = open(self._pickle, 'wb')
+ fd = open(self._json, 'w')
try:
- pickle.dump(cache, fd, pickle.HIGHEST_PROTOCOL)
+ json.dump(cache, fd, indent=2)
finally:
fd.close()
- except IOError:
- if os.path.exists(self._pickle):
- os.remove(self._pickle)
- except pickle.PickleError:
- if os.path.exists(self._pickle):
- os.remove(self._pickle)
+ except (IOError, TypeError):
+ if os.path.exists(self.json):
+ os.remove(self._json)
def _ReadGit(self):
"""
diff --git a/main.py b/main.py
index 3661776..72fb39b 100755
--- a/main.py
+++ b/main.py
@@ -129,8 +129,15 @@
file=sys.stderr)
return 1
- copts, cargs = cmd.OptionParser.parse_args(argv)
- copts = cmd.ReadEnvironmentOptions(copts)
+ try:
+ copts, cargs = cmd.OptionParser.parse_args(argv)
+ copts = cmd.ReadEnvironmentOptions(copts)
+ except NoManifestException as e:
+ print('error: in `%s`: %s' % (' '.join([name] + argv), str(e)),
+ file=sys.stderr)
+ print('error: manifest missing or unreadable -- please run init',
+ file=sys.stderr)
+ return 1
if not gopts.no_pager and not isinstance(cmd, InteractiveCommand):
config = cmd.manifest.globalConfig
@@ -146,15 +153,13 @@
start = time.time()
try:
result = cmd.Execute(copts, cargs)
- except DownloadError as e:
- print('error: %s' % str(e), file=sys.stderr)
- result = 1
- except ManifestInvalidRevisionError as e:
- print('error: %s' % str(e), file=sys.stderr)
- result = 1
- except NoManifestException as e:
- print('error: manifest required for this command -- please run init',
- file=sys.stderr)
+ except (DownloadError, ManifestInvalidRevisionError,
+ NoManifestException) as e:
+ print('error: in `%s`: %s' % (' '.join([name] + argv), str(e)),
+ file=sys.stderr)
+ if isinstance(e, NoManifestException):
+ print('error: manifest missing or unreadable -- please run init',
+ file=sys.stderr)
result = 1
except NoSuchProjectError as e:
if e.name:
diff --git a/manifest_xml.py b/manifest_xml.py
index 3c8fadd..fdc3177 100644
--- a/manifest_xml.py
+++ b/manifest_xml.py
@@ -63,12 +63,14 @@
alias=None,
fetch=None,
manifestUrl=None,
- review=None):
+ review=None,
+ revision=None):
self.name = name
self.fetchUrl = fetch
self.manifestUrl = manifestUrl
self.remoteAlias = alias
self.reviewUrl = review
+ self.revision = revision
self.resolvedFetchUrl = self._resolveFetchUrl()
def __eq__(self, other):
@@ -159,6 +161,8 @@
e.setAttribute('alias', r.remoteAlias)
if r.reviewUrl is not None:
e.setAttribute('review', r.reviewUrl)
+ if r.revision is not None:
+ e.setAttribute('revision', r.revision)
def Save(self, fd, peg_rev=False, peg_rev_upstream=True):
"""Write the current manifest out to the given file descriptor.
@@ -240,7 +244,8 @@
if d.remote:
remoteName = d.remote.remoteAlias or d.remote.name
if not d.remote or p.remote.name != remoteName:
- e.setAttribute('remote', p.remote.name)
+ remoteName = p.remote.name
+ e.setAttribute('remote', remoteName)
if peg_rev:
if self.IsMirror:
value = p.bare_git.rev_parse(p.revisionExpr + '^0')
@@ -252,8 +257,10 @@
# isn't our value, and the if the default doesn't already have that
# covered.
e.setAttribute('upstream', p.revisionExpr)
- elif not d.revisionExpr or p.revisionExpr != d.revisionExpr:
- e.setAttribute('revision', p.revisionExpr)
+ else:
+ revision = self.remotes[remoteName].revision or d.revisionExpr
+ if not revision or revision != p.revisionExpr:
+ e.setAttribute('revision', p.revisionExpr)
for c in p.copyfiles:
ce = doc.createElement('copyfile')
@@ -261,6 +268,12 @@
ce.setAttribute('dest', c.dest)
e.appendChild(ce)
+ for l in p.linkfiles:
+ le = doc.createElement('linkfile')
+ le.setAttribute('src', l.src)
+ le.setAttribute('dest', l.dest)
+ e.appendChild(le)
+
default_groups = ['all', 'name:%s' % p.name, 'path:%s' % p.relpath]
egroups = [g for g in p.groups if g not in default_groups]
if egroups:
@@ -304,7 +317,7 @@
@property
def projects(self):
self._Load()
- return self._paths.values()
+ return list(self._paths.values())
@property
def remotes(self):
@@ -586,8 +599,11 @@
review = node.getAttribute('review')
if review == '':
review = None
+ revision = node.getAttribute('revision')
+ if revision == '':
+ revision = None
manifestUrl = self.manifestProject.config.GetString('remote.origin.url')
- return _XmlRemote(name, alias, fetch, manifestUrl, review)
+ return _XmlRemote(name, alias, fetch, manifestUrl, review, revision)
def _ParseDefault(self, node):
"""
@@ -680,7 +696,7 @@
raise ManifestParseError("no remote for project %s within %s" %
(name, self.manifestFile))
- revisionExpr = node.getAttribute('revision')
+ revisionExpr = node.getAttribute('revision') or remote.revision
if not revisionExpr:
revisionExpr = self._default.revisionExpr
if not revisionExpr:
@@ -765,6 +781,8 @@
for n in node.childNodes:
if n.nodeName == 'copyfile':
self._ParseCopyFile(project, n)
+ if n.nodeName == 'linkfile':
+ self._ParseLinkFile(project, n)
if n.nodeName == 'annotation':
self._ParseAnnotation(project, n)
if n.nodeName == 'project':
@@ -814,6 +832,14 @@
# dest is relative to the top of the tree
project.AddCopyFile(src, dest, os.path.join(self.topdir, dest))
+ def _ParseLinkFile(self, project, node):
+ src = self._reqatt(node, 'src')
+ dest = self._reqatt(node, 'dest')
+ if not self.IsMirror:
+ # src is project relative;
+ # dest is relative to the top of the tree
+ project.AddLinkFile(src, dest, os.path.join(self.topdir, dest))
+
def _ParseAnnotation(self, project, node):
name = self._reqatt(node, 'name')
value = self._reqatt(node, 'value')
@@ -856,10 +882,8 @@
fromProjects = self.paths
toProjects = manifest.paths
- fromKeys = fromProjects.keys()
- fromKeys.sort()
- toKeys = toProjects.keys()
- toKeys.sort()
+ fromKeys = sorted(fromProjects.keys())
+ toKeys = sorted(toProjects.keys())
diff = {'added': [], 'removed': [], 'changed': [], 'unreachable': []}
diff --git a/project.py b/project.py
index d07b521..460bf31 100644
--- a/project.py
+++ b/project.py
@@ -231,14 +231,40 @@
except IOError:
_error('Cannot copy file %s to %s', src, dest)
+class _LinkFile:
+ def __init__(self, src, dest, abssrc, absdest):
+ self.src = src
+ self.dest = dest
+ self.abs_src = abssrc
+ self.abs_dest = absdest
+
+ def _Link(self):
+ src = self.abs_src
+ dest = self.abs_dest
+ # link file if it does not exist or is out of date
+ if not os.path.islink(dest) or os.readlink(dest) != src:
+ try:
+ # remove existing file first, since it might be read-only
+ if os.path.exists(dest):
+ os.remove(dest)
+ else:
+ dest_dir = os.path.dirname(dest)
+ if not os.path.isdir(dest_dir):
+ os.makedirs(dest_dir)
+ os.symlink(src, dest)
+ except IOError:
+ _error('Cannot link file %s to %s', src, dest)
+
class RemoteSpec(object):
def __init__(self,
name,
url = None,
- review = None):
+ review = None,
+ revision = None):
self.name = name
self.url = url
self.review = review
+ self.revision = revision
class RepoHook(object):
"""A RepoHook contains information about a script to run as a hook.
@@ -414,7 +440,8 @@
# and convert to a HookError w/ just the failing traceback.
context = {}
try:
- execfile(self._script_fullpath, context)
+ exec(compile(open(self._script_fullpath).read(),
+ self._script_fullpath, 'exec'), context)
except Exception:
raise HookError('%s\nFailed to import %s hook; see traceback above.' % (
traceback.format_exc(), self._hook_type))
@@ -555,6 +582,7 @@
self.snapshots = {}
self.copyfiles = []
+ self.linkfiles = []
self.annotations = []
self.config = GitConfig.ForRepository(
gitdir = self.gitdir,
@@ -1040,7 +1068,7 @@
except OSError as e:
print("warn: Cannot remove archive %s: "
"%s" % (tarpath, str(e)), file=sys.stderr)
- self._CopyFiles()
+ self._CopyAndLinkFiles()
return True
if is_new is None:
@@ -1078,17 +1106,12 @@
elif self.manifest.default.sync_c:
current_branch_only = True
- is_sha1 = False
- if ID_RE.match(self.revisionExpr) is not None:
- is_sha1 = True
- if is_sha1 and self._CheckForSha1():
- # Don't need to fetch since we already have this revision
- return True
-
- if not self._RemoteFetch(initial=is_new, quiet=quiet, alt_dir=alt_dir,
- current_branch_only=current_branch_only,
- no_tags=no_tags):
- return False
+ has_sha1 = ID_RE.match(self.revisionExpr) and self._CheckForSha1()
+ if (not has_sha1 #Need to fetch since we don't already have this revision
+ and not self._RemoteFetch(initial=is_new, quiet=quiet, alt_dir=alt_dir,
+ current_branch_only=current_branch_only,
+ no_tags=no_tags)):
+ return False
if self.worktree:
self._InitMRef()
@@ -1103,9 +1126,11 @@
def PostRepoUpgrade(self):
self._InitHooks()
- def _CopyFiles(self):
+ def _CopyAndLinkFiles(self):
for copyfile in self.copyfiles:
copyfile._Copy()
+ for linkfile in self.linkfiles:
+ linkfile._Link()
def GetCommitRevisionId(self):
"""Get revisionId of a commit.
@@ -1152,7 +1177,7 @@
def _doff():
self._FastForward(revid)
- self._CopyFiles()
+ self._CopyAndLinkFiles()
head = self.work_git.GetHead()
if head.startswith(R_HEADS):
@@ -1188,7 +1213,7 @@
except GitError as e:
syncbuf.fail(self, e)
return
- self._CopyFiles()
+ self._CopyAndLinkFiles()
return
if head == revid:
@@ -1210,7 +1235,7 @@
except GitError as e:
syncbuf.fail(self, e)
return
- self._CopyFiles()
+ self._CopyAndLinkFiles()
return
upstream_gain = self._revlist(not_rev(HEAD), revid)
@@ -1283,12 +1308,12 @@
if cnt_mine > 0 and self.rebase:
def _dorebase():
self._Rebase(upstream = '%s^1' % last_mine, onto = revid)
- self._CopyFiles()
+ self._CopyAndLinkFiles()
syncbuf.later2(self, _dorebase)
elif local_changes:
try:
self._ResetHard(revid)
- self._CopyFiles()
+ self._CopyAndLinkFiles()
except GitError as e:
syncbuf.fail(self, e)
return
@@ -1301,6 +1326,12 @@
abssrc = os.path.join(self.worktree, src)
self.copyfiles.append(_CopyFile(src, dest, abssrc, absdest))
+ def AddLinkFile(self, src, dest, absdest):
+ # dest should already be an absolute path, but src is project relative
+ # make src an absolute path
+ abssrc = os.path.join(self.worktree, src)
+ self.linkfiles.append(_LinkFile(src, dest, abssrc, absdest))
+
def AddAnnotation(self, name, value, keep):
self.annotations.append(_Annotation(name, value, keep))
@@ -1629,7 +1660,8 @@
remote = RemoteSpec(self.remote.name,
url = url,
- review = self.remote.review)
+ review = self.remote.review,
+ revision = self.remote.revision)
subproject = Project(manifest = self.manifest,
name = name,
remote = remote,
@@ -1674,6 +1706,7 @@
if command.Wait() != 0:
raise GitError('git archive %s: %s' % (self.name, command.stderr))
+
def _RemoteFetch(self, name=None,
current_branch_only=False,
initial=False,
@@ -1683,11 +1716,17 @@
is_sha1 = False
tag_name = None
+ depth = None
- if self.clone_depth:
- depth = self.clone_depth
- else:
- depth = self.manifest.manifestProject.config.GetString('repo.depth')
+ # The depth should not be used when fetching to a mirror because
+ # it will result in a shallow repository that cannot be cloned or
+ # fetched from.
+ if not self.manifest.IsMirror:
+ if self.clone_depth:
+ depth = self.clone_depth
+ else:
+ depth = self.manifest.manifestProject.config.GetString('repo.depth')
+
if depth:
current_branch_only = True
@@ -1763,26 +1802,37 @@
cmd.append('--update-head-ok')
cmd.append(name)
+ # If using depth then we should not get all the tags since they may
+ # be outside of the depth.
+ if no_tags or depth:
+ cmd.append('--no-tags')
+ else:
+ cmd.append('--tags')
+
+ spec = []
if not current_branch_only:
# Fetch whole repo
- # If using depth then we should not get all the tags since they may
- # be outside of the depth.
- if no_tags or depth:
- cmd.append('--no-tags')
- else:
- cmd.append('--tags')
-
- cmd.append(str((u'+refs/heads/*:') + remote.ToLocal('refs/heads/*')))
+ spec.append(str((u'+refs/heads/*:') + remote.ToLocal('refs/heads/*')))
elif tag_name is not None:
- cmd.append('tag')
- cmd.append(tag_name)
+ spec.append('tag')
+ spec.append(tag_name)
else:
branch = self.revisionExpr
if is_sha1:
branch = self.upstream
if branch.startswith(R_HEADS):
branch = branch[len(R_HEADS):]
- cmd.append(str((u'+refs/heads/%s:' % branch) + remote.ToLocal('refs/heads/%s' % branch)))
+ spec.append(str((u'+refs/heads/%s:' % branch) + remote.ToLocal('refs/heads/%s' % branch)))
+ cmd.extend(spec)
+
+ shallowfetch = self.config.GetString('repo.shallowfetch')
+ if shallowfetch and shallowfetch != ' '.join(spec):
+ GitCommand(self, ['fetch', '--unshallow', name] + shallowfetch.split(),
+ bare=True, ssh_proxy=ssh_proxy).Wait()
+ if depth:
+ self.config.SetString('repo.shallowfetch', ' '.join(spec))
+ else:
+ self.config.SetString('repo.shallowfetch', None)
ok = False
for _i in range(2):
@@ -1801,7 +1851,7 @@
# Ensure that some refs exist. Otherwise, we probably aren't looking
# at a real git repository and may have a bad url.
if not self.bare_ref.all:
- ok = False
+ ok = False
if alt_dir:
if old_packed != '':
@@ -2147,7 +2197,7 @@
symlink_dirs = ['hooks', 'objects', 'rr-cache', 'svn']
if share_refs:
# These objects can only be used by a single working tree.
- symlink_files += ['config', 'packed-refs']
+ symlink_files += ['config', 'packed-refs', 'shallow']
symlink_dirs += ['logs', 'refs']
to_symlink = symlink_files + symlink_dirs
@@ -2167,6 +2217,14 @@
if name in symlink_dirs and not os.path.lexists(src):
os.makedirs(src)
+ # If the source file doesn't exist, ensure the destination
+ # file doesn't either.
+ if name in symlink_files and not os.path.lexists(src):
+ try:
+ os.remove(dst)
+ except OSError:
+ pass
+
if name in to_symlink:
os.symlink(os.path.relpath(src, os.path.dirname(dst)), dst)
elif copy_all and not os.path.islink(dst):
@@ -2195,7 +2253,7 @@
if GitCommand(self, cmd).Wait() != 0:
raise GitError("cannot initialize work tree")
- self._CopyFiles()
+ self._CopyAndLinkFiles()
def _gitdir_path(self, path):
return os.path.realpath(os.path.join(self.gitdir, path))
@@ -2287,8 +2345,8 @@
out = iter(out[:-1].split('\0')) # pylint: disable=W1401
while out:
try:
- info = out.next()
- path = out.next()
+ info = next(out)
+ path = next(out)
except StopIteration:
break
@@ -2314,7 +2372,7 @@
info = _Info(path, *info)
if info.status in ('R', 'C'):
info.src_path = info.path
- info.path = out.next()
+ info.path = next(out)
r[info.path] = info
return r
finally:
@@ -2327,8 +2385,8 @@
path = os.path.join(self._project.worktree, '.git', HEAD)
try:
fd = open(path, 'rb')
- except IOError:
- raise NoManifestException(path)
+ except IOError as e:
+ raise NoManifestException(path, str(e))
try:
line = fd.read()
finally:
diff --git a/repo b/repo
index 768f11f..3fd0166 100755
--- a/repo
+++ b/repo
@@ -114,6 +114,7 @@
import optparse
import os
import re
+import shutil
import stat
import subprocess
import sys
@@ -138,10 +139,6 @@
# Python version check
ver = sys.version_info
-if ver[0] == 3:
- _print('warning: Python 3 support is currently experimental. YMMV.\n'
- 'Please use Python 2.6 - 2.7 instead.',
- file=sys.stderr)
if (ver[0], ver[1]) < MIN_PYTHON_VERSION:
_print('error: Python version %s unsupported.\n'
'Please use Python 2.6 - 2.7 instead.'
@@ -741,12 +738,7 @@
try:
_Init(args)
except CloneFailure:
- for root, dirs, files in os.walk(repodir, topdown=False):
- for name in files:
- os.remove(os.path.join(root, name))
- for name in dirs:
- os.rmdir(os.path.join(root, name))
- os.rmdir(repodir)
+ shutil.rmtree(repodir, ignore_errors=True)
sys.exit(1)
repo_main, rel_repo_dir = _FindRepo()
else:
@@ -772,4 +764,8 @@
if __name__ == '__main__':
+ if ver[0] == 3:
+ _print('warning: Python 3 support is currently experimental. YMMV.\n'
+ 'Please use Python 2.6 - 2.7 instead.',
+ file=sys.stderr)
main(sys.argv[1:])
diff --git a/subcmds/forall.py b/subcmds/forall.py
index e2a420a..7771ec1 100644
--- a/subcmds/forall.py
+++ b/subcmds/forall.py
@@ -14,7 +14,9 @@
# limitations under the License.
from __future__ import print_function
+import errno
import fcntl
+import multiprocessing
import re
import os
import select
@@ -31,6 +33,7 @@
'log',
]
+
class ForallColoring(Coloring):
def __init__(self, config):
Coloring.__init__(self, config, 'forall')
@@ -87,6 +90,12 @@
REPO_RREV is the name of the revision from the manifest, exactly
as written in the manifest.
+REPO_COUNT is the total number of projects being iterated.
+
+REPO_I is the current (1-based) iteration count. Can be used in
+conjunction with REPO_COUNT to add a simple progress indicator to your
+command.
+
REPO__* are any extra environment variables, specified by the
"annotation" element under any project element. This can be useful
for differentiating trees based on user-specific criteria, or simply
@@ -126,9 +135,31 @@
g.add_option('-v', '--verbose',
dest='verbose', action='store_true',
help='Show command error messages')
+ g.add_option('-j', '--jobs',
+ dest='jobs', action='store', type='int', default=1,
+ help='number of commands to execute simultaneously')
def WantPager(self, opt):
- return opt.project_header
+ return opt.project_header and opt.jobs == 1
+
+ def _SerializeProject(self, project):
+ """ Serialize a project._GitGetByExec instance.
+
+ project._GitGetByExec is not pickle-able. Instead of trying to pass it
+ around between processes, make a dict ourselves containing only the
+ attributes that we need.
+
+ """
+ return {
+ 'name': project.name,
+ 'relpath': project.relpath,
+ 'remote_name': project.remote.name,
+ 'lrev': project.GetRevisionId(),
+ 'rrev': project.revisionExpr,
+ 'annotations': dict((a.name, a.value) for a in project.annotations),
+ 'gitdir': project.gitdir,
+ 'worktree': project.worktree,
+ }
def Execute(self, opt, args):
if not opt.command:
@@ -167,123 +198,165 @@
# pylint: enable=W0631
mirror = self.manifest.IsMirror
- out = ForallColoring(self.manifest.manifestProject.config)
- out.redirect(sys.stdout)
-
rc = 0
- first = True
if not opt.regex:
projects = self.GetProjects(args)
else:
projects = self.FindProjects(args)
- for project in projects:
- env = os.environ.copy()
- def setenv(name, val):
- if val is None:
- val = ''
- env[name] = val.encode()
+ os.environ['REPO_COUNT'] = str(len(projects))
- setenv('REPO_PROJECT', project.name)
- setenv('REPO_PATH', project.relpath)
- setenv('REPO_REMOTE', project.remote.name)
- setenv('REPO_LREV', project.GetRevisionId())
- setenv('REPO_RREV', project.revisionExpr)
- for a in project.annotations:
- setenv("REPO__%s" % (a.name), a.value)
-
- if mirror:
- setenv('GIT_DIR', project.gitdir)
- cwd = project.gitdir
- else:
- cwd = project.worktree
-
- if not os.path.exists(cwd):
- if (opt.project_header and opt.verbose) \
- or not opt.project_header:
- print('skipping %s/' % project.relpath, file=sys.stderr)
- continue
-
- if opt.project_header:
- stdin = subprocess.PIPE
- stdout = subprocess.PIPE
- stderr = subprocess.PIPE
- else:
- stdin = None
- stdout = None
- stderr = None
-
- p = subprocess.Popen(cmd,
- cwd = cwd,
- shell = shell,
- env = env,
- stdin = stdin,
- stdout = stdout,
- stderr = stderr)
-
- if opt.project_header:
- class sfd(object):
- def __init__(self, fd, dest):
- self.fd = fd
- self.dest = dest
- def fileno(self):
- return self.fd.fileno()
-
- empty = True
- errbuf = ''
-
- p.stdin.close()
- s_in = [sfd(p.stdout, sys.stdout),
- sfd(p.stderr, sys.stderr)]
-
- for s in s_in:
- flags = fcntl.fcntl(s.fd, fcntl.F_GETFL)
- fcntl.fcntl(s.fd, fcntl.F_SETFL, flags | os.O_NONBLOCK)
-
- while s_in:
- in_ready, _out_ready, _err_ready = select.select(s_in, [], [])
- for s in in_ready:
- buf = s.fd.read(4096)
- if not buf:
- s.fd.close()
- s_in.remove(s)
- continue
-
- if not opt.verbose:
- if s.fd != p.stdout:
- errbuf += buf
- continue
-
- if empty:
- if first:
- first = False
- else:
- out.nl()
-
- if mirror:
- project_header_path = project.name
- else:
- project_header_path = project.relpath
- out.project('project %s/', project_header_path)
- out.nl()
- out.flush()
- if errbuf:
- sys.stderr.write(errbuf)
- sys.stderr.flush()
- errbuf = ''
- empty = False
-
- s.dest.write(buf)
- s.dest.flush()
-
- r = p.wait()
- if r != 0:
- if r != rc:
- rc = r
- if opt.abort_on_errors:
- print("error: %s: Aborting due to previous error" % project.relpath,
- file=sys.stderr)
- sys.exit(r)
+ pool = multiprocessing.Pool(opt.jobs)
+ try:
+ config = self.manifest.manifestProject.config
+ results_it = pool.imap(
+ DoWorkWrapper,
+ [[mirror, opt, cmd, shell, cnt, config, self._SerializeProject(p)]
+ for cnt, p in enumerate(projects)]
+ )
+ pool.close()
+ for r in results_it:
+ rc = rc or r
+ if r != 0 and opt.abort_on_errors:
+ raise Exception('Aborting due to previous error')
+ except (KeyboardInterrupt, WorkerKeyboardInterrupt):
+ # Catch KeyboardInterrupt raised inside and outside of workers
+ print('Interrupted - terminating the pool')
+ pool.terminate()
+ rc = rc or errno.EINTR
+ except Exception as e:
+ # Catch any other exceptions raised
+ print('Got an error, terminating the pool: %r' % e,
+ file=sys.stderr)
+ pool.terminate()
+ rc = rc or getattr(e, 'errno', 1)
+ finally:
+ pool.join()
if rc != 0:
sys.exit(rc)
+
+
+class WorkerKeyboardInterrupt(Exception):
+ """ Keyboard interrupt exception for worker processes. """
+ pass
+
+
+def DoWorkWrapper(args):
+ """ A wrapper around the DoWork() method.
+
+ Catch the KeyboardInterrupt exceptions here and re-raise them as a different,
+ ``Exception``-based exception to stop it flooding the console with stacktraces
+ and making the parent hang indefinitely.
+
+ """
+ project = args.pop()
+ try:
+ return DoWork(project, *args)
+ except KeyboardInterrupt:
+ print('%s: Worker interrupted' % project['name'])
+ raise WorkerKeyboardInterrupt()
+
+
+def DoWork(project, mirror, opt, cmd, shell, cnt, config):
+ env = os.environ.copy()
+ def setenv(name, val):
+ if val is None:
+ val = ''
+ env[name] = val.encode()
+
+ setenv('REPO_PROJECT', project['name'])
+ setenv('REPO_PATH', project['relpath'])
+ setenv('REPO_REMOTE', project['remote_name'])
+ setenv('REPO_LREV', project['lrev'])
+ setenv('REPO_RREV', project['rrev'])
+ setenv('REPO_I', str(cnt + 1))
+ for name in project['annotations']:
+ setenv("REPO__%s" % (name), project['annotations'][name])
+
+ if mirror:
+ setenv('GIT_DIR', project['gitdir'])
+ cwd = project['gitdir']
+ else:
+ cwd = project['worktree']
+
+ if not os.path.exists(cwd):
+ if (opt.project_header and opt.verbose) \
+ or not opt.project_header:
+ print('skipping %s/' % project['relpath'], file=sys.stderr)
+ return
+
+ if opt.project_header:
+ stdin = subprocess.PIPE
+ stdout = subprocess.PIPE
+ stderr = subprocess.PIPE
+ else:
+ stdin = None
+ stdout = None
+ stderr = None
+
+ p = subprocess.Popen(cmd,
+ cwd=cwd,
+ shell=shell,
+ env=env,
+ stdin=stdin,
+ stdout=stdout,
+ stderr=stderr)
+
+ if opt.project_header:
+ out = ForallColoring(config)
+ out.redirect(sys.stdout)
+ class sfd(object):
+ def __init__(self, fd, dest):
+ self.fd = fd
+ self.dest = dest
+ def fileno(self):
+ return self.fd.fileno()
+
+ empty = True
+ errbuf = ''
+
+ p.stdin.close()
+ s_in = [sfd(p.stdout, sys.stdout),
+ sfd(p.stderr, sys.stderr)]
+
+ for s in s_in:
+ flags = fcntl.fcntl(s.fd, fcntl.F_GETFL)
+ fcntl.fcntl(s.fd, fcntl.F_SETFL, flags | os.O_NONBLOCK)
+
+ while s_in:
+ in_ready, _out_ready, _err_ready = select.select(s_in, [], [])
+ for s in in_ready:
+ buf = s.fd.read(4096)
+ if not buf:
+ s.fd.close()
+ s_in.remove(s)
+ continue
+
+ if not opt.verbose:
+ if s.fd != p.stdout:
+ errbuf += buf
+ continue
+
+ if empty and out:
+ if not cnt == 0:
+ out.nl()
+
+ if mirror:
+ project_header_path = project['name']
+ else:
+ project_header_path = project['relpath']
+ out.project('project %s/', project_header_path)
+ out.nl()
+ out.flush()
+ if errbuf:
+ sys.stderr.write(errbuf)
+ sys.stderr.flush()
+ errbuf = ''
+ empty = False
+
+ s.dest.write(buf)
+ s.dest.flush()
+
+ r = p.wait()
+ return r
diff --git a/subcmds/status.py b/subcmds/status.py
index 41c4429..b42675e 100644
--- a/subcmds/status.py
+++ b/subcmds/status.py
@@ -113,7 +113,7 @@
try:
state = project.PrintWorkTreeStatus(output)
if state == 'CLEAN':
- clean_counter.next()
+ next(clean_counter)
finally:
sem.release()
@@ -141,7 +141,7 @@
for project in all_projects:
state = project.PrintWorkTreeStatus()
if state == 'CLEAN':
- counter.next()
+ next(counter)
else:
sem = _threading.Semaphore(opt.jobs)
threads_and_output = []
@@ -164,7 +164,7 @@
t.join()
output.dump(sys.stdout)
output.close()
- if len(all_projects) == counter.next():
+ if len(all_projects) == next(counter):
print('nothing to commit (working directory clean)')
if opt.orphans:
diff --git a/subcmds/sync.py b/subcmds/sync.py
index a0a6896..6f77310 100644
--- a/subcmds/sync.py
+++ b/subcmds/sync.py
@@ -14,10 +14,10 @@
# limitations under the License.
from __future__ import print_function
+import json
import netrc
from optparse import SUPPRESS_HELP
import os
-import pickle
import re
import shutil
import socket
@@ -760,7 +760,7 @@
_ALPHA = 0.5
def __init__(self, manifest):
- self._path = os.path.join(manifest.repodir, '.repopickle_fetchtimes')
+ self._path = os.path.join(manifest.repodir, '.repo_fetchtimes.json')
self._times = None
self._seen = set()
@@ -779,22 +779,17 @@
def _Load(self):
if self._times is None:
try:
- f = open(self._path, 'rb')
- except IOError:
- self._times = {}
- return self._times
- try:
+ f = open(self._path)
try:
- self._times = pickle.load(f)
- except IOError:
- try:
- os.remove(self._path)
- except OSError:
- pass
- self._times = {}
- finally:
- f.close()
- return self._times
+ self._times = json.load(f)
+ finally:
+ f.close()
+ except (IOError, ValueError):
+ try:
+ os.remove(self._path)
+ except OSError:
+ pass
+ self._times = {}
def Save(self):
if self._times is None:
@@ -808,13 +803,13 @@
del self._times[name]
try:
- f = open(self._path, 'wb')
+ f = open(self._path, 'w')
try:
- pickle.dump(self._times, f)
- except (IOError, OSError, pickle.PickleError):
- try:
- os.remove(self._path)
- except OSError:
- pass
- finally:
- f.close()
+ json.dump(self._times, f, indent=2)
+ finally:
+ f.close()
+ except (IOError, TypeError):
+ try:
+ os.remove(self._path)
+ except OSError:
+ pass
diff --git a/subcmds/upload.py b/subcmds/upload.py
index e2fa261..0ee36df 100644
--- a/subcmds/upload.py
+++ b/subcmds/upload.py
@@ -25,10 +25,12 @@
from project import RepoHook
from pyversion import is_python3
+# pylint:disable=W0622
if not is_python3():
- # pylint:disable=W0622
input = raw_input
- # pylint:enable=W0622
+else:
+ unicode = str
+# pylint:enable=W0622
UNUSUAL_COMMIT_THRESHOLD = 5