Merge "Provide detail print-out when not all projects of a branch are current."
diff --git a/docs/manifest-format.txt b/docs/manifest-format.txt
index e48b75f..28a21bb 100644
--- a/docs/manifest-format.txt
+++ b/docs/manifest-format.txt
@@ -35,6 +35,7 @@
<!ATTLIST remote alias CDATA #IMPLIED>
<!ATTLIST remote fetch CDATA #REQUIRED>
<!ATTLIST remote review CDATA #IMPLIED>
+ <!ATTLIST remote revision CDATA #IMPLIED>
<!ELEMENT default (EMPTY)>
<!ATTLIST default remote IDREF #IMPLIED>
@@ -112,6 +113,10 @@
are uploaded to by `repo upload`. This attribute is optional;
if not specified then `repo upload` will not function.
+Attribute `revision`: Name of a Git branch (e.g. `master` or
+`refs/heads/master`). Remotes with their own revision will override
+the default revision.
+
Element default
---------------
@@ -132,14 +137,14 @@
this value. If this value is not set, projects will use `revision`
by default instead.
-Attribute `sync_j`: Number of parallel jobs to use when synching.
+Attribute `sync-j`: Number of parallel jobs to use when synching.
-Attribute `sync_c`: Set to true to only sync the given Git
+Attribute `sync-c`: Set to true to only sync the given Git
branch (specified in the `revision` attribute) rather than the
-whole ref space. Project elements lacking a sync_c element of
+whole ref space. Project elements lacking a sync-c element of
their own will use this value.
-Attribute `sync_s`: Set to true to also sync sub-projects.
+Attribute `sync-s`: Set to true to also sync sub-projects.
Element manifest-server
@@ -208,7 +213,8 @@
(e.g. just "master") or absolute (e.g. "refs/heads/master").
Tags and/or explicit SHA-1s should work in theory, but have not
been extensively tested. If not supplied the revision given by
-the default element is used.
+the remote element is used if applicable, else the default
+element is used.
Attribute `dest-branch`: Name of a Git branch (e.g. `master`).
When using `repo upload`, changes will be submitted for code
@@ -226,11 +232,11 @@
If the project has a parent element, the `name` and `path` here
are the prefixed ones.
-Attribute `sync_c`: Set to true to only sync the given Git
+Attribute `sync-c`: Set to true to only sync the given Git
branch (specified in the `revision` attribute) rather than the
whole ref space.
-Attribute `sync_s`: Set to true to also sync sub-projects.
+Attribute `sync-s`: Set to true to also sync sub-projects.
Attribute `upstream`: Name of the Git branch in which a sha1
can be found. Used when syncing a revision locked manifest in
diff --git a/git_command.py b/git_command.py
index 354fc71..53b3e75 100644
--- a/git_command.py
+++ b/git_command.py
@@ -80,13 +80,13 @@
def version(self):
p = GitCommand(None, ['--version'], capture_stdout=True)
if p.Wait() == 0:
- return p.stdout
+ return p.stdout.decode('utf-8')
return None
def version_tuple(self):
global _git_version
if _git_version is None:
- ver_str = git.version().decode('utf-8')
+ ver_str = git.version()
_git_version = Wrapper().ParseGitVersion(ver_str)
if _git_version is None:
print('fatal: "%s" unsupported' % ver_str, file=sys.stderr)
diff --git a/git_config.py b/git_config.py
index 32879ec..aa07d1b 100644
--- a/git_config.py
+++ b/git_config.py
@@ -15,8 +15,8 @@
from __future__ import print_function
+import json
import os
-import pickle
import re
import subprocess
import sys
@@ -80,7 +80,7 @@
return cls(configfile = os.path.join(gitdir, 'config'),
defaults = defaults)
- def __init__(self, configfile, defaults=None, pickleFile=None):
+ def __init__(self, configfile, defaults=None, jsonFile=None):
self.file = configfile
self.defaults = defaults
self._cache_dict = None
@@ -88,12 +88,11 @@
self._remotes = {}
self._branches = {}
- if pickleFile is None:
- self._pickle = os.path.join(
+ self._json = jsonFile
+ if self._json is None:
+ self._json = os.path.join(
os.path.dirname(self.file),
- '.repopickle_' + os.path.basename(self.file))
- else:
- self._pickle = pickleFile
+ '.repo_' + os.path.basename(self.file) + '.json')
def Has(self, name, include_defaults = True):
"""Return true if this configuration file has the key.
@@ -217,9 +216,9 @@
"""Resolve any url.*.insteadof references.
"""
for new_url in self.GetSubSections('url'):
- old_url = self.GetString('url.%s.insteadof' % new_url)
- if old_url is not None and url.startswith(old_url):
- return new_url + url[len(old_url):]
+ for old_url in self.GetString('url.%s.insteadof' % new_url, True):
+ if old_url is not None and url.startswith(old_url):
+ return new_url + url[len(old_url):]
return url
@property
@@ -248,50 +247,41 @@
return self._cache_dict
def _Read(self):
- d = self._ReadPickle()
+ d = self._ReadJson()
if d is None:
d = self._ReadGit()
- self._SavePickle(d)
+ self._SaveJson(d)
return d
- def _ReadPickle(self):
+ def _ReadJson(self):
try:
- if os.path.getmtime(self._pickle) \
+ if os.path.getmtime(self._json) \
<= os.path.getmtime(self.file):
- os.remove(self._pickle)
+ os.remove(self._json)
return None
except OSError:
return None
try:
- Trace(': unpickle %s', self.file)
- fd = open(self._pickle, 'rb')
+ Trace(': parsing %s', self.file)
+ fd = open(self._json)
try:
- return pickle.load(fd)
+ return json.load(fd)
finally:
fd.close()
- except EOFError:
- os.remove(self._pickle)
- return None
- except IOError:
- os.remove(self._pickle)
- return None
- except pickle.PickleError:
- os.remove(self._pickle)
+ except (IOError, ValueError):
+ os.remove(self._json)
return None
- def _SavePickle(self, cache):
+ def _SaveJson(self, cache):
try:
- fd = open(self._pickle, 'wb')
+ fd = open(self._json, 'w')
try:
- pickle.dump(cache, fd, pickle.HIGHEST_PROTOCOL)
+ json.dump(cache, fd, indent=2)
finally:
fd.close()
- except IOError:
- if os.path.exists(self._pickle):
- os.remove(self._pickle)
- except pickle.PickleError:
- if os.path.exists(self._pickle):
- os.remove(self._pickle)
+ except (IOError, TypeError):
+ if os.path.exists(self.json):
+ os.remove(self._json)
def _ReadGit(self):
"""
@@ -707,7 +697,7 @@
self._Set('merge', self.merge)
else:
- fd = open(self._config.file, 'ab')
+ fd = open(self._config.file, 'a')
try:
fd.write('[branch "%s"]\n' % self.name)
if self.remote:
diff --git a/hooks/commit-msg b/hooks/commit-msg
index 5ca2b11..d8f009b 100755
--- a/hooks/commit-msg
+++ b/hooks/commit-msg
@@ -1,5 +1,4 @@
#!/bin/sh
-# From Gerrit Code Review 2.6
#
# Part of Gerrit Code Review (http://code.google.com/p/gerrit/)
#
@@ -27,7 +26,7 @@
#
add_ChangeId() {
clean_message=`sed -e '
- /^diff --git a\/.*/{
+ /^diff --git .*/{
s///
q
}
@@ -39,6 +38,11 @@
return
fi
+ if test "false" = "`git config --bool --get gerrit.createChangeId`"
+ then
+ return
+ fi
+
# Does Change-Id: already exist? if so, exit (no change).
if grep -i '^Change-Id:' "$MSG" >/dev/null
then
@@ -77,7 +81,7 @@
# Skip the line starting with the diff command and everything after it,
# up to the end of the file, assuming it is only patch data.
# If more than one line before the diff was empty, strip all but one.
- /^diff --git a/ {
+ /^diff --git / {
blankLines = 0
while (getline) { }
next
diff --git a/manifest_xml.py b/manifest_xml.py
index e2f58e6..3517c15 100644
--- a/manifest_xml.py
+++ b/manifest_xml.py
@@ -63,12 +63,14 @@
alias=None,
fetch=None,
manifestUrl=None,
- review=None):
+ review=None,
+ revision=None):
self.name = name
self.fetchUrl = fetch
self.manifestUrl = manifestUrl
self.remoteAlias = alias
self.reviewUrl = review
+ self.revision = revision
self.resolvedFetchUrl = self._resolveFetchUrl()
def __eq__(self, other):
@@ -159,6 +161,8 @@
e.setAttribute('alias', r.remoteAlias)
if r.reviewUrl is not None:
e.setAttribute('review', r.reviewUrl)
+ if r.revision is not None:
+ e.setAttribute('revision', r.revision)
def Save(self, fd, peg_rev=False, peg_rev_upstream=True):
"""Write the current manifest out to the given file descriptor.
@@ -240,7 +244,8 @@
if d.remote:
remoteName = d.remote.remoteAlias or d.remote.name
if not d.remote or p.remote.name != remoteName:
- e.setAttribute('remote', p.remote.name)
+ remoteName = p.remote.name
+ e.setAttribute('remote', remoteName)
if peg_rev:
if self.IsMirror:
value = p.bare_git.rev_parse(p.revisionExpr + '^0')
@@ -252,8 +257,12 @@
# isn't our value, and the if the default doesn't already have that
# covered.
e.setAttribute('upstream', p.revisionExpr)
- elif not d.revisionExpr or p.revisionExpr != d.revisionExpr:
- e.setAttribute('revision', p.revisionExpr)
+ else:
+ revision = self.remotes[remoteName].revision or d.revisionExpr
+ if not revision or revision != p.revisionExpr:
+ e.setAttribute('revision', p.revisionExpr)
+ if p.upstream and p.upstream != p.revisionExpr:
+ e.setAttribute('upstream', p.upstream)
for c in p.copyfiles:
ce = doc.createElement('copyfile')
@@ -310,7 +319,7 @@
@property
def projects(self):
self._Load()
- return self._paths.values()
+ return list(self._paths.values())
@property
def remotes(self):
@@ -592,8 +601,11 @@
review = node.getAttribute('review')
if review == '':
review = None
+ revision = node.getAttribute('revision')
+ if revision == '':
+ revision = None
manifestUrl = self.manifestProject.config.GetString('remote.origin.url')
- return _XmlRemote(name, alias, fetch, manifestUrl, review)
+ return _XmlRemote(name, alias, fetch, manifestUrl, review, revision)
def _ParseDefault(self, node):
"""
@@ -686,7 +698,7 @@
raise ManifestParseError("no remote for project %s within %s" %
(name, self.manifestFile))
- revisionExpr = node.getAttribute('revision')
+ revisionExpr = node.getAttribute('revision') or remote.revision
if not revisionExpr:
revisionExpr = self._default.revisionExpr
if not revisionExpr:
@@ -872,10 +884,8 @@
fromProjects = self.paths
toProjects = manifest.paths
- fromKeys = fromProjects.keys()
- fromKeys.sort()
- toKeys = toProjects.keys()
- toKeys.sort()
+ fromKeys = sorted(fromProjects.keys())
+ toKeys = sorted(toProjects.keys())
diff = {'added': [], 'removed': [], 'changed': [], 'unreachable': []}
diff --git a/project.py b/project.py
index 127176e..95403cc 100644
--- a/project.py
+++ b/project.py
@@ -46,7 +46,7 @@
def _lwrite(path, content):
lock = '%s.lock' % path
- fd = open(lock, 'wb')
+ fd = open(lock, 'w')
try:
fd.write(content)
finally:
@@ -259,10 +259,12 @@
def __init__(self,
name,
url = None,
- review = None):
+ review = None,
+ revision = None):
self.name = name
self.url = url
self.review = review
+ self.revision = revision
class RepoHook(object):
"""A RepoHook contains information about a script to run as a hook.
@@ -438,7 +440,8 @@
# and convert to a HookError w/ just the failing traceback.
context = {}
try:
- execfile(self._script_fullpath, context)
+ exec(compile(open(self._script_fullpath).read(),
+ self._script_fullpath, 'exec'), context)
except Exception:
raise HookError('%s\nFailed to import %s hook; see traceback above.' % (
traceback.format_exc(), self._hook_type))
@@ -1657,7 +1660,8 @@
remote = RemoteSpec(self.remote.name,
url = url,
- review = self.remote.review)
+ review = self.remote.review,
+ revision = self.remote.revision)
subproject = Project(manifest = self.manifest,
name = name,
remote = remote,
@@ -1702,6 +1706,7 @@
if command.Wait() != 0:
raise GitError('git archive %s: %s' % (self.name, command.stderr))
+
def _RemoteFetch(self, name=None,
current_branch_only=False,
initial=False,
@@ -1804,19 +1809,30 @@
else:
cmd.append('--tags')
+ spec = []
if not current_branch_only:
# Fetch whole repo
- cmd.append(str((u'+refs/heads/*:') + remote.ToLocal('refs/heads/*')))
+ spec.append(str((u'+refs/heads/*:') + remote.ToLocal('refs/heads/*')))
elif tag_name is not None:
- cmd.append('tag')
- cmd.append(tag_name)
+ spec.append('tag')
+ spec.append(tag_name)
else:
branch = self.revisionExpr
if is_sha1:
branch = self.upstream
if branch.startswith(R_HEADS):
branch = branch[len(R_HEADS):]
- cmd.append(str((u'+refs/heads/%s:' % branch) + remote.ToLocal('refs/heads/%s' % branch)))
+ spec.append(str((u'+refs/heads/%s:' % branch) + remote.ToLocal('refs/heads/%s' % branch)))
+ cmd.extend(spec)
+
+ shallowfetch = self.config.GetString('repo.shallowfetch')
+ if shallowfetch and shallowfetch != ' '.join(spec):
+ GitCommand(self, ['fetch', '--unshallow', name] + shallowfetch.split(),
+ bare=True, ssh_proxy=ssh_proxy).Wait()
+ if depth:
+ self.config.SetString('repo.shallowfetch', ' '.join(spec))
+ else:
+ self.config.SetString('repo.shallowfetch', None)
ok = False
for _i in range(2):
@@ -2201,6 +2217,14 @@
if name in symlink_dirs and not os.path.lexists(src):
os.makedirs(src)
+ # If the source file doesn't exist, ensure the destination
+ # file doesn't either.
+ if name in symlink_files and not os.path.lexists(src):
+ try:
+ os.remove(dst)
+ except OSError:
+ pass
+
if name in to_symlink:
os.symlink(os.path.relpath(src, os.path.dirname(dst)), dst)
elif copy_all and not os.path.islink(dst):
@@ -2321,8 +2345,8 @@
out = iter(out[:-1].split('\0')) # pylint: disable=W1401
while out:
try:
- info = out.next()
- path = out.next()
+ info = next(out)
+ path = next(out)
except StopIteration:
break
@@ -2348,7 +2372,7 @@
info = _Info(path, *info)
if info.status in ('R', 'C'):
info.src_path = info.path
- info.path = out.next()
+ info.path = next(out)
r[info.path] = info
return r
finally:
diff --git a/repo b/repo
index b8c414b..3fd0166 100755
--- a/repo
+++ b/repo
@@ -139,10 +139,6 @@
# Python version check
ver = sys.version_info
-if ver[0] == 3:
- _print('warning: Python 3 support is currently experimental. YMMV.\n'
- 'Please use Python 2.6 - 2.7 instead.',
- file=sys.stderr)
if (ver[0], ver[1]) < MIN_PYTHON_VERSION:
_print('error: Python version %s unsupported.\n'
'Please use Python 2.6 - 2.7 instead.'
@@ -768,4 +764,8 @@
if __name__ == '__main__':
+ if ver[0] == 3:
+ _print('warning: Python 3 support is currently experimental. YMMV.\n'
+ 'Please use Python 2.6 - 2.7 instead.',
+ file=sys.stderr)
main(sys.argv[1:])
diff --git a/subcmds/forall.py b/subcmds/forall.py
index 03ebcb2..7771ec1 100644
--- a/subcmds/forall.py
+++ b/subcmds/forall.py
@@ -14,7 +14,9 @@
# limitations under the License.
from __future__ import print_function
+import errno
import fcntl
+import multiprocessing
import re
import os
import select
@@ -31,6 +33,7 @@
'log',
]
+
class ForallColoring(Coloring):
def __init__(self, config):
Coloring.__init__(self, config, 'forall')
@@ -132,9 +135,31 @@
g.add_option('-v', '--verbose',
dest='verbose', action='store_true',
help='Show command error messages')
+ g.add_option('-j', '--jobs',
+ dest='jobs', action='store', type='int', default=1,
+ help='number of commands to execute simultaneously')
def WantPager(self, opt):
- return opt.project_header
+ return opt.project_header and opt.jobs == 1
+
+ def _SerializeProject(self, project):
+ """ Serialize a project._GitGetByExec instance.
+
+ project._GitGetByExec is not pickle-able. Instead of trying to pass it
+ around between processes, make a dict ourselves containing only the
+ attributes that we need.
+
+ """
+ return {
+ 'name': project.name,
+ 'relpath': project.relpath,
+ 'remote_name': project.remote.name,
+ 'lrev': project.GetRevisionId(),
+ 'rrev': project.revisionExpr,
+ 'annotations': dict((a.name, a.value) for a in project.annotations),
+ 'gitdir': project.gitdir,
+ 'worktree': project.worktree,
+ }
def Execute(self, opt, args):
if not opt.command:
@@ -173,11 +198,7 @@
# pylint: enable=W0631
mirror = self.manifest.IsMirror
- out = ForallColoring(self.manifest.manifestProject.config)
- out.redirect(sys.stdout)
-
rc = 0
- first = True
if not opt.regex:
projects = self.GetProjects(args)
@@ -186,113 +207,156 @@
os.environ['REPO_COUNT'] = str(len(projects))
- for (cnt, project) in enumerate(projects):
- env = os.environ.copy()
- def setenv(name, val):
- if val is None:
- val = ''
- env[name] = val.encode()
-
- setenv('REPO_PROJECT', project.name)
- setenv('REPO_PATH', project.relpath)
- setenv('REPO_REMOTE', project.remote.name)
- setenv('REPO_LREV', project.GetRevisionId())
- setenv('REPO_RREV', project.revisionExpr)
- setenv('REPO_I', str(cnt + 1))
- for a in project.annotations:
- setenv("REPO__%s" % (a.name), a.value)
-
- if mirror:
- setenv('GIT_DIR', project.gitdir)
- cwd = project.gitdir
- else:
- cwd = project.worktree
-
- if not os.path.exists(cwd):
- if (opt.project_header and opt.verbose) \
- or not opt.project_header:
- print('skipping %s/' % project.relpath, file=sys.stderr)
- continue
-
- if opt.project_header:
- stdin = subprocess.PIPE
- stdout = subprocess.PIPE
- stderr = subprocess.PIPE
- else:
- stdin = None
- stdout = None
- stderr = None
-
- p = subprocess.Popen(cmd,
- cwd = cwd,
- shell = shell,
- env = env,
- stdin = stdin,
- stdout = stdout,
- stderr = stderr)
-
- if opt.project_header:
- class sfd(object):
- def __init__(self, fd, dest):
- self.fd = fd
- self.dest = dest
- def fileno(self):
- return self.fd.fileno()
-
- empty = True
- errbuf = ''
-
- p.stdin.close()
- s_in = [sfd(p.stdout, sys.stdout),
- sfd(p.stderr, sys.stderr)]
-
- for s in s_in:
- flags = fcntl.fcntl(s.fd, fcntl.F_GETFL)
- fcntl.fcntl(s.fd, fcntl.F_SETFL, flags | os.O_NONBLOCK)
-
- while s_in:
- in_ready, _out_ready, _err_ready = select.select(s_in, [], [])
- for s in in_ready:
- buf = s.fd.read(4096)
- if not buf:
- s.fd.close()
- s_in.remove(s)
- continue
-
- if not opt.verbose:
- if s.fd != p.stdout:
- errbuf += buf
- continue
-
- if empty:
- if first:
- first = False
- else:
- out.nl()
-
- if mirror:
- project_header_path = project.name
- else:
- project_header_path = project.relpath
- out.project('project %s/', project_header_path)
- out.nl()
- out.flush()
- if errbuf:
- sys.stderr.write(errbuf)
- sys.stderr.flush()
- errbuf = ''
- empty = False
-
- s.dest.write(buf)
- s.dest.flush()
-
- r = p.wait()
- if r != 0:
- if r != rc:
- rc = r
- if opt.abort_on_errors:
- print("error: %s: Aborting due to previous error" % project.relpath,
- file=sys.stderr)
- sys.exit(r)
+ pool = multiprocessing.Pool(opt.jobs)
+ try:
+ config = self.manifest.manifestProject.config
+ results_it = pool.imap(
+ DoWorkWrapper,
+ [[mirror, opt, cmd, shell, cnt, config, self._SerializeProject(p)]
+ for cnt, p in enumerate(projects)]
+ )
+ pool.close()
+ for r in results_it:
+ rc = rc or r
+ if r != 0 and opt.abort_on_errors:
+ raise Exception('Aborting due to previous error')
+ except (KeyboardInterrupt, WorkerKeyboardInterrupt):
+ # Catch KeyboardInterrupt raised inside and outside of workers
+ print('Interrupted - terminating the pool')
+ pool.terminate()
+ rc = rc or errno.EINTR
+ except Exception as e:
+ # Catch any other exceptions raised
+ print('Got an error, terminating the pool: %r' % e,
+ file=sys.stderr)
+ pool.terminate()
+ rc = rc or getattr(e, 'errno', 1)
+ finally:
+ pool.join()
if rc != 0:
sys.exit(rc)
+
+
+class WorkerKeyboardInterrupt(Exception):
+ """ Keyboard interrupt exception for worker processes. """
+ pass
+
+
+def DoWorkWrapper(args):
+ """ A wrapper around the DoWork() method.
+
+ Catch the KeyboardInterrupt exceptions here and re-raise them as a different,
+ ``Exception``-based exception to stop it flooding the console with stacktraces
+ and making the parent hang indefinitely.
+
+ """
+ project = args.pop()
+ try:
+ return DoWork(project, *args)
+ except KeyboardInterrupt:
+ print('%s: Worker interrupted' % project['name'])
+ raise WorkerKeyboardInterrupt()
+
+
+def DoWork(project, mirror, opt, cmd, shell, cnt, config):
+ env = os.environ.copy()
+ def setenv(name, val):
+ if val is None:
+ val = ''
+ env[name] = val.encode()
+
+ setenv('REPO_PROJECT', project['name'])
+ setenv('REPO_PATH', project['relpath'])
+ setenv('REPO_REMOTE', project['remote_name'])
+ setenv('REPO_LREV', project['lrev'])
+ setenv('REPO_RREV', project['rrev'])
+ setenv('REPO_I', str(cnt + 1))
+ for name in project['annotations']:
+ setenv("REPO__%s" % (name), project['annotations'][name])
+
+ if mirror:
+ setenv('GIT_DIR', project['gitdir'])
+ cwd = project['gitdir']
+ else:
+ cwd = project['worktree']
+
+ if not os.path.exists(cwd):
+ if (opt.project_header and opt.verbose) \
+ or not opt.project_header:
+ print('skipping %s/' % project['relpath'], file=sys.stderr)
+ return
+
+ if opt.project_header:
+ stdin = subprocess.PIPE
+ stdout = subprocess.PIPE
+ stderr = subprocess.PIPE
+ else:
+ stdin = None
+ stdout = None
+ stderr = None
+
+ p = subprocess.Popen(cmd,
+ cwd=cwd,
+ shell=shell,
+ env=env,
+ stdin=stdin,
+ stdout=stdout,
+ stderr=stderr)
+
+ if opt.project_header:
+ out = ForallColoring(config)
+ out.redirect(sys.stdout)
+ class sfd(object):
+ def __init__(self, fd, dest):
+ self.fd = fd
+ self.dest = dest
+ def fileno(self):
+ return self.fd.fileno()
+
+ empty = True
+ errbuf = ''
+
+ p.stdin.close()
+ s_in = [sfd(p.stdout, sys.stdout),
+ sfd(p.stderr, sys.stderr)]
+
+ for s in s_in:
+ flags = fcntl.fcntl(s.fd, fcntl.F_GETFL)
+ fcntl.fcntl(s.fd, fcntl.F_SETFL, flags | os.O_NONBLOCK)
+
+ while s_in:
+ in_ready, _out_ready, _err_ready = select.select(s_in, [], [])
+ for s in in_ready:
+ buf = s.fd.read(4096)
+ if not buf:
+ s.fd.close()
+ s_in.remove(s)
+ continue
+
+ if not opt.verbose:
+ if s.fd != p.stdout:
+ errbuf += buf
+ continue
+
+ if empty and out:
+ if not cnt == 0:
+ out.nl()
+
+ if mirror:
+ project_header_path = project['name']
+ else:
+ project_header_path = project['relpath']
+ out.project('project %s/', project_header_path)
+ out.nl()
+ out.flush()
+ if errbuf:
+ sys.stderr.write(errbuf)
+ sys.stderr.flush()
+ errbuf = ''
+ empty = False
+
+ s.dest.write(buf)
+ s.dest.flush()
+
+ r = p.wait()
+ return r
diff --git a/subcmds/status.py b/subcmds/status.py
index 41c4429..b42675e 100644
--- a/subcmds/status.py
+++ b/subcmds/status.py
@@ -113,7 +113,7 @@
try:
state = project.PrintWorkTreeStatus(output)
if state == 'CLEAN':
- clean_counter.next()
+ next(clean_counter)
finally:
sem.release()
@@ -141,7 +141,7 @@
for project in all_projects:
state = project.PrintWorkTreeStatus()
if state == 'CLEAN':
- counter.next()
+ next(counter)
else:
sem = _threading.Semaphore(opt.jobs)
threads_and_output = []
@@ -164,7 +164,7 @@
t.join()
output.dump(sys.stdout)
output.close()
- if len(all_projects) == counter.next():
+ if len(all_projects) == next(counter):
print('nothing to commit (working directory clean)')
if opt.orphans:
diff --git a/subcmds/sync.py b/subcmds/sync.py
index a0a6896..6f77310 100644
--- a/subcmds/sync.py
+++ b/subcmds/sync.py
@@ -14,10 +14,10 @@
# limitations under the License.
from __future__ import print_function
+import json
import netrc
from optparse import SUPPRESS_HELP
import os
-import pickle
import re
import shutil
import socket
@@ -760,7 +760,7 @@
_ALPHA = 0.5
def __init__(self, manifest):
- self._path = os.path.join(manifest.repodir, '.repopickle_fetchtimes')
+ self._path = os.path.join(manifest.repodir, '.repo_fetchtimes.json')
self._times = None
self._seen = set()
@@ -779,22 +779,17 @@
def _Load(self):
if self._times is None:
try:
- f = open(self._path, 'rb')
- except IOError:
- self._times = {}
- return self._times
- try:
+ f = open(self._path)
try:
- self._times = pickle.load(f)
- except IOError:
- try:
- os.remove(self._path)
- except OSError:
- pass
- self._times = {}
- finally:
- f.close()
- return self._times
+ self._times = json.load(f)
+ finally:
+ f.close()
+ except (IOError, ValueError):
+ try:
+ os.remove(self._path)
+ except OSError:
+ pass
+ self._times = {}
def Save(self):
if self._times is None:
@@ -808,13 +803,13 @@
del self._times[name]
try:
- f = open(self._path, 'wb')
+ f = open(self._path, 'w')
try:
- pickle.dump(self._times, f)
- except (IOError, OSError, pickle.PickleError):
- try:
- os.remove(self._path)
- except OSError:
- pass
- finally:
- f.close()
+ json.dump(self._times, f, indent=2)
+ finally:
+ f.close()
+ except (IOError, TypeError):
+ try:
+ os.remove(self._path)
+ except OSError:
+ pass
diff --git a/subcmds/upload.py b/subcmds/upload.py
index e2fa261..0ee36df 100644
--- a/subcmds/upload.py
+++ b/subcmds/upload.py
@@ -25,10 +25,12 @@
from project import RepoHook
from pyversion import is_python3
+# pylint:disable=W0622
if not is_python3():
- # pylint:disable=W0622
input = raw_input
- # pylint:enable=W0622
+else:
+ unicode = str
+# pylint:enable=W0622
UNUSUAL_COMMIT_THRESHOLD = 5