Merge "Update commit-msg hook to version from Gerrit 2.14.6"
diff --git a/.mailmap b/.mailmap
index eb64bd2..905139d 100644
--- a/.mailmap
+++ b/.mailmap
@@ -1,4 +1,5 @@
 Anthony Newnam <anthony.newnam@garmin.com>    Anthony <anthony@bnovc.com>
+He Ping <tdihp@hotmail.com>                   heping <tdihp@hotmail.com>
 Hu Xiuyun <xiuyun.hu@hisilicon.com>           Hu xiuyun <xiuyun.hu@hisilicon.com>
 Hu Xiuyun <xiuyun.hu@hisilicon.com>           Hu Xiuyun <clouds08@qq.com>
 Jelly Chen <chenguodong@huawei.com>           chenguodong <chenguodong@huawei.com>
diff --git a/README.md b/README.md
index e35f8e9..250d08e 100644
--- a/README.md
+++ b/README.md
@@ -11,4 +11,6 @@
 * Source: https://code.google.com/p/git-repo/
 * Overview: https://source.android.com/source/developing.html
 * Docs: https://source.android.com/source/using-repo.html
+* [repo Manifest Format](./docs/manifest-format.txt)
+* [repo Hooks](./docs/repo-hooks.md)
 * [Submitting patches](./SUBMITTING_PATCHES.md)
diff --git a/command.py b/command.py
index 2ff0a34..971f968 100644
--- a/command.py
+++ b/command.py
@@ -19,6 +19,7 @@
 import re
 import sys
 
+from event_log import EventLog
 from error import NoSuchProjectError
 from error import InvalidProjectGroupsError
 
@@ -28,6 +29,7 @@
   """
 
   common = False
+  event_log = EventLog()
   manifest = None
   _optparse = None
 
diff --git a/docs/manifest-format.txt b/docs/manifest-format.txt
index 2a07f19..7778409 100644
--- a/docs/manifest-format.txt
+++ b/docs/manifest-format.txt
@@ -27,11 +27,12 @@
                         remove-project*,
                         project*,
                         extend-project*,
-                        repo-hooks?)>
+                        repo-hooks?,
+                        include*)>
 
     <!ELEMENT notice (#PCDATA)>
 
-    <!ELEMENT remote (EMPTY)>
+    <!ELEMENT remote EMPTY>
     <!ATTLIST remote name         ID    #REQUIRED>
     <!ATTLIST remote alias        CDATA #IMPLIED>
     <!ATTLIST remote fetch        CDATA #REQUIRED>
@@ -39,7 +40,7 @@
     <!ATTLIST remote review       CDATA #IMPLIED>
     <!ATTLIST remote revision     CDATA #IMPLIED>
 
-    <!ELEMENT default (EMPTY)>
+    <!ELEMENT default EMPTY>
     <!ATTLIST default remote      IDREF #IMPLIED>
     <!ATTLIST default revision    CDATA #IMPLIED>
     <!ATTLIST default dest-branch CDATA #IMPLIED>
@@ -47,7 +48,7 @@
     <!ATTLIST default sync-c      CDATA #IMPLIED>
     <!ATTLIST default sync-s      CDATA #IMPLIED>
 
-    <!ELEMENT manifest-server (EMPTY)>
+    <!ELEMENT manifest-server EMPTY>
     <!ATTLIST manifest-server url CDATA #REQUIRED>
 
     <!ELEMENT project (annotation*,
@@ -66,32 +67,32 @@
     <!ATTLIST project clone-depth CDATA #IMPLIED>
     <!ATTLIST project force-path CDATA #IMPLIED>
 
-    <!ELEMENT annotation (EMPTY)>
+    <!ELEMENT annotation EMPTY>
     <!ATTLIST annotation name  CDATA #REQUIRED>
     <!ATTLIST annotation value CDATA #REQUIRED>
     <!ATTLIST annotation keep  CDATA "true">
 
-    <!ELEMENT copyfile (EMPTY)>
+    <!ELEMENT copyfile EMPTY>
     <!ATTLIST copyfile src  CDATA #REQUIRED>
     <!ATTLIST copyfile dest CDATA #REQUIRED>
 
-    <!ELEMENT linkfile (EMPTY)>
+    <!ELEMENT linkfile EMPTY>
     <!ATTLIST linkfile src CDATA #REQUIRED>
     <!ATTLIST linkfile dest CDATA #REQUIRED>
 
-    <!ELEMENT extend-project (EMPTY)>
+    <!ELEMENT extend-project EMPTY>
     <!ATTLIST extend-project name CDATA #REQUIRED>
     <!ATTLIST extend-project path CDATA #IMPLIED>
     <!ATTLIST extend-project groups CDATA #IMPLIED>
 
-    <!ELEMENT remove-project (EMPTY)>
+    <!ELEMENT remove-project EMPTY>
     <!ATTLIST remove-project name  CDATA #REQUIRED>
 
-    <!ELEMENT repo-hooks (EMPTY)>
+    <!ELEMENT repo-hooks EMPTY>
     <!ATTLIST repo-hooks in-project CDATA #REQUIRED>
     <!ATTLIST repo-hooks enabled-list CDATA #REQUIRED>
 
-    <!ELEMENT include      (EMPTY)>
+    <!ELEMENT include EMPTY>
     <!ATTLIST include name CDATA #REQUIRED>
   ]>
 
diff --git a/docs/repo-hooks.md b/docs/repo-hooks.md
new file mode 100644
index 0000000..c8eb945
--- /dev/null
+++ b/docs/repo-hooks.md
@@ -0,0 +1,105 @@
+# repo hooks
+
+[TOC]
+
+Repo provides a mechanism to hook specific stages of the runtime with custom
+python modules.  All the hooks live in one git project which is checked out by
+the manifest (specified during `repo init`), and the manifest itself defines
+which hooks are registered.
+
+These are useful to run linters, check formatting, and run quick unittests
+before allowing a step to proceed (e.g. before uploading a commit to Gerrit).
+
+A complete example can be found in the Android project.  It can be easily
+re-used by any repo based project and is not specific to Android.<br>
+https://android.googlesource.com/platform/tools/repohooks
+
+## Approvals
+
+When a hook is processed the first time, the user is prompted for approval.
+We don't want to execute arbitrary code without explicit consent.  For manifests
+fetched via secure protocols (e.g. https://), the user is prompted once.  For
+insecure protocols (e.g. http://), the user is prompted whenever the registered
+repohooks project is updated and a hook is triggered.
+
+## Manifest Settings
+
+For the full syntax, see the [repo manifest format](./manifest-format.txt).
+
+Here's a short example from
+[Android](https://android.googlesource.com/platform/manifest/+/master/default.xml).
+The `<project>` line checks out the repohooks git repo to the local
+`tools/repohooks/` path.  The `<repo-hooks>` line says to look in the project
+with the name `platform/tools/repohooks` for hooks to run during the
+`pre-upload` phase.
+
+```xml
+<project path="tools/repohooks" name="platform/tools/repohooks" />
+<repo-hooks in-project="platform/tools/repohooks" enabled-list="pre-upload" />
+```
+
+## Source Layout
+
+The repohooks git repo should have a python file with the same name as the hook.
+So if you want to support the `pre-upload` hook, you'll need to create a file
+named `pre-upload.py`.  Repo will dynamically load that module when processing
+the hook and then call the `main` function in it.
+
+Hooks should have their `main` accept `**kwargs` for future compatibility.
+
+## Runtime
+
+Hook return values are ignored.
+
+Any uncaught exceptions from the hook will cause the step to fail.  This is
+intended as a fallback safety check though rather than the normal flow.  If
+you want your hook to trigger a failure, it should call `sys.exit()` (after
+displaying relevant diagnostics).
+
+Output (stdout & stderr) are not filtered in any way.  Hooks should generally
+not be too verbose.  A short summary is nice, and some status information when
+long running operations occur, but long/verbose output should be used only if
+the hook ultimately fails.
+
+The hook runs from the top level of the git repo where the operation is started.
+e.g. If you're in the git repo `src/foo/`, that is where the hook runs, even if
+the `repo` command was started from a subdir like `src/foo/bar/`.
+
+Python's `sys.path` is modified so that the top of repohooks directory comes
+first.  This should help simplify the hook logic to easily allow importing of
+local modules.
+
+Repo does not modify the state of the git checkout.  This means that the hooks
+might be running in a dirty git repo with many commits and checked out to the
+latest one.  If the hook wants to operate on specific git commits, it needs to
+manually discover the list of pending commits, extract the diff/commit, and
+then check it directly.  Hooks should not normally modify the active git repo
+(such as checking out a specific commit to run checks) without first prompting
+the user.  Although user interaction is discouraged in the common case, it can
+be useful when deploying automatic fixes.
+
+## Hooks
+
+Here are all the points available for hooking.
+
+### pre-upload
+
+This hook runs when people run `repo upload`.
+
+The `pre-upload.py` file should be defined like:
+
+```py
+def main(project_list, worktree_list=None, **kwargs):
+    """Main function invoked directly by repo.
+
+    We must use the name "main" as that is what repo requires.
+
+    Args:
+      project_list: List of projects to run on.
+      worktree_list: A list of directories.  It should be the same length as
+          project_list, so that each entry in project_list matches with a
+          directory in worktree_list.  If None, we will attempt to calculate
+          the directories automatically.
+      kwargs: Leave this here for forward-compatibility.
+    """
+```
diff --git a/editor.py b/editor.py
index 883a1a8..7980f2b 100644
--- a/editor.py
+++ b/editor.py
@@ -21,6 +21,7 @@
 import tempfile
 
 from error import EditorError
+import platform_utils
 
 class Editor(object):
   """Manages the user's preferred text editor."""
@@ -82,7 +83,12 @@
       os.close(fd)
       fd = None
 
-      if re.compile("^.*[$ \t'].*$").match(editor):
+      if platform_utils.isWindows():
+        # Split on spaces, respecting quoted strings
+        import shlex
+        args = shlex.split(editor)
+        shell = False
+      elif re.compile("^.*[$ \t'].*$").match(editor):
         args = [editor + ' "$@"', 'sh']
         shell = True
       else:
@@ -107,4 +113,4 @@
     finally:
       if fd:
         os.close(fd)
-      os.remove(path)
+      platform_utils.remove(path)
diff --git a/event_log.py b/event_log.py
new file mode 100644
index 0000000..d73511d
--- /dev/null
+++ b/event_log.py
@@ -0,0 +1,177 @@
+#
+# Copyright (C) 2017 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from __future__ import print_function
+
+import json
+import multiprocessing
+
+TASK_COMMAND = 'command'
+TASK_SYNC_NETWORK = 'sync-network'
+TASK_SYNC_LOCAL = 'sync-local'
+
+class EventLog(object):
+  """Event log that records events that occurred during a repo invocation.
+
+  Events are written to the log as a consecutive JSON entries, one per line.
+  Each entry contains the following keys:
+  - id: A ('RepoOp', ID) tuple, suitable for storing in a datastore.
+        The ID is only unique for the invocation of the repo command.
+  - name: Name of the object being operated upon.
+  - task_name: The task that was performed.
+  - start: Timestamp of when the operation started.
+  - finish: Timestamp of when the operation finished.
+  - success: Boolean indicating if the operation was successful.
+  - try_count: A counter indicating the try count of this task.
+
+  Optionally:
+  - parent: A ('RepoOp', ID) tuple indicating the parent event for nested
+            events.
+
+  Valid task_names include:
+  - command: The invocation of a subcommand.
+  - sync-network: The network component of a sync command.
+  - sync-local: The local component of a sync command.
+
+  Specific tasks may include additional informational properties.
+  """
+
+  def __init__(self):
+    """Initializes the event log."""
+    self._log = []
+    self._next_id = _EventIdGenerator()
+    self._parent = None
+
+  def Add(self, name, task_name, start, finish=None, success=None,
+          try_count=1, kind='RepoOp'):
+    """Add an event to the log.
+
+    Args:
+      name: Name of the object being operated upon.
+      task_name: A sub-task that was performed for name.
+      start: Timestamp of when the operation started.
+      finish: Timestamp of when the operation finished.
+      success: Boolean indicating if the operation was successful.
+      try_count: A counter indicating the try count of this task.
+      kind: The kind of the object for the unique identifier.
+
+    Returns:
+      A dictionary of the event added to the log.
+    """
+    event = {
+        'id': (kind, self._next_id.next()),
+        'name': name,
+        'task_name': task_name,
+        'start_time': start,
+        'try': try_count,
+    }
+
+    if self._parent:
+      event['parent'] = self._parent['id']
+
+    if success is not None or finish is not None:
+        self.FinishEvent(event, finish, success)
+
+    self._log.append(event)
+    return event
+
+  def AddSync(self, project, task_name, start, finish, success):
+    """Add a event to the log for a sync command.
+
+    Args:
+      project: Project being synced.
+      task_name: A sub-task that was performed for name.
+                 One of (TASK_SYNC_NETWORK, TASK_SYNC_LOCAL)
+      start: Timestamp of when the operation started.
+      finish: Timestamp of when the operation finished.
+      success: Boolean indicating if the operation was successful.
+
+    Returns:
+      A dictionary of the event added to the log.
+    """
+    event = self.Add(project.relpath, success, start, finish, task_name)
+    if event is not None:
+      event['project'] = project.name
+      if project.revisionExpr:
+        event['revision'] = project.revisionExpr
+      if project.remote.url:
+        event['project_url'] = project.remote.url
+      if project.remote.fetchUrl:
+        event['remote_url'] = project.remote.fetchUrl
+      try:
+        event['git_hash'] = project.GetCommitRevisionId()
+      except Exception:
+        pass
+    return event
+
+  def GetStatusString(self, success):
+    """Converst a boolean success to a status string.
+
+    Args:
+      success: Boolean indicating if the operation was successful.
+
+    Returns:
+      status string.
+    """
+    return 'pass' if success else 'fail'
+
+  def FinishEvent(self, event, finish, success):
+    """Finishes an incomplete event.
+
+    Args:
+      event: An event that has been added to the log.
+      finish: Timestamp of when the operation finished.
+      success: Boolean indicating if the operation was successful.
+
+    Returns:
+      A dictionary of the event added to the log.
+    """
+    event['status'] =  self.GetStatusString(success)
+    event['finish_time'] = finish
+    return event
+
+  def SetParent(self, event):
+    """Set a parent event for all new entities.
+
+    Args:
+      event: The event to use as a parent.
+    """
+    self._parent = event
+
+  def Write(self, filename):
+    """Writes the log out to a file.
+
+    Args:
+      filename: The file to write the log to.
+    """
+    with open(filename, 'w+') as f:
+      for e in self._log:
+        json.dump(e, f, sort_keys=True)
+        f.write('\n')
+
+
+def _EventIdGenerator():
+  """Returns multi-process safe iterator that generates locally unique id.
+
+  Yields:
+    A unique, to this invocation of the program, integer id.
+  """
+  eid = multiprocessing.Value('i', 1)
+
+  while True:
+    with eid.get_lock():
+      val = eid.value
+      eid.value += 1
+    yield val
diff --git a/git_command.py b/git_command.py
index 9f7d293..b1e9e17 100644
--- a/git_command.py
+++ b/git_command.py
@@ -14,14 +14,14 @@
 # limitations under the License.
 
 from __future__ import print_function
-import fcntl
 import os
-import select
 import sys
 import subprocess
 import tempfile
 from signal import SIGTERM
+
 from error import GitError
+import platform_utils
 from trace import REPO_TRACE, IsTrace, Trace
 from wrapper import Wrapper
 
@@ -78,16 +78,6 @@
 
 _git_version = None
 
-class _sfd(object):
-  """select file descriptor class"""
-  def __init__(self, fd, dest, std_name):
-    assert std_name in ('stdout', 'stderr')
-    self.fd = fd
-    self.dest = dest
-    self.std_name = std_name
-  def fileno(self):
-    return self.fd.fileno()
-
 class _GitCall(object):
   def version(self):
     p = GitCommand(None, ['--version'], capture_stdout=True)
@@ -162,6 +152,7 @@
     if ssh_proxy:
       _setenv(env, 'REPO_SSH_SOCK', ssh_sock())
       _setenv(env, 'GIT_SSH', _ssh_proxy())
+      _setenv(env, 'GIT_SSH_VARIANT', 'ssh')
     if 'http_proxy' in env and 'darwin' == sys.platform:
       s = "'http.proxy=%s'" % (env['http_proxy'],)
       p = env.get('GIT_CONFIG_PARAMETERS')
@@ -253,19 +244,16 @@
 
   def _CaptureOutput(self):
     p = self.process
-    s_in = [_sfd(p.stdout, sys.stdout, 'stdout'),
-            _sfd(p.stderr, sys.stderr, 'stderr')]
+    s_in = platform_utils.FileDescriptorStreams.create()
+    s_in.add(p.stdout, sys.stdout, 'stdout')
+    s_in.add(p.stderr, sys.stderr, 'stderr')
     self.stdout = ''
     self.stderr = ''
 
-    for s in s_in:
-      flags = fcntl.fcntl(s.fd, fcntl.F_GETFL)
-      fcntl.fcntl(s.fd, fcntl.F_SETFL, flags | os.O_NONBLOCK)
-
-    while s_in:
-      in_ready, _, _ = select.select(s_in, [], [])
+    while not s_in.is_done:
+      in_ready = s_in.select()
       for s in in_ready:
-        buf = s.fd.read(4096)
+        buf = s.read()
         if not buf:
           s_in.remove(s)
           continue
diff --git a/git_config.py b/git_config.py
index e223678..3ba9dbd 100644
--- a/git_config.py
+++ b/git_config.py
@@ -20,6 +20,7 @@
 import json
 import os
 import re
+import ssl
 import subprocess
 import sys
 try:
@@ -41,6 +42,7 @@
 
 from signal import SIGTERM
 from error import GitError, UploadError
+import platform_utils
 from trace import Trace
 if is_python3():
   from http.client import HTTPException
@@ -50,16 +52,24 @@
 from git_command import GitCommand
 from git_command import ssh_sock
 from git_command import terminate_ssh_clients
+from git_refs import R_CHANGES, R_HEADS, R_TAGS
 
-R_HEADS = 'refs/heads/'
-R_TAGS  = 'refs/tags/'
 ID_RE = re.compile(r'^[0-9a-f]{40}$')
 
 REVIEW_CACHE = dict()
 
+def IsChange(rev):
+  return rev.startswith(R_CHANGES)
+
 def IsId(rev):
   return ID_RE.match(rev)
 
+def IsTag(rev):
+  return rev.startswith(R_TAGS)
+
+def IsImmutable(rev):
+    return IsChange(rev) or IsId(rev) or IsTag(rev)
+
 def _key(name):
   parts = name.split('.')
   if len(parts) < 2:
@@ -259,7 +269,7 @@
     try:
       if os.path.getmtime(self._json) \
       <= os.path.getmtime(self.file):
-        os.remove(self._json)
+        platform_utils.remove(self._json)
         return None
     except OSError:
       return None
@@ -271,7 +281,7 @@
       finally:
         fd.close()
     except (IOError, ValueError):
-      os.remove(self._json)
+      platform_utils.remove(self._json)
       return None
 
   def _SaveJson(self, cache):
@@ -283,7 +293,7 @@
         fd.close()
     except (IOError, TypeError):
       if os.path.exists(self._json):
-        os.remove(self._json)
+        platform_utils.remove(self._json)
 
   def _ReadGit(self):
     """
@@ -604,7 +614,7 @@
     connectionUrl = self._InsteadOf()
     return _preconnect(connectionUrl)
 
-  def ReviewUrl(self, userEmail):
+  def ReviewUrl(self, userEmail, validate_certs):
     if self._review_url is None:
       if self.review is None:
         return None
@@ -612,7 +622,7 @@
       u = self.review
       if u.startswith('persistent-'):
         u = u[len('persistent-'):]
-      if u.split(':')[0] not in ('http', 'https', 'sso'):
+      if u.split(':')[0] not in ('http', 'https', 'sso', 'ssh'):
         u = 'http://%s' % u
       if u.endswith('/Gerrit'):
         u = u[:len(u) - len('/Gerrit')]
@@ -628,13 +638,20 @@
         host, port = os.environ['REPO_HOST_PORT_INFO'].split()
         self._review_url = self._SshReviewUrl(userEmail, host, port)
         REVIEW_CACHE[u] = self._review_url
-      elif u.startswith('sso:'):
+      elif u.startswith('sso:') or u.startswith('ssh:'):
         self._review_url = u  # Assume it's right
         REVIEW_CACHE[u] = self._review_url
+      elif 'REPO_IGNORE_SSH_INFO' in os.environ:
+        self._review_url = http_url
+        REVIEW_CACHE[u] = self._review_url
       else:
         try:
           info_url = u + 'ssh_info'
-          info = urllib.request.urlopen(info_url).read()
+          if not validate_certs:
+              context = ssl._create_unverified_context()
+              info = urllib.request.urlopen(info_url, context=context).read()
+          else:
+              info = urllib.request.urlopen(info_url).read()
           if info == 'NOT_AVAILABLE' or '<' in info:
             # If `info` contains '<', we assume the server gave us some sort
             # of HTML response back, like maybe a login page.
diff --git a/git_refs.py b/git_refs.py
index 3c26606..7feaffb 100644
--- a/git_refs.py
+++ b/git_refs.py
@@ -16,11 +16,12 @@
 import os
 from trace import Trace
 
-HEAD    = 'HEAD'
-R_HEADS = 'refs/heads/'
-R_TAGS  = 'refs/tags/'
-R_PUB   = 'refs/published/'
-R_M     = 'refs/remotes/m/'
+HEAD      = 'HEAD'
+R_CHANGES = 'refs/changes/'
+R_HEADS   = 'refs/heads/'
+R_TAGS    = 'refs/tags/'
+R_PUB     = 'refs/published/'
+R_M       = 'refs/remotes/m/'
 
 
 class GitRefs(object):
@@ -138,7 +139,7 @@
 
   def _ReadLoose1(self, path, name):
     try:
-      fd = open(path, 'rb')
+      fd = open(path)
     except IOError:
       return
 
diff --git a/hooks/pre-auto-gc b/hooks/pre-auto-gc
index 4340302..c4107f5 100755
--- a/hooks/pre-auto-gc
+++ b/hooks/pre-auto-gc
@@ -1,9 +1,9 @@
 #!/bin/sh
 #
 # An example hook script to verify if you are on battery, in case you
-# are running Linux or OS X. Called by git-gc --auto with no arguments.
-# The hook should exit with non-zero status after issuing an appropriate
-# message if it wants to stop the auto repacking.
+# are running Windows, Linux or OS X. Called by git-gc --auto with no
+# arguments. The hook should exit with non-zero status after issuing an
+# appropriate message if it wants to stop the auto repacking.
 
 # This program is free software; you can redistribute it and/or modify
 # it under the terms of the GNU General Public License as published by
@@ -19,6 +19,16 @@
 # along with this program; if not, write to the Free Software
 # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA  02111-1307  USA
 
+if uname -s | grep -q "_NT-"
+then
+	if test -x $SYSTEMROOT/System32/Wbem/wmic
+	then
+		STATUS=$(wmic path win32_battery get batterystatus /format:list | tr -d '\r\n')
+		[ "$STATUS" = "BatteryStatus=2" ] && exit 0 || exit 1
+	fi
+	exit 0
+fi
+
 if test -x /sbin/on_ac_power && /sbin/on_ac_power
 then
 	exit 0
diff --git a/main.py b/main.py
index c5f1e9c..a6538c2 100755
--- a/main.py
+++ b/main.py
@@ -37,6 +37,7 @@
   kerberos = None
 
 from color import SetDefaultColoring
+import event_log
 from trace import SetTrace
 from git_command import git, GitCommand
 from git_config import init_ssh, close_ssh
@@ -54,7 +55,7 @@
 from error import RepoChangedException
 import gitc_utils
 from manifest_xml import GitcManifest, XmlManifest
-from pager import RunPager
+from pager import RunPager, TerminatePager
 from wrapper import WrapperPath, Wrapper
 
 from subcmds import all_commands
@@ -85,6 +86,9 @@
 global_options.add_option('--version',
                           dest='show_version', action='store_true',
                           help='display this version of repo')
+global_options.add_option('--event-log',
+                          dest='event_log', action='store',
+                          help='filename of event log to append timeline to')
 
 class _Repo(object):
   def __init__(self, repodir):
@@ -176,6 +180,8 @@
         RunPager(config)
 
     start = time.time()
+    cmd_event = cmd.event_log.Add(name, event_log.TASK_COMMAND, start)
+    cmd.event_log.SetParent(cmd_event)
     try:
       result = cmd.Execute(copts, cargs)
     except (DownloadError, ManifestInvalidRevisionError,
@@ -198,8 +204,13 @@
       else:
         print('error: project group must be enabled for the project in the current directory', file=sys.stderr)
       result = 1
+    except SystemExit as e:
+      if e.code:
+        result = e.code
+      raise
     finally:
-      elapsed = time.time() - start
+      finish = time.time()
+      elapsed = finish - start
       hours, remainder = divmod(elapsed, 3600)
       minutes, seconds = divmod(remainder, 60)
       if gopts.time:
@@ -209,6 +220,12 @@
           print('real\t%dh%dm%.3fs' % (hours, minutes, seconds),
                 file=sys.stderr)
 
+      cmd.event_log.FinishEvent(cmd_event, finish,
+                                result is None or result == 0)
+      if gopts.event_log:
+        cmd.event_log.Write(os.path.abspath(
+                            os.path.expanduser(gopts.event_log)))
+
     return result
 
 
@@ -525,6 +542,7 @@
       print('fatal: %s' % e, file=sys.stderr)
       result = 128
 
+  TerminatePager()
   sys.exit(result)
 
 if __name__ == '__main__':
diff --git a/manifest_xml.py b/manifest_xml.py
index 9c882af..9b5d784 100644
--- a/manifest_xml.py
+++ b/manifest_xml.py
@@ -32,6 +32,7 @@
 import gitc_utils
 from git_config import GitConfig
 from git_refs import R_HEADS, HEAD
+import platform_utils
 from project import RemoteSpec, Project, MetaProject
 from error import ManifestParseError, ManifestInvalidRevisionError
 
@@ -40,8 +41,18 @@
 LOCAL_MANIFESTS_DIR_NAME = 'local_manifests'
 
 # urljoin gets confused if the scheme is not known.
-urllib.parse.uses_relative.extend(['ssh', 'git', 'persistent-https', 'rpc'])
-urllib.parse.uses_netloc.extend(['ssh', 'git', 'persistent-https', 'rpc'])
+urllib.parse.uses_relative.extend([
+    'ssh',
+    'git',
+    'persistent-https',
+    'sso',
+    'rpc'])
+urllib.parse.uses_netloc.extend([
+    'ssh',
+    'git',
+    'persistent-https',
+    'sso',
+    'rpc'])
 
 class _Default(object):
   """Project defaults within the manifest."""
@@ -100,7 +111,8 @@
     return url
 
   def ToRemoteSpec(self, projectName):
-    url = self.resolvedFetchUrl.rstrip('/') + '/' + projectName
+    fetchUrl = self.resolvedFetchUrl.rstrip('/')
+    url = fetchUrl + '/' + projectName
     remoteName = self.name
     if self.remoteAlias:
       remoteName = self.remoteAlias
@@ -108,7 +120,8 @@
                       url=url,
                       pushUrl=self.pushUrl,
                       review=self.reviewUrl,
-                      orig_name=self.name)
+                      orig_name=self.name,
+                      fetchUrl=self.fetchUrl)
 
 class XmlManifest(object):
   """manages the repo configuration file"""
@@ -153,8 +166,8 @@
 
     try:
       if os.path.lexists(self.manifestFile):
-        os.remove(self.manifestFile)
-      os.symlink('manifests/%s' % name, self.manifestFile)
+        platform_utils.remove(self.manifestFile)
+      platform_utils.symlink(os.path.join('manifests', name), self.manifestFile)
     except OSError as e:
       raise ManifestParseError('cannot link manifest %s: %s' % (name, str(e)))
 
@@ -383,6 +396,10 @@
   def IsArchive(self):
     return self.manifestProject.config.GetBoolean('repo.archive')
 
+  @property
+  def HasSubmodules(self):
+    return self.manifestProject.config.GetBoolean('repo.submodules')
+
   def _Unload(self):
     self._loaded = False
     self._projects = {}
diff --git a/pager.py b/pager.py
index c621141..0521c0c 100755
--- a/pager.py
+++ b/pager.py
@@ -16,19 +16,53 @@
 from __future__ import print_function
 import os
 import select
+import subprocess
 import sys
 
+import platform_utils
+
 active = False
+pager_process = None
+old_stdout = None
+old_stderr = None
 
 def RunPager(globalConfig):
-  global active
-
   if not os.isatty(0) or not os.isatty(1):
     return
   pager = _SelectPager(globalConfig)
   if pager == '' or pager == 'cat':
     return
 
+  if platform_utils.isWindows():
+    _PipePager(pager);
+  else:
+    _ForkPager(pager)
+
+def TerminatePager():
+  global pager_process, old_stdout, old_stderr
+  if pager_process:
+    sys.stdout.flush()
+    sys.stderr.flush()
+    pager_process.stdin.close()
+    pager_process.wait();
+    pager_process = None
+    # Restore initial stdout/err in case there is more output in this process
+    # after shutting down the pager process
+    sys.stdout = old_stdout
+    sys.stderr = old_stderr
+
+def _PipePager(pager):
+  global pager_process, old_stdout, old_stderr
+  assert pager_process is None, "Only one active pager process at a time"
+  # Create pager process, piping stdout/err into its stdin
+  pager_process = subprocess.Popen([pager], stdin=subprocess.PIPE, stdout=sys.stdout, stderr=sys.stderr)
+  old_stdout = sys.stdout
+  old_stderr = sys.stderr
+  sys.stdout = pager_process.stdin
+  sys.stderr = pager_process.stdin
+
+def _ForkPager(pager):
+  global active
   # This process turns into the pager; a child it forks will
   # do the real processing and output back to the pager. This
   # is necessary to keep the pager in control of the tty.
diff --git a/platform_utils.py b/platform_utils.py
new file mode 100644
index 0000000..33cb2ec
--- /dev/null
+++ b/platform_utils.py
@@ -0,0 +1,315 @@
+#
+# Copyright (C) 2016 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import errno
+import os
+import platform
+import select
+import shutil
+import stat
+
+from Queue import Queue
+from threading import Thread
+
+
+def isWindows():
+  """ Returns True when running with the native port of Python for Windows,
+  False when running on any other platform (including the Cygwin port of
+  Python).
+  """
+  # Note: The cygwin port of Python returns "CYGWIN_NT_xxx"
+  return platform.system() == "Windows"
+
+
+class FileDescriptorStreams(object):
+  """ Platform agnostic abstraction enabling non-blocking I/O over a
+  collection of file descriptors. This abstraction is required because
+  fctnl(os.O_NONBLOCK) is not supported on Windows.
+  """
+  @classmethod
+  def create(cls):
+    """ Factory method: instantiates the concrete class according to the
+    current platform.
+    """
+    if isWindows():
+      return _FileDescriptorStreamsThreads()
+    else:
+      return _FileDescriptorStreamsNonBlocking()
+
+  def __init__(self):
+    self.streams = []
+
+  def add(self, fd, dest, std_name):
+    """ Wraps an existing file descriptor as a stream.
+    """
+    self.streams.append(self._create_stream(fd, dest, std_name))
+
+  def remove(self, stream):
+    """ Removes a stream, when done with it.
+    """
+    self.streams.remove(stream)
+
+  @property
+  def is_done(self):
+    """ Returns True when all streams have been processed.
+    """
+    return len(self.streams) == 0
+
+  def select(self):
+    """ Returns the set of streams that have data available to read.
+    The returned streams each expose a read() and a close() method.
+    When done with a stream, call the remove(stream) method.
+    """
+    raise NotImplementedError
+
+  def _create_stream(fd, dest, std_name):
+    """ Creates a new stream wrapping an existing file descriptor.
+    """
+    raise NotImplementedError
+
+
+class _FileDescriptorStreamsNonBlocking(FileDescriptorStreams):
+  """ Implementation of FileDescriptorStreams for platforms that support
+  non blocking I/O.
+  """
+  class Stream(object):
+    """ Encapsulates a file descriptor """
+    def __init__(self, fd, dest, std_name):
+      self.fd = fd
+      self.dest = dest
+      self.std_name = std_name
+      self.set_non_blocking()
+
+    def set_non_blocking(self):
+      import fcntl
+      flags = fcntl.fcntl(self.fd, fcntl.F_GETFL)
+      fcntl.fcntl(self.fd, fcntl.F_SETFL, flags | os.O_NONBLOCK)
+
+    def fileno(self):
+      return self.fd.fileno()
+
+    def read(self):
+      return self.fd.read(4096)
+
+    def close(self):
+      self.fd.close()
+
+  def _create_stream(self, fd, dest, std_name):
+    return self.Stream(fd, dest, std_name)
+
+  def select(self):
+    ready_streams, _, _ = select.select(self.streams, [], [])
+    return ready_streams
+
+
+class _FileDescriptorStreamsThreads(FileDescriptorStreams):
+  """ Implementation of FileDescriptorStreams for platforms that don't support
+  non blocking I/O. This implementation requires creating threads issuing
+  blocking read operations on file descriptors.
+  """
+  def __init__(self):
+    super(_FileDescriptorStreamsThreads, self).__init__()
+    # The queue is shared accross all threads so we can simulate the
+    # behavior of the select() function
+    self.queue = Queue(10)  # Limit incoming data from streams
+
+  def _create_stream(self, fd, dest, std_name):
+    return self.Stream(fd, dest, std_name, self.queue)
+
+  def select(self):
+    # Return only one stream at a time, as it is the most straighforward
+    # thing to do and it is compatible with the select() function.
+    item = self.queue.get()
+    stream = item.stream
+    stream.data = item.data
+    return [stream]
+
+  class QueueItem(object):
+    """ Item put in the shared queue """
+    def __init__(self, stream, data):
+      self.stream = stream
+      self.data = data
+
+  class Stream(object):
+    """ Encapsulates a file descriptor """
+    def __init__(self, fd, dest, std_name, queue):
+      self.fd = fd
+      self.dest = dest
+      self.std_name = std_name
+      self.queue = queue
+      self.data = None
+      self.thread = Thread(target=self.read_to_queue)
+      self.thread.daemon = True
+      self.thread.start()
+
+    def close(self):
+      self.fd.close()
+
+    def read(self):
+      data = self.data
+      self.data = None
+      return data
+
+    def read_to_queue(self):
+      """ The thread function: reads everything from the file descriptor into
+      the shared queue and terminates when reaching EOF.
+      """
+      for line in iter(self.fd.readline, b''):
+        self.queue.put(_FileDescriptorStreamsThreads.QueueItem(self, line))
+      self.fd.close()
+      self.queue.put(_FileDescriptorStreamsThreads.QueueItem(self, None))
+
+
+def symlink(source, link_name):
+  """Creates a symbolic link pointing to source named link_name.
+  Note: On Windows, source must exist on disk, as the implementation needs
+  to know whether to create a "File" or a "Directory" symbolic link.
+  """
+  if isWindows():
+    import platform_utils_win32
+    source = _validate_winpath(source)
+    link_name = _validate_winpath(link_name)
+    target = os.path.join(os.path.dirname(link_name), source)
+    if os.path.isdir(target):
+      platform_utils_win32.create_dirsymlink(source, link_name)
+    else:
+      platform_utils_win32.create_filesymlink(source, link_name)
+  else:
+    return os.symlink(source, link_name)
+
+
+def _validate_winpath(path):
+  path = os.path.normpath(path)
+  if _winpath_is_valid(path):
+    return path
+  raise ValueError("Path \"%s\" must be a relative path or an absolute "
+                   "path starting with a drive letter".format(path))
+
+
+def _winpath_is_valid(path):
+  """Windows only: returns True if path is relative (e.g. ".\\foo") or is
+  absolute including a drive letter (e.g. "c:\\foo"). Returns False if path
+  is ambiguous (e.g. "x:foo" or "\\foo").
+  """
+  assert isWindows()
+  path = os.path.normpath(path)
+  drive, tail = os.path.splitdrive(path)
+  if tail:
+    if not drive:
+      return tail[0] != os.sep  # "\\foo" is invalid
+    else:
+      return tail[0] == os.sep  # "x:foo" is invalid
+  else:
+    return not drive  # "x:" is invalid
+
+
+def rmtree(path):
+  if isWindows():
+    shutil.rmtree(path, onerror=handle_rmtree_error)
+  else:
+    shutil.rmtree(path)
+
+
+def handle_rmtree_error(function, path, excinfo):
+  # Allow deleting read-only files
+  os.chmod(path, stat.S_IWRITE)
+  function(path)
+
+
+def rename(src, dst):
+  if isWindows():
+    # On Windows, rename fails if destination exists, see
+    # https://docs.python.org/2/library/os.html#os.rename
+    try:
+      os.rename(src, dst)
+    except OSError as e:
+      if e.errno == errno.EEXIST:
+        os.remove(dst)
+        os.rename(src, dst)
+      else:
+        raise
+  else:
+    os.rename(src, dst)
+
+
+def remove(path):
+  """Remove (delete) the file path. This is a replacement for os.remove, but
+  allows deleting read-only files on Windows.
+  """
+  if isWindows():
+    try:
+      os.remove(path)
+    except OSError as e:
+      if e.errno == errno.EACCES:
+        os.chmod(path, stat.S_IWRITE)
+        os.remove(path)
+      else:
+        raise
+  else:
+    os.remove(path)
+
+
+def islink(path):
+  """Test whether a path is a symbolic link.
+
+  Availability: Windows, Unix.
+  """
+  if isWindows():
+    import platform_utils_win32
+    return platform_utils_win32.islink(path)
+  else:
+    return os.path.islink(path)
+
+
+def readlink(path):
+  """Return a string representing the path to which the symbolic link
+  points. The result may be either an absolute or relative pathname;
+  if it is relative, it may be converted to an absolute pathname using
+  os.path.join(os.path.dirname(path), result).
+
+  Availability: Windows, Unix.
+  """
+  if isWindows():
+    import platform_utils_win32
+    return platform_utils_win32.readlink(path)
+  else:
+    return os.readlink(path)
+
+
+def realpath(path):
+  """Return the canonical path of the specified filename, eliminating
+  any symbolic links encountered in the path.
+
+  Availability: Windows, Unix.
+  """
+  if isWindows():
+    current_path = os.path.abspath(path)
+    path_tail = []
+    for c in range(0, 100):  # Avoid cycles
+      if islink(current_path):
+        target = readlink(current_path)
+        current_path = os.path.join(os.path.dirname(current_path), target)
+      else:
+        basename = os.path.basename(current_path)
+        if basename == '':
+          path_tail.append(current_path)
+          break
+        path_tail.append(basename)
+        current_path = os.path.dirname(current_path)
+    path_tail.reverse()
+    result = os.path.normpath(os.path.join(*path_tail))
+    return result
+  else:
+    return os.path.realpath(path)
diff --git a/platform_utils_win32.py b/platform_utils_win32.py
new file mode 100644
index 0000000..fe76b3d
--- /dev/null
+++ b/platform_utils_win32.py
@@ -0,0 +1,217 @@
+#
+# Copyright (C) 2016 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import errno
+
+from ctypes import WinDLL, get_last_error, FormatError, WinError, addressof
+from ctypes import c_buffer
+from ctypes.wintypes import BOOL, LPCWSTR, DWORD, HANDLE, POINTER, c_ubyte
+from ctypes.wintypes import WCHAR, USHORT, LPVOID, Structure, Union, ULONG
+from ctypes.wintypes import byref
+
+kernel32 = WinDLL('kernel32', use_last_error=True)
+
+LPDWORD = POINTER(DWORD)
+UCHAR = c_ubyte
+
+# Win32 error codes
+ERROR_SUCCESS = 0
+ERROR_NOT_SUPPORTED = 50
+ERROR_PRIVILEGE_NOT_HELD = 1314
+
+# Win32 API entry points
+CreateSymbolicLinkW = kernel32.CreateSymbolicLinkW
+CreateSymbolicLinkW.restype = BOOL
+CreateSymbolicLinkW.argtypes = (LPCWSTR,  # lpSymlinkFileName In
+                                LPCWSTR,  # lpTargetFileName In
+                                DWORD)    # dwFlags In
+
+# Symbolic link creation flags
+SYMBOLIC_LINK_FLAG_FILE = 0x00
+SYMBOLIC_LINK_FLAG_DIRECTORY = 0x01
+
+GetFileAttributesW = kernel32.GetFileAttributesW
+GetFileAttributesW.restype = DWORD
+GetFileAttributesW.argtypes = (LPCWSTR,)  # lpFileName In
+
+INVALID_FILE_ATTRIBUTES = 0xFFFFFFFF
+FILE_ATTRIBUTE_REPARSE_POINT = 0x00400
+
+CreateFileW = kernel32.CreateFileW
+CreateFileW.restype = HANDLE
+CreateFileW.argtypes = (LPCWSTR,  # lpFileName In
+                        DWORD,    # dwDesiredAccess In
+                        DWORD,    # dwShareMode In
+                        LPVOID,   # lpSecurityAttributes In_opt
+                        DWORD,    # dwCreationDisposition In
+                        DWORD,    # dwFlagsAndAttributes In
+                        HANDLE)   # hTemplateFile In_opt
+
+CloseHandle = kernel32.CloseHandle
+CloseHandle.restype = BOOL
+CloseHandle.argtypes = (HANDLE,)  # hObject In
+
+INVALID_HANDLE_VALUE = HANDLE(-1).value
+OPEN_EXISTING = 3
+FILE_FLAG_BACKUP_SEMANTICS = 0x02000000
+FILE_FLAG_OPEN_REPARSE_POINT = 0x00200000
+
+DeviceIoControl = kernel32.DeviceIoControl
+DeviceIoControl.restype = BOOL
+DeviceIoControl.argtypes = (HANDLE,   # hDevice In
+                            DWORD,    # dwIoControlCode In
+                            LPVOID,   # lpInBuffer In_opt
+                            DWORD,    # nInBufferSize In
+                            LPVOID,   # lpOutBuffer Out_opt
+                            DWORD,    # nOutBufferSize In
+                            LPDWORD,  # lpBytesReturned Out_opt
+                            LPVOID)   # lpOverlapped Inout_opt
+
+# Device I/O control flags and options
+FSCTL_GET_REPARSE_POINT = 0x000900A8
+IO_REPARSE_TAG_MOUNT_POINT = 0xA0000003
+IO_REPARSE_TAG_SYMLINK = 0xA000000C
+MAXIMUM_REPARSE_DATA_BUFFER_SIZE = 0x4000
+
+
+class GENERIC_REPARSE_BUFFER(Structure):
+  _fields_ = (('DataBuffer', UCHAR * 1),)
+
+
+class SYMBOLIC_LINK_REPARSE_BUFFER(Structure):
+  _fields_ = (('SubstituteNameOffset', USHORT),
+              ('SubstituteNameLength', USHORT),
+              ('PrintNameOffset', USHORT),
+              ('PrintNameLength', USHORT),
+              ('Flags', ULONG),
+              ('PathBuffer', WCHAR * 1))
+
+  @property
+  def PrintName(self):
+    arrayt = WCHAR * (self.PrintNameLength // 2)
+    offset = type(self).PathBuffer.offset + self.PrintNameOffset
+    return arrayt.from_address(addressof(self) + offset).value
+
+
+class MOUNT_POINT_REPARSE_BUFFER(Structure):
+  _fields_ = (('SubstituteNameOffset', USHORT),
+              ('SubstituteNameLength', USHORT),
+              ('PrintNameOffset', USHORT),
+              ('PrintNameLength', USHORT),
+              ('PathBuffer', WCHAR * 1))
+
+  @property
+  def PrintName(self):
+    arrayt = WCHAR * (self.PrintNameLength // 2)
+    offset = type(self).PathBuffer.offset + self.PrintNameOffset
+    return arrayt.from_address(addressof(self) + offset).value
+
+
+class REPARSE_DATA_BUFFER(Structure):
+  class REPARSE_BUFFER(Union):
+    _fields_ = (('SymbolicLinkReparseBuffer', SYMBOLIC_LINK_REPARSE_BUFFER),
+                ('MountPointReparseBuffer', MOUNT_POINT_REPARSE_BUFFER),
+                ('GenericReparseBuffer', GENERIC_REPARSE_BUFFER))
+  _fields_ = (('ReparseTag', ULONG),
+              ('ReparseDataLength', USHORT),
+              ('Reserved', USHORT),
+              ('ReparseBuffer', REPARSE_BUFFER))
+  _anonymous_ = ('ReparseBuffer',)
+
+
+def create_filesymlink(source, link_name):
+  """Creates a Windows file symbolic link source pointing to link_name."""
+  _create_symlink(source, link_name, SYMBOLIC_LINK_FLAG_FILE)
+
+
+def create_dirsymlink(source, link_name):
+  """Creates a Windows directory symbolic link source pointing to link_name.
+  """
+  _create_symlink(source, link_name, SYMBOLIC_LINK_FLAG_DIRECTORY)
+
+
+def _create_symlink(source, link_name, dwFlags):
+  # Note: Win32 documentation for CreateSymbolicLink is incorrect.
+  # On success, the function returns "1".
+  # On error, the function returns some random value (e.g. 1280).
+  # The best bet seems to use "GetLastError" and check for error/success.
+  CreateSymbolicLinkW(link_name, source, dwFlags)
+  code = get_last_error()
+  if code != ERROR_SUCCESS:
+    error_desc = FormatError(code).strip()
+    if code == ERROR_PRIVILEGE_NOT_HELD:
+      raise OSError(errno.EPERM, error_desc, link_name)
+    _raise_winerror(
+        code,
+        'Error creating symbolic link \"%s\"'.format(link_name))
+
+
+def islink(path):
+  result = GetFileAttributesW(path)
+  if result == INVALID_FILE_ATTRIBUTES:
+    return False
+  return bool(result & FILE_ATTRIBUTE_REPARSE_POINT)
+
+
+def readlink(path):
+  reparse_point_handle = CreateFileW(path,
+                                     0,
+                                     0,
+                                     None,
+                                     OPEN_EXISTING,
+                                     FILE_FLAG_OPEN_REPARSE_POINT |
+                                     FILE_FLAG_BACKUP_SEMANTICS,
+                                     None)
+  if reparse_point_handle == INVALID_HANDLE_VALUE:
+    _raise_winerror(
+        get_last_error(),
+        'Error opening symblic link \"%s\"'.format(path))
+  target_buffer = c_buffer(MAXIMUM_REPARSE_DATA_BUFFER_SIZE)
+  n_bytes_returned = DWORD()
+  io_result = DeviceIoControl(reparse_point_handle,
+                              FSCTL_GET_REPARSE_POINT,
+                              None,
+                              0,
+                              target_buffer,
+                              len(target_buffer),
+                              byref(n_bytes_returned),
+                              None)
+  CloseHandle(reparse_point_handle)
+  if not io_result:
+    _raise_winerror(
+        get_last_error(),
+        'Error reading symblic link \"%s\"'.format(path))
+  rdb = REPARSE_DATA_BUFFER.from_buffer(target_buffer)
+  if rdb.ReparseTag == IO_REPARSE_TAG_SYMLINK:
+    return _preserve_encoding(path, rdb.SymbolicLinkReparseBuffer.PrintName)
+  elif rdb.ReparseTag == IO_REPARSE_TAG_MOUNT_POINT:
+    return _preserve_encoding(path, rdb.MountPointReparseBuffer.PrintName)
+  # Unsupported reparse point type
+  _raise_winerror(
+      ERROR_NOT_SUPPORTED,
+      'Error reading symblic link \"%s\"'.format(path))
+
+
+def _preserve_encoding(source, target):
+  """Ensures target is the same string type (i.e. unicode or str) as source."""
+  if isinstance(source, unicode):
+    return unicode(target)
+  return str(target)
+
+
+def _raise_winerror(code, error_desc):
+  win_error_desc = FormatError(code).strip()
+  error_desc = "%s: %s".format(error_desc, win_error_desc)
+  raise WinError(code, error_desc)
diff --git a/progress.py b/progress.py
index d948654..0dd5d1a 100644
--- a/progress.py
+++ b/progress.py
@@ -21,7 +21,8 @@
 _NOT_TTY = not os.isatty(2)
 
 class Progress(object):
-  def __init__(self, title, total=0, units=''):
+  def __init__(self, title, total=0, units='', print_newline=False,
+               always_print_percentage=False):
     self._title = title
     self._total = total
     self._done = 0
@@ -29,6 +30,8 @@
     self._start = time()
     self._show = False
     self._units = units
+    self._print_newline = print_newline
+    self._always_print_percentage = always_print_percentage
 
   def update(self, inc=1):
     self._done += inc
@@ -50,13 +53,14 @@
     else:
       p = (100 * self._done) / self._total
 
-      if self._lastp != p:
+      if self._lastp != p or self._always_print_percentage:
         self._lastp = p
-        sys.stderr.write('\r%s: %3d%% (%d%s/%d%s)  ' % (
+        sys.stderr.write('\r%s: %3d%% (%d%s/%d%s)%s' % (
           self._title,
           p,
           self._done, self._units,
-          self._total, self._units))
+          self._total, self._units,
+          "\n" if self._print_newline else ""))
         sys.stderr.flush()
 
   def end(self):
diff --git a/project.py b/project.py
index 142258e..83dcf55 100644
--- a/project.py
+++ b/project.py
@@ -35,6 +35,7 @@
 from error import GitError, HookError, UploadError, DownloadError
 from error import ManifestInvalidRevisionError
 from error import NoManifestException
+import platform_utils
 from trace import IsTrace, Trace
 
 from git_refs import GitRefs, HEAD, R_HEADS, R_TAGS, R_PUB, R_M
@@ -62,9 +63,9 @@
     fd.close()
 
   try:
-    os.rename(lock, path)
+    platform_utils.rename(lock, path)
   except OSError:
-    os.remove(lock)
+    platform_utils.remove(lock)
     raise
 
 
@@ -102,7 +103,7 @@
   """
   global _project_hook_list
   if _project_hook_list is None:
-    d = os.path.realpath(os.path.abspath(os.path.dirname(__file__)))
+    d = platform_utils.realpath(os.path.abspath(os.path.dirname(__file__)))
     d = os.path.join(d, 'hooks')
     _project_hook_list = [os.path.join(d, x) for x in os.listdir(d)]
   return _project_hook_list
@@ -176,12 +177,20 @@
   def UploadForReview(self, people,
                       auto_topic=False,
                       draft=False,
-                      dest_branch=None):
+                      private=False,
+                      wip=False,
+                      dest_branch=None,
+                      validate_certs=True,
+                      push_options=None):
     self.project.UploadForReview(self.name,
                                  people,
                                  auto_topic=auto_topic,
                                  draft=draft,
-                                 dest_branch=dest_branch)
+                                 private=private,
+                                 wip=wip,
+                                 dest_branch=dest_branch,
+                                 validate_certs=validate_certs,
+                                 push_options=push_options)
 
   def GetPublishedRefs(self):
     refs = {}
@@ -243,7 +252,7 @@
       try:
         # remove existing file first, since it might be read-only
         if os.path.exists(dest):
-          os.remove(dest)
+          platform_utils.remove(dest)
         else:
           dest_dir = os.path.dirname(dest)
           if not os.path.isdir(dest_dir):
@@ -268,16 +277,16 @@
 
   def __linkIt(self, relSrc, absDest):
     # link file if it does not exist or is out of date
-    if not os.path.islink(absDest) or (os.readlink(absDest) != relSrc):
+    if not platform_utils.islink(absDest) or (platform_utils.readlink(absDest) != relSrc):
       try:
         # remove existing file first, since it might be read-only
         if os.path.lexists(absDest):
-          os.remove(absDest)
+          platform_utils.remove(absDest)
         else:
           dest_dir = os.path.dirname(absDest)
           if not os.path.isdir(dest_dir):
             os.makedirs(dest_dir)
-        os.symlink(relSrc, absDest)
+        platform_utils.symlink(relSrc, absDest)
       except IOError:
         _error('Cannot link file %s to %s', relSrc, absDest)
 
@@ -323,13 +332,15 @@
                pushUrl=None,
                review=None,
                revision=None,
-               orig_name=None):
+               orig_name=None,
+               fetchUrl=None):
     self.name = name
     self.url = url
     self.pushUrl = pushUrl
     self.review = review
     self.revision = revision
     self.orig_name = orig_name
+    self.fetchUrl = fetchUrl
 
 
 class RepoHook(object):
@@ -687,7 +698,7 @@
     self.gitdir = gitdir.replace('\\', '/')
     self.objdir = objdir.replace('\\', '/')
     if worktree:
-      self.worktree = os.path.normpath(worktree.replace('\\', '/'))
+      self.worktree = os.path.normpath(worktree).replace('\\', '/')
     else:
       self.worktree = None
     self.relpath = relpath
@@ -911,11 +922,13 @@
     else:
       return False
 
-  def PrintWorkTreeStatus(self, output_redir=None):
+  def PrintWorkTreeStatus(self, output_redir=None, quiet=False):
     """Prints the status of the repository to stdout.
 
     Args:
       output: If specified, redirect the output to this object.
+      quiet:  If True then only print the project name.  Do not print
+              the modified files, branch name, etc.
     """
     if not os.path.isdir(self.worktree):
       if output_redir is None:
@@ -941,6 +954,10 @@
       out.redirect(output_redir)
     out.project('project %-40s', self.relpath + '/ ')
 
+    if quiet:
+      out.nl()
+      return 'DIRTY'
+
     branch = self.CurrentBranch
     if branch is None:
       out.nobranch('(*** NO BRANCH ***)')
@@ -1099,7 +1116,11 @@
                       people=([], []),
                       auto_topic=False,
                       draft=False,
-                      dest_branch=None):
+                      private=False,
+                      wip=False,
+                      dest_branch=None,
+                      validate_certs=True,
+                      push_options=None):
     """Uploads the named branch for code review.
     """
     if branch is None:
@@ -1124,7 +1145,7 @@
       branch.remote.projectname = self.name
       branch.remote.Save()
 
-    url = branch.remote.ReviewUrl(self.UserEmail)
+    url = branch.remote.ReviewUrl(self.UserEmail, validate_certs)
     if url is None:
       raise UploadError('review not configured')
     cmd = ['push']
@@ -1137,6 +1158,10 @@
         rp.append('--cc=%s' % sq(e))
       cmd.append('--receive-pack=%s' % " ".join(rp))
 
+    for push_option in (push_options or []):
+      cmd.append('-o')
+      cmd.append(push_option)
+
     cmd.append(url)
 
     if dest_branch.startswith(R_HEADS):
@@ -1150,9 +1175,14 @@
                                   dest_branch)
     if auto_topic:
       ref_spec = ref_spec + '/' + branch.name
+
     if not url.startswith('ssh://'):
       rp = ['r=%s' % p for p in people[0]] + \
            ['cc=%s' % p for p in people[1]]
+      if private:
+        rp = rp + ['private']
+      if wip:
+        rp = rp + ['wip']
       if rp:
         ref_spec = ref_spec + '%' + ','.join(rp)
     cmd.append(ref_spec)
@@ -1192,7 +1222,8 @@
                        no_tags=False,
                        archive=False,
                        optimized_fetch=False,
-                       prune=False):
+                       prune=False,
+                       submodules=False):
     """Perform only the network IO portion of the sync process.
        Local working directory/branch state is not affected.
     """
@@ -1218,7 +1249,7 @@
       if not self._ExtractArchive(tarpath, path=topdir):
         return False
       try:
-        os.remove(tarpath)
+        platform_utils.remove(tarpath)
       except OSError as e:
         _warn("Cannot remove archive %s: %s", tarpath, str(e))
       self._CopyAndLinkFiles()
@@ -1234,7 +1265,7 @@
     if is_new:
       alt = os.path.join(self.gitdir, 'objects/info/alternates')
       try:
-        fd = open(alt, 'rb')
+        fd = open(alt)
         try:
           alt_dir = fd.readline().rstrip()
         finally:
@@ -1258,13 +1289,19 @@
       elif self.manifest.default.sync_c:
         current_branch_only = True
 
+    if self.clone_depth:
+      depth = self.clone_depth
+    else:
+      depth = self.manifest.manifestProject.config.GetString('repo.depth')
+
     need_to_fetch = not (optimized_fetch and
                          (ID_RE.match(self.revisionExpr) and
-                          self._CheckForSha1()))
+                          self._CheckForImmutableRevision()))
     if (need_to_fetch and
         not self._RemoteFetch(initial=is_new, quiet=quiet, alt_dir=alt_dir,
                               current_branch_only=current_branch_only,
-                              no_tags=no_tags, prune=prune)):
+                              no_tags=no_tags, prune=prune, depth=depth,
+                              submodules=submodules)):
       return False
 
     if self.worktree:
@@ -1272,7 +1309,7 @@
     else:
       self._InitMirrorHead()
       try:
-        os.remove(os.path.join(self.gitdir, 'FETCH_HEAD'))
+        platform_utils.remove(os.path.join(self.gitdir, 'FETCH_HEAD'))
       except OSError:
         pass
     return True
@@ -1320,11 +1357,11 @@
       raise ManifestInvalidRevisionError('revision %s in %s not found' %
                                          (self.revisionExpr, self.name))
 
-  def Sync_LocalHalf(self, syncbuf, force_sync=False):
+  def Sync_LocalHalf(self, syncbuf, force_sync=False, submodules=False):
     """Perform only the local IO portion of the sync process.
        Network access is not required.
     """
-    self._InitWorkTree(force_sync=force_sync)
+    self._InitWorkTree(force_sync=force_sync, submodules=submodules)
     all_refs = self.bare_ref.all
     self.CleanPublishedCache(all_refs)
     revid = self.GetRevisionId(all_refs)
@@ -1333,6 +1370,9 @@
       self._FastForward(revid)
       self._CopyAndLinkFiles()
 
+    def _dosubmodules():
+      self._SyncSubmodules(quiet=True)
+
     head = self.work_git.GetHead()
     if head.startswith(R_HEADS):
       branch = head[len(R_HEADS):]
@@ -1366,6 +1406,8 @@
 
       try:
         self._Checkout(revid, quiet=True)
+        if submodules:
+          self._SyncSubmodules(quiet=True)
       except GitError as e:
         syncbuf.fail(self, e)
         return
@@ -1390,6 +1432,8 @@
                    branch.name)
       try:
         self._Checkout(revid, quiet=True)
+        if submodules:
+          self._SyncSubmodules(quiet=True)
       except GitError as e:
         syncbuf.fail(self, e)
         return
@@ -1415,6 +1459,8 @@
         # strict subset.  We can fast-forward safely.
         #
         syncbuf.later1(self, _doff)
+        if submodules:
+          syncbuf.later1(self, _dosubmodules)
         return
 
     # Examine the local commits not in the remote.  Find the
@@ -1466,19 +1512,28 @@
     branch.Save()
 
     if cnt_mine > 0 and self.rebase:
+      def _docopyandlink():
+        self._CopyAndLinkFiles()
+
       def _dorebase():
         self._Rebase(upstream='%s^1' % last_mine, onto=revid)
-        self._CopyAndLinkFiles()
       syncbuf.later2(self, _dorebase)
+      if submodules:
+        syncbuf.later2(self, _dosubmodules)
+      syncbuf.later2(self, _docopyandlink)
     elif local_changes:
       try:
         self._ResetHard(revid)
+        if submodules:
+          self._SyncSubmodules(quiet=True)
         self._CopyAndLinkFiles()
       except GitError as e:
         syncbuf.fail(self, e)
         return
     else:
       syncbuf.later1(self, _doff)
+      if submodules:
+        syncbuf.later1(self, _dosubmodules)
 
   def AddCopyFile(self, src, dest, absdest):
     # dest should already be an absolute path, but src is project relative
@@ -1764,7 +1819,7 @@
       except GitError:
         return [], []
       finally:
-        os.remove(temp_gitmodules_path)
+        platform_utils.remove(temp_gitmodules_path)
 
       names = set()
       paths = {}
@@ -1851,7 +1906,7 @@
 
 
 # Direct Git Commands ##
-  def _CheckForSha1(self):
+  def _CheckForImmutableRevision(self):
     try:
       # if revision (sha or tag) is not present then following function
       # throws an error.
@@ -1880,23 +1935,18 @@
                    quiet=False,
                    alt_dir=None,
                    no_tags=False,
-                   prune=False):
+                   prune=False,
+                   depth=None,
+                   submodules=False):
 
     is_sha1 = False
     tag_name = None
-    depth = None
-
     # The depth should not be used when fetching to a mirror because
     # it will result in a shallow repository that cannot be cloned or
     # fetched from.
-    if not self.manifest.IsMirror:
-      if self.clone_depth:
-        depth = self.clone_depth
-      else:
-        depth = self.manifest.manifestProject.config.GetString('repo.depth')
-      # The repo project should never be synced with partial depth
-      if self.relpath == '.repo/repo':
-        depth = None
+    # The repo project should also never be synced with partial depth.
+    if self.manifest.IsMirror or self.relpath == '.repo/repo':
+      depth = None
 
     if depth:
       current_branch_only = True
@@ -1910,7 +1960,9 @@
         tag_name = self.revisionExpr[len(R_TAGS):]
 
       if is_sha1 or tag_name is not None:
-        if self._CheckForSha1():
+        if self._CheckForImmutableRevision():
+          print('Skipped fetching project %s (already have persistent ref)'
+                % self.name)
           return True
       if is_sha1 and not depth:
         # When syncing a specific commit and --depth is not set:
@@ -1958,15 +2010,17 @@
           ids.add(ref_id)
           tmp.add(r)
 
-        tmp_packed = ''
-        old_packed = ''
+        tmp_packed_lines = []
+        old_packed_lines = []
 
         for r in sorted(all_refs):
           line = '%s %s\n' % (all_refs[r], r)
-          tmp_packed += line
+          tmp_packed_lines.append(line)
           if r not in tmp:
-            old_packed += line
+            old_packed_lines.append(line)
 
+        tmp_packed = ''.join(tmp_packed_lines)
+        old_packed = ''.join(old_packed_lines)
         _lwrite(packed_refs, tmp_packed)
       else:
         alt_dir = None
@@ -1999,6 +2053,9 @@
     if prune:
       cmd.append('--prune')
 
+    if submodules:
+      cmd.append('--recurse-submodules=on-demand')
+
     spec = []
     if not current_branch_only:
       # Fetch whole repo
@@ -2054,24 +2111,25 @@
         if old_packed != '':
           _lwrite(packed_refs, old_packed)
         else:
-          os.remove(packed_refs)
+          platform_utils.remove(packed_refs)
       self.bare_git.pack_refs('--all', '--prune')
 
-    if is_sha1 and current_branch_only and self.upstream:
+    if is_sha1 and current_branch_only:
       # We just synced the upstream given branch; verify we
       # got what we wanted, else trigger a second run of all
       # refs.
-      if not self._CheckForSha1():
-        if not depth:
-          # Avoid infinite recursion when depth is True (since depth implies
-          # current_branch_only)
-          return self._RemoteFetch(name=name, current_branch_only=False,
-                                   initial=False, quiet=quiet, alt_dir=alt_dir)
-        if self.clone_depth:
-          self.clone_depth = None
+      if not self._CheckForImmutableRevision():
+        if current_branch_only and depth:
+          # Sync the current branch only with depth set to None
           return self._RemoteFetch(name=name,
                                    current_branch_only=current_branch_only,
-                                   initial=False, quiet=quiet, alt_dir=alt_dir)
+                                   initial=False, quiet=quiet, alt_dir=alt_dir,
+                                   depth=None)
+        else:
+          # Avoid infinite recursion: sync all branches with depth set to None
+          return self._RemoteFetch(name=name, current_branch_only=False,
+                                   initial=False, quiet=quiet, alt_dir=alt_dir,
+                                   depth=None)
 
     return ok
 
@@ -2115,14 +2173,14 @@
 
     ok = GitCommand(self, cmd, bare=True).Wait() == 0
     if os.path.exists(bundle_dst):
-      os.remove(bundle_dst)
+      platform_utils.remove(bundle_dst)
     if os.path.exists(bundle_tmp):
-      os.remove(bundle_tmp)
+      platform_utils.remove(bundle_tmp)
     return ok
 
   def _FetchBundle(self, srcUrl, tmpPath, dstPath, quiet):
     if os.path.exists(dstPath):
-      os.remove(dstPath)
+      platform_utils.remove(dstPath)
 
     cmd = ['curl', '--fail', '--output', tmpPath, '--netrc', '--location']
     if quiet:
@@ -2132,7 +2190,7 @@
       if size >= 1024:
         cmd += ['--continue-at', '%d' % (size,)]
       else:
-        os.remove(tmpPath)
+        platform_utils.remove(tmpPath)
     if 'http_proxy' in os.environ and 'darwin' == sys.platform:
       cmd += ['--proxy', os.environ['http_proxy']]
     with GetUrlCookieFile(srcUrl, quiet) as (cookiefile, _proxy):
@@ -2163,10 +2221,10 @@
 
     if os.path.exists(tmpPath):
       if curlret == 0 and self._IsValidBundle(tmpPath, quiet):
-        os.rename(tmpPath, dstPath)
+        platform_utils.rename(tmpPath, dstPath)
         return True
       else:
-        os.remove(tmpPath)
+        platform_utils.remove(tmpPath)
         return False
     else:
       return False
@@ -2218,6 +2276,13 @@
     if GitCommand(self, cmd).Wait() != 0:
       raise GitError('%s reset --hard %s ' % (self.name, rev))
 
+  def _SyncSubmodules(self, quiet=True):
+    cmd = ['submodule', 'update', '--init', '--recursive']
+    if quiet:
+      cmd.append('-q')
+    if GitCommand(self, cmd).Wait() != 0:
+      raise GitError('%s submodule update --init --recursive %s ' % self.name)
+
   def _Rebase(self, upstream, onto=None):
     cmd = ['rebase']
     if onto is not None:
@@ -2257,10 +2322,10 @@
             print("Retrying clone after deleting %s" %
                   self.gitdir, file=sys.stderr)
             try:
-              shutil.rmtree(os.path.realpath(self.gitdir))
-              if self.worktree and os.path.exists(os.path.realpath
+              platform_utils.rmtree(platform_utils.realpath(self.gitdir))
+              if self.worktree and os.path.exists(platform_utils.realpath
                                                   (self.worktree)):
-                shutil.rmtree(os.path.realpath(self.worktree))
+                platform_utils.rmtree(platform_utils.realpath(self.worktree))
               return self._InitGitDir(mirror_git=mirror_git, force_sync=False)
             except:
               raise e
@@ -2302,9 +2367,9 @@
           self.config.SetString('core.bare', None)
     except Exception:
       if init_obj_dir and os.path.exists(self.objdir):
-        shutil.rmtree(self.objdir)
+        platform_utils.rmtree(self.objdir)
       if init_git_dir and os.path.exists(self.gitdir):
-        shutil.rmtree(self.gitdir)
+        platform_utils.rmtree(self.gitdir)
       raise
 
   def _UpdateHooks(self):
@@ -2312,7 +2377,7 @@
       self._InitHooks()
 
   def _InitHooks(self):
-    hooks = os.path.realpath(self._gitdir_path('hooks'))
+    hooks = platform_utils.realpath(self._gitdir_path('hooks'))
     if not os.path.exists(hooks):
       os.makedirs(hooks)
     for stock_hook in _ProjectHooks():
@@ -2328,20 +2393,21 @@
         continue
 
       dst = os.path.join(hooks, name)
-      if os.path.islink(dst):
+      if platform_utils.islink(dst):
         continue
       if os.path.exists(dst):
         if filecmp.cmp(stock_hook, dst, shallow=False):
-          os.remove(dst)
+          platform_utils.remove(dst)
         else:
           _warn("%s: Not replacing locally modified %s hook",
                 self.relpath, name)
           continue
       try:
-        os.symlink(os.path.relpath(stock_hook, os.path.dirname(dst)), dst)
+        platform_utils.symlink(
+            os.path.relpath(stock_hook, os.path.dirname(dst)), dst)
       except OSError as e:
         if e.errno == errno.EPERM:
-          raise GitError('filesystem must support symlinks')
+          raise GitError(self._get_symlink_error_message())
         else:
           raise
 
@@ -2389,11 +2455,12 @@
       symlink_dirs += self.working_tree_dirs
     to_symlink = symlink_files + symlink_dirs
     for name in set(to_symlink):
-      dst = os.path.realpath(os.path.join(destdir, name))
+      dst = platform_utils.realpath(os.path.join(destdir, name))
       if os.path.lexists(dst):
-        src = os.path.realpath(os.path.join(srcdir, name))
+        src = platform_utils.realpath(os.path.join(srcdir, name))
         # Fail if the links are pointing to the wrong place
         if src != dst:
+          _error('%s is different in %s vs %s', name, destdir, srcdir)
           raise GitError('--force-sync not enabled; cannot overwrite a local '
                          'work tree. If you\'re comfortable with the '
                          'possibility of losing the work tree\'s git metadata,'
@@ -2422,10 +2489,10 @@
     if copy_all:
       to_copy = os.listdir(gitdir)
 
-    dotgit = os.path.realpath(dotgit)
+    dotgit = platform_utils.realpath(dotgit)
     for name in set(to_copy).union(to_symlink):
       try:
-        src = os.path.realpath(os.path.join(gitdir, name))
+        src = platform_utils.realpath(os.path.join(gitdir, name))
         dst = os.path.join(dotgit, name)
 
         if os.path.lexists(dst):
@@ -2435,28 +2502,30 @@
         if name in symlink_dirs and not os.path.lexists(src):
           os.makedirs(src)
 
-        # If the source file doesn't exist, ensure the destination
-        # file doesn't either.
-        if name in symlink_files and not os.path.lexists(src):
-          try:
-            os.remove(dst)
-          except OSError:
-            pass
-
         if name in to_symlink:
-          os.symlink(os.path.relpath(src, os.path.dirname(dst)), dst)
-        elif copy_all and not os.path.islink(dst):
+          platform_utils.symlink(
+              os.path.relpath(src, os.path.dirname(dst)), dst)
+        elif copy_all and not platform_utils.islink(dst):
           if os.path.isdir(src):
             shutil.copytree(src, dst)
           elif os.path.isfile(src):
             shutil.copy(src, dst)
+
+        # If the source file doesn't exist, ensure the destination
+        # file doesn't either.
+        if name in symlink_files and not os.path.lexists(src):
+          try:
+            platform_utils.remove(dst)
+          except OSError:
+            pass
+
       except OSError as e:
         if e.errno == errno.EPERM:
-          raise DownloadError('filesystem must support symlinks')
+          raise DownloadError(self._get_symlink_error_message())
         else:
           raise
 
-  def _InitWorkTree(self, force_sync=False):
+  def _InitWorkTree(self, force_sync=False, submodules=False):
     dotgit = os.path.join(self.worktree, '.git')
     init_dotgit = not os.path.exists(dotgit)
     try:
@@ -2470,8 +2539,8 @@
       except GitError as e:
         if force_sync:
           try:
-            shutil.rmtree(dotgit)
-            return self._InitWorkTree(force_sync=False)
+            platform_utils.rmtree(dotgit)
+            return self._InitWorkTree(force_sync=False, submodules=submodules)
           except:
             raise e
         raise e
@@ -2485,14 +2554,24 @@
         if GitCommand(self, cmd).Wait() != 0:
           raise GitError("cannot initialize work tree")
 
+        if submodules:
+          self._SyncSubmodules(quiet=True)
         self._CopyAndLinkFiles()
     except Exception:
       if init_dotgit:
-        shutil.rmtree(dotgit)
+        platform_utils.rmtree(dotgit)
       raise
 
+  def _get_symlink_error_message(self):
+    if platform_utils.isWindows():
+      return ('Unable to create symbolic link. Please re-run the command as '
+              'Administrator, or see '
+              'https://github.com/git-for-windows/git/wiki/Symbolic-Links '
+              'for other options.')
+    return 'filesystem must support symlinks'
+
   def _gitdir_path(self, path):
-    return os.path.realpath(os.path.join(self.gitdir, path))
+    return platform_utils.realpath(os.path.join(self.gitdir, path))
 
   def _revlist(self, *args, **kw):
     a = []
@@ -2627,11 +2706,11 @@
       else:
         path = os.path.join(self._project.worktree, '.git', HEAD)
       try:
-        fd = open(path, 'rb')
+        fd = open(path)
       except IOError as e:
         raise NoManifestException(path, str(e))
       try:
-        line = fd.read()
+        line = fd.readline()
       finally:
         fd.close()
       try:
@@ -2833,13 +2912,14 @@
 
     self.detach_head = detach_head
     self.clean = True
+    self.recent_clean = True
 
   def info(self, project, fmt, *args):
     self._messages.append(_InfoMessage(project, fmt % args))
 
   def fail(self, project, err=None):
     self._failures.append(_Failure(project, err))
-    self.clean = False
+    self._MarkUnclean()
 
   def later1(self, project, what):
     self._later_queue1.append(_Later(project, what))
@@ -2853,6 +2933,15 @@
     self._PrintMessages()
     return self.clean
 
+  def Recently(self):
+    recent_clean = self.recent_clean
+    self.recent_clean = True
+    return recent_clean
+
+  def _MarkUnclean(self):
+    self.clean = False
+    self.recent_clean = False
+
   def _RunLater(self):
     for q in ['_later_queue1', '_later_queue2']:
       if not self._RunQueue(q):
@@ -2861,7 +2950,7 @@
   def _RunQueue(self, queue):
     for m in getattr(self, queue):
       if not m.Run(self):
-        self.clean = False
+        self._MarkUnclean()
         return False
     setattr(self, queue, [])
     return True
@@ -2903,14 +2992,14 @@
           self.revisionExpr = base
           self.revisionId = None
 
-  def MetaBranchSwitch(self):
+  def MetaBranchSwitch(self, submodules=False):
     """ Prepare MetaProject for manifest branch switch
     """
 
     # detach and delete manifest branch, allowing a new
     # branch to take over
     syncbuf = SyncBuffer(self.config, detach_head=True)
-    self.Sync_LocalHalf(syncbuf)
+    self.Sync_LocalHalf(syncbuf, submodules=submodules)
     syncbuf.Finish()
 
     return GitCommand(self,
diff --git a/repo b/repo
index acaa9c4..13ccd2b 100755
--- a/repo
+++ b/repo
@@ -23,7 +23,7 @@
 # limitations under the License.
 
 # increment this whenever we make important changes to this script
-VERSION = (1, 23)
+VERSION = (1, 24)
 
 # increment this if the MAINTAINER_KEYS block is modified
 KEYRING_VERSION = (1, 2)
@@ -120,6 +120,7 @@
 
 import errno
 import optparse
+import platform
 import re
 import shutil
 import stat
@@ -175,6 +176,9 @@
 group.add_option('-m', '--manifest-name',
                  dest='manifest_name',
                  help='initial manifest file', metavar='NAME.xml')
+group.add_option('-c', '--current-branch',
+                 dest='current_branch_only', action='store_true',
+                 help='fetch only current manifest branch from server')
 group.add_option('--mirror',
                  dest='mirror', action='store_true',
                  help='create a replica of the remote repositories '
@@ -189,6 +193,9 @@
                  dest='archive', action='store_true',
                  help='checkout an archive instead of a git repository for '
                       'each project. See git archive.')
+group.add_option('--submodules',
+                 dest='submodules', action='store_true',
+                 help='sync any submodules associated with the manifest repo')
 group.add_option('-g', '--groups',
                  dest='groups', default='default',
                  help='restrict manifest projects to ones with specified '
@@ -202,6 +209,9 @@
 group.add_option('--no-clone-bundle',
                  dest='no_clone_bundle', action='store_true',
                  help='disable use of /clone.bundle on HTTP/HTTPS')
+group.add_option('--no-tags',
+                 dest='no_tags', action='store_true',
+                 help="don't fetch tags in the manifest")
 
 
 # Tool
@@ -347,6 +357,10 @@
     dst = os.path.abspath(os.path.join(repodir, S_repo))
     _Clone(url, dst, opt.quiet, not opt.no_clone_bundle)
 
+    if not os.path.isfile('%s/repo' % dst):
+      _print("warning: '%s' does not look like a git-repo repository, is "
+             "REPO_URL set correctly?" % url, file=sys.stderr)
+
     if can_verify and not opt.no_repo_verify:
       rev = _Verify(dst, branch, opt.quiet)
     else:
@@ -853,7 +867,10 @@
       try:
         _Init(args, gitc_init=(cmd == 'gitc-init'))
       except CloneFailure:
-        shutil.rmtree(os.path.join(repodir, S_repo), ignore_errors=True)
+        path = os.path.join(repodir, S_repo)
+        _print("fatal: cloning the git-repo repository failed, will remove "
+               "'%s' " % path, file=sys.stderr)
+        shutil.rmtree(path, ignore_errors=True)
         sys.exit(1)
       repo_main, rel_repo_dir = _FindRepo()
     else:
@@ -871,7 +888,10 @@
   me.extend(orig_args)
   me.extend(extra_args)
   try:
-    os.execv(sys.executable, me)
+    if platform.system() == "Windows":
+      sys.exit(subprocess.call(me))
+    else:
+      os.execv(sys.executable, me)
   except OSError as e:
     _print("fatal: unable to start %s" % repo_main, file=sys.stderr)
     _print("fatal: %s" % e, file=sys.stderr)
diff --git a/subcmds/abandon.py b/subcmds/abandon.py
index b94ccdd..be32dc5 100644
--- a/subcmds/abandon.py
+++ b/subcmds/abandon.py
@@ -16,6 +16,7 @@
 from __future__ import print_function
 import sys
 from command import Command
+from collections import defaultdict
 from git_command import git
 from progress import Progress
 
@@ -23,49 +24,75 @@
   common = True
   helpSummary = "Permanently abandon a development branch"
   helpUsage = """
-%prog <branchname> [<project>...]
+%prog [--all | <branchname>] [<project>...]
 
 This subcommand permanently abandons a development branch by
 deleting it (and all its history) from your local repository.
 
 It is equivalent to "git branch -D <branchname>".
 """
+  def _Options(self, p):
+    p.add_option('--all',
+                 dest='all', action='store_true',
+                 help='delete all branches in all projects')
 
   def Execute(self, opt, args):
-    if not args:
+    if not opt.all and not args:
       self.Usage()
 
-    nb = args[0]
-    if not git.check_ref_format('heads/%s' % nb):
-      print("error: '%s' is not a valid name" % nb, file=sys.stderr)
-      sys.exit(1)
+    if not opt.all:
+      nb = args[0]
+      if not git.check_ref_format('heads/%s' % nb):
+        print("error: '%s' is not a valid name" % nb, file=sys.stderr)
+        sys.exit(1)
+    else:
+      args.insert(0,None)
+      nb = "'All local branches'"
 
-    nb = args[0]
-    err = []
-    success = []
+    err = defaultdict(list)
+    success = defaultdict(list)
     all_projects = self.GetProjects(args[1:])
 
     pm = Progress('Abandon %s' % nb, len(all_projects))
     for project in all_projects:
       pm.update()
 
-      status = project.AbandonBranch(nb)
-      if status is not None:
-        if status:
-          success.append(project)
-        else:
-          err.append(project)
+      if opt.all:
+        branches = project.GetBranches().keys()
+      else:
+        branches = [nb]
+
+      for name in branches:
+        status = project.AbandonBranch(name)
+        if status is not None:
+          if status:
+            success[name].append(project)
+          else:
+            err[name].append(project)
     pm.end()
 
+    width = 25
+    for name in branches:
+      if width < len(name):
+        width = len(name)
+
     if err:
-      for p in err:
-        print("error: %s/: cannot abandon %s" % (p.relpath, nb),
-              file=sys.stderr)
+      for br in err.keys():
+        err_msg = "error: cannot abandon %s" %br
+        print(err_msg, file=sys.stderr)
+        for proj in err[br]:
+          print(' '*len(err_msg) + " | %s" % proj.relpath, file=sys.stderr)
       sys.exit(1)
     elif not success:
-      print('error: no project has branch %s' % nb, file=sys.stderr)
+      print('error: no project has local branch(es) : %s' % nb,
+            file=sys.stderr)
       sys.exit(1)
     else:
-      print('Abandoned in %d project(s):\n  %s'
-            % (len(success), '\n  '.join(p.relpath for p in success)),
-            file=sys.stderr)
+      print('Abandoned branches:', file=sys.stderr)
+      for br in success.keys():
+        if len(all_projects) > 1 and len(all_projects) == len(success[br]):
+          result = "all project"
+        else:
+          result = "%s" % (
+            ('\n'+' '*width + '| ').join(p.relpath for p in success[br]))
+        print("%s%s| %s\n" % (br,' '*(width-len(br)), result),file=sys.stderr)
diff --git a/subcmds/download.py b/subcmds/download.py
index a029462..e1010aa 100644
--- a/subcmds/download.py
+++ b/subcmds/download.py
@@ -26,11 +26,12 @@
   common = True
   helpSummary = "Download and checkout a change"
   helpUsage = """
-%prog {project change[/patchset]}...
+%prog {[project] change[/patchset]}...
 """
   helpDescription = """
 The '%prog' command downloads a change from the review system and
 makes it available in your project's local working directory.
+If no project is specified try to use current directory as a project.
 """
 
   def _Options(self, p):
@@ -55,7 +56,7 @@
       m = CHANGE_RE.match(a)
       if m:
         if not project:
-          self.Usage()
+          project = self.GetProjects(".")[0]
         chg_id = int(m.group(1))
         if m.group(2):
           ps_id = int(m.group(2))
diff --git a/subcmds/forall.py b/subcmds/forall.py
index 07ee8d5..52eb5e2 100644
--- a/subcmds/forall.py
+++ b/subcmds/forall.py
@@ -15,17 +15,16 @@
 
 from __future__ import print_function
 import errno
-import fcntl
 import multiprocessing
 import re
 import os
-import select
 import signal
 import sys
 import subprocess
 
 from color import Coloring
 from command import Command, MirrorSafeCommand
+import platform_utils
 
 _CAN_COLOR = [
   'branch',
@@ -105,6 +104,13 @@
 shell positional arguments ($1, $2, .., $#) are set to any arguments
 following <command>.
 
+Example: to list projects:
+
+  %prog% forall -c 'echo $REPO_PROJECT'
+
+Notice that $REPO_PROJECT is quoted to ensure it is expanded in
+the context of running <command> instead of in the calling shell.
+
 Unless -p is used, stdin, stdout, stderr are inherited from the
 terminal and are not redirected.
 
@@ -344,35 +350,25 @@
   if opt.project_header:
     out = ForallColoring(config)
     out.redirect(sys.stdout)
-    class sfd(object):
-      def __init__(self, fd, dest):
-        self.fd = fd
-        self.dest = dest
-      def fileno(self):
-        return self.fd.fileno()
-
     empty = True
     errbuf = ''
 
     p.stdin.close()
-    s_in = [sfd(p.stdout, sys.stdout),
-            sfd(p.stderr, sys.stderr)]
+    s_in = platform_utils.FileDescriptorStreams.create()
+    s_in.add(p.stdout, sys.stdout, 'stdout')
+    s_in.add(p.stderr, sys.stderr, 'stderr')
 
-    for s in s_in:
-      flags = fcntl.fcntl(s.fd, fcntl.F_GETFL)
-      fcntl.fcntl(s.fd, fcntl.F_SETFL, flags | os.O_NONBLOCK)
-
-    while s_in:
-      in_ready, _out_ready, _err_ready = select.select(s_in, [], [])
+    while not s_in.is_done:
+      in_ready = s_in.select()
       for s in in_ready:
-        buf = s.fd.read(4096)
+        buf = s.read()
         if not buf:
-          s.fd.close()
+          s.close()
           s_in.remove(s)
           continue
 
         if not opt.verbose:
-          if s.fd != p.stdout:
+          if s.std_name == 'stderr':
             errbuf += buf
             continue
 
diff --git a/subcmds/gitc_delete.py b/subcmds/gitc_delete.py
index 7380c35..54f62f4 100644
--- a/subcmds/gitc_delete.py
+++ b/subcmds/gitc_delete.py
@@ -14,12 +14,10 @@
 # limitations under the License.
 
 from __future__ import print_function
-import os
-import shutil
 import sys
 
 from command import Command, GitcClientCommand
-import gitc_utils
+import platform_utils
 
 from pyversion import is_python3
 if not is_python3():
@@ -52,4 +50,4 @@
       if not response == 'yes':
         print('Response was not "yes"\n Exiting...')
         sys.exit(1)
-    shutil.rmtree(self.gitc_manifest.gitc_client_dir)
+    platform_utils.rmtree(self.gitc_manifest.gitc_client_dir)
diff --git a/subcmds/init.py b/subcmds/init.py
index 45d69b7..eeddca0 100644
--- a/subcmds/init.py
+++ b/subcmds/init.py
@@ -17,7 +17,6 @@
 import os
 import platform
 import re
-import shutil
 import sys
 
 from pyversion import is_python3
@@ -35,6 +34,7 @@
 from project import SyncBuffer
 from git_config import GitConfig
 from git_command import git_require, MIN_GIT_VERSION
+import platform_utils
 
 class Init(InteractiveCommand, MirrorSafeCommand):
   common = True
@@ -91,6 +91,9 @@
     g.add_option('-b', '--manifest-branch',
                  dest='manifest_branch',
                  help='manifest branch or revision', metavar='REVISION')
+    g.add_option('-c', '--current-branch',
+                 dest='current_branch_only', action='store_true',
+                 help='fetch only current manifest branch from server')
     g.add_option('-m', '--manifest-name',
                  dest='manifest_name', default='default.xml',
                  help='initial manifest file', metavar='NAME.xml')
@@ -108,6 +111,9 @@
                  dest='archive', action='store_true',
                  help='checkout an archive instead of a git repository for '
                       'each project. See git archive.')
+    g.add_option('--submodules',
+                 dest='submodules', action='store_true',
+                 help='sync any submodules associated with the manifest repo')
     g.add_option('-g', '--groups',
                  dest='groups', default='default',
                  help='restrict manifest projects to ones with specified '
@@ -121,6 +127,9 @@
     g.add_option('--no-clone-bundle',
                  dest='no_clone_bundle', action='store_true',
                  help='disable use of /clone.bundle on HTTP/HTTPS')
+    g.add_option('--no-tags',
+                 dest='no_tags', action='store_true',
+                 help="don't fetch tags in the manifest")
 
     # Tool
     g = p.add_option_group('repo Version options')
@@ -230,22 +239,27 @@
               'in another location.', file=sys.stderr)
         sys.exit(1)
 
+    if opt.submodules:
+      m.config.SetString('repo.submodules', 'true')
+
     if not m.Sync_NetworkHalf(is_new=is_new, quiet=opt.quiet,
-        clone_bundle=not opt.no_clone_bundle):
+        clone_bundle=not opt.no_clone_bundle,
+        current_branch_only=opt.current_branch_only,
+        no_tags=opt.no_tags, submodules=opt.submodules):
       r = m.GetRemote(m.remote.name)
       print('fatal: cannot obtain manifest %s' % r.url, file=sys.stderr)
 
       # Better delete the manifest git dir if we created it; otherwise next
       # time (when user fixes problems) we won't go through the "is_new" logic.
       if is_new:
-        shutil.rmtree(m.gitdir)
+        platform_utils.rmtree(m.gitdir)
       sys.exit(1)
 
     if opt.manifest_branch:
-      m.MetaBranchSwitch()
+      m.MetaBranchSwitch(submodules=opt.submodules)
 
     syncbuf = SyncBuffer(m.config)
-    m.Sync_LocalHalf(syncbuf)
+    m.Sync_LocalHalf(syncbuf, submodules=opt.submodules)
     syncbuf.Finish()
 
     if is_new or m.CurrentBranch is None:
@@ -387,7 +401,7 @@
     git_require(MIN_GIT_VERSION, fail=True)
 
     if opt.reference:
-      opt.reference = os.path.expanduser(opt.reference)
+      opt.reference = os.path.abspath(os.path.expanduser(opt.reference))
 
     # Check this here, else manifest will be tagged "not new" and init won't be
     # possible anymore without removing the .repo/manifests directory.
diff --git a/subcmds/stage.py b/subcmds/stage.py
index 2884976..9d35426 100644
--- a/subcmds/stage.py
+++ b/subcmds/stage.py
@@ -60,8 +60,8 @@
       out.nl()
 
       for i in range(len(all_projects)):
-        p = all_projects[i]
-        out.write('%3d:    %s', i + 1, p.relpath + '/')
+        project = all_projects[i]
+        out.write('%3d:    %s', i + 1, project.relpath + '/')
         out.nl()
       out.nl()
 
diff --git a/subcmds/start.py b/subcmds/start.py
index 290b689..c3ec303 100644
--- a/subcmds/start.py
+++ b/subcmds/start.py
@@ -18,7 +18,7 @@
 import sys
 
 from command import Command
-from git_config import IsId
+from git_config import IsImmutable
 from git_command import git
 import gitc_utils
 from progress import Progress
@@ -96,11 +96,11 @@
           project.Sync_LocalHalf(sync_buf)
           project.revisionId = gitc_project.old_revision
 
-      # If the current revision is a specific SHA1 then we can't push back
-      # to it; so substitute with dest_branch if defined, or with manifest
-      # default revision instead.
+      # If the current revision is immutable, such as a SHA1, a tag or
+      # a change, then we can't push back to it. Substitute with
+      # dest_branch, if defined; or with manifest default revision instead.
       branch_merge = ''
-      if IsId(project.revisionExpr):
+      if IsImmutable(project.revisionExpr):
         if project.dest_branch:
           branch_merge = project.dest_branch
         else:
diff --git a/subcmds/status.py b/subcmds/status.py
index 38c229b..60e26ff 100644
--- a/subcmds/status.py
+++ b/subcmds/status.py
@@ -89,8 +89,10 @@
     p.add_option('-o', '--orphans',
                  dest='orphans', action='store_true',
                  help="include objects in working directory outside of repo projects")
+    p.add_option('-q', '--quiet', action='store_true',
+                 help="only print the name of modified projects")
 
-  def _StatusHelper(self, project, clean_counter, sem):
+  def _StatusHelper(self, project, clean_counter, sem, quiet):
     """Obtains the status for a specific project.
 
     Obtains the status for a project, redirecting the output to
@@ -104,7 +106,7 @@
       output: Where to output the status.
     """
     try:
-      state = project.PrintWorkTreeStatus()
+      state = project.PrintWorkTreeStatus(quiet=quiet)
       if state == 'CLEAN':
         next(clean_counter)
     finally:
@@ -132,7 +134,7 @@
 
     if opt.jobs == 1:
       for project in all_projects:
-        state = project.PrintWorkTreeStatus()
+        state = project.PrintWorkTreeStatus(quiet=opt.quiet)
         if state == 'CLEAN':
           next(counter)
     else:
@@ -142,13 +144,13 @@
         sem.acquire()
 
         t = _threading.Thread(target=self._StatusHelper,
-                              args=(project, counter, sem))
+                              args=(project, counter, sem, opt.quiet))
         threads.append(t)
         t.daemon = True
         t.start()
       for t in threads:
         t.join()
-    if len(all_projects) == next(counter):
+    if not opt.quiet and len(all_projects) == next(counter):
       print('nothing to commit (working directory clean)')
 
     if opt.orphans:
diff --git a/subcmds/sync.py b/subcmds/sync.py
index 7ba9ebf..cda47fd 100644
--- a/subcmds/sync.py
+++ b/subcmds/sync.py
@@ -19,7 +19,6 @@
 from optparse import SUPPRESS_HELP
 import os
 import re
-import shutil
 import socket
 import subprocess
 import sys
@@ -64,6 +63,7 @@
 except ImportError:
   multiprocessing = None
 
+import event_log
 from git_command import GIT, git_require
 from git_config import GetUrlCookieFile
 from git_refs import R_HEADS, HEAD
@@ -72,6 +72,7 @@
 from project import RemoteSpec
 from command import Command, MirrorSafeCommand
 from error import RepoChangedException, GitError, ManifestParseError
+import platform_utils
 from project import SyncBuffer
 from progress import Progress
 from wrapper import Wrapper
@@ -255,7 +256,7 @@
                  dest='repo_upgraded', action='store_true',
                  help=SUPPRESS_HELP)
 
-  def _FetchProjectList(self, opt, projects, *args, **kwargs):
+  def _FetchProjectList(self, opt, projects, sem, *args, **kwargs):
     """Main function of the fetch threads when jobs are > 1.
 
     Delegates most of the work to _FetchHelper.
@@ -263,15 +264,20 @@
     Args:
       opt: Program options returned from optparse.  See _Options().
       projects: Projects to fetch.
+      sem: We'll release() this semaphore when we exit so that another thread
+          can be started up.
       *args, **kwargs: Remaining arguments to pass to _FetchHelper. See the
           _FetchHelper docstring for details.
     """
-    for project in projects:
-      success = self._FetchHelper(opt, project, *args, **kwargs)
-      if not success and not opt.force_broken:
-        break
+    try:
+        for project in projects:
+          success = self._FetchHelper(opt, project, *args, **kwargs)
+          if not success and not opt.force_broken:
+            break
+    finally:
+        sem.release()
 
-  def _FetchHelper(self, opt, project, lock, fetched, pm, sem, err_event):
+  def _FetchHelper(self, opt, project, lock, fetched, pm, err_event):
     """Fetch git objects for a single project.
 
     Args:
@@ -283,8 +289,6 @@
           (with our lock held).
       pm: Instance of a Project object.  We will call pm.update() (with our
           lock held).
-      sem: We'll release() this semaphore when we exit so that another thread
-          can be started up.
       err_event: We'll set this event in the case of an error (after printing
           out info about the error).
 
@@ -301,9 +305,10 @@
     # - We always set err_event in the case of an exception.
     # - We always make sure we call sem.release().
     # - We always make sure we unlock the lock if we locked it.
+    start = time.time()
+    success = False
     try:
       try:
-        start = time.time()
         success = project.Sync_NetworkHalf(
           quiet=opt.quiet,
           current_branch_only=opt.current_branch_only,
@@ -321,7 +326,9 @@
 
         if not success:
           err_event.set()
-          print('error: Cannot fetch %s' % project.name, file=sys.stderr)
+          print('error: Cannot fetch %s from %s'
+                % (project.name, project.remote.url),
+                file=sys.stderr)
           if opt.force_broken:
             print('warn: --force-broken, continuing to sync',
                   file=sys.stderr)
@@ -340,14 +347,18 @@
     finally:
       if did_lock:
         lock.release()
-      sem.release()
+      finish = time.time()
+      self.event_log.AddSync(project, event_log.TASK_SYNC_NETWORK,
+                             start, finish, success)
 
     return success
 
   def _Fetch(self, projects, opt):
     fetched = set()
     lock = _threading.Lock()
-    pm = Progress('Fetching projects', len(projects))
+    pm = Progress('Fetching projects', len(projects),
+                  print_newline=not(opt.quiet),
+                  always_print_percentage=opt.quiet)
 
     objdir_project_map = dict()
     for project in projects:
@@ -365,10 +376,10 @@
       sem.acquire()
       kwargs = dict(opt=opt,
                     projects=project_list,
+                    sem=sem,
                     lock=lock,
                     fetched=fetched,
                     pm=pm,
-                    sem=sem,
                     err_event=err_event)
       if self.jobs > 1:
         t = _threading.Thread(target = self._FetchProjectList,
@@ -384,7 +395,7 @@
       t.join()
 
     # If we saw an error, exit with code 1 so that other scripts can check.
-    if err_event.isSet():
+    if err_event.isSet() and not opt.force_broken:
       print('\nerror: Exited sync due to fetch errors', file=sys.stderr)
       sys.exit(1)
 
@@ -464,7 +475,7 @@
     # working git repository around. There shouldn't be any git projects here,
     # so rmtree works.
     try:
-      shutil.rmtree(os.path.join(path, '.git'))
+      platform_utils.rmtree(os.path.join(path, '.git'))
     except OSError:
       print('Failed to remove %s' % os.path.join(path, '.git'), file=sys.stderr)
       print('error: Failed to delete obsolete path %s' % path, file=sys.stderr)
@@ -478,7 +489,7 @@
     for root, dirs, files in os.walk(path):
       for f in files:
         try:
-          os.remove(os.path.join(root, f))
+          platform_utils.remove(os.path.join(root, f))
         except OSError:
           print('Failed to remove %s' % os.path.join(root, f), file=sys.stderr)
           failed = True
@@ -487,9 +498,9 @@
       dirs_to_remove += [os.path.join(root, d) for d in dirs
                          if os.path.join(root, d) not in dirs_to_remove]
     for d in reversed(dirs_to_remove):
-      if os.path.islink(d):
+      if platform_utils.islink(d):
         try:
-          os.remove(d)
+          platform_utils.remove(d)
         except OSError:
           print('Failed to remove %s' % os.path.join(root, d), file=sys.stderr)
           failed = True
@@ -701,7 +712,7 @@
     else:  # Not smart sync or smart tag mode
       if os.path.isfile(smart_sync_manifest_path):
         try:
-          os.remove(smart_sync_manifest_path)
+          platform_utils.remove(smart_sync_manifest_path)
         except OSError as e:
           print('error: failed to remove existing smart sync override manifest: %s' %
                 e, file=sys.stderr)
@@ -716,15 +727,24 @@
       _PostRepoUpgrade(self.manifest, quiet=opt.quiet)
 
     if not opt.local_only:
-      mp.Sync_NetworkHalf(quiet=opt.quiet,
-                          current_branch_only=opt.current_branch_only,
-                          no_tags=opt.no_tags,
-                          optimized_fetch=opt.optimized_fetch)
+      start = time.time()
+      success = mp.Sync_NetworkHalf(quiet=opt.quiet,
+                                    current_branch_only=opt.current_branch_only,
+                                    no_tags=opt.no_tags,
+                                    optimized_fetch=opt.optimized_fetch,
+                                    submodules=self.manifest.HasSubmodules)
+      finish = time.time()
+      self.event_log.AddSync(mp, event_log.TASK_SYNC_NETWORK,
+                             start, finish, success)
 
     if mp.HasChanges:
       syncbuf = SyncBuffer(mp.config)
-      mp.Sync_LocalHalf(syncbuf)
-      if not syncbuf.Finish():
+      start = time.time()
+      mp.Sync_LocalHalf(syncbuf, submodules=self.manifest.HasSubmodules)
+      clean = syncbuf.Finish()
+      self.event_log.AddSync(mp, event_log.TASK_SYNC_LOCAL,
+                             start, time.time(), clean)
+      if not clean:
         sys.exit(1)
       self._ReloadManifest(manifest_name)
       if opt.jobs is None:
@@ -761,8 +781,8 @@
       # generate a new args list to represent the opened projects.
       # TODO: make this more reliable -- if there's a project name/path overlap,
       # this may choose the wrong project.
-      args = [os.path.relpath(self.manifest.paths[p].worktree, os.getcwd())
-              for p in opened_projects]
+      args = [os.path.relpath(self.manifest.paths[path].worktree, os.getcwd())
+              for path in opened_projects]
       if not args:
         return
     all_projects = self.GetProjects(args,
@@ -818,7 +838,10 @@
     for project in all_projects:
       pm.update()
       if project.worktree:
+        start = time.time()
         project.Sync_LocalHalf(syncbuf, force_sync=opt.force_sync)
+        self.event_log.AddSync(project, event_log.TASK_SYNC_LOCAL,
+                               start, time.time(), syncbuf.Recently())
     pm.end()
     print(file=sys.stderr)
     if not syncbuf.Finish():
@@ -902,6 +925,7 @@
     return False
   return True
 
+
 class _FetchTimes(object):
   _ALPHA = 0.5
 
@@ -932,7 +956,7 @@
           f.close()
       except (IOError, ValueError):
         try:
-          os.remove(self._path)
+          platform_utils.remove(self._path)
         except OSError:
           pass
         self._times = {}
@@ -956,7 +980,7 @@
         f.close()
     except (IOError, TypeError):
       try:
-        os.remove(self._path)
+        platform_utils.remove(self._path)
       except OSError:
         pass
 
diff --git a/subcmds/upload.py b/subcmds/upload.py
index 1172dad..77eaf81 100644
--- a/subcmds/upload.py
+++ b/subcmds/upload.py
@@ -154,6 +154,16 @@
     p.add_option('-d', '--draft',
                  action='store_true', dest='draft', default=False,
                  help='If specified, upload as a draft.')
+    p.add_option('-p', '--private',
+                 action='store_true', dest='private', default=False,
+                 help='If specified, upload as a private change.')
+    p.add_option('-w', '--wip',
+                 action='store_true', dest='wip', default=False,
+                 help='If specified, upload as a work-in-progress change.')
+    p.add_option('-o', '--push-option',
+                 type='string', action='append', dest='push_options',
+                 default=[],
+                 help='Additional push options to transmit')
     p.add_option('-D', '--destination', '--dest',
                  type='string', action='store', dest='dest_branch',
                  metavar='BRANCH',
@@ -175,6 +185,9 @@
     #   Never run upload hooks, but upload anyway (AKA bypass hooks).
     # - no-verify=True, verify=True:
     #   Invalid
+    p.add_option('--no-cert-checks',
+                 dest='validate_certs', action='store_false', default=True,
+                 help='Disable verifying ssl certs (unsafe).')
     p.add_option('--no-verify',
                  dest='bypass_hooks', action='store_true',
                  help='Do not run the upload hook.')
@@ -198,7 +211,8 @@
       commit_list = branch.commits
 
       destination = opt.dest_branch or project.dest_branch or project.revisionExpr
-      print('Upload project %s/ to remote branch %s:' % (project.relpath, destination))
+      print('Upload project %s/ to remote branch %s%s:' %
+            (project.relpath, destination, ' (draft)' if opt.draft else ''))
       print('  branch %s (%2d commit%s, %s):' % (
                     name,
                     len(commit_list),
@@ -377,7 +391,15 @@
             branch.uploaded = False
             continue
 
-        branch.UploadForReview(people, auto_topic=opt.auto_topic, draft=opt.draft, dest_branch=destination)
+        branch.UploadForReview(people,
+                               auto_topic=opt.auto_topic,
+                               draft=opt.draft,
+                               private=opt.private,
+                               wip=opt.wip,
+                               dest_branch=destination,
+                               validate_certs=opt.validate_certs,
+                               push_options=opt.push_options)
+
         branch.uploaded = True
       except UploadError as e:
         branch.error = e
@@ -463,8 +485,8 @@
                       self.manifest.topdir,
                       self.manifest.manifestProject.GetRemote('origin').url,
                       abort_if_user_denies=True)
-      pending_proj_names = [project.name for (project, avail) in pending]
-      pending_worktrees = [project.worktree for (project, avail) in pending]
+      pending_proj_names = [project.name for (project, available) in pending]
+      pending_worktrees = [project.worktree for (project, available) in pending]
       try:
         hook.Run(opt.allow_all_hooks, project_list=pending_proj_names,
                  worktree_list=pending_worktrees)