summaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
-rw-r--r--docs/repo-hooks.md40
-rw-r--r--git_superproject.py48
-rw-r--r--hooks.py25
-rw-r--r--man/repo-smartsync.110
-rw-r--r--man/repo-sync.110
-rw-r--r--progress.py20
-rw-r--r--subcmds/abandon.py1
-rw-r--r--subcmds/diff.py1
-rw-r--r--subcmds/diffmanifests.py5
-rw-r--r--subcmds/download.py1
-rw-r--r--subcmds/forall.py7
-rw-r--r--subcmds/grep.py1
-rw-r--r--subcmds/info.py6
-rw-r--r--subcmds/init.py1
-rw-r--r--subcmds/list.py5
-rw-r--r--subcmds/manifest.py1
-rw-r--r--subcmds/overview.py1
-rw-r--r--subcmds/rebase.py7
-rw-r--r--subcmds/selfupdate.py1
-rw-r--r--subcmds/stage.py1
-rw-r--r--subcmds/start.py1
-rw-r--r--subcmds/status.py1
-rw-r--r--subcmds/sync.py847
-rw-r--r--subcmds/upload.py16
-rw-r--r--tests/test_subcmds.py41
-rw-r--r--tests/test_subcmds_sync.py405
26 files changed, 1342 insertions, 161 deletions
diff --git a/docs/repo-hooks.md b/docs/repo-hooks.md
index cbb1aac7..a56f261c 100644
--- a/docs/repo-hooks.md
+++ b/docs/repo-hooks.md
@@ -133,3 +133,43 @@ def main(project_list, worktree_list=None, **kwargs):
133 kwargs: Leave this here for forward-compatibility. 133 kwargs: Leave this here for forward-compatibility.
134 """ 134 """
135``` 135```
136
137### post-sync
138
139This hook runs when `repo sync` completes without errors.
140
141Note: This includes cases where no actual checkout may occur. The hook will still run.
142For example:
143- `repo sync -n` performs network fetches only and skips the checkout phase.
144- `repo sync <project>` only updates the specified project(s).
145- Partial failures may still result in a successful exit.
146
147This hook is useful for post-processing tasks such as setting up git hooks,
148bootstrapping configuration files, or running project initialization logic.
149
150The hook is defined using the existing `<repo-hooks>` manifest block and is
151optional. If the hook script fails or is missing, `repo sync` will still
152complete successfully, and the error will be printed as a warning.
153
154Example:
155
156```xml
157<project name="myorg/dev-tools" path="tools" revision="main" />
158<repo-hooks in-project="myorg/dev-tools" enabled-list="post-sync">
159 <hook name="post-sync" />
160</repo-hooks>
161```
162
163The `post-sync.py` file should be defined like:
164
165```py
166def main(repo_topdir=None, **kwargs):
167 """Main function invoked directly by repo.
168
169 We must use the name "main" as that is what repo requires.
170
171 Args:
172 repo_topdir: The absolute path to the top-level directory of the repo workspace.
173 kwargs: Leave this here for forward-compatibility.
174 """
175```
diff --git a/git_superproject.py b/git_superproject.py
index ce8161fd..d808bb09 100644
--- a/git_superproject.py
+++ b/git_superproject.py
@@ -28,6 +28,7 @@ import os
28import sys 28import sys
29import time 29import time
30from typing import NamedTuple 30from typing import NamedTuple
31import urllib.parse
31 32
32from git_command import git_require 33from git_command import git_require
33from git_command import GitCommand 34from git_command import GitCommand
@@ -129,6 +130,30 @@ class Superproject:
129 self._print_messages = value 130 self._print_messages = value
130 131
131 @property 132 @property
133 def commit_id(self):
134 """Returns the commit ID of the superproject checkout."""
135 cmd = ["rev-parse", self.revision]
136 p = GitCommand(
137 None, # project
138 cmd,
139 gitdir=self._work_git,
140 bare=True,
141 capture_stdout=True,
142 capture_stderr=True,
143 )
144 retval = p.Wait()
145 if retval != 0:
146 self._LogWarning(
147 "git rev-parse call failed, command: git {}, "
148 "return code: {}, stderr: {}",
149 cmd,
150 retval,
151 p.stderr,
152 )
153 return None
154 return p.stdout
155
156 @property
132 def project_commit_ids(self): 157 def project_commit_ids(self):
133 """Returns a dictionary of projects and their commit ids.""" 158 """Returns a dictionary of projects and their commit ids."""
134 return self._project_commit_ids 159 return self._project_commit_ids
@@ -140,6 +165,26 @@ class Superproject:
140 self._manifest_path if os.path.exists(self._manifest_path) else None 165 self._manifest_path if os.path.exists(self._manifest_path) else None
141 ) 166 )
142 167
168 @property
169 def repo_id(self):
170 """Returns the repo ID for the superproject.
171
172 For example, if the superproject points to:
173 https://android-review.googlesource.com/platform/superproject/
174 Then the repo_id would be:
175 android/platform/superproject
176 """
177 review_url = self.remote.review
178 if review_url:
179 parsed_url = urllib.parse.urlparse(review_url)
180 netloc = parsed_url.netloc
181 if netloc:
182 parts = netloc.split("-review", 1)
183 host = parts[0]
184 rev = GitRefs(self._work_git).get("HEAD")
185 return f"{host}/{self.name}@{rev}"
186 return None
187
143 def _LogMessage(self, fmt, *inputs): 188 def _LogMessage(self, fmt, *inputs):
144 """Logs message to stderr and _git_event_log.""" 189 """Logs message to stderr and _git_event_log."""
145 message = f"{self._LogMessagePrefix()} {fmt.format(*inputs)}" 190 message = f"{self._LogMessagePrefix()} {fmt.format(*inputs)}"
@@ -258,7 +303,7 @@ class Superproject:
258 Works only in git repositories. 303 Works only in git repositories.
259 304
260 Returns: 305 Returns:
261 data: data returned from 'git ls-tree ...' instead of None. 306 data: data returned from 'git ls-tree ...'. None on error.
262 """ 307 """
263 if not os.path.exists(self._work_git): 308 if not os.path.exists(self._work_git):
264 self._LogWarning( 309 self._LogWarning(
@@ -288,6 +333,7 @@ class Superproject:
288 retval, 333 retval,
289 p.stderr, 334 p.stderr,
290 ) 335 )
336 return None
291 return data 337 return data
292 338
293 def Sync(self, git_event_log): 339 def Sync(self, git_event_log):
diff --git a/hooks.py b/hooks.py
index 82bf7e36..fc31a5ef 100644
--- a/hooks.py
+++ b/hooks.py
@@ -22,6 +22,13 @@ from error import HookError
22from git_refs import HEAD 22from git_refs import HEAD
23 23
24 24
25# The API we've documented to hook authors. Keep in sync with repo-hooks.md.
26_API_ARGS = {
27 "pre-upload": {"project_list", "worktree_list"},
28 "post-sync": {"repo_topdir"},
29}
30
31
25class RepoHook: 32class RepoHook:
26 """A RepoHook contains information about a script to run as a hook. 33 """A RepoHook contains information about a script to run as a hook.
27 34
@@ -56,6 +63,7 @@ class RepoHook:
56 hooks_project, 63 hooks_project,
57 repo_topdir, 64 repo_topdir,
58 manifest_url, 65 manifest_url,
66 bug_url=None,
59 bypass_hooks=False, 67 bypass_hooks=False,
60 allow_all_hooks=False, 68 allow_all_hooks=False,
61 ignore_hooks=False, 69 ignore_hooks=False,
@@ -75,6 +83,7 @@ class RepoHook:
75 run with CWD as this directory. 83 run with CWD as this directory.
76 If you have a manifest, this is manifest.topdir. 84 If you have a manifest, this is manifest.topdir.
77 manifest_url: The URL to the manifest git repo. 85 manifest_url: The URL to the manifest git repo.
86 bug_url: The URL to report issues.
78 bypass_hooks: If True, then 'Do not run the hook'. 87 bypass_hooks: If True, then 'Do not run the hook'.
79 allow_all_hooks: If True, then 'Run the hook without prompting'. 88 allow_all_hooks: If True, then 'Run the hook without prompting'.
80 ignore_hooks: If True, then 'Do not abort action if hooks fail'. 89 ignore_hooks: If True, then 'Do not abort action if hooks fail'.
@@ -85,6 +94,7 @@ class RepoHook:
85 self._hooks_project = hooks_project 94 self._hooks_project = hooks_project
86 self._repo_topdir = repo_topdir 95 self._repo_topdir = repo_topdir
87 self._manifest_url = manifest_url 96 self._manifest_url = manifest_url
97 self._bug_url = bug_url
88 self._bypass_hooks = bypass_hooks 98 self._bypass_hooks = bypass_hooks
89 self._allow_all_hooks = allow_all_hooks 99 self._allow_all_hooks = allow_all_hooks
90 self._ignore_hooks = ignore_hooks 100 self._ignore_hooks = ignore_hooks
@@ -414,6 +424,20 @@ class RepoHook:
414 ignore the result through the option combinations as listed in 424 ignore the result through the option combinations as listed in
415 AddHookOptionGroup(). 425 AddHookOptionGroup().
416 """ 426 """
427 # Make sure our own callers use the documented API.
428 exp_kwargs = _API_ARGS.get(self._hook_type, set())
429 got_kwargs = set(kwargs.keys())
430 if exp_kwargs != got_kwargs:
431 print(
432 "repo internal error: "
433 f"hook '{self._hook_type}' called incorrectly\n"
434 f" got: {sorted(got_kwargs)}\n"
435 f" expected: {sorted(exp_kwargs)}\n"
436 f"Please file a bug: {self._bug_url}",
437 file=sys.stderr,
438 )
439 return False
440
417 # Do not do anything in case bypass_hooks is set, or 441 # Do not do anything in case bypass_hooks is set, or
418 # no-op if there is no hooks project or if hook is disabled. 442 # no-op if there is no hooks project or if hook is disabled.
419 if ( 443 if (
@@ -472,6 +496,7 @@ class RepoHook:
472 "manifest_url": manifest.manifestProject.GetRemote( 496 "manifest_url": manifest.manifestProject.GetRemote(
473 "origin" 497 "origin"
474 ).url, 498 ).url,
499 "bug_url": manifest.contactinfo.bugurl,
475 } 500 }
476 ) 501 )
477 return cls(*args, **kwargs) 502 return cls(*args, **kwargs)
diff --git a/man/repo-smartsync.1 b/man/repo-smartsync.1
index a793b7bc..dd36df01 100644
--- a/man/repo-smartsync.1
+++ b/man/repo-smartsync.1
@@ -1,5 +1,5 @@
1.\" DO NOT MODIFY THIS FILE! It was generated by help2man. 1.\" DO NOT MODIFY THIS FILE! It was generated by help2man.
2.TH REPO "1" "September 2024" "repo smartsync" "Repo Manual" 2.TH REPO "1" "June 2025" "repo smartsync" "Repo Manual"
3.SH NAME 3.SH NAME
4repo \- repo smartsync - manual page for repo smartsync 4repo \- repo smartsync - manual page for repo smartsync
5.SH SYNOPSIS 5.SH SYNOPSIS
@@ -20,11 +20,12 @@ number of CPU cores)
20.TP 20.TP
21\fB\-\-jobs\-network\fR=\fI\,JOBS\/\fR 21\fB\-\-jobs\-network\fR=\fI\,JOBS\/\fR
22number of network jobs to run in parallel (defaults to 22number of network jobs to run in parallel (defaults to
23\fB\-\-jobs\fR or 1) 23\fB\-\-jobs\fR or 1). Ignored when \fB\-\-interleaved\fR is set
24.TP 24.TP
25\fB\-\-jobs\-checkout\fR=\fI\,JOBS\/\fR 25\fB\-\-jobs\-checkout\fR=\fI\,JOBS\/\fR
26number of local checkout jobs to run in parallel 26number of local checkout jobs to run in parallel
27(defaults to \fB\-\-jobs\fR or 8) 27(defaults to \fB\-\-jobs\fR or 8). Ignored when \fB\-\-interleaved\fR
28is set
28.TP 29.TP
29\fB\-f\fR, \fB\-\-force\-broken\fR 30\fB\-f\fR, \fB\-\-force\-broken\fR
30obsolete option (to be deleted in the future) 31obsolete option (to be deleted in the future)
@@ -58,6 +59,9 @@ only update working tree, don't fetch
58use the existing manifest checkout as\-is. (do not 59use the existing manifest checkout as\-is. (do not
59update to the latest revision) 60update to the latest revision)
60.TP 61.TP
62\fB\-\-interleaved\fR
63fetch and checkout projects in parallel (experimental)
64.TP
61\fB\-n\fR, \fB\-\-network\-only\fR 65\fB\-n\fR, \fB\-\-network\-only\fR
62fetch only, don't update working tree 66fetch only, don't update working tree
63.TP 67.TP
diff --git a/man/repo-sync.1 b/man/repo-sync.1
index 3ce9ec69..6e9dd8ad 100644
--- a/man/repo-sync.1
+++ b/man/repo-sync.1
@@ -1,5 +1,5 @@
1.\" DO NOT MODIFY THIS FILE! It was generated by help2man. 1.\" DO NOT MODIFY THIS FILE! It was generated by help2man.
2.TH REPO "1" "September 2024" "repo sync" "Repo Manual" 2.TH REPO "1" "June 2025" "repo sync" "Repo Manual"
3.SH NAME 3.SH NAME
4repo \- repo sync - manual page for repo sync 4repo \- repo sync - manual page for repo sync
5.SH SYNOPSIS 5.SH SYNOPSIS
@@ -20,11 +20,12 @@ number of CPU cores)
20.TP 20.TP
21\fB\-\-jobs\-network\fR=\fI\,JOBS\/\fR 21\fB\-\-jobs\-network\fR=\fI\,JOBS\/\fR
22number of network jobs to run in parallel (defaults to 22number of network jobs to run in parallel (defaults to
23\fB\-\-jobs\fR or 1) 23\fB\-\-jobs\fR or 1). Ignored when \fB\-\-interleaved\fR is set
24.TP 24.TP
25\fB\-\-jobs\-checkout\fR=\fI\,JOBS\/\fR 25\fB\-\-jobs\-checkout\fR=\fI\,JOBS\/\fR
26number of local checkout jobs to run in parallel 26number of local checkout jobs to run in parallel
27(defaults to \fB\-\-jobs\fR or 8) 27(defaults to \fB\-\-jobs\fR or 8). Ignored when \fB\-\-interleaved\fR
28is set
28.TP 29.TP
29\fB\-f\fR, \fB\-\-force\-broken\fR 30\fB\-f\fR, \fB\-\-force\-broken\fR
30obsolete option (to be deleted in the future) 31obsolete option (to be deleted in the future)
@@ -58,6 +59,9 @@ only update working tree, don't fetch
58use the existing manifest checkout as\-is. (do not 59use the existing manifest checkout as\-is. (do not
59update to the latest revision) 60update to the latest revision)
60.TP 61.TP
62\fB\-\-interleaved\fR
63fetch and checkout projects in parallel (experimental)
64.TP
61\fB\-n\fR, \fB\-\-network\-only\fR 65\fB\-n\fR, \fB\-\-network\-only\fR
62fetch only, don't update working tree 66fetch only, don't update working tree
63.TP 67.TP
diff --git a/progress.py b/progress.py
index fe246c74..31a4890a 100644
--- a/progress.py
+++ b/progress.py
@@ -101,6 +101,7 @@ class Progress:
101 self._units = units 101 self._units = units
102 self._elide = elide and _TTY 102 self._elide = elide and _TTY
103 self._quiet = quiet 103 self._quiet = quiet
104 self._ended = False
104 105
105 # Only show the active jobs section if we run more than one in parallel. 106 # Only show the active jobs section if we run more than one in parallel.
106 self._show_jobs = False 107 self._show_jobs = False
@@ -195,7 +196,26 @@ class Progress:
195 ) 196 )
196 ) 197 )
197 198
199 def display_message(self, msg):
200 """Clears the current progress line and prints a message above it.
201
202 The progress bar is then redrawn on the next line.
203 """
204 if not _TTY or IsTraceToStderr() or self._quiet:
205 return
206
207 # Erase the current line, print the message with a newline,
208 # and then immediately redraw the progress bar on the new line.
209 sys.stderr.write("\r" + CSI_ERASE_LINE)
210 sys.stderr.write(msg + "\n")
211 sys.stderr.flush()
212 self.update(inc=0)
213
198 def end(self): 214 def end(self):
215 if self._ended:
216 return
217 self._ended = True
218
199 self._update_event.set() 219 self._update_event.set()
200 if not _TTY or IsTraceToStderr() or self._quiet: 220 if not _TTY or IsTraceToStderr() or self._quiet:
201 return 221 return
diff --git a/subcmds/abandon.py b/subcmds/abandon.py
index 3208be6b..f1688e7b 100644
--- a/subcmds/abandon.py
+++ b/subcmds/abandon.py
@@ -48,7 +48,6 @@ It is equivalent to "git branch -D <branchname>".
48 def _Options(self, p): 48 def _Options(self, p):
49 p.add_option( 49 p.add_option(
50 "--all", 50 "--all",
51 dest="all",
52 action="store_true", 51 action="store_true",
53 help="delete all branches in all projects", 52 help="delete all branches in all projects",
54 ) 53 )
diff --git a/subcmds/diff.py b/subcmds/diff.py
index 7bb0cbbd..fe1a5139 100644
--- a/subcmds/diff.py
+++ b/subcmds/diff.py
@@ -35,7 +35,6 @@ to the Unix 'patch' command.
35 p.add_option( 35 p.add_option(
36 "-u", 36 "-u",
37 "--absolute", 37 "--absolute",
38 dest="absolute",
39 action="store_true", 38 action="store_true",
40 help="paths are relative to the repository root", 39 help="paths are relative to the repository root",
41 ) 40 )
diff --git a/subcmds/diffmanifests.py b/subcmds/diffmanifests.py
index 3eee3f94..66b3183d 100644
--- a/subcmds/diffmanifests.py
+++ b/subcmds/diffmanifests.py
@@ -67,7 +67,9 @@ synced and their revisions won't be found.
67 67
68 def _Options(self, p): 68 def _Options(self, p):
69 p.add_option( 69 p.add_option(
70 "--raw", dest="raw", action="store_true", help="display raw diff" 70 "--raw",
71 action="store_true",
72 help="display raw diff",
71 ) 73 )
72 p.add_option( 74 p.add_option(
73 "--no-color", 75 "--no-color",
@@ -78,7 +80,6 @@ synced and their revisions won't be found.
78 ) 80 )
79 p.add_option( 81 p.add_option(
80 "--pretty-format", 82 "--pretty-format",
81 dest="pretty_format",
82 action="store", 83 action="store",
83 metavar="<FORMAT>", 84 metavar="<FORMAT>",
84 help="print the log using a custom git pretty format string", 85 help="print the log using a custom git pretty format string",
diff --git a/subcmds/download.py b/subcmds/download.py
index 4396c9e7..1c0bf5ce 100644
--- a/subcmds/download.py
+++ b/subcmds/download.py
@@ -60,7 +60,6 @@ If no project is specified try to use current directory as a project.
60 p.add_option( 60 p.add_option(
61 "-r", 61 "-r",
62 "--revert", 62 "--revert",
63 dest="revert",
64 action="store_true", 63 action="store_true",
65 help="revert instead of checkout", 64 help="revert instead of checkout",
66 ) 65 )
diff --git a/subcmds/forall.py b/subcmds/forall.py
index 9da0c96e..4bae46af 100644
--- a/subcmds/forall.py
+++ b/subcmds/forall.py
@@ -133,7 +133,7 @@ without iterating through the remaining projects.
133 133
134 @staticmethod 134 @staticmethod
135 def _cmd_option(option, _opt_str, _value, parser): 135 def _cmd_option(option, _opt_str, _value, parser):
136 setattr(parser.values, option.dest, list(parser.rargs)) 136 setattr(parser.values, option.dest or "command", list(parser.rargs))
137 while parser.rargs: 137 while parser.rargs:
138 del parser.rargs[0] 138 del parser.rargs[0]
139 139
@@ -141,7 +141,6 @@ without iterating through the remaining projects.
141 p.add_option( 141 p.add_option(
142 "-r", 142 "-r",
143 "--regex", 143 "--regex",
144 dest="regex",
145 action="store_true", 144 action="store_true",
146 help="execute the command only on projects matching regex or " 145 help="execute the command only on projects matching regex or "
147 "wildcard expression", 146 "wildcard expression",
@@ -149,7 +148,6 @@ without iterating through the remaining projects.
149 p.add_option( 148 p.add_option(
150 "-i", 149 "-i",
151 "--inverse-regex", 150 "--inverse-regex",
152 dest="inverse_regex",
153 action="store_true", 151 action="store_true",
154 help="execute the command only on projects not matching regex or " 152 help="execute the command only on projects not matching regex or "
155 "wildcard expression", 153 "wildcard expression",
@@ -157,7 +155,6 @@ without iterating through the remaining projects.
157 p.add_option( 155 p.add_option(
158 "-g", 156 "-g",
159 "--groups", 157 "--groups",
160 dest="groups",
161 help="execute the command only on projects matching the specified " 158 help="execute the command only on projects matching the specified "
162 "groups", 159 "groups",
163 ) 160 )
@@ -165,14 +162,12 @@ without iterating through the remaining projects.
165 "-c", 162 "-c",
166 "--command", 163 "--command",
167 help="command (and arguments) to execute", 164 help="command (and arguments) to execute",
168 dest="command",
169 action="callback", 165 action="callback",
170 callback=self._cmd_option, 166 callback=self._cmd_option,
171 ) 167 )
172 p.add_option( 168 p.add_option(
173 "-e", 169 "-e",
174 "--abort-on-errors", 170 "--abort-on-errors",
175 dest="abort_on_errors",
176 action="store_true", 171 action="store_true",
177 help="abort if a command exits unsuccessfully", 172 help="abort if a command exits unsuccessfully",
178 ) 173 )
diff --git a/subcmds/grep.py b/subcmds/grep.py
index 918651d9..85977ce8 100644
--- a/subcmds/grep.py
+++ b/subcmds/grep.py
@@ -120,7 +120,6 @@ contain a line that matches both expressions:
120 g.add_option( 120 g.add_option(
121 "-r", 121 "-r",
122 "--revision", 122 "--revision",
123 dest="revision",
124 action="append", 123 action="append",
125 metavar="TREEish", 124 metavar="TREEish",
126 help="Search TREEish, instead of the work tree", 125 help="Search TREEish, instead of the work tree",
diff --git a/subcmds/info.py b/subcmds/info.py
index ab230ddd..2fbdae05 100644
--- a/subcmds/info.py
+++ b/subcmds/info.py
@@ -43,14 +43,12 @@ class Info(PagedCommand):
43 p.add_option( 43 p.add_option(
44 "-o", 44 "-o",
45 "--overview", 45 "--overview",
46 dest="overview",
47 action="store_true", 46 action="store_true",
48 help="show overview of all local commits", 47 help="show overview of all local commits",
49 ) 48 )
50 p.add_option( 49 p.add_option(
51 "-c", 50 "-c",
52 "--current-branch", 51 "--current-branch",
53 dest="current_branch",
54 action="store_true", 52 action="store_true",
55 help="consider only checked out branches", 53 help="consider only checked out branches",
56 ) 54 )
@@ -104,6 +102,10 @@ class Info(PagedCommand):
104 self.heading("Manifest groups: ") 102 self.heading("Manifest groups: ")
105 self.headtext(manifestGroups) 103 self.headtext(manifestGroups)
106 self.out.nl() 104 self.out.nl()
105 sp = self.manifest.superproject
106 srev = sp.commit_id if sp and sp.commit_id else "None"
107 self.heading("Superproject revision: ")
108 self.headtext(srev)
107 109
108 self.printSeparator() 110 self.printSeparator()
109 111
diff --git a/subcmds/init.py b/subcmds/init.py
index fb6d3eb5..f5a3892a 100644
--- a/subcmds/init.py
+++ b/subcmds/init.py
@@ -127,6 +127,7 @@ to update the working directory files.
127 return { 127 return {
128 "REPO_MANIFEST_URL": "manifest_url", 128 "REPO_MANIFEST_URL": "manifest_url",
129 "REPO_MIRROR_LOCATION": "reference", 129 "REPO_MIRROR_LOCATION": "reference",
130 "REPO_GIT_LFS": "git_lfs",
130 } 131 }
131 132
132 def _SyncManifest(self, opt): 133 def _SyncManifest(self, opt):
diff --git a/subcmds/list.py b/subcmds/list.py
index 4338e1c9..df9ce5f6 100644
--- a/subcmds/list.py
+++ b/subcmds/list.py
@@ -40,7 +40,6 @@ This is similar to running: repo forall -c 'echo "$REPO_PATH : $REPO_PROJECT"'.
40 p.add_option( 40 p.add_option(
41 "-r", 41 "-r",
42 "--regex", 42 "--regex",
43 dest="regex",
44 action="store_true", 43 action="store_true",
45 help="filter the project list based on regex or wildcard matching " 44 help="filter the project list based on regex or wildcard matching "
46 "of strings", 45 "of strings",
@@ -48,7 +47,6 @@ This is similar to running: repo forall -c 'echo "$REPO_PATH : $REPO_PROJECT"'.
48 p.add_option( 47 p.add_option(
49 "-g", 48 "-g",
50 "--groups", 49 "--groups",
51 dest="groups",
52 help="filter the project list based on the groups the project is " 50 help="filter the project list based on the groups the project is "
53 "in", 51 "in",
54 ) 52 )
@@ -61,21 +59,18 @@ This is similar to running: repo forall -c 'echo "$REPO_PATH : $REPO_PROJECT"'.
61 p.add_option( 59 p.add_option(
62 "-n", 60 "-n",
63 "--name-only", 61 "--name-only",
64 dest="name_only",
65 action="store_true", 62 action="store_true",
66 help="display only the name of the repository", 63 help="display only the name of the repository",
67 ) 64 )
68 p.add_option( 65 p.add_option(
69 "-p", 66 "-p",
70 "--path-only", 67 "--path-only",
71 dest="path_only",
72 action="store_true", 68 action="store_true",
73 help="display only the path of the repository", 69 help="display only the path of the repository",
74 ) 70 )
75 p.add_option( 71 p.add_option(
76 "-f", 72 "-f",
77 "--fullpath", 73 "--fullpath",
78 dest="fullpath",
79 action="store_true", 74 action="store_true",
80 help="display the full work tree path instead of the relative path", 75 help="display the full work tree path instead of the relative path",
81 ) 76 )
diff --git a/subcmds/manifest.py b/subcmds/manifest.py
index 9786580a..548bac0d 100644
--- a/subcmds/manifest.py
+++ b/subcmds/manifest.py
@@ -134,7 +134,6 @@ human-readable variations.
134 p.add_option( 134 p.add_option(
135 "-o", 135 "-o",
136 "--output-file", 136 "--output-file",
137 dest="output_file",
138 default="-", 137 default="-",
139 help="file to save the manifest to. (Filename prefix for " 138 help="file to save the manifest to. (Filename prefix for "
140 "multi-tree.)", 139 "multi-tree.)",
diff --git a/subcmds/overview.py b/subcmds/overview.py
index 8ccad611..dff17623 100644
--- a/subcmds/overview.py
+++ b/subcmds/overview.py
@@ -37,7 +37,6 @@ are displayed.
37 p.add_option( 37 p.add_option(
38 "-c", 38 "-c",
39 "--current-branch", 39 "--current-branch",
40 dest="current_branch",
41 action="store_true", 40 action="store_true",
42 help="consider only checked out branches", 41 help="consider only checked out branches",
43 ) 42 )
diff --git a/subcmds/rebase.py b/subcmds/rebase.py
index db1b387c..d7e769ce 100644
--- a/subcmds/rebase.py
+++ b/subcmds/rebase.py
@@ -47,21 +47,18 @@ branch but need to incorporate new upstream changes "underneath" them.
47 g.add_option( 47 g.add_option(
48 "-i", 48 "-i",
49 "--interactive", 49 "--interactive",
50 dest="interactive",
51 action="store_true", 50 action="store_true",
52 help="interactive rebase (single project only)", 51 help="interactive rebase (single project only)",
53 ) 52 )
54 53
55 p.add_option( 54 p.add_option(
56 "--fail-fast", 55 "--fail-fast",
57 dest="fail_fast",
58 action="store_true", 56 action="store_true",
59 help="stop rebasing after first error is hit", 57 help="stop rebasing after first error is hit",
60 ) 58 )
61 p.add_option( 59 p.add_option(
62 "-f", 60 "-f",
63 "--force-rebase", 61 "--force-rebase",
64 dest="force_rebase",
65 action="store_true", 62 action="store_true",
66 help="pass --force-rebase to git rebase", 63 help="pass --force-rebase to git rebase",
67 ) 64 )
@@ -74,27 +71,23 @@ branch but need to incorporate new upstream changes "underneath" them.
74 ) 71 )
75 p.add_option( 72 p.add_option(
76 "--autosquash", 73 "--autosquash",
77 dest="autosquash",
78 action="store_true", 74 action="store_true",
79 help="pass --autosquash to git rebase", 75 help="pass --autosquash to git rebase",
80 ) 76 )
81 p.add_option( 77 p.add_option(
82 "--whitespace", 78 "--whitespace",
83 dest="whitespace",
84 action="store", 79 action="store",
85 metavar="WS", 80 metavar="WS",
86 help="pass --whitespace to git rebase", 81 help="pass --whitespace to git rebase",
87 ) 82 )
88 p.add_option( 83 p.add_option(
89 "--auto-stash", 84 "--auto-stash",
90 dest="auto_stash",
91 action="store_true", 85 action="store_true",
92 help="stash local modifications before starting", 86 help="stash local modifications before starting",
93 ) 87 )
94 p.add_option( 88 p.add_option(
95 "-m", 89 "-m",
96 "--onto-manifest", 90 "--onto-manifest",
97 dest="onto_manifest",
98 action="store_true", 91 action="store_true",
99 help="rebase onto the manifest version instead of upstream " 92 help="rebase onto the manifest version instead of upstream "
100 "HEAD (this helps to make sure the local tree stays " 93 "HEAD (this helps to make sure the local tree stays "
diff --git a/subcmds/selfupdate.py b/subcmds/selfupdate.py
index 72683097..ed333569 100644
--- a/subcmds/selfupdate.py
+++ b/subcmds/selfupdate.py
@@ -54,7 +54,6 @@ need to be performed by an end-user.
54 ) 54 )
55 g.add_option( 55 g.add_option(
56 "--repo-upgraded", 56 "--repo-upgraded",
57 dest="repo_upgraded",
58 action="store_true", 57 action="store_true",
59 help=optparse.SUPPRESS_HELP, 58 help=optparse.SUPPRESS_HELP,
60 ) 59 )
diff --git a/subcmds/stage.py b/subcmds/stage.py
index 92a00ea0..1c285fc1 100644
--- a/subcmds/stage.py
+++ b/subcmds/stage.py
@@ -46,7 +46,6 @@ The '%prog' command stages files to prepare the next commit.
46 g.add_option( 46 g.add_option(
47 "-i", 47 "-i",
48 "--interactive", 48 "--interactive",
49 dest="interactive",
50 action="store_true", 49 action="store_true",
51 help="use interactive staging", 50 help="use interactive staging",
52 ) 51 )
diff --git a/subcmds/start.py b/subcmds/start.py
index 6dca7e4e..73dddf3f 100644
--- a/subcmds/start.py
+++ b/subcmds/start.py
@@ -51,7 +51,6 @@ revision specified in the manifest.
51 def _Options(self, p): 51 def _Options(self, p):
52 p.add_option( 52 p.add_option(
53 "--all", 53 "--all",
54 dest="all",
55 action="store_true", 54 action="store_true",
56 help="begin branch in all projects", 55 help="begin branch in all projects",
57 ) 56 )
diff --git a/subcmds/status.py b/subcmds/status.py
index cda73627..a9852b3c 100644
--- a/subcmds/status.py
+++ b/subcmds/status.py
@@ -82,7 +82,6 @@ the following meanings:
82 p.add_option( 82 p.add_option(
83 "-o", 83 "-o",
84 "--orphans", 84 "--orphans",
85 dest="orphans",
86 action="store_true", 85 action="store_true",
87 help="include objects in working directory outside of repo " 86 help="include objects in working directory outside of repo "
88 "projects", 87 "projects",
diff --git a/subcmds/sync.py b/subcmds/sync.py
index 3dc74f1f..250925f4 100644
--- a/subcmds/sync.py
+++ b/subcmds/sync.py
@@ -13,6 +13,7 @@
13# limitations under the License. 13# limitations under the License.
14 14
15import collections 15import collections
16import contextlib
16import functools 17import functools
17import http.cookiejar as cookielib 18import http.cookiejar as cookielib
18import io 19import io
@@ -25,7 +26,7 @@ from pathlib import Path
25import sys 26import sys
26import tempfile 27import tempfile
27import time 28import time
28from typing import List, NamedTuple, Set, Union 29from typing import List, NamedTuple, Optional, Set, Tuple, Union
29import urllib.error 30import urllib.error
30import urllib.parse 31import urllib.parse
31import urllib.request 32import urllib.request
@@ -67,6 +68,7 @@ from git_config import GetUrlCookieFile
67from git_refs import HEAD 68from git_refs import HEAD
68from git_refs import R_HEADS 69from git_refs import R_HEADS
69import git_superproject 70import git_superproject
71from hooks import RepoHook
70import platform_utils 72import platform_utils
71from progress import elapsed_str 73from progress import elapsed_str
72from progress import jobs_str 74from progress import jobs_str
@@ -194,6 +196,57 @@ class _CheckoutOneResult(NamedTuple):
194 finish: float 196 finish: float
195 197
196 198
199class _SyncResult(NamedTuple):
200 """Individual project sync result for interleaved mode.
201
202 Attributes:
203 project_index (int): The index of the project in the shared list.
204 relpath (str): The project's relative path from the repo client top.
205 remote_fetched (bool): True if the remote was actually queried.
206 fetch_success (bool): True if the fetch operation was successful.
207 fetch_error (Optional[Exception]): The Exception from a failed fetch,
208 or None.
209 fetch_start (Optional[float]): The time.time() when fetch started.
210 fetch_finish (Optional[float]): The time.time() when fetch finished.
211 checkout_success (bool): True if the checkout operation was
212 successful.
213 checkout_error (Optional[Exception]): The Exception from a failed
214 checkout, or None.
215 checkout_start (Optional[float]): The time.time() when checkout
216 started.
217 checkout_finish (Optional[float]): The time.time() when checkout
218 finished.
219 stderr_text (str): The combined output from both fetch and checkout.
220 """
221
222 project_index: int
223 relpath: str
224
225 remote_fetched: bool
226 fetch_success: bool
227 fetch_error: Optional[Exception]
228 fetch_start: Optional[float]
229 fetch_finish: Optional[float]
230
231 checkout_success: bool
232 checkout_error: Optional[Exception]
233 checkout_start: Optional[float]
234 checkout_finish: Optional[float]
235
236 stderr_text: str
237
238
239class _InterleavedSyncResult(NamedTuple):
240 """Result of an interleaved sync.
241
242 Attributes:
243 results (List[_SyncResult]): A list of results, one for each project
244 processed. Empty if the worker failed before creating results.
245 """
246
247 results: List[_SyncResult]
248
249
197class SuperprojectError(SyncError): 250class SuperprojectError(SyncError):
198 """Superproject sync repo.""" 251 """Superproject sync repo."""
199 252
@@ -359,7 +412,7 @@ later is required to fix a server side protocol bug.
359 type=int, 412 type=int,
360 metavar="JOBS", 413 metavar="JOBS",
361 help="number of network jobs to run in parallel (defaults to " 414 help="number of network jobs to run in parallel (defaults to "
362 "--jobs or 1)", 415 "--jobs or 1). Ignored when --interleaved is set",
363 ) 416 )
364 p.add_option( 417 p.add_option(
365 "--jobs-checkout", 418 "--jobs-checkout",
@@ -367,25 +420,23 @@ later is required to fix a server side protocol bug.
367 type=int, 420 type=int,
368 metavar="JOBS", 421 metavar="JOBS",
369 help="number of local checkout jobs to run in parallel (defaults " 422 help="number of local checkout jobs to run in parallel (defaults "
370 f"to --jobs or {DEFAULT_LOCAL_JOBS})", 423 f"to --jobs or {DEFAULT_LOCAL_JOBS}). Ignored when --interleaved "
424 "is set",
371 ) 425 )
372 426
373 p.add_option( 427 p.add_option(
374 "-f", 428 "-f",
375 "--force-broken", 429 "--force-broken",
376 dest="force_broken",
377 action="store_true", 430 action="store_true",
378 help="obsolete option (to be deleted in the future)", 431 help="obsolete option (to be deleted in the future)",
379 ) 432 )
380 p.add_option( 433 p.add_option(
381 "--fail-fast", 434 "--fail-fast",
382 dest="fail_fast",
383 action="store_true", 435 action="store_true",
384 help="stop syncing after first error is hit", 436 help="stop syncing after first error is hit",
385 ) 437 )
386 p.add_option( 438 p.add_option(
387 "--force-sync", 439 "--force-sync",
388 dest="force_sync",
389 action="store_true", 440 action="store_true",
390 help="overwrite an existing git directory if it needs to " 441 help="overwrite an existing git directory if it needs to "
391 "point to a different object directory. WARNING: this " 442 "point to a different object directory. WARNING: this "
@@ -393,7 +444,6 @@ later is required to fix a server side protocol bug.
393 ) 444 )
394 p.add_option( 445 p.add_option(
395 "--force-checkout", 446 "--force-checkout",
396 dest="force_checkout",
397 action="store_true", 447 action="store_true",
398 help="force checkout even if it results in throwing away " 448 help="force checkout even if it results in throwing away "
399 "uncommitted modifications. " 449 "uncommitted modifications. "
@@ -401,7 +451,6 @@ later is required to fix a server side protocol bug.
401 ) 451 )
402 p.add_option( 452 p.add_option(
403 "--force-remove-dirty", 453 "--force-remove-dirty",
404 dest="force_remove_dirty",
405 action="store_true", 454 action="store_true",
406 help="force remove projects with uncommitted modifications if " 455 help="force remove projects with uncommitted modifications if "
407 "projects no longer exist in the manifest. " 456 "projects no longer exist in the manifest. "
@@ -409,7 +458,6 @@ later is required to fix a server side protocol bug.
409 ) 458 )
410 p.add_option( 459 p.add_option(
411 "--rebase", 460 "--rebase",
412 dest="rebase",
413 action="store_true", 461 action="store_true",
414 help="rebase local commits regardless of whether they are " 462 help="rebase local commits regardless of whether they are "
415 "published", 463 "published",
@@ -417,7 +465,6 @@ later is required to fix a server side protocol bug.
417 p.add_option( 465 p.add_option(
418 "-l", 466 "-l",
419 "--local-only", 467 "--local-only",
420 dest="local_only",
421 action="store_true", 468 action="store_true",
422 help="only update working tree, don't fetch", 469 help="only update working tree, don't fetch",
423 ) 470 )
@@ -431,9 +478,13 @@ later is required to fix a server side protocol bug.
431 "(do not update to the latest revision)", 478 "(do not update to the latest revision)",
432 ) 479 )
433 p.add_option( 480 p.add_option(
481 "--interleaved",
482 action="store_true",
483 help="fetch and checkout projects in parallel (experimental)",
484 )
485 p.add_option(
434 "-n", 486 "-n",
435 "--network-only", 487 "--network-only",
436 dest="network_only",
437 action="store_true", 488 action="store_true",
438 help="fetch only, don't update working tree", 489 help="fetch only, don't update working tree",
439 ) 490 )
@@ -460,7 +511,6 @@ later is required to fix a server side protocol bug.
460 p.add_option( 511 p.add_option(
461 "-m", 512 "-m",
462 "--manifest-name", 513 "--manifest-name",
463 dest="manifest_name",
464 help="temporary manifest to use for this sync", 514 help="temporary manifest to use for this sync",
465 metavar="NAME.xml", 515 metavar="NAME.xml",
466 ) 516 )
@@ -479,19 +529,16 @@ later is required to fix a server side protocol bug.
479 "-u", 529 "-u",
480 "--manifest-server-username", 530 "--manifest-server-username",
481 action="store", 531 action="store",
482 dest="manifest_server_username",
483 help="username to authenticate with the manifest server", 532 help="username to authenticate with the manifest server",
484 ) 533 )
485 p.add_option( 534 p.add_option(
486 "-p", 535 "-p",
487 "--manifest-server-password", 536 "--manifest-server-password",
488 action="store", 537 action="store",
489 dest="manifest_server_password",
490 help="password to authenticate with the manifest server", 538 help="password to authenticate with the manifest server",
491 ) 539 )
492 p.add_option( 540 p.add_option(
493 "--fetch-submodules", 541 "--fetch-submodules",
494 dest="fetch_submodules",
495 action="store_true", 542 action="store_true",
496 help="fetch submodules from server", 543 help="fetch submodules from server",
497 ) 544 )
@@ -515,7 +562,6 @@ later is required to fix a server side protocol bug.
515 ) 562 )
516 p.add_option( 563 p.add_option(
517 "--optimized-fetch", 564 "--optimized-fetch",
518 dest="optimized_fetch",
519 action="store_true", 565 action="store_true",
520 help="only fetch projects fixed to sha1 if revision does not exist " 566 help="only fetch projects fixed to sha1 if revision does not exist "
521 "locally", 567 "locally",
@@ -554,7 +600,6 @@ later is required to fix a server side protocol bug.
554 p.add_option( 600 p.add_option(
555 "-s", 601 "-s",
556 "--smart-sync", 602 "--smart-sync",
557 dest="smart_sync",
558 action="store_true", 603 action="store_true",
559 help="smart sync using manifest from the latest known good " 604 help="smart sync using manifest from the latest known good "
560 "build", 605 "build",
@@ -562,7 +607,6 @@ later is required to fix a server side protocol bug.
562 p.add_option( 607 p.add_option(
563 "-t", 608 "-t",
564 "--smart-tag", 609 "--smart-tag",
565 dest="smart_tag",
566 action="store", 610 action="store",
567 help="smart sync using manifest from a known tag", 611 help="smart sync using manifest from a known tag",
568 ) 612 )
@@ -577,10 +621,10 @@ later is required to fix a server side protocol bug.
577 ) 621 )
578 g.add_option( 622 g.add_option(
579 "--repo-upgraded", 623 "--repo-upgraded",
580 dest="repo_upgraded",
581 action="store_true", 624 action="store_true",
582 help=optparse.SUPPRESS_HELP, 625 help=optparse.SUPPRESS_HELP,
583 ) 626 )
627 RepoHook.AddOptionGroup(p, "post-sync")
584 628
585 def _GetBranch(self, manifest_project): 629 def _GetBranch(self, manifest_project):
586 """Returns the branch name for getting the approved smartsync manifest. 630 """Returns the branch name for getting the approved smartsync manifest.
@@ -848,15 +892,7 @@ later is required to fix a server side protocol bug.
848 ) 892 )
849 893
850 sync_event = _threading.Event() 894 sync_event = _threading.Event()
851 895 sync_progress_thread = self._CreateSyncProgressThread(pm, sync_event)
852 def _MonitorSyncLoop():
853 while True:
854 pm.update(inc=0, msg=self._GetSyncProgressMessage())
855 if sync_event.wait(timeout=1):
856 return
857
858 sync_progress_thread = _threading.Thread(target=_MonitorSyncLoop)
859 sync_progress_thread.daemon = True
860 896
861 def _ProcessResults(pool, pm, results_sets): 897 def _ProcessResults(pool, pm, results_sets):
862 ret = True 898 ret = True
@@ -955,25 +991,16 @@ later is required to fix a server side protocol bug.
955 Returns: 991 Returns:
956 List of all projects that should be checked out. 992 List of all projects that should be checked out.
957 """ 993 """
958 rp = manifest.repoProject
959
960 to_fetch = [] 994 to_fetch = []
961 now = time.time()
962 if _ONE_DAY_S <= (now - rp.LastFetch):
963 to_fetch.append(rp)
964 to_fetch.extend(all_projects) 995 to_fetch.extend(all_projects)
965 to_fetch.sort(key=self._fetch_times.Get, reverse=True) 996 to_fetch.sort(key=self._fetch_times.Get, reverse=True)
966 997
967 result = self._Fetch(to_fetch, opt, err_event, ssh_proxy, errors) 998 result = self._Fetch(to_fetch, opt, err_event, ssh_proxy, errors)
968 success = result.success 999 success = result.success
969 fetched = result.projects 1000 fetched = result.projects
970
971 if not success: 1001 if not success:
972 err_event.set() 1002 err_event.set()
973 1003
974 # Call self update, unless requested not to
975 if os.environ.get("REPO_SKIP_SELF_UPDATE", "0") == "0":
976 _PostRepoFetch(rp, opt.repo_verify)
977 if opt.network_only: 1004 if opt.network_only:
978 # Bail out now; the rest touches the working tree. 1005 # Bail out now; the rest touches the working tree.
979 if err_event.is_set(): 1006 if err_event.is_set():
@@ -1152,6 +1179,16 @@ later is required to fix a server side protocol bug.
1152 self._local_sync_state.Save() 1179 self._local_sync_state.Save()
1153 return proc_res and not err_results 1180 return proc_res and not err_results
1154 1181
1182 def _PrintManifestNotices(self, opt):
1183 """Print all manifest notices, but only once."""
1184 printed_notices = set()
1185 # Print all manifest notices, but only once.
1186 # Sort by path_prefix to ensure consistent ordering.
1187 for m in sorted(self.ManifestList(opt), key=lambda x: x.path_prefix):
1188 if m.notice and m.notice not in printed_notices:
1189 print(m.notice)
1190 printed_notices.add(m.notice)
1191
1155 @staticmethod 1192 @staticmethod
1156 def _GetPreciousObjectsState(project: Project, opt): 1193 def _GetPreciousObjectsState(project: Project, opt):
1157 """Get the preciousObjects state for the project. 1194 """Get the preciousObjects state for the project.
@@ -1324,6 +1361,61 @@ later is required to fix a server side protocol bug.
1324 t.join() 1361 t.join()
1325 pm.end() 1362 pm.end()
1326 1363
1364 def _UpdateRepoProject(self, opt, manifest, errors):
1365 """Fetch the repo project and check for updates."""
1366 if opt.local_only:
1367 return
1368
1369 rp = manifest.repoProject
1370 now = time.time()
1371 # If we've fetched in the last day, don't bother fetching again.
1372 if (now - rp.LastFetch) < _ONE_DAY_S:
1373 return
1374
1375 with multiprocessing.Manager() as manager:
1376 with ssh.ProxyManager(manager) as ssh_proxy:
1377 ssh_proxy.sock()
1378 start = time.time()
1379 buf = TeeStringIO(sys.stdout if opt.verbose else None)
1380 sync_result = rp.Sync_NetworkHalf(
1381 quiet=opt.quiet,
1382 verbose=opt.verbose,
1383 output_redir=buf,
1384 current_branch_only=self._GetCurrentBranchOnly(
1385 opt, manifest
1386 ),
1387 force_sync=opt.force_sync,
1388 clone_bundle=opt.clone_bundle,
1389 tags=opt.tags,
1390 archive=manifest.IsArchive,
1391 optimized_fetch=opt.optimized_fetch,
1392 retry_fetches=opt.retry_fetches,
1393 prune=opt.prune,
1394 ssh_proxy=ssh_proxy,
1395 clone_filter=manifest.CloneFilter,
1396 partial_clone_exclude=manifest.PartialCloneExclude,
1397 clone_filter_for_depth=manifest.CloneFilterForDepth,
1398 )
1399 if sync_result.error:
1400 errors.append(sync_result.error)
1401
1402 finish = time.time()
1403 self.event_log.AddSync(
1404 rp,
1405 event_log.TASK_SYNC_NETWORK,
1406 start,
1407 finish,
1408 sync_result.success,
1409 )
1410 if not sync_result.success:
1411 logger.error("error: Cannot fetch repo tool %s", rp.name)
1412 return
1413
1414 # After fetching, check if a new version of repo is available and
1415 # restart. This is only done if the user hasn't explicitly disabled it.
1416 if os.environ.get("REPO_SKIP_SELF_UPDATE", "0") == "0":
1417 _PostRepoFetch(rp, opt.repo_verify)
1418
1327 def _ReloadManifest(self, manifest_name, manifest): 1419 def _ReloadManifest(self, manifest_name, manifest):
1328 """Reload the manfiest from the file specified by the |manifest_name|. 1420 """Reload the manfiest from the file specified by the |manifest_name|.
1329 1421
@@ -1757,6 +1849,21 @@ later is required to fix a server side protocol bug.
1757 except (KeyboardInterrupt, Exception) as e: 1849 except (KeyboardInterrupt, Exception) as e:
1758 raise RepoUnhandledExceptionError(e, aggregate_errors=errors) 1850 raise RepoUnhandledExceptionError(e, aggregate_errors=errors)
1759 1851
1852 # Run post-sync hook only after successful sync
1853 self._RunPostSyncHook(opt)
1854
1855 def _RunPostSyncHook(self, opt):
1856 """Run post-sync hook if configured in manifest <repo-hooks>."""
1857 hook = RepoHook.FromSubcmd(
1858 hook_type="post-sync",
1859 manifest=self.manifest,
1860 opt=opt,
1861 abort_if_user_denies=False,
1862 )
1863 success = hook.Run(repo_topdir=self.client.topdir)
1864 if not success:
1865 print("Warning: post-sync hook reported failure.")
1866
1760 def _ExecuteHelper(self, opt, args, errors): 1867 def _ExecuteHelper(self, opt, args, errors):
1761 manifest = self.outer_manifest 1868 manifest = self.outer_manifest
1762 if not opt.outer_manifest: 1869 if not opt.outer_manifest:
@@ -1788,8 +1895,6 @@ later is required to fix a server side protocol bug.
1788 e, 1895 e,
1789 ) 1896 )
1790 1897
1791 err_event = multiprocessing.Event()
1792
1793 rp = manifest.repoProject 1898 rp = manifest.repoProject
1794 rp.PreSync() 1899 rp.PreSync()
1795 cb = rp.CurrentBranch 1900 cb = rp.CurrentBranch
@@ -1828,6 +1933,9 @@ later is required to fix a server side protocol bug.
1828 # might be in the manifest. 1933 # might be in the manifest.
1829 self._ValidateOptionsWithManifest(opt, mp) 1934 self._ValidateOptionsWithManifest(opt, mp)
1830 1935
1936 # Update the repo project and check for new versions of repo.
1937 self._UpdateRepoProject(opt, manifest, errors)
1938
1831 superproject_logging_data = {} 1939 superproject_logging_data = {}
1832 self._UpdateProjectsRevisionId( 1940 self._UpdateProjectsRevisionId(
1833 opt, args, superproject_logging_data, manifest 1941 opt, args, superproject_logging_data, manifest
@@ -1841,10 +1949,6 @@ later is required to fix a server side protocol bug.
1841 all_manifests=not opt.this_manifest_only, 1949 all_manifests=not opt.this_manifest_only,
1842 ) 1950 )
1843 1951
1844 err_network_sync = False
1845 err_update_projects = False
1846 err_update_linkfiles = False
1847
1848 # Log the repo projects by existing and new. 1952 # Log the repo projects by existing and new.
1849 existing = [x for x in all_projects if x.Exists] 1953 existing = [x for x in all_projects if x.Exists]
1850 mp.config.SetString("repo.existingprojectcount", str(len(existing))) 1954 mp.config.SetString("repo.existingprojectcount", str(len(existing)))
@@ -1854,6 +1958,185 @@ later is required to fix a server side protocol bug.
1854 1958
1855 self._fetch_times = _FetchTimes(manifest) 1959 self._fetch_times = _FetchTimes(manifest)
1856 self._local_sync_state = LocalSyncState(manifest) 1960 self._local_sync_state = LocalSyncState(manifest)
1961
1962 if opt.interleaved:
1963 sync_method = self._SyncInterleaved
1964 else:
1965 sync_method = self._SyncPhased
1966
1967 sync_method(
1968 opt,
1969 args,
1970 errors,
1971 manifest,
1972 mp,
1973 all_projects,
1974 superproject_logging_data,
1975 )
1976
1977 # Log the previous sync analysis state from the config.
1978 self.git_event_log.LogDataConfigEvents(
1979 mp.config.GetSyncAnalysisStateData(), "previous_sync_state"
1980 )
1981
1982 # Update and log with the new sync analysis state.
1983 mp.config.UpdateSyncAnalysisState(opt, superproject_logging_data)
1984 self.git_event_log.LogDataConfigEvents(
1985 mp.config.GetSyncAnalysisStateData(), "current_sync_state"
1986 )
1987
1988 self._local_sync_state.PruneRemovedProjects()
1989 if self._local_sync_state.IsPartiallySynced():
1990 logger.warning(
1991 "warning: Partial syncs are not supported. For the best "
1992 "experience, sync the entire tree."
1993 )
1994
1995 if not opt.quiet:
1996 print("repo sync has finished successfully.")
1997
1998 def _CreateSyncProgressThread(
1999 self, pm: Progress, stop_event: _threading.Event
2000 ) -> _threading.Thread:
2001 """Creates and returns a daemon thread to update a Progress object.
2002
2003 The returned thread is not yet started. The thread will periodically
2004 update the progress bar with information from _GetSyncProgressMessage
2005 until the stop_event is set.
2006
2007 Args:
2008 pm: The Progress object to update.
2009 stop_event: The threading.Event to signal the monitor to stop.
2010
2011 Returns:
2012 The configured _threading.Thread object.
2013 """
2014
2015 def _monitor_loop():
2016 """The target function for the monitor thread."""
2017 while True:
2018 # Update the progress bar with the current status message.
2019 pm.update(inc=0, msg=self._GetSyncProgressMessage())
2020 # Wait for 1 second or until the stop_event is set.
2021 if stop_event.wait(timeout=1):
2022 return
2023
2024 return _threading.Thread(target=_monitor_loop, daemon=True)
2025
2026 def _UpdateManifestLists(
2027 self,
2028 opt: optparse.Values,
2029 err_event: multiprocessing.Event,
2030 errors: List[Exception],
2031 ) -> Tuple[bool, bool]:
2032 """Updates project lists and copy/link files for all manifests.
2033
2034 Args:
2035 opt: Program options from optparse.
2036 err_event: An event to set if any error occurs.
2037 errors: A list to append any encountered exceptions to.
2038
2039 Returns:
2040 A tuple (err_update_projects, err_update_linkfiles) indicating
2041 an error for each task.
2042 """
2043 err_update_projects = False
2044 err_update_linkfiles = False
2045 for m in self.ManifestList(opt):
2046 if m.IsMirror or m.IsArchive:
2047 continue
2048
2049 try:
2050 self.UpdateProjectList(opt, m)
2051 except Exception as e:
2052 err_event.set()
2053 err_update_projects = True
2054 errors.append(e)
2055 if isinstance(e, DeleteWorktreeError):
2056 errors.extend(e.aggregate_errors)
2057 if opt.fail_fast:
2058 logger.error("error: Local checkouts *not* updated.")
2059 raise SyncFailFastError(aggregate_errors=errors)
2060
2061 try:
2062 self.UpdateCopyLinkfileList(m)
2063 except Exception as e:
2064 err_event.set()
2065 err_update_linkfiles = True
2066 errors.append(e)
2067 if opt.fail_fast:
2068 logger.error(
2069 "error: Local update copyfile or linkfile failed."
2070 )
2071 raise SyncFailFastError(aggregate_errors=errors)
2072 return err_update_projects, err_update_linkfiles
2073
2074 def _ReportErrors(
2075 self,
2076 errors,
2077 err_network_sync=False,
2078 failing_network_repos=None,
2079 err_checkout=False,
2080 failing_checkout_repos=None,
2081 err_update_projects=False,
2082 err_update_linkfiles=False,
2083 ):
2084 """Logs detailed error messages and raises a SyncError."""
2085
2086 def print_and_log(err_msg):
2087 self.git_event_log.ErrorEvent(err_msg)
2088 logger.error("%s", err_msg)
2089
2090 print_and_log("error: Unable to fully sync the tree")
2091 if err_network_sync:
2092 print_and_log("error: Downloading network changes failed.")
2093 if failing_network_repos:
2094 logger.error(
2095 "Failing repos (network):\n%s",
2096 "\n".join(sorted(failing_network_repos)),
2097 )
2098 if err_update_projects:
2099 print_and_log("error: Updating local project lists failed.")
2100 if err_update_linkfiles:
2101 print_and_log("error: Updating copyfiles or linkfiles failed.")
2102 if err_checkout:
2103 print_and_log("error: Checking out local projects failed.")
2104 if failing_checkout_repos:
2105 logger.error(
2106 "Failing repos (checkout):\n%s",
2107 "\n".join(sorted(failing_checkout_repos)),
2108 )
2109 logger.error(
2110 'Try re-running with "-j1 --fail-fast" to exit at the first error.'
2111 )
2112 raise SyncError(aggregate_errors=errors)
2113
2114 def _SyncPhased(
2115 self,
2116 opt,
2117 args,
2118 errors,
2119 manifest,
2120 mp,
2121 all_projects,
2122 superproject_logging_data,
2123 ):
2124 """Sync projects by separating network and local operations.
2125
2126 This method performs sync in two distinct, sequential phases:
2127 1. Network Phase: Fetches updates for all projects from their remotes.
2128 2. Local Phase: Checks out the updated revisions into the local
2129 worktrees for all projects.
2130
2131 This approach ensures that the local work-tree is not modified until
2132 all network operations are complete, providing a transactional-like
2133 safety net for the checkout state.
2134 """
2135 err_event = multiprocessing.Event()
2136 err_network_sync = False
2137 err_update_projects = False
2138 err_update_linkfiles = False
2139
1857 if not opt.local_only: 2140 if not opt.local_only:
1858 with multiprocessing.Manager() as manager: 2141 with multiprocessing.Manager() as manager:
1859 with ssh.ProxyManager(manager) as ssh_proxy: 2142 with ssh.ProxyManager(manager) as ssh_proxy:
@@ -1886,34 +2169,11 @@ later is required to fix a server side protocol bug.
1886 ) 2169 )
1887 raise SyncFailFastError(aggregate_errors=errors) 2170 raise SyncFailFastError(aggregate_errors=errors)
1888 2171
1889 for m in self.ManifestList(opt): 2172 err_update_projects, err_update_linkfiles = self._UpdateManifestLists(
1890 if m.IsMirror or m.IsArchive: 2173 opt,
1891 # Bail out now, we have no working tree. 2174 err_event,
1892 continue 2175 errors,
1893 2176 )
1894 try:
1895 self.UpdateProjectList(opt, m)
1896 except Exception as e:
1897 err_event.set()
1898 err_update_projects = True
1899 errors.append(e)
1900 if isinstance(e, DeleteWorktreeError):
1901 errors.extend(e.aggregate_errors)
1902 if opt.fail_fast:
1903 logger.error("error: Local checkouts *not* updated.")
1904 raise SyncFailFastError(aggregate_errors=errors)
1905
1906 try:
1907 self.UpdateCopyLinkfileList(m)
1908 except Exception as e:
1909 err_update_linkfiles = True
1910 errors.append(e)
1911 err_event.set()
1912 if opt.fail_fast:
1913 logger.error(
1914 "error: Local update copyfile or linkfile failed."
1915 )
1916 raise SyncFailFastError(aggregate_errors=errors)
1917 2177
1918 err_results = [] 2178 err_results = []
1919 # NB: We don't exit here because this is the last step. 2179 # NB: We don't exit here because this is the last step.
@@ -1923,61 +2183,416 @@ later is required to fix a server side protocol bug.
1923 if err_checkout: 2183 if err_checkout:
1924 err_event.set() 2184 err_event.set()
1925 2185
1926 printed_notices = set() 2186 self._PrintManifestNotices(opt)
1927 # If there's a notice that's supposed to print at the end of the sync,
1928 # print it now... But avoid printing duplicate messages, and preserve
1929 # order.
1930 for m in sorted(self.ManifestList(opt), key=lambda x: x.path_prefix):
1931 if m.notice and m.notice not in printed_notices:
1932 print(m.notice)
1933 printed_notices.add(m.notice)
1934 2187
1935 # If we saw an error, exit with code 1 so that other scripts can check. 2188 # If we saw an error, exit with code 1 so that other scripts can check.
1936 if err_event.is_set(): 2189 if err_event.is_set():
1937 2190 self._ReportErrors(
1938 def print_and_log(err_msg): 2191 errors,
1939 self.git_event_log.ErrorEvent(err_msg) 2192 err_network_sync=err_network_sync,
1940 logger.error("%s", err_msg) 2193 err_checkout=err_checkout,
1941 2194 failing_checkout_repos=err_results,
1942 print_and_log("error: Unable to fully sync the tree") 2195 err_update_projects=err_update_projects,
1943 if err_network_sync: 2196 err_update_linkfiles=err_update_linkfiles,
1944 print_and_log("error: Downloading network changes failed.")
1945 if err_update_projects:
1946 print_and_log("error: Updating local project lists failed.")
1947 if err_update_linkfiles:
1948 print_and_log("error: Updating copyfiles or linkfiles failed.")
1949 if err_checkout:
1950 print_and_log("error: Checking out local projects failed.")
1951 if err_results:
1952 # Don't log repositories, as it may contain sensitive info.
1953 logger.error("Failing repos:\n%s", "\n".join(err_results))
1954 # Not useful to log.
1955 logger.error(
1956 'Try re-running with "-j1 --fail-fast" to exit at the first '
1957 "error."
1958 ) 2197 )
1959 raise SyncError(aggregate_errors=errors)
1960 2198
1961 # Log the previous sync analysis state from the config. 2199 @classmethod
1962 self.git_event_log.LogDataConfigEvents( 2200 def _SyncOneProject(cls, opt, project_index, project) -> _SyncResult:
1963 mp.config.GetSyncAnalysisStateData(), "previous_sync_state" 2201 """Syncs a single project for interleaved sync."""
2202 fetch_success = False
2203 remote_fetched = False
2204 fetch_error = None
2205 fetch_start = None
2206 fetch_finish = None
2207 network_output = ""
2208
2209 if opt.local_only:
2210 fetch_success = True
2211 else:
2212 fetch_start = time.time()
2213 network_output_capture = io.StringIO()
2214 try:
2215 ssh_proxy = cls.get_parallel_context().get("ssh_proxy")
2216 sync_result = project.Sync_NetworkHalf(
2217 quiet=opt.quiet,
2218 verbose=opt.verbose,
2219 output_redir=network_output_capture,
2220 current_branch_only=cls._GetCurrentBranchOnly(
2221 opt, project.manifest
2222 ),
2223 force_sync=opt.force_sync,
2224 clone_bundle=opt.clone_bundle,
2225 tags=opt.tags,
2226 archive=project.manifest.IsArchive,
2227 optimized_fetch=opt.optimized_fetch,
2228 retry_fetches=opt.retry_fetches,
2229 prune=opt.prune,
2230 ssh_proxy=ssh_proxy,
2231 clone_filter=project.manifest.CloneFilter,
2232 partial_clone_exclude=project.manifest.PartialCloneExclude,
2233 clone_filter_for_depth=project.manifest.CloneFilterForDepth,
2234 )
2235 fetch_success = sync_result.success
2236 remote_fetched = sync_result.remote_fetched
2237 fetch_error = sync_result.error
2238 except KeyboardInterrupt:
2239 logger.error(
2240 "Keyboard interrupt while processing %s", project.name
2241 )
2242 except GitError as e:
2243 fetch_error = e
2244 logger.error("error.GitError: Cannot fetch %s", e)
2245 except Exception as e:
2246 fetch_error = e
2247 logger.error(
2248 "error: Cannot fetch %s (%s: %s)",
2249 project.name,
2250 type(e).__name__,
2251 e,
2252 )
2253 finally:
2254 fetch_finish = time.time()
2255 network_output = network_output_capture.getvalue()
2256
2257 checkout_success = False
2258 checkout_error = None
2259 checkout_start = None
2260 checkout_finish = None
2261 checkout_stderr = ""
2262
2263 if fetch_success and not opt.network_only:
2264 checkout_start = time.time()
2265 stderr_capture = io.StringIO()
2266 try:
2267 with contextlib.redirect_stderr(stderr_capture):
2268 syncbuf = SyncBuffer(
2269 project.manifest.manifestProject.config,
2270 detach_head=opt.detach_head,
2271 )
2272 local_half_errors = []
2273 project.Sync_LocalHalf(
2274 syncbuf,
2275 force_sync=opt.force_sync,
2276 force_checkout=opt.force_checkout,
2277 force_rebase=opt.rebase,
2278 errors=local_half_errors,
2279 verbose=opt.verbose,
2280 )
2281 checkout_success = syncbuf.Finish()
2282 if local_half_errors:
2283 checkout_error = SyncError(
2284 aggregate_errors=local_half_errors
2285 )
2286 except KeyboardInterrupt:
2287 logger.error(
2288 "Keyboard interrupt while processing %s", project.name
2289 )
2290 except GitError as e:
2291 checkout_error = e
2292 logger.error(
2293 "error.GitError: Cannot checkout %s: %s", project.name, e
2294 )
2295 except Exception as e:
2296 checkout_error = e
2297 logger.error(
2298 "error: Cannot checkout %s: %s: %s",
2299 project.name,
2300 type(e).__name__,
2301 e,
2302 )
2303 finally:
2304 checkout_finish = time.time()
2305 checkout_stderr = stderr_capture.getvalue()
2306 elif fetch_success:
2307 checkout_success = True
2308
2309 # Consolidate all captured output.
2310 captured_parts = []
2311 if network_output:
2312 captured_parts.append(network_output)
2313 if checkout_stderr:
2314 captured_parts.append(checkout_stderr)
2315 stderr_text = "\n".join(captured_parts)
2316
2317 return _SyncResult(
2318 project_index=project_index,
2319 relpath=project.relpath,
2320 fetch_success=fetch_success,
2321 remote_fetched=remote_fetched,
2322 checkout_success=checkout_success,
2323 fetch_error=fetch_error,
2324 checkout_error=checkout_error,
2325 stderr_text=stderr_text.strip(),
2326 fetch_start=fetch_start,
2327 fetch_finish=fetch_finish,
2328 checkout_start=checkout_start,
2329 checkout_finish=checkout_finish,
1964 ) 2330 )
1965 2331
1966 # Update and log with the new sync analysis state. 2332 @classmethod
1967 mp.config.UpdateSyncAnalysisState(opt, superproject_logging_data) 2333 def _SyncProjectList(cls, opt, project_indices) -> _InterleavedSyncResult:
1968 self.git_event_log.LogDataConfigEvents( 2334 """Worker for interleaved sync.
1969 mp.config.GetSyncAnalysisStateData(), "current_sync_state" 2335
2336 This function is responsible for syncing a group of projects that share
2337 a git object directory.
2338
2339 Args:
2340 opt: Program options returned from optparse. See _Options().
2341 project_indices: A list of indices into the projects list stored in
2342 the parallel context.
2343
2344 Returns:
2345 An `_InterleavedSyncResult` containing the results for each project.
2346 """
2347 results = []
2348 context = cls.get_parallel_context()
2349 projects = context["projects"]
2350 sync_dict = context["sync_dict"]
2351
2352 assert project_indices, "_SyncProjectList called with no indices."
2353
2354 # Use the first project as the representative for the progress bar.
2355 first_project = projects[project_indices[0]]
2356 key = f"{first_project.name} @ {first_project.relpath}"
2357 sync_dict[key] = time.time()
2358
2359 try:
2360 for idx in project_indices:
2361 project = projects[idx]
2362 results.append(cls._SyncOneProject(opt, idx, project))
2363 finally:
2364 del sync_dict[key]
2365
2366 return _InterleavedSyncResult(results=results)
2367
2368 def _ProcessSyncInterleavedResults(
2369 self,
2370 synced_relpaths: Set[str],
2371 err_event: _threading.Event,
2372 errors: List[Exception],
2373 opt: optparse.Values,
2374 pool: Optional[multiprocessing.Pool],
2375 pm: Progress,
2376 results_sets: List[_InterleavedSyncResult],
2377 ):
2378 """Callback to process results from interleaved sync workers."""
2379 ret = True
2380 projects = self.get_parallel_context()["projects"]
2381 for result_group in results_sets:
2382 for result in result_group.results:
2383 pm.update()
2384 project = projects[result.project_index]
2385
2386 if opt.verbose and result.stderr_text:
2387 pm.display_message(result.stderr_text)
2388
2389 if result.fetch_start:
2390 self._fetch_times.Set(
2391 project,
2392 result.fetch_finish - result.fetch_start,
2393 )
2394 self._local_sync_state.SetFetchTime(project)
2395 self.event_log.AddSync(
2396 project,
2397 event_log.TASK_SYNC_NETWORK,
2398 result.fetch_start,
2399 result.fetch_finish,
2400 result.fetch_success,
2401 )
2402 if result.checkout_start:
2403 if result.checkout_success:
2404 self._local_sync_state.SetCheckoutTime(project)
2405 self.event_log.AddSync(
2406 project,
2407 event_log.TASK_SYNC_LOCAL,
2408 result.checkout_start,
2409 result.checkout_finish,
2410 result.checkout_success,
2411 )
2412
2413 if result.fetch_success and result.checkout_success:
2414 synced_relpaths.add(result.relpath)
2415 else:
2416 ret = False
2417 err_event.set()
2418 if result.fetch_error:
2419 errors.append(result.fetch_error)
2420 self._interleaved_err_network = True
2421 self._interleaved_err_network_results.append(
2422 result.relpath
2423 )
2424 if result.checkout_error:
2425 errors.append(result.checkout_error)
2426 self._interleaved_err_checkout = True
2427 self._interleaved_err_checkout_results.append(
2428 result.relpath
2429 )
2430
2431 if not ret and opt.fail_fast:
2432 if pool:
2433 pool.close()
2434 break
2435 return ret
2436
2437 def _SyncInterleaved(
2438 self,
2439 opt,
2440 args,
2441 errors,
2442 manifest,
2443 mp,
2444 all_projects,
2445 superproject_logging_data,
2446 ):
2447 """Sync projects by performing network and local operations in parallel.
2448
2449 This method processes each project (or groups of projects that share git
2450 objects) independently. For each project, it performs the fetch and
2451 checkout operations back-to-back. These independent tasks are run in
2452 parallel.
2453
2454 It respects two constraints for correctness:
2455 1. Projects in nested directories (e.g. 'foo' and 'foo/bar') are
2456 processed in hierarchical order.
2457 2. Projects that share git objects are processed serially to prevent
2458 race conditions.
2459 """
2460 # Temporary state for tracking errors in interleaved mode.
2461 self._interleaved_err_network = False
2462 self._interleaved_err_network_results = []
2463 self._interleaved_err_checkout = False
2464 self._interleaved_err_checkout_results = []
2465
2466 err_event = multiprocessing.Event()
2467 synced_relpaths = set()
2468 project_list = list(all_projects)
2469 pm = Progress(
2470 "Syncing",
2471 len(project_list),
2472 delay=False,
2473 quiet=opt.quiet,
2474 show_elapsed=True,
2475 elide=True,
1970 ) 2476 )
2477 previously_pending_relpaths = set()
1971 2478
1972 self._local_sync_state.PruneRemovedProjects() 2479 sync_event = _threading.Event()
1973 if self._local_sync_state.IsPartiallySynced(): 2480 sync_progress_thread = self._CreateSyncProgressThread(pm, sync_event)
1974 logger.warning(
1975 "warning: Partial syncs are not supported. For the best "
1976 "experience, sync the entire tree."
1977 )
1978 2481
1979 if not opt.quiet: 2482 with multiprocessing.Manager() as manager, ssh.ProxyManager(
1980 print("repo sync has finished successfully.") 2483 manager
2484 ) as ssh_proxy:
2485 ssh_proxy.sock()
2486 with self.ParallelContext():
2487 self.get_parallel_context()["ssh_proxy"] = ssh_proxy
2488 # TODO(gavinmak): Use multprocessing.Queue instead of dict.
2489 self.get_parallel_context()[
2490 "sync_dict"
2491 ] = multiprocessing.Manager().dict()
2492 sync_progress_thread.start()
2493
2494 try:
2495 # Outer loop for dynamic project discovery. This continues
2496 # until no unsynced projects remain.
2497 while True:
2498 projects_to_sync = [
2499 p
2500 for p in project_list
2501 if p.relpath not in synced_relpaths
2502 ]
2503 if not projects_to_sync:
2504 break
2505
2506 pending_relpaths = {p.relpath for p in projects_to_sync}
2507 if previously_pending_relpaths == pending_relpaths:
2508 logger.error(
2509 "Stall detected in interleaved sync, not all "
2510 "projects could be synced."
2511 )
2512 err_event.set()
2513 break
2514 previously_pending_relpaths = pending_relpaths
2515
2516 self.get_parallel_context()[
2517 "projects"
2518 ] = projects_to_sync
2519 project_index_map = {
2520 p: i for i, p in enumerate(projects_to_sync)
2521 }
2522
2523 # Inner loop to process projects in a hierarchical
2524 # order. This iterates through levels of project
2525 # dependencies (e.g. 'foo' then 'foo/bar'). All projects
2526 # in one level can be processed in parallel, but we must
2527 # wait for a level to complete before starting the next.
2528 for level_projects in _SafeCheckoutOrder(
2529 projects_to_sync
2530 ):
2531 if not level_projects:
2532 continue
2533
2534 objdir_project_map = collections.defaultdict(list)
2535 for p in level_projects:
2536 objdir_project_map[p.objdir].append(
2537 project_index_map[p]
2538 )
2539
2540 work_items = list(objdir_project_map.values())
2541 if not work_items:
2542 continue
2543
2544 jobs = max(1, min(opt.jobs, len(work_items)))
2545 callback = functools.partial(
2546 self._ProcessSyncInterleavedResults,
2547 synced_relpaths,
2548 err_event,
2549 errors,
2550 opt,
2551 )
2552 if not self.ExecuteInParallel(
2553 jobs,
2554 functools.partial(self._SyncProjectList, opt),
2555 work_items,
2556 callback=callback,
2557 output=pm,
2558 chunksize=1,
2559 ):
2560 err_event.set()
2561
2562 if err_event.is_set() and opt.fail_fast:
2563 raise SyncFailFastError(aggregate_errors=errors)
2564
2565 self._ReloadManifest(None, manifest)
2566 project_list = self.GetProjects(
2567 args,
2568 missing_ok=True,
2569 submodules_ok=opt.fetch_submodules,
2570 manifest=manifest,
2571 all_manifests=not opt.this_manifest_only,
2572 )
2573 finally:
2574 sync_event.set()
2575 sync_progress_thread.join()
2576
2577 pm.end()
2578
2579 err_update_projects, err_update_linkfiles = self._UpdateManifestLists(
2580 opt, err_event, errors
2581 )
2582 if not self.outer_client.manifest.IsArchive:
2583 self._GCProjects(project_list, opt, err_event)
2584
2585 self._PrintManifestNotices(opt)
2586 if err_event.is_set():
2587 self._ReportErrors(
2588 errors,
2589 err_network_sync=self._interleaved_err_network,
2590 failing_network_repos=self._interleaved_err_network_results,
2591 err_checkout=self._interleaved_err_checkout,
2592 failing_checkout_repos=self._interleaved_err_checkout_results,
2593 err_update_projects=err_update_projects,
2594 err_update_linkfiles=err_update_linkfiles,
2595 )
1981 2596
1982 2597
1983def _PostRepoUpgrade(manifest, quiet=False): 2598def _PostRepoUpgrade(manifest, quiet=False):
diff --git a/subcmds/upload.py b/subcmds/upload.py
index bac2f8ac..4f817ddf 100644
--- a/subcmds/upload.py
+++ b/subcmds/upload.py
@@ -267,7 +267,6 @@ Gerrit Code Review: https://www.gerritcodereview.com/
267 "--cc", 267 "--cc",
268 type="string", 268 type="string",
269 action="append", 269 action="append",
270 dest="cc",
271 help="also send email to these email addresses", 270 help="also send email to these email addresses",
272 ) 271 )
273 p.add_option( 272 p.add_option(
@@ -281,7 +280,6 @@ Gerrit Code Review: https://www.gerritcodereview.com/
281 p.add_option( 280 p.add_option(
282 "-c", 281 "-c",
283 "--current-branch", 282 "--current-branch",
284 dest="current_branch",
285 action="store_true", 283 action="store_true",
286 help="upload current git branch", 284 help="upload current git branch",
287 ) 285 )
@@ -310,7 +308,6 @@ Gerrit Code Review: https://www.gerritcodereview.com/
310 "-p", 308 "-p",
311 "--private", 309 "--private",
312 action="store_true", 310 action="store_true",
313 dest="private",
314 default=False, 311 default=False,
315 help="upload as a private change (deprecated; use --wip)", 312 help="upload as a private change (deprecated; use --wip)",
316 ) 313 )
@@ -318,7 +315,6 @@ Gerrit Code Review: https://www.gerritcodereview.com/
318 "-w", 315 "-w",
319 "--wip", 316 "--wip",
320 action="store_true", 317 action="store_true",
321 dest="wip",
322 default=False, 318 default=False,
323 help="upload as a work-in-progress change", 319 help="upload as a work-in-progress change",
324 ) 320 )
@@ -628,6 +624,16 @@ Gerrit Code Review: https://www.gerritcodereview.com/
628 branch.uploaded = False 624 branch.uploaded = False
629 return 625 return
630 626
627 # If using superproject, add the root repo as a push option.
628 manifest = branch.project.manifest
629 push_options = list(opt.push_options)
630 if manifest.manifestProject.use_superproject:
631 sp = manifest.superproject
632 if sp:
633 r_id = sp.repo_id
634 if r_id:
635 push_options.append(f"custom-keyed-value=rootRepo:{r_id}")
636
631 branch.UploadForReview( 637 branch.UploadForReview(
632 people, 638 people,
633 dryrun=opt.dryrun, 639 dryrun=opt.dryrun,
@@ -640,7 +646,7 @@ Gerrit Code Review: https://www.gerritcodereview.com/
640 ready=opt.ready, 646 ready=opt.ready,
641 dest_branch=destination, 647 dest_branch=destination,
642 validate_certs=opt.validate_certs, 648 validate_certs=opt.validate_certs,
643 push_options=opt.push_options, 649 push_options=push_options,
644 patchset_description=opt.patchset_description, 650 patchset_description=opt.patchset_description,
645 ) 651 )
646 652
diff --git a/tests/test_subcmds.py b/tests/test_subcmds.py
index 5ce0776f..2d680fb7 100644
--- a/tests/test_subcmds.py
+++ b/tests/test_subcmds.py
@@ -89,3 +89,44 @@ class AllCommands(unittest.TestCase):
89 msg=f"subcmds/{name}.py: {opt}: only use dashes in " 89 msg=f"subcmds/{name}.py: {opt}: only use dashes in "
90 "options, not underscores", 90 "options, not underscores",
91 ) 91 )
92
93 def test_cli_option_dest(self):
94 """Block redundant dest= arguments."""
95
96 def _check_dest(opt):
97 if opt.dest is None or not opt._long_opts:
98 return
99
100 long = opt._long_opts[0]
101 assert long.startswith("--")
102 # This matches optparse's behavior.
103 implicit_dest = long[2:].replace("-", "_")
104 if implicit_dest == opt.dest:
105 bad_opts.append((str(opt), opt.dest))
106
107 # Hook the option check list.
108 optparse.Option.CHECK_METHODS.insert(0, _check_dest)
109
110 # Gather all the bad options up front so people can see all bad options
111 # instead of failing at the first one.
112 all_bad_opts = {}
113 for name, cls in subcmds.all_commands.items():
114 bad_opts = all_bad_opts[name] = []
115 cmd = cls()
116 # Trigger construction of parser.
117 cmd.OptionParser
118
119 errmsg = None
120 for name, bad_opts in sorted(all_bad_opts.items()):
121 if bad_opts:
122 if not errmsg:
123 errmsg = "Omit redundant dest= when defining options.\n"
124 errmsg += f"\nSubcommand {name} (subcmds/{name}.py):\n"
125 errmsg += "".join(
126 f" {opt}: dest='{dest}'\n" for opt, dest in bad_opts
127 )
128 if errmsg:
129 self.fail(errmsg)
130
131 # Make sure we aren't popping the wrong stuff.
132 assert optparse.Option.CHECK_METHODS.pop(0) is _check_dest
diff --git a/tests/test_subcmds_sync.py b/tests/test_subcmds_sync.py
index b871317c..9cd19f10 100644
--- a/tests/test_subcmds_sync.py
+++ b/tests/test_subcmds_sync.py
@@ -305,8 +305,20 @@ class LocalSyncState(unittest.TestCase):
305 305
306 306
307class FakeProject: 307class FakeProject:
308 def __init__(self, relpath): 308 def __init__(self, relpath, name=None, objdir=None):
309 self.relpath = relpath 309 self.relpath = relpath
310 self.name = name or relpath
311 self.objdir = objdir or relpath
312
313 self.use_git_worktrees = False
314 self.UseAlternates = False
315 self.manifest = mock.MagicMock()
316 self.manifest.GetProjectsWithName.return_value = [self]
317 self.config = mock.MagicMock()
318 self.EnableRepositoryExtension = mock.MagicMock()
319
320 def RelPath(self, local=None):
321 return self.relpath
310 322
311 def __str__(self): 323 def __str__(self):
312 return f"project: {self.relpath}" 324 return f"project: {self.relpath}"
@@ -513,3 +525,394 @@ class SyncCommand(unittest.TestCase):
513 self.cmd.Execute(self.opt, []) 525 self.cmd.Execute(self.opt, [])
514 self.assertIn(self.sync_local_half_error, e.aggregate_errors) 526 self.assertIn(self.sync_local_half_error, e.aggregate_errors)
515 self.assertIn(self.sync_network_half_error, e.aggregate_errors) 527 self.assertIn(self.sync_network_half_error, e.aggregate_errors)
528
529
530class SyncUpdateRepoProject(unittest.TestCase):
531 """Tests for Sync._UpdateRepoProject."""
532
533 def setUp(self):
534 """Common setup."""
535 self.repodir = tempfile.mkdtemp(".repo")
536 self.manifest = manifest = mock.MagicMock(repodir=self.repodir)
537 # Create a repoProject with a mock Sync_NetworkHalf.
538 repoProject = mock.MagicMock(name="repo")
539 repoProject.Sync_NetworkHalf = mock.Mock(
540 return_value=SyncNetworkHalfResult(True, None)
541 )
542 manifest.repoProject = repoProject
543 manifest.IsArchive = False
544 manifest.CloneFilter = None
545 manifest.PartialCloneExclude = None
546 manifest.CloneFilterForDepth = None
547
548 git_event_log = mock.MagicMock(ErrorEvent=mock.Mock(return_value=None))
549 self.cmd = sync.Sync(manifest=manifest, git_event_log=git_event_log)
550
551 opt, _ = self.cmd.OptionParser.parse_args([])
552 opt.local_only = False
553 opt.repo_verify = False
554 opt.verbose = False
555 opt.quiet = True
556 opt.force_sync = False
557 opt.clone_bundle = False
558 opt.tags = False
559 opt.optimized_fetch = False
560 opt.retry_fetches = 0
561 opt.prune = False
562 self.opt = opt
563 self.errors = []
564
565 mock.patch.object(sync.Sync, "_GetCurrentBranchOnly").start()
566
567 def tearDown(self):
568 shutil.rmtree(self.repodir)
569 mock.patch.stopall()
570
571 def test_fetches_when_stale(self):
572 """Test it fetches when the repo project is stale."""
573 self.manifest.repoProject.LastFetch = time.time() - (
574 sync._ONE_DAY_S + 1
575 )
576
577 with mock.patch.object(sync, "_PostRepoFetch") as mock_post_fetch:
578 self.cmd._UpdateRepoProject(self.opt, self.manifest, self.errors)
579 self.manifest.repoProject.Sync_NetworkHalf.assert_called_once()
580 mock_post_fetch.assert_called_once()
581 self.assertEqual(self.errors, [])
582
583 def test_skips_when_fresh(self):
584 """Test it skips fetch when repo project is fresh."""
585 self.manifest.repoProject.LastFetch = time.time()
586
587 with mock.patch.object(sync, "_PostRepoFetch") as mock_post_fetch:
588 self.cmd._UpdateRepoProject(self.opt, self.manifest, self.errors)
589 self.manifest.repoProject.Sync_NetworkHalf.assert_not_called()
590 mock_post_fetch.assert_not_called()
591
592 def test_skips_local_only(self):
593 """Test it does nothing with --local-only."""
594 self.opt.local_only = True
595 self.manifest.repoProject.LastFetch = time.time() - (
596 sync._ONE_DAY_S + 1
597 )
598
599 with mock.patch.object(sync, "_PostRepoFetch") as mock_post_fetch:
600 self.cmd._UpdateRepoProject(self.opt, self.manifest, self.errors)
601 self.manifest.repoProject.Sync_NetworkHalf.assert_not_called()
602 mock_post_fetch.assert_not_called()
603
604 def test_post_repo_fetch_skipped_on_env_var(self):
605 """Test _PostRepoFetch is skipped when REPO_SKIP_SELF_UPDATE is set."""
606 self.manifest.repoProject.LastFetch = time.time()
607
608 with mock.patch.dict(os.environ, {"REPO_SKIP_SELF_UPDATE": "1"}):
609 with mock.patch.object(sync, "_PostRepoFetch") as mock_post_fetch:
610 self.cmd._UpdateRepoProject(
611 self.opt, self.manifest, self.errors
612 )
613 mock_post_fetch.assert_not_called()
614
615 def test_fetch_failure_is_handled(self):
616 """Test that a fetch failure is recorded and doesn't crash."""
617 self.manifest.repoProject.LastFetch = time.time() - (
618 sync._ONE_DAY_S + 1
619 )
620 fetch_error = GitError("Fetch failed")
621 self.manifest.repoProject.Sync_NetworkHalf.return_value = (
622 SyncNetworkHalfResult(False, fetch_error)
623 )
624
625 with mock.patch.object(sync, "_PostRepoFetch") as mock_post_fetch:
626 self.cmd._UpdateRepoProject(self.opt, self.manifest, self.errors)
627 self.manifest.repoProject.Sync_NetworkHalf.assert_called_once()
628 mock_post_fetch.assert_not_called()
629 self.assertEqual(self.errors, [fetch_error])
630
631
632class InterleavedSyncTest(unittest.TestCase):
633 """Tests for interleaved sync."""
634
635 def setUp(self):
636 """Set up a sync command with mocks."""
637 self.repodir = tempfile.mkdtemp(".repo")
638 self.manifest = mock.MagicMock(repodir=self.repodir)
639 self.manifest.repoProject.LastFetch = time.time()
640 self.manifest.repoProject.worktree = self.repodir
641 self.manifest.manifestProject.worktree = self.repodir
642 self.manifest.IsArchive = False
643 self.manifest.CloneBundle = False
644 self.manifest.default.sync_j = 1
645
646 self.outer_client = mock.MagicMock()
647 self.outer_client.manifest.IsArchive = False
648 self.cmd = sync.Sync(
649 manifest=self.manifest, outer_client=self.outer_client
650 )
651 self.cmd.outer_manifest = self.manifest
652
653 # Mock projects.
654 self.projA = FakeProject("projA", objdir="objA")
655 self.projB = FakeProject("projB", objdir="objB")
656 self.projA_sub = FakeProject(
657 "projA/sub", name="projA_sub", objdir="objA_sub"
658 )
659 self.projC = FakeProject("projC", objdir="objC")
660
661 # Mock methods that are not part of the core interleaved sync logic.
662 mock.patch.object(self.cmd, "_UpdateAllManifestProjects").start()
663 mock.patch.object(self.cmd, "_UpdateProjectsRevisionId").start()
664 mock.patch.object(self.cmd, "_ValidateOptionsWithManifest").start()
665 mock.patch.object(sync, "_PostRepoUpgrade").start()
666 mock.patch.object(sync, "_PostRepoFetch").start()
667
668 # Mock parallel context for worker tests.
669 self.parallel_context_patcher = mock.patch(
670 "subcmds.sync.Sync.get_parallel_context"
671 )
672 self.mock_get_parallel_context = self.parallel_context_patcher.start()
673 self.sync_dict = {}
674 self.mock_context = {
675 "projects": [],
676 "sync_dict": self.sync_dict,
677 }
678 self.mock_get_parallel_context.return_value = self.mock_context
679
680 # Mock _GetCurrentBranchOnly for worker tests.
681 mock.patch.object(sync.Sync, "_GetCurrentBranchOnly").start()
682
683 def tearDown(self):
684 """Clean up resources."""
685 shutil.rmtree(self.repodir)
686 mock.patch.stopall()
687
688 def test_interleaved_fail_fast(self):
689 """Test that --fail-fast is respected in interleaved mode."""
690 opt, args = self.cmd.OptionParser.parse_args(
691 ["--interleaved", "--fail-fast", "-j2"]
692 )
693 opt.quiet = True
694
695 # With projA/sub, _SafeCheckoutOrder creates two batches:
696 # 1. [projA, projB]
697 # 2. [projA/sub]
698 # We want to fail on the first batch and ensure the second isn't run.
699 all_projects = [self.projA, self.projB, self.projA_sub]
700 mock.patch.object(
701 self.cmd, "GetProjects", return_value=all_projects
702 ).start()
703
704 # Mock ExecuteInParallel to simulate a failed run on the first batch of
705 # projects.
706 execute_mock = mock.patch.object(
707 self.cmd, "ExecuteInParallel", return_value=False
708 ).start()
709
710 with self.assertRaises(sync.SyncFailFastError):
711 self.cmd._SyncInterleaved(
712 opt,
713 args,
714 [],
715 self.manifest,
716 self.manifest.manifestProject,
717 all_projects,
718 {},
719 )
720
721 execute_mock.assert_called_once()
722
723 def test_interleaved_shared_objdir_serial(self):
724 """Test that projects with shared objdir are processed serially."""
725 opt, args = self.cmd.OptionParser.parse_args(["--interleaved", "-j4"])
726 opt.quiet = True
727
728 # Setup projects with a shared objdir.
729 self.projA.objdir = "common_objdir"
730 self.projC.objdir = "common_objdir"
731
732 all_projects = [self.projA, self.projB, self.projC]
733 mock.patch.object(
734 self.cmd, "GetProjects", return_value=all_projects
735 ).start()
736
737 def execute_side_effect(jobs, target, work_items, **kwargs):
738 # The callback is a partial object. The first arg is the set we
739 # need to update to avoid the stall detection.
740 synced_relpaths_set = kwargs["callback"].args[0]
741 projects_in_pass = self.cmd.get_parallel_context()["projects"]
742 for item in work_items:
743 for project_idx in item:
744 synced_relpaths_set.add(
745 projects_in_pass[project_idx].relpath
746 )
747 return True
748
749 execute_mock = mock.patch.object(
750 self.cmd, "ExecuteInParallel", side_effect=execute_side_effect
751 ).start()
752
753 self.cmd._SyncInterleaved(
754 opt,
755 args,
756 [],
757 self.manifest,
758 self.manifest.manifestProject,
759 all_projects,
760 {},
761 )
762
763 execute_mock.assert_called_once()
764 jobs_arg, _, work_items = execute_mock.call_args.args
765 self.assertEqual(jobs_arg, 2)
766 work_items_sets = {frozenset(item) for item in work_items}
767 expected_sets = {frozenset([0, 2]), frozenset([1])}
768 self.assertEqual(work_items_sets, expected_sets)
769
770 def _get_opts(self, args=None):
771 """Helper to get default options for worker tests."""
772 if args is None:
773 args = ["--interleaved"]
774 opt, _ = self.cmd.OptionParser.parse_args(args)
775 # Set defaults for options used by the worker.
776 opt.quiet = True
777 opt.verbose = False
778 opt.force_sync = False
779 opt.clone_bundle = False
780 opt.tags = False
781 opt.optimized_fetch = False
782 opt.retry_fetches = 0
783 opt.prune = False
784 opt.detach_head = False
785 opt.force_checkout = False
786 opt.rebase = False
787 return opt
788
789 def test_worker_successful_sync(self):
790 """Test _SyncProjectList with a successful fetch and checkout."""
791 opt = self._get_opts()
792 project = self.projA
793 project.Sync_NetworkHalf = mock.Mock(
794 return_value=SyncNetworkHalfResult(error=None, remote_fetched=True)
795 )
796 project.Sync_LocalHalf = mock.Mock()
797 project.manifest.manifestProject.config = mock.MagicMock()
798 self.mock_context["projects"] = [project]
799
800 with mock.patch("subcmds.sync.SyncBuffer") as mock_sync_buffer:
801 mock_sync_buf_instance = mock.MagicMock()
802 mock_sync_buf_instance.Finish.return_value = True
803 mock_sync_buffer.return_value = mock_sync_buf_instance
804
805 result_obj = self.cmd._SyncProjectList(opt, [0])
806
807 self.assertEqual(len(result_obj.results), 1)
808 result = result_obj.results[0]
809 self.assertTrue(result.fetch_success)
810 self.assertTrue(result.checkout_success)
811 self.assertIsNone(result.fetch_error)
812 self.assertIsNone(result.checkout_error)
813 project.Sync_NetworkHalf.assert_called_once()
814 project.Sync_LocalHalf.assert_called_once()
815
816 def test_worker_fetch_fails(self):
817 """Test _SyncProjectList with a failed fetch."""
818 opt = self._get_opts()
819 project = self.projA
820 fetch_error = GitError("Fetch failed")
821 project.Sync_NetworkHalf = mock.Mock(
822 return_value=SyncNetworkHalfResult(
823 error=fetch_error, remote_fetched=False
824 )
825 )
826 project.Sync_LocalHalf = mock.Mock()
827 self.mock_context["projects"] = [project]
828
829 result_obj = self.cmd._SyncProjectList(opt, [0])
830 result = result_obj.results[0]
831
832 self.assertFalse(result.fetch_success)
833 self.assertFalse(result.checkout_success)
834 self.assertEqual(result.fetch_error, fetch_error)
835 self.assertIsNone(result.checkout_error)
836 project.Sync_NetworkHalf.assert_called_once()
837 project.Sync_LocalHalf.assert_not_called()
838
839 def test_worker_fetch_fails_exception(self):
840 """Test _SyncProjectList with an exception during fetch."""
841 opt = self._get_opts()
842 project = self.projA
843 fetch_error = GitError("Fetch failed")
844 project.Sync_NetworkHalf = mock.Mock(side_effect=fetch_error)
845 project.Sync_LocalHalf = mock.Mock()
846 self.mock_context["projects"] = [project]
847
848 result_obj = self.cmd._SyncProjectList(opt, [0])
849 result = result_obj.results[0]
850
851 self.assertFalse(result.fetch_success)
852 self.assertFalse(result.checkout_success)
853 self.assertEqual(result.fetch_error, fetch_error)
854 project.Sync_NetworkHalf.assert_called_once()
855 project.Sync_LocalHalf.assert_not_called()
856
857 def test_worker_checkout_fails(self):
858 """Test _SyncProjectList with an exception during checkout."""
859 opt = self._get_opts()
860 project = self.projA
861 project.Sync_NetworkHalf = mock.Mock(
862 return_value=SyncNetworkHalfResult(error=None, remote_fetched=True)
863 )
864 checkout_error = GitError("Checkout failed")
865 project.Sync_LocalHalf = mock.Mock(side_effect=checkout_error)
866 project.manifest.manifestProject.config = mock.MagicMock()
867 self.mock_context["projects"] = [project]
868
869 with mock.patch("subcmds.sync.SyncBuffer"):
870 result_obj = self.cmd._SyncProjectList(opt, [0])
871 result = result_obj.results[0]
872
873 self.assertTrue(result.fetch_success)
874 self.assertFalse(result.checkout_success)
875 self.assertIsNone(result.fetch_error)
876 self.assertEqual(result.checkout_error, checkout_error)
877 project.Sync_NetworkHalf.assert_called_once()
878 project.Sync_LocalHalf.assert_called_once()
879
880 def test_worker_local_only(self):
881 """Test _SyncProjectList with --local-only."""
882 opt = self._get_opts(["--interleaved", "--local-only"])
883 project = self.projA
884 project.Sync_NetworkHalf = mock.Mock()
885 project.Sync_LocalHalf = mock.Mock()
886 project.manifest.manifestProject.config = mock.MagicMock()
887 self.mock_context["projects"] = [project]
888
889 with mock.patch("subcmds.sync.SyncBuffer") as mock_sync_buffer:
890 mock_sync_buf_instance = mock.MagicMock()
891 mock_sync_buf_instance.Finish.return_value = True
892 mock_sync_buffer.return_value = mock_sync_buf_instance
893
894 result_obj = self.cmd._SyncProjectList(opt, [0])
895 result = result_obj.results[0]
896
897 self.assertTrue(result.fetch_success)
898 self.assertTrue(result.checkout_success)
899 project.Sync_NetworkHalf.assert_not_called()
900 project.Sync_LocalHalf.assert_called_once()
901
902 def test_worker_network_only(self):
903 """Test _SyncProjectList with --network-only."""
904 opt = self._get_opts(["--interleaved", "--network-only"])
905 project = self.projA
906 project.Sync_NetworkHalf = mock.Mock(
907 return_value=SyncNetworkHalfResult(error=None, remote_fetched=True)
908 )
909 project.Sync_LocalHalf = mock.Mock()
910 self.mock_context["projects"] = [project]
911
912 result_obj = self.cmd._SyncProjectList(opt, [0])
913 result = result_obj.results[0]
914
915 self.assertTrue(result.fetch_success)
916 self.assertTrue(result.checkout_success)
917 project.Sync_NetworkHalf.assert_called_once()
918 project.Sync_LocalHalf.assert_not_called()