summaryrefslogtreecommitdiffstats
path: root/subcmds
diff options
context:
space:
mode:
Diffstat (limited to 'subcmds')
-rw-r--r--subcmds/abandon.py1
-rw-r--r--subcmds/diff.py1
-rw-r--r--subcmds/diffmanifests.py5
-rw-r--r--subcmds/download.py1
-rw-r--r--subcmds/forall.py7
-rw-r--r--subcmds/grep.py1
-rw-r--r--subcmds/info.py6
-rw-r--r--subcmds/init.py1
-rw-r--r--subcmds/list.py5
-rw-r--r--subcmds/manifest.py1
-rw-r--r--subcmds/overview.py1
-rw-r--r--subcmds/rebase.py7
-rw-r--r--subcmds/selfupdate.py1
-rw-r--r--subcmds/stage.py1
-rw-r--r--subcmds/start.py1
-rw-r--r--subcmds/status.py1
-rw-r--r--subcmds/sync.py847
-rw-r--r--subcmds/upload.py16
18 files changed, 751 insertions, 153 deletions
diff --git a/subcmds/abandon.py b/subcmds/abandon.py
index 3208be6b..f1688e7b 100644
--- a/subcmds/abandon.py
+++ b/subcmds/abandon.py
@@ -48,7 +48,6 @@ It is equivalent to "git branch -D <branchname>".
48 def _Options(self, p): 48 def _Options(self, p):
49 p.add_option( 49 p.add_option(
50 "--all", 50 "--all",
51 dest="all",
52 action="store_true", 51 action="store_true",
53 help="delete all branches in all projects", 52 help="delete all branches in all projects",
54 ) 53 )
diff --git a/subcmds/diff.py b/subcmds/diff.py
index 7bb0cbbd..fe1a5139 100644
--- a/subcmds/diff.py
+++ b/subcmds/diff.py
@@ -35,7 +35,6 @@ to the Unix 'patch' command.
35 p.add_option( 35 p.add_option(
36 "-u", 36 "-u",
37 "--absolute", 37 "--absolute",
38 dest="absolute",
39 action="store_true", 38 action="store_true",
40 help="paths are relative to the repository root", 39 help="paths are relative to the repository root",
41 ) 40 )
diff --git a/subcmds/diffmanifests.py b/subcmds/diffmanifests.py
index 3eee3f94..66b3183d 100644
--- a/subcmds/diffmanifests.py
+++ b/subcmds/diffmanifests.py
@@ -67,7 +67,9 @@ synced and their revisions won't be found.
67 67
68 def _Options(self, p): 68 def _Options(self, p):
69 p.add_option( 69 p.add_option(
70 "--raw", dest="raw", action="store_true", help="display raw diff" 70 "--raw",
71 action="store_true",
72 help="display raw diff",
71 ) 73 )
72 p.add_option( 74 p.add_option(
73 "--no-color", 75 "--no-color",
@@ -78,7 +80,6 @@ synced and their revisions won't be found.
78 ) 80 )
79 p.add_option( 81 p.add_option(
80 "--pretty-format", 82 "--pretty-format",
81 dest="pretty_format",
82 action="store", 83 action="store",
83 metavar="<FORMAT>", 84 metavar="<FORMAT>",
84 help="print the log using a custom git pretty format string", 85 help="print the log using a custom git pretty format string",
diff --git a/subcmds/download.py b/subcmds/download.py
index 4396c9e7..1c0bf5ce 100644
--- a/subcmds/download.py
+++ b/subcmds/download.py
@@ -60,7 +60,6 @@ If no project is specified try to use current directory as a project.
60 p.add_option( 60 p.add_option(
61 "-r", 61 "-r",
62 "--revert", 62 "--revert",
63 dest="revert",
64 action="store_true", 63 action="store_true",
65 help="revert instead of checkout", 64 help="revert instead of checkout",
66 ) 65 )
diff --git a/subcmds/forall.py b/subcmds/forall.py
index 9da0c96e..4bae46af 100644
--- a/subcmds/forall.py
+++ b/subcmds/forall.py
@@ -133,7 +133,7 @@ without iterating through the remaining projects.
133 133
134 @staticmethod 134 @staticmethod
135 def _cmd_option(option, _opt_str, _value, parser): 135 def _cmd_option(option, _opt_str, _value, parser):
136 setattr(parser.values, option.dest, list(parser.rargs)) 136 setattr(parser.values, option.dest or "command", list(parser.rargs))
137 while parser.rargs: 137 while parser.rargs:
138 del parser.rargs[0] 138 del parser.rargs[0]
139 139
@@ -141,7 +141,6 @@ without iterating through the remaining projects.
141 p.add_option( 141 p.add_option(
142 "-r", 142 "-r",
143 "--regex", 143 "--regex",
144 dest="regex",
145 action="store_true", 144 action="store_true",
146 help="execute the command only on projects matching regex or " 145 help="execute the command only on projects matching regex or "
147 "wildcard expression", 146 "wildcard expression",
@@ -149,7 +148,6 @@ without iterating through the remaining projects.
149 p.add_option( 148 p.add_option(
150 "-i", 149 "-i",
151 "--inverse-regex", 150 "--inverse-regex",
152 dest="inverse_regex",
153 action="store_true", 151 action="store_true",
154 help="execute the command only on projects not matching regex or " 152 help="execute the command only on projects not matching regex or "
155 "wildcard expression", 153 "wildcard expression",
@@ -157,7 +155,6 @@ without iterating through the remaining projects.
157 p.add_option( 155 p.add_option(
158 "-g", 156 "-g",
159 "--groups", 157 "--groups",
160 dest="groups",
161 help="execute the command only on projects matching the specified " 158 help="execute the command only on projects matching the specified "
162 "groups", 159 "groups",
163 ) 160 )
@@ -165,14 +162,12 @@ without iterating through the remaining projects.
165 "-c", 162 "-c",
166 "--command", 163 "--command",
167 help="command (and arguments) to execute", 164 help="command (and arguments) to execute",
168 dest="command",
169 action="callback", 165 action="callback",
170 callback=self._cmd_option, 166 callback=self._cmd_option,
171 ) 167 )
172 p.add_option( 168 p.add_option(
173 "-e", 169 "-e",
174 "--abort-on-errors", 170 "--abort-on-errors",
175 dest="abort_on_errors",
176 action="store_true", 171 action="store_true",
177 help="abort if a command exits unsuccessfully", 172 help="abort if a command exits unsuccessfully",
178 ) 173 )
diff --git a/subcmds/grep.py b/subcmds/grep.py
index 918651d9..85977ce8 100644
--- a/subcmds/grep.py
+++ b/subcmds/grep.py
@@ -120,7 +120,6 @@ contain a line that matches both expressions:
120 g.add_option( 120 g.add_option(
121 "-r", 121 "-r",
122 "--revision", 122 "--revision",
123 dest="revision",
124 action="append", 123 action="append",
125 metavar="TREEish", 124 metavar="TREEish",
126 help="Search TREEish, instead of the work tree", 125 help="Search TREEish, instead of the work tree",
diff --git a/subcmds/info.py b/subcmds/info.py
index ab230ddd..2fbdae05 100644
--- a/subcmds/info.py
+++ b/subcmds/info.py
@@ -43,14 +43,12 @@ class Info(PagedCommand):
43 p.add_option( 43 p.add_option(
44 "-o", 44 "-o",
45 "--overview", 45 "--overview",
46 dest="overview",
47 action="store_true", 46 action="store_true",
48 help="show overview of all local commits", 47 help="show overview of all local commits",
49 ) 48 )
50 p.add_option( 49 p.add_option(
51 "-c", 50 "-c",
52 "--current-branch", 51 "--current-branch",
53 dest="current_branch",
54 action="store_true", 52 action="store_true",
55 help="consider only checked out branches", 53 help="consider only checked out branches",
56 ) 54 )
@@ -104,6 +102,10 @@ class Info(PagedCommand):
104 self.heading("Manifest groups: ") 102 self.heading("Manifest groups: ")
105 self.headtext(manifestGroups) 103 self.headtext(manifestGroups)
106 self.out.nl() 104 self.out.nl()
105 sp = self.manifest.superproject
106 srev = sp.commit_id if sp and sp.commit_id else "None"
107 self.heading("Superproject revision: ")
108 self.headtext(srev)
107 109
108 self.printSeparator() 110 self.printSeparator()
109 111
diff --git a/subcmds/init.py b/subcmds/init.py
index fb6d3eb5..f5a3892a 100644
--- a/subcmds/init.py
+++ b/subcmds/init.py
@@ -127,6 +127,7 @@ to update the working directory files.
127 return { 127 return {
128 "REPO_MANIFEST_URL": "manifest_url", 128 "REPO_MANIFEST_URL": "manifest_url",
129 "REPO_MIRROR_LOCATION": "reference", 129 "REPO_MIRROR_LOCATION": "reference",
130 "REPO_GIT_LFS": "git_lfs",
130 } 131 }
131 132
132 def _SyncManifest(self, opt): 133 def _SyncManifest(self, opt):
diff --git a/subcmds/list.py b/subcmds/list.py
index 4338e1c9..df9ce5f6 100644
--- a/subcmds/list.py
+++ b/subcmds/list.py
@@ -40,7 +40,6 @@ This is similar to running: repo forall -c 'echo "$REPO_PATH : $REPO_PROJECT"'.
40 p.add_option( 40 p.add_option(
41 "-r", 41 "-r",
42 "--regex", 42 "--regex",
43 dest="regex",
44 action="store_true", 43 action="store_true",
45 help="filter the project list based on regex or wildcard matching " 44 help="filter the project list based on regex or wildcard matching "
46 "of strings", 45 "of strings",
@@ -48,7 +47,6 @@ This is similar to running: repo forall -c 'echo "$REPO_PATH : $REPO_PROJECT"'.
48 p.add_option( 47 p.add_option(
49 "-g", 48 "-g",
50 "--groups", 49 "--groups",
51 dest="groups",
52 help="filter the project list based on the groups the project is " 50 help="filter the project list based on the groups the project is "
53 "in", 51 "in",
54 ) 52 )
@@ -61,21 +59,18 @@ This is similar to running: repo forall -c 'echo "$REPO_PATH : $REPO_PROJECT"'.
61 p.add_option( 59 p.add_option(
62 "-n", 60 "-n",
63 "--name-only", 61 "--name-only",
64 dest="name_only",
65 action="store_true", 62 action="store_true",
66 help="display only the name of the repository", 63 help="display only the name of the repository",
67 ) 64 )
68 p.add_option( 65 p.add_option(
69 "-p", 66 "-p",
70 "--path-only", 67 "--path-only",
71 dest="path_only",
72 action="store_true", 68 action="store_true",
73 help="display only the path of the repository", 69 help="display only the path of the repository",
74 ) 70 )
75 p.add_option( 71 p.add_option(
76 "-f", 72 "-f",
77 "--fullpath", 73 "--fullpath",
78 dest="fullpath",
79 action="store_true", 74 action="store_true",
80 help="display the full work tree path instead of the relative path", 75 help="display the full work tree path instead of the relative path",
81 ) 76 )
diff --git a/subcmds/manifest.py b/subcmds/manifest.py
index 9786580a..548bac0d 100644
--- a/subcmds/manifest.py
+++ b/subcmds/manifest.py
@@ -134,7 +134,6 @@ human-readable variations.
134 p.add_option( 134 p.add_option(
135 "-o", 135 "-o",
136 "--output-file", 136 "--output-file",
137 dest="output_file",
138 default="-", 137 default="-",
139 help="file to save the manifest to. (Filename prefix for " 138 help="file to save the manifest to. (Filename prefix for "
140 "multi-tree.)", 139 "multi-tree.)",
diff --git a/subcmds/overview.py b/subcmds/overview.py
index 8ccad611..dff17623 100644
--- a/subcmds/overview.py
+++ b/subcmds/overview.py
@@ -37,7 +37,6 @@ are displayed.
37 p.add_option( 37 p.add_option(
38 "-c", 38 "-c",
39 "--current-branch", 39 "--current-branch",
40 dest="current_branch",
41 action="store_true", 40 action="store_true",
42 help="consider only checked out branches", 41 help="consider only checked out branches",
43 ) 42 )
diff --git a/subcmds/rebase.py b/subcmds/rebase.py
index db1b387c..d7e769ce 100644
--- a/subcmds/rebase.py
+++ b/subcmds/rebase.py
@@ -47,21 +47,18 @@ branch but need to incorporate new upstream changes "underneath" them.
47 g.add_option( 47 g.add_option(
48 "-i", 48 "-i",
49 "--interactive", 49 "--interactive",
50 dest="interactive",
51 action="store_true", 50 action="store_true",
52 help="interactive rebase (single project only)", 51 help="interactive rebase (single project only)",
53 ) 52 )
54 53
55 p.add_option( 54 p.add_option(
56 "--fail-fast", 55 "--fail-fast",
57 dest="fail_fast",
58 action="store_true", 56 action="store_true",
59 help="stop rebasing after first error is hit", 57 help="stop rebasing after first error is hit",
60 ) 58 )
61 p.add_option( 59 p.add_option(
62 "-f", 60 "-f",
63 "--force-rebase", 61 "--force-rebase",
64 dest="force_rebase",
65 action="store_true", 62 action="store_true",
66 help="pass --force-rebase to git rebase", 63 help="pass --force-rebase to git rebase",
67 ) 64 )
@@ -74,27 +71,23 @@ branch but need to incorporate new upstream changes "underneath" them.
74 ) 71 )
75 p.add_option( 72 p.add_option(
76 "--autosquash", 73 "--autosquash",
77 dest="autosquash",
78 action="store_true", 74 action="store_true",
79 help="pass --autosquash to git rebase", 75 help="pass --autosquash to git rebase",
80 ) 76 )
81 p.add_option( 77 p.add_option(
82 "--whitespace", 78 "--whitespace",
83 dest="whitespace",
84 action="store", 79 action="store",
85 metavar="WS", 80 metavar="WS",
86 help="pass --whitespace to git rebase", 81 help="pass --whitespace to git rebase",
87 ) 82 )
88 p.add_option( 83 p.add_option(
89 "--auto-stash", 84 "--auto-stash",
90 dest="auto_stash",
91 action="store_true", 85 action="store_true",
92 help="stash local modifications before starting", 86 help="stash local modifications before starting",
93 ) 87 )
94 p.add_option( 88 p.add_option(
95 "-m", 89 "-m",
96 "--onto-manifest", 90 "--onto-manifest",
97 dest="onto_manifest",
98 action="store_true", 91 action="store_true",
99 help="rebase onto the manifest version instead of upstream " 92 help="rebase onto the manifest version instead of upstream "
100 "HEAD (this helps to make sure the local tree stays " 93 "HEAD (this helps to make sure the local tree stays "
diff --git a/subcmds/selfupdate.py b/subcmds/selfupdate.py
index 72683097..ed333569 100644
--- a/subcmds/selfupdate.py
+++ b/subcmds/selfupdate.py
@@ -54,7 +54,6 @@ need to be performed by an end-user.
54 ) 54 )
55 g.add_option( 55 g.add_option(
56 "--repo-upgraded", 56 "--repo-upgraded",
57 dest="repo_upgraded",
58 action="store_true", 57 action="store_true",
59 help=optparse.SUPPRESS_HELP, 58 help=optparse.SUPPRESS_HELP,
60 ) 59 )
diff --git a/subcmds/stage.py b/subcmds/stage.py
index 92a00ea0..1c285fc1 100644
--- a/subcmds/stage.py
+++ b/subcmds/stage.py
@@ -46,7 +46,6 @@ The '%prog' command stages files to prepare the next commit.
46 g.add_option( 46 g.add_option(
47 "-i", 47 "-i",
48 "--interactive", 48 "--interactive",
49 dest="interactive",
50 action="store_true", 49 action="store_true",
51 help="use interactive staging", 50 help="use interactive staging",
52 ) 51 )
diff --git a/subcmds/start.py b/subcmds/start.py
index 6dca7e4e..73dddf3f 100644
--- a/subcmds/start.py
+++ b/subcmds/start.py
@@ -51,7 +51,6 @@ revision specified in the manifest.
51 def _Options(self, p): 51 def _Options(self, p):
52 p.add_option( 52 p.add_option(
53 "--all", 53 "--all",
54 dest="all",
55 action="store_true", 54 action="store_true",
56 help="begin branch in all projects", 55 help="begin branch in all projects",
57 ) 56 )
diff --git a/subcmds/status.py b/subcmds/status.py
index cda73627..a9852b3c 100644
--- a/subcmds/status.py
+++ b/subcmds/status.py
@@ -82,7 +82,6 @@ the following meanings:
82 p.add_option( 82 p.add_option(
83 "-o", 83 "-o",
84 "--orphans", 84 "--orphans",
85 dest="orphans",
86 action="store_true", 85 action="store_true",
87 help="include objects in working directory outside of repo " 86 help="include objects in working directory outside of repo "
88 "projects", 87 "projects",
diff --git a/subcmds/sync.py b/subcmds/sync.py
index 3dc74f1f..250925f4 100644
--- a/subcmds/sync.py
+++ b/subcmds/sync.py
@@ -13,6 +13,7 @@
13# limitations under the License. 13# limitations under the License.
14 14
15import collections 15import collections
16import contextlib
16import functools 17import functools
17import http.cookiejar as cookielib 18import http.cookiejar as cookielib
18import io 19import io
@@ -25,7 +26,7 @@ from pathlib import Path
25import sys 26import sys
26import tempfile 27import tempfile
27import time 28import time
28from typing import List, NamedTuple, Set, Union 29from typing import List, NamedTuple, Optional, Set, Tuple, Union
29import urllib.error 30import urllib.error
30import urllib.parse 31import urllib.parse
31import urllib.request 32import urllib.request
@@ -67,6 +68,7 @@ from git_config import GetUrlCookieFile
67from git_refs import HEAD 68from git_refs import HEAD
68from git_refs import R_HEADS 69from git_refs import R_HEADS
69import git_superproject 70import git_superproject
71from hooks import RepoHook
70import platform_utils 72import platform_utils
71from progress import elapsed_str 73from progress import elapsed_str
72from progress import jobs_str 74from progress import jobs_str
@@ -194,6 +196,57 @@ class _CheckoutOneResult(NamedTuple):
194 finish: float 196 finish: float
195 197
196 198
199class _SyncResult(NamedTuple):
200 """Individual project sync result for interleaved mode.
201
202 Attributes:
203 project_index (int): The index of the project in the shared list.
204 relpath (str): The project's relative path from the repo client top.
205 remote_fetched (bool): True if the remote was actually queried.
206 fetch_success (bool): True if the fetch operation was successful.
207 fetch_error (Optional[Exception]): The Exception from a failed fetch,
208 or None.
209 fetch_start (Optional[float]): The time.time() when fetch started.
210 fetch_finish (Optional[float]): The time.time() when fetch finished.
211 checkout_success (bool): True if the checkout operation was
212 successful.
213 checkout_error (Optional[Exception]): The Exception from a failed
214 checkout, or None.
215 checkout_start (Optional[float]): The time.time() when checkout
216 started.
217 checkout_finish (Optional[float]): The time.time() when checkout
218 finished.
219 stderr_text (str): The combined output from both fetch and checkout.
220 """
221
222 project_index: int
223 relpath: str
224
225 remote_fetched: bool
226 fetch_success: bool
227 fetch_error: Optional[Exception]
228 fetch_start: Optional[float]
229 fetch_finish: Optional[float]
230
231 checkout_success: bool
232 checkout_error: Optional[Exception]
233 checkout_start: Optional[float]
234 checkout_finish: Optional[float]
235
236 stderr_text: str
237
238
239class _InterleavedSyncResult(NamedTuple):
240 """Result of an interleaved sync.
241
242 Attributes:
243 results (List[_SyncResult]): A list of results, one for each project
244 processed. Empty if the worker failed before creating results.
245 """
246
247 results: List[_SyncResult]
248
249
197class SuperprojectError(SyncError): 250class SuperprojectError(SyncError):
198 """Superproject sync repo.""" 251 """Superproject sync repo."""
199 252
@@ -359,7 +412,7 @@ later is required to fix a server side protocol bug.
359 type=int, 412 type=int,
360 metavar="JOBS", 413 metavar="JOBS",
361 help="number of network jobs to run in parallel (defaults to " 414 help="number of network jobs to run in parallel (defaults to "
362 "--jobs or 1)", 415 "--jobs or 1). Ignored when --interleaved is set",
363 ) 416 )
364 p.add_option( 417 p.add_option(
365 "--jobs-checkout", 418 "--jobs-checkout",
@@ -367,25 +420,23 @@ later is required to fix a server side protocol bug.
367 type=int, 420 type=int,
368 metavar="JOBS", 421 metavar="JOBS",
369 help="number of local checkout jobs to run in parallel (defaults " 422 help="number of local checkout jobs to run in parallel (defaults "
370 f"to --jobs or {DEFAULT_LOCAL_JOBS})", 423 f"to --jobs or {DEFAULT_LOCAL_JOBS}). Ignored when --interleaved "
424 "is set",
371 ) 425 )
372 426
373 p.add_option( 427 p.add_option(
374 "-f", 428 "-f",
375 "--force-broken", 429 "--force-broken",
376 dest="force_broken",
377 action="store_true", 430 action="store_true",
378 help="obsolete option (to be deleted in the future)", 431 help="obsolete option (to be deleted in the future)",
379 ) 432 )
380 p.add_option( 433 p.add_option(
381 "--fail-fast", 434 "--fail-fast",
382 dest="fail_fast",
383 action="store_true", 435 action="store_true",
384 help="stop syncing after first error is hit", 436 help="stop syncing after first error is hit",
385 ) 437 )
386 p.add_option( 438 p.add_option(
387 "--force-sync", 439 "--force-sync",
388 dest="force_sync",
389 action="store_true", 440 action="store_true",
390 help="overwrite an existing git directory if it needs to " 441 help="overwrite an existing git directory if it needs to "
391 "point to a different object directory. WARNING: this " 442 "point to a different object directory. WARNING: this "
@@ -393,7 +444,6 @@ later is required to fix a server side protocol bug.
393 ) 444 )
394 p.add_option( 445 p.add_option(
395 "--force-checkout", 446 "--force-checkout",
396 dest="force_checkout",
397 action="store_true", 447 action="store_true",
398 help="force checkout even if it results in throwing away " 448 help="force checkout even if it results in throwing away "
399 "uncommitted modifications. " 449 "uncommitted modifications. "
@@ -401,7 +451,6 @@ later is required to fix a server side protocol bug.
401 ) 451 )
402 p.add_option( 452 p.add_option(
403 "--force-remove-dirty", 453 "--force-remove-dirty",
404 dest="force_remove_dirty",
405 action="store_true", 454 action="store_true",
406 help="force remove projects with uncommitted modifications if " 455 help="force remove projects with uncommitted modifications if "
407 "projects no longer exist in the manifest. " 456 "projects no longer exist in the manifest. "
@@ -409,7 +458,6 @@ later is required to fix a server side protocol bug.
409 ) 458 )
410 p.add_option( 459 p.add_option(
411 "--rebase", 460 "--rebase",
412 dest="rebase",
413 action="store_true", 461 action="store_true",
414 help="rebase local commits regardless of whether they are " 462 help="rebase local commits regardless of whether they are "
415 "published", 463 "published",
@@ -417,7 +465,6 @@ later is required to fix a server side protocol bug.
417 p.add_option( 465 p.add_option(
418 "-l", 466 "-l",
419 "--local-only", 467 "--local-only",
420 dest="local_only",
421 action="store_true", 468 action="store_true",
422 help="only update working tree, don't fetch", 469 help="only update working tree, don't fetch",
423 ) 470 )
@@ -431,9 +478,13 @@ later is required to fix a server side protocol bug.
431 "(do not update to the latest revision)", 478 "(do not update to the latest revision)",
432 ) 479 )
433 p.add_option( 480 p.add_option(
481 "--interleaved",
482 action="store_true",
483 help="fetch and checkout projects in parallel (experimental)",
484 )
485 p.add_option(
434 "-n", 486 "-n",
435 "--network-only", 487 "--network-only",
436 dest="network_only",
437 action="store_true", 488 action="store_true",
438 help="fetch only, don't update working tree", 489 help="fetch only, don't update working tree",
439 ) 490 )
@@ -460,7 +511,6 @@ later is required to fix a server side protocol bug.
460 p.add_option( 511 p.add_option(
461 "-m", 512 "-m",
462 "--manifest-name", 513 "--manifest-name",
463 dest="manifest_name",
464 help="temporary manifest to use for this sync", 514 help="temporary manifest to use for this sync",
465 metavar="NAME.xml", 515 metavar="NAME.xml",
466 ) 516 )
@@ -479,19 +529,16 @@ later is required to fix a server side protocol bug.
479 "-u", 529 "-u",
480 "--manifest-server-username", 530 "--manifest-server-username",
481 action="store", 531 action="store",
482 dest="manifest_server_username",
483 help="username to authenticate with the manifest server", 532 help="username to authenticate with the manifest server",
484 ) 533 )
485 p.add_option( 534 p.add_option(
486 "-p", 535 "-p",
487 "--manifest-server-password", 536 "--manifest-server-password",
488 action="store", 537 action="store",
489 dest="manifest_server_password",
490 help="password to authenticate with the manifest server", 538 help="password to authenticate with the manifest server",
491 ) 539 )
492 p.add_option( 540 p.add_option(
493 "--fetch-submodules", 541 "--fetch-submodules",
494 dest="fetch_submodules",
495 action="store_true", 542 action="store_true",
496 help="fetch submodules from server", 543 help="fetch submodules from server",
497 ) 544 )
@@ -515,7 +562,6 @@ later is required to fix a server side protocol bug.
515 ) 562 )
516 p.add_option( 563 p.add_option(
517 "--optimized-fetch", 564 "--optimized-fetch",
518 dest="optimized_fetch",
519 action="store_true", 565 action="store_true",
520 help="only fetch projects fixed to sha1 if revision does not exist " 566 help="only fetch projects fixed to sha1 if revision does not exist "
521 "locally", 567 "locally",
@@ -554,7 +600,6 @@ later is required to fix a server side protocol bug.
554 p.add_option( 600 p.add_option(
555 "-s", 601 "-s",
556 "--smart-sync", 602 "--smart-sync",
557 dest="smart_sync",
558 action="store_true", 603 action="store_true",
559 help="smart sync using manifest from the latest known good " 604 help="smart sync using manifest from the latest known good "
560 "build", 605 "build",
@@ -562,7 +607,6 @@ later is required to fix a server side protocol bug.
562 p.add_option( 607 p.add_option(
563 "-t", 608 "-t",
564 "--smart-tag", 609 "--smart-tag",
565 dest="smart_tag",
566 action="store", 610 action="store",
567 help="smart sync using manifest from a known tag", 611 help="smart sync using manifest from a known tag",
568 ) 612 )
@@ -577,10 +621,10 @@ later is required to fix a server side protocol bug.
577 ) 621 )
578 g.add_option( 622 g.add_option(
579 "--repo-upgraded", 623 "--repo-upgraded",
580 dest="repo_upgraded",
581 action="store_true", 624 action="store_true",
582 help=optparse.SUPPRESS_HELP, 625 help=optparse.SUPPRESS_HELP,
583 ) 626 )
627 RepoHook.AddOptionGroup(p, "post-sync")
584 628
585 def _GetBranch(self, manifest_project): 629 def _GetBranch(self, manifest_project):
586 """Returns the branch name for getting the approved smartsync manifest. 630 """Returns the branch name for getting the approved smartsync manifest.
@@ -848,15 +892,7 @@ later is required to fix a server side protocol bug.
848 ) 892 )
849 893
850 sync_event = _threading.Event() 894 sync_event = _threading.Event()
851 895 sync_progress_thread = self._CreateSyncProgressThread(pm, sync_event)
852 def _MonitorSyncLoop():
853 while True:
854 pm.update(inc=0, msg=self._GetSyncProgressMessage())
855 if sync_event.wait(timeout=1):
856 return
857
858 sync_progress_thread = _threading.Thread(target=_MonitorSyncLoop)
859 sync_progress_thread.daemon = True
860 896
861 def _ProcessResults(pool, pm, results_sets): 897 def _ProcessResults(pool, pm, results_sets):
862 ret = True 898 ret = True
@@ -955,25 +991,16 @@ later is required to fix a server side protocol bug.
955 Returns: 991 Returns:
956 List of all projects that should be checked out. 992 List of all projects that should be checked out.
957 """ 993 """
958 rp = manifest.repoProject
959
960 to_fetch = [] 994 to_fetch = []
961 now = time.time()
962 if _ONE_DAY_S <= (now - rp.LastFetch):
963 to_fetch.append(rp)
964 to_fetch.extend(all_projects) 995 to_fetch.extend(all_projects)
965 to_fetch.sort(key=self._fetch_times.Get, reverse=True) 996 to_fetch.sort(key=self._fetch_times.Get, reverse=True)
966 997
967 result = self._Fetch(to_fetch, opt, err_event, ssh_proxy, errors) 998 result = self._Fetch(to_fetch, opt, err_event, ssh_proxy, errors)
968 success = result.success 999 success = result.success
969 fetched = result.projects 1000 fetched = result.projects
970
971 if not success: 1001 if not success:
972 err_event.set() 1002 err_event.set()
973 1003
974 # Call self update, unless requested not to
975 if os.environ.get("REPO_SKIP_SELF_UPDATE", "0") == "0":
976 _PostRepoFetch(rp, opt.repo_verify)
977 if opt.network_only: 1004 if opt.network_only:
978 # Bail out now; the rest touches the working tree. 1005 # Bail out now; the rest touches the working tree.
979 if err_event.is_set(): 1006 if err_event.is_set():
@@ -1152,6 +1179,16 @@ later is required to fix a server side protocol bug.
1152 self._local_sync_state.Save() 1179 self._local_sync_state.Save()
1153 return proc_res and not err_results 1180 return proc_res and not err_results
1154 1181
1182 def _PrintManifestNotices(self, opt):
1183 """Print all manifest notices, but only once."""
1184 printed_notices = set()
1185 # Print all manifest notices, but only once.
1186 # Sort by path_prefix to ensure consistent ordering.
1187 for m in sorted(self.ManifestList(opt), key=lambda x: x.path_prefix):
1188 if m.notice and m.notice not in printed_notices:
1189 print(m.notice)
1190 printed_notices.add(m.notice)
1191
1155 @staticmethod 1192 @staticmethod
1156 def _GetPreciousObjectsState(project: Project, opt): 1193 def _GetPreciousObjectsState(project: Project, opt):
1157 """Get the preciousObjects state for the project. 1194 """Get the preciousObjects state for the project.
@@ -1324,6 +1361,61 @@ later is required to fix a server side protocol bug.
1324 t.join() 1361 t.join()
1325 pm.end() 1362 pm.end()
1326 1363
1364 def _UpdateRepoProject(self, opt, manifest, errors):
1365 """Fetch the repo project and check for updates."""
1366 if opt.local_only:
1367 return
1368
1369 rp = manifest.repoProject
1370 now = time.time()
1371 # If we've fetched in the last day, don't bother fetching again.
1372 if (now - rp.LastFetch) < _ONE_DAY_S:
1373 return
1374
1375 with multiprocessing.Manager() as manager:
1376 with ssh.ProxyManager(manager) as ssh_proxy:
1377 ssh_proxy.sock()
1378 start = time.time()
1379 buf = TeeStringIO(sys.stdout if opt.verbose else None)
1380 sync_result = rp.Sync_NetworkHalf(
1381 quiet=opt.quiet,
1382 verbose=opt.verbose,
1383 output_redir=buf,
1384 current_branch_only=self._GetCurrentBranchOnly(
1385 opt, manifest
1386 ),
1387 force_sync=opt.force_sync,
1388 clone_bundle=opt.clone_bundle,
1389 tags=opt.tags,
1390 archive=manifest.IsArchive,
1391 optimized_fetch=opt.optimized_fetch,
1392 retry_fetches=opt.retry_fetches,
1393 prune=opt.prune,
1394 ssh_proxy=ssh_proxy,
1395 clone_filter=manifest.CloneFilter,
1396 partial_clone_exclude=manifest.PartialCloneExclude,
1397 clone_filter_for_depth=manifest.CloneFilterForDepth,
1398 )
1399 if sync_result.error:
1400 errors.append(sync_result.error)
1401
1402 finish = time.time()
1403 self.event_log.AddSync(
1404 rp,
1405 event_log.TASK_SYNC_NETWORK,
1406 start,
1407 finish,
1408 sync_result.success,
1409 )
1410 if not sync_result.success:
1411 logger.error("error: Cannot fetch repo tool %s", rp.name)
1412 return
1413
1414 # After fetching, check if a new version of repo is available and
1415 # restart. This is only done if the user hasn't explicitly disabled it.
1416 if os.environ.get("REPO_SKIP_SELF_UPDATE", "0") == "0":
1417 _PostRepoFetch(rp, opt.repo_verify)
1418
1327 def _ReloadManifest(self, manifest_name, manifest): 1419 def _ReloadManifest(self, manifest_name, manifest):
1328 """Reload the manfiest from the file specified by the |manifest_name|. 1420 """Reload the manfiest from the file specified by the |manifest_name|.
1329 1421
@@ -1757,6 +1849,21 @@ later is required to fix a server side protocol bug.
1757 except (KeyboardInterrupt, Exception) as e: 1849 except (KeyboardInterrupt, Exception) as e:
1758 raise RepoUnhandledExceptionError(e, aggregate_errors=errors) 1850 raise RepoUnhandledExceptionError(e, aggregate_errors=errors)
1759 1851
1852 # Run post-sync hook only after successful sync
1853 self._RunPostSyncHook(opt)
1854
1855 def _RunPostSyncHook(self, opt):
1856 """Run post-sync hook if configured in manifest <repo-hooks>."""
1857 hook = RepoHook.FromSubcmd(
1858 hook_type="post-sync",
1859 manifest=self.manifest,
1860 opt=opt,
1861 abort_if_user_denies=False,
1862 )
1863 success = hook.Run(repo_topdir=self.client.topdir)
1864 if not success:
1865 print("Warning: post-sync hook reported failure.")
1866
1760 def _ExecuteHelper(self, opt, args, errors): 1867 def _ExecuteHelper(self, opt, args, errors):
1761 manifest = self.outer_manifest 1868 manifest = self.outer_manifest
1762 if not opt.outer_manifest: 1869 if not opt.outer_manifest:
@@ -1788,8 +1895,6 @@ later is required to fix a server side protocol bug.
1788 e, 1895 e,
1789 ) 1896 )
1790 1897
1791 err_event = multiprocessing.Event()
1792
1793 rp = manifest.repoProject 1898 rp = manifest.repoProject
1794 rp.PreSync() 1899 rp.PreSync()
1795 cb = rp.CurrentBranch 1900 cb = rp.CurrentBranch
@@ -1828,6 +1933,9 @@ later is required to fix a server side protocol bug.
1828 # might be in the manifest. 1933 # might be in the manifest.
1829 self._ValidateOptionsWithManifest(opt, mp) 1934 self._ValidateOptionsWithManifest(opt, mp)
1830 1935
1936 # Update the repo project and check for new versions of repo.
1937 self._UpdateRepoProject(opt, manifest, errors)
1938
1831 superproject_logging_data = {} 1939 superproject_logging_data = {}
1832 self._UpdateProjectsRevisionId( 1940 self._UpdateProjectsRevisionId(
1833 opt, args, superproject_logging_data, manifest 1941 opt, args, superproject_logging_data, manifest
@@ -1841,10 +1949,6 @@ later is required to fix a server side protocol bug.
1841 all_manifests=not opt.this_manifest_only, 1949 all_manifests=not opt.this_manifest_only,
1842 ) 1950 )
1843 1951
1844 err_network_sync = False
1845 err_update_projects = False
1846 err_update_linkfiles = False
1847
1848 # Log the repo projects by existing and new. 1952 # Log the repo projects by existing and new.
1849 existing = [x for x in all_projects if x.Exists] 1953 existing = [x for x in all_projects if x.Exists]
1850 mp.config.SetString("repo.existingprojectcount", str(len(existing))) 1954 mp.config.SetString("repo.existingprojectcount", str(len(existing)))
@@ -1854,6 +1958,185 @@ later is required to fix a server side protocol bug.
1854 1958
1855 self._fetch_times = _FetchTimes(manifest) 1959 self._fetch_times = _FetchTimes(manifest)
1856 self._local_sync_state = LocalSyncState(manifest) 1960 self._local_sync_state = LocalSyncState(manifest)
1961
1962 if opt.interleaved:
1963 sync_method = self._SyncInterleaved
1964 else:
1965 sync_method = self._SyncPhased
1966
1967 sync_method(
1968 opt,
1969 args,
1970 errors,
1971 manifest,
1972 mp,
1973 all_projects,
1974 superproject_logging_data,
1975 )
1976
1977 # Log the previous sync analysis state from the config.
1978 self.git_event_log.LogDataConfigEvents(
1979 mp.config.GetSyncAnalysisStateData(), "previous_sync_state"
1980 )
1981
1982 # Update and log with the new sync analysis state.
1983 mp.config.UpdateSyncAnalysisState(opt, superproject_logging_data)
1984 self.git_event_log.LogDataConfigEvents(
1985 mp.config.GetSyncAnalysisStateData(), "current_sync_state"
1986 )
1987
1988 self._local_sync_state.PruneRemovedProjects()
1989 if self._local_sync_state.IsPartiallySynced():
1990 logger.warning(
1991 "warning: Partial syncs are not supported. For the best "
1992 "experience, sync the entire tree."
1993 )
1994
1995 if not opt.quiet:
1996 print("repo sync has finished successfully.")
1997
1998 def _CreateSyncProgressThread(
1999 self, pm: Progress, stop_event: _threading.Event
2000 ) -> _threading.Thread:
2001 """Creates and returns a daemon thread to update a Progress object.
2002
2003 The returned thread is not yet started. The thread will periodically
2004 update the progress bar with information from _GetSyncProgressMessage
2005 until the stop_event is set.
2006
2007 Args:
2008 pm: The Progress object to update.
2009 stop_event: The threading.Event to signal the monitor to stop.
2010
2011 Returns:
2012 The configured _threading.Thread object.
2013 """
2014
2015 def _monitor_loop():
2016 """The target function for the monitor thread."""
2017 while True:
2018 # Update the progress bar with the current status message.
2019 pm.update(inc=0, msg=self._GetSyncProgressMessage())
2020 # Wait for 1 second or until the stop_event is set.
2021 if stop_event.wait(timeout=1):
2022 return
2023
2024 return _threading.Thread(target=_monitor_loop, daemon=True)
2025
2026 def _UpdateManifestLists(
2027 self,
2028 opt: optparse.Values,
2029 err_event: multiprocessing.Event,
2030 errors: List[Exception],
2031 ) -> Tuple[bool, bool]:
2032 """Updates project lists and copy/link files for all manifests.
2033
2034 Args:
2035 opt: Program options from optparse.
2036 err_event: An event to set if any error occurs.
2037 errors: A list to append any encountered exceptions to.
2038
2039 Returns:
2040 A tuple (err_update_projects, err_update_linkfiles) indicating
2041 an error for each task.
2042 """
2043 err_update_projects = False
2044 err_update_linkfiles = False
2045 for m in self.ManifestList(opt):
2046 if m.IsMirror or m.IsArchive:
2047 continue
2048
2049 try:
2050 self.UpdateProjectList(opt, m)
2051 except Exception as e:
2052 err_event.set()
2053 err_update_projects = True
2054 errors.append(e)
2055 if isinstance(e, DeleteWorktreeError):
2056 errors.extend(e.aggregate_errors)
2057 if opt.fail_fast:
2058 logger.error("error: Local checkouts *not* updated.")
2059 raise SyncFailFastError(aggregate_errors=errors)
2060
2061 try:
2062 self.UpdateCopyLinkfileList(m)
2063 except Exception as e:
2064 err_event.set()
2065 err_update_linkfiles = True
2066 errors.append(e)
2067 if opt.fail_fast:
2068 logger.error(
2069 "error: Local update copyfile or linkfile failed."
2070 )
2071 raise SyncFailFastError(aggregate_errors=errors)
2072 return err_update_projects, err_update_linkfiles
2073
2074 def _ReportErrors(
2075 self,
2076 errors,
2077 err_network_sync=False,
2078 failing_network_repos=None,
2079 err_checkout=False,
2080 failing_checkout_repos=None,
2081 err_update_projects=False,
2082 err_update_linkfiles=False,
2083 ):
2084 """Logs detailed error messages and raises a SyncError."""
2085
2086 def print_and_log(err_msg):
2087 self.git_event_log.ErrorEvent(err_msg)
2088 logger.error("%s", err_msg)
2089
2090 print_and_log("error: Unable to fully sync the tree")
2091 if err_network_sync:
2092 print_and_log("error: Downloading network changes failed.")
2093 if failing_network_repos:
2094 logger.error(
2095 "Failing repos (network):\n%s",
2096 "\n".join(sorted(failing_network_repos)),
2097 )
2098 if err_update_projects:
2099 print_and_log("error: Updating local project lists failed.")
2100 if err_update_linkfiles:
2101 print_and_log("error: Updating copyfiles or linkfiles failed.")
2102 if err_checkout:
2103 print_and_log("error: Checking out local projects failed.")
2104 if failing_checkout_repos:
2105 logger.error(
2106 "Failing repos (checkout):\n%s",
2107 "\n".join(sorted(failing_checkout_repos)),
2108 )
2109 logger.error(
2110 'Try re-running with "-j1 --fail-fast" to exit at the first error.'
2111 )
2112 raise SyncError(aggregate_errors=errors)
2113
2114 def _SyncPhased(
2115 self,
2116 opt,
2117 args,
2118 errors,
2119 manifest,
2120 mp,
2121 all_projects,
2122 superproject_logging_data,
2123 ):
2124 """Sync projects by separating network and local operations.
2125
2126 This method performs sync in two distinct, sequential phases:
2127 1. Network Phase: Fetches updates for all projects from their remotes.
2128 2. Local Phase: Checks out the updated revisions into the local
2129 worktrees for all projects.
2130
2131 This approach ensures that the local work-tree is not modified until
2132 all network operations are complete, providing a transactional-like
2133 safety net for the checkout state.
2134 """
2135 err_event = multiprocessing.Event()
2136 err_network_sync = False
2137 err_update_projects = False
2138 err_update_linkfiles = False
2139
1857 if not opt.local_only: 2140 if not opt.local_only:
1858 with multiprocessing.Manager() as manager: 2141 with multiprocessing.Manager() as manager:
1859 with ssh.ProxyManager(manager) as ssh_proxy: 2142 with ssh.ProxyManager(manager) as ssh_proxy:
@@ -1886,34 +2169,11 @@ later is required to fix a server side protocol bug.
1886 ) 2169 )
1887 raise SyncFailFastError(aggregate_errors=errors) 2170 raise SyncFailFastError(aggregate_errors=errors)
1888 2171
1889 for m in self.ManifestList(opt): 2172 err_update_projects, err_update_linkfiles = self._UpdateManifestLists(
1890 if m.IsMirror or m.IsArchive: 2173 opt,
1891 # Bail out now, we have no working tree. 2174 err_event,
1892 continue 2175 errors,
1893 2176 )
1894 try:
1895 self.UpdateProjectList(opt, m)
1896 except Exception as e:
1897 err_event.set()
1898 err_update_projects = True
1899 errors.append(e)
1900 if isinstance(e, DeleteWorktreeError):
1901 errors.extend(e.aggregate_errors)
1902 if opt.fail_fast:
1903 logger.error("error: Local checkouts *not* updated.")
1904 raise SyncFailFastError(aggregate_errors=errors)
1905
1906 try:
1907 self.UpdateCopyLinkfileList(m)
1908 except Exception as e:
1909 err_update_linkfiles = True
1910 errors.append(e)
1911 err_event.set()
1912 if opt.fail_fast:
1913 logger.error(
1914 "error: Local update copyfile or linkfile failed."
1915 )
1916 raise SyncFailFastError(aggregate_errors=errors)
1917 2177
1918 err_results = [] 2178 err_results = []
1919 # NB: We don't exit here because this is the last step. 2179 # NB: We don't exit here because this is the last step.
@@ -1923,61 +2183,416 @@ later is required to fix a server side protocol bug.
1923 if err_checkout: 2183 if err_checkout:
1924 err_event.set() 2184 err_event.set()
1925 2185
1926 printed_notices = set() 2186 self._PrintManifestNotices(opt)
1927 # If there's a notice that's supposed to print at the end of the sync,
1928 # print it now... But avoid printing duplicate messages, and preserve
1929 # order.
1930 for m in sorted(self.ManifestList(opt), key=lambda x: x.path_prefix):
1931 if m.notice and m.notice not in printed_notices:
1932 print(m.notice)
1933 printed_notices.add(m.notice)
1934 2187
1935 # If we saw an error, exit with code 1 so that other scripts can check. 2188 # If we saw an error, exit with code 1 so that other scripts can check.
1936 if err_event.is_set(): 2189 if err_event.is_set():
1937 2190 self._ReportErrors(
1938 def print_and_log(err_msg): 2191 errors,
1939 self.git_event_log.ErrorEvent(err_msg) 2192 err_network_sync=err_network_sync,
1940 logger.error("%s", err_msg) 2193 err_checkout=err_checkout,
1941 2194 failing_checkout_repos=err_results,
1942 print_and_log("error: Unable to fully sync the tree") 2195 err_update_projects=err_update_projects,
1943 if err_network_sync: 2196 err_update_linkfiles=err_update_linkfiles,
1944 print_and_log("error: Downloading network changes failed.")
1945 if err_update_projects:
1946 print_and_log("error: Updating local project lists failed.")
1947 if err_update_linkfiles:
1948 print_and_log("error: Updating copyfiles or linkfiles failed.")
1949 if err_checkout:
1950 print_and_log("error: Checking out local projects failed.")
1951 if err_results:
1952 # Don't log repositories, as it may contain sensitive info.
1953 logger.error("Failing repos:\n%s", "\n".join(err_results))
1954 # Not useful to log.
1955 logger.error(
1956 'Try re-running with "-j1 --fail-fast" to exit at the first '
1957 "error."
1958 ) 2197 )
1959 raise SyncError(aggregate_errors=errors)
1960 2198
1961 # Log the previous sync analysis state from the config. 2199 @classmethod
1962 self.git_event_log.LogDataConfigEvents( 2200 def _SyncOneProject(cls, opt, project_index, project) -> _SyncResult:
1963 mp.config.GetSyncAnalysisStateData(), "previous_sync_state" 2201 """Syncs a single project for interleaved sync."""
2202 fetch_success = False
2203 remote_fetched = False
2204 fetch_error = None
2205 fetch_start = None
2206 fetch_finish = None
2207 network_output = ""
2208
2209 if opt.local_only:
2210 fetch_success = True
2211 else:
2212 fetch_start = time.time()
2213 network_output_capture = io.StringIO()
2214 try:
2215 ssh_proxy = cls.get_parallel_context().get("ssh_proxy")
2216 sync_result = project.Sync_NetworkHalf(
2217 quiet=opt.quiet,
2218 verbose=opt.verbose,
2219 output_redir=network_output_capture,
2220 current_branch_only=cls._GetCurrentBranchOnly(
2221 opt, project.manifest
2222 ),
2223 force_sync=opt.force_sync,
2224 clone_bundle=opt.clone_bundle,
2225 tags=opt.tags,
2226 archive=project.manifest.IsArchive,
2227 optimized_fetch=opt.optimized_fetch,
2228 retry_fetches=opt.retry_fetches,
2229 prune=opt.prune,
2230 ssh_proxy=ssh_proxy,
2231 clone_filter=project.manifest.CloneFilter,
2232 partial_clone_exclude=project.manifest.PartialCloneExclude,
2233 clone_filter_for_depth=project.manifest.CloneFilterForDepth,
2234 )
2235 fetch_success = sync_result.success
2236 remote_fetched = sync_result.remote_fetched
2237 fetch_error = sync_result.error
2238 except KeyboardInterrupt:
2239 logger.error(
2240 "Keyboard interrupt while processing %s", project.name
2241 )
2242 except GitError as e:
2243 fetch_error = e
2244 logger.error("error.GitError: Cannot fetch %s", e)
2245 except Exception as e:
2246 fetch_error = e
2247 logger.error(
2248 "error: Cannot fetch %s (%s: %s)",
2249 project.name,
2250 type(e).__name__,
2251 e,
2252 )
2253 finally:
2254 fetch_finish = time.time()
2255 network_output = network_output_capture.getvalue()
2256
2257 checkout_success = False
2258 checkout_error = None
2259 checkout_start = None
2260 checkout_finish = None
2261 checkout_stderr = ""
2262
2263 if fetch_success and not opt.network_only:
2264 checkout_start = time.time()
2265 stderr_capture = io.StringIO()
2266 try:
2267 with contextlib.redirect_stderr(stderr_capture):
2268 syncbuf = SyncBuffer(
2269 project.manifest.manifestProject.config,
2270 detach_head=opt.detach_head,
2271 )
2272 local_half_errors = []
2273 project.Sync_LocalHalf(
2274 syncbuf,
2275 force_sync=opt.force_sync,
2276 force_checkout=opt.force_checkout,
2277 force_rebase=opt.rebase,
2278 errors=local_half_errors,
2279 verbose=opt.verbose,
2280 )
2281 checkout_success = syncbuf.Finish()
2282 if local_half_errors:
2283 checkout_error = SyncError(
2284 aggregate_errors=local_half_errors
2285 )
2286 except KeyboardInterrupt:
2287 logger.error(
2288 "Keyboard interrupt while processing %s", project.name
2289 )
2290 except GitError as e:
2291 checkout_error = e
2292 logger.error(
2293 "error.GitError: Cannot checkout %s: %s", project.name, e
2294 )
2295 except Exception as e:
2296 checkout_error = e
2297 logger.error(
2298 "error: Cannot checkout %s: %s: %s",
2299 project.name,
2300 type(e).__name__,
2301 e,
2302 )
2303 finally:
2304 checkout_finish = time.time()
2305 checkout_stderr = stderr_capture.getvalue()
2306 elif fetch_success:
2307 checkout_success = True
2308
2309 # Consolidate all captured output.
2310 captured_parts = []
2311 if network_output:
2312 captured_parts.append(network_output)
2313 if checkout_stderr:
2314 captured_parts.append(checkout_stderr)
2315 stderr_text = "\n".join(captured_parts)
2316
2317 return _SyncResult(
2318 project_index=project_index,
2319 relpath=project.relpath,
2320 fetch_success=fetch_success,
2321 remote_fetched=remote_fetched,
2322 checkout_success=checkout_success,
2323 fetch_error=fetch_error,
2324 checkout_error=checkout_error,
2325 stderr_text=stderr_text.strip(),
2326 fetch_start=fetch_start,
2327 fetch_finish=fetch_finish,
2328 checkout_start=checkout_start,
2329 checkout_finish=checkout_finish,
1964 ) 2330 )
1965 2331
1966 # Update and log with the new sync analysis state. 2332 @classmethod
1967 mp.config.UpdateSyncAnalysisState(opt, superproject_logging_data) 2333 def _SyncProjectList(cls, opt, project_indices) -> _InterleavedSyncResult:
1968 self.git_event_log.LogDataConfigEvents( 2334 """Worker for interleaved sync.
1969 mp.config.GetSyncAnalysisStateData(), "current_sync_state" 2335
2336 This function is responsible for syncing a group of projects that share
2337 a git object directory.
2338
2339 Args:
2340 opt: Program options returned from optparse. See _Options().
2341 project_indices: A list of indices into the projects list stored in
2342 the parallel context.
2343
2344 Returns:
2345 An `_InterleavedSyncResult` containing the results for each project.
2346 """
2347 results = []
2348 context = cls.get_parallel_context()
2349 projects = context["projects"]
2350 sync_dict = context["sync_dict"]
2351
2352 assert project_indices, "_SyncProjectList called with no indices."
2353
2354 # Use the first project as the representative for the progress bar.
2355 first_project = projects[project_indices[0]]
2356 key = f"{first_project.name} @ {first_project.relpath}"
2357 sync_dict[key] = time.time()
2358
2359 try:
2360 for idx in project_indices:
2361 project = projects[idx]
2362 results.append(cls._SyncOneProject(opt, idx, project))
2363 finally:
2364 del sync_dict[key]
2365
2366 return _InterleavedSyncResult(results=results)
2367
2368 def _ProcessSyncInterleavedResults(
2369 self,
2370 synced_relpaths: Set[str],
2371 err_event: _threading.Event,
2372 errors: List[Exception],
2373 opt: optparse.Values,
2374 pool: Optional[multiprocessing.Pool],
2375 pm: Progress,
2376 results_sets: List[_InterleavedSyncResult],
2377 ):
2378 """Callback to process results from interleaved sync workers."""
2379 ret = True
2380 projects = self.get_parallel_context()["projects"]
2381 for result_group in results_sets:
2382 for result in result_group.results:
2383 pm.update()
2384 project = projects[result.project_index]
2385
2386 if opt.verbose and result.stderr_text:
2387 pm.display_message(result.stderr_text)
2388
2389 if result.fetch_start:
2390 self._fetch_times.Set(
2391 project,
2392 result.fetch_finish - result.fetch_start,
2393 )
2394 self._local_sync_state.SetFetchTime(project)
2395 self.event_log.AddSync(
2396 project,
2397 event_log.TASK_SYNC_NETWORK,
2398 result.fetch_start,
2399 result.fetch_finish,
2400 result.fetch_success,
2401 )
2402 if result.checkout_start:
2403 if result.checkout_success:
2404 self._local_sync_state.SetCheckoutTime(project)
2405 self.event_log.AddSync(
2406 project,
2407 event_log.TASK_SYNC_LOCAL,
2408 result.checkout_start,
2409 result.checkout_finish,
2410 result.checkout_success,
2411 )
2412
2413 if result.fetch_success and result.checkout_success:
2414 synced_relpaths.add(result.relpath)
2415 else:
2416 ret = False
2417 err_event.set()
2418 if result.fetch_error:
2419 errors.append(result.fetch_error)
2420 self._interleaved_err_network = True
2421 self._interleaved_err_network_results.append(
2422 result.relpath
2423 )
2424 if result.checkout_error:
2425 errors.append(result.checkout_error)
2426 self._interleaved_err_checkout = True
2427 self._interleaved_err_checkout_results.append(
2428 result.relpath
2429 )
2430
2431 if not ret and opt.fail_fast:
2432 if pool:
2433 pool.close()
2434 break
2435 return ret
2436
2437 def _SyncInterleaved(
2438 self,
2439 opt,
2440 args,
2441 errors,
2442 manifest,
2443 mp,
2444 all_projects,
2445 superproject_logging_data,
2446 ):
2447 """Sync projects by performing network and local operations in parallel.
2448
2449 This method processes each project (or groups of projects that share git
2450 objects) independently. For each project, it performs the fetch and
2451 checkout operations back-to-back. These independent tasks are run in
2452 parallel.
2453
2454 It respects two constraints for correctness:
2455 1. Projects in nested directories (e.g. 'foo' and 'foo/bar') are
2456 processed in hierarchical order.
2457 2. Projects that share git objects are processed serially to prevent
2458 race conditions.
2459 """
2460 # Temporary state for tracking errors in interleaved mode.
2461 self._interleaved_err_network = False
2462 self._interleaved_err_network_results = []
2463 self._interleaved_err_checkout = False
2464 self._interleaved_err_checkout_results = []
2465
2466 err_event = multiprocessing.Event()
2467 synced_relpaths = set()
2468 project_list = list(all_projects)
2469 pm = Progress(
2470 "Syncing",
2471 len(project_list),
2472 delay=False,
2473 quiet=opt.quiet,
2474 show_elapsed=True,
2475 elide=True,
1970 ) 2476 )
2477 previously_pending_relpaths = set()
1971 2478
1972 self._local_sync_state.PruneRemovedProjects() 2479 sync_event = _threading.Event()
1973 if self._local_sync_state.IsPartiallySynced(): 2480 sync_progress_thread = self._CreateSyncProgressThread(pm, sync_event)
1974 logger.warning(
1975 "warning: Partial syncs are not supported. For the best "
1976 "experience, sync the entire tree."
1977 )
1978 2481
1979 if not opt.quiet: 2482 with multiprocessing.Manager() as manager, ssh.ProxyManager(
1980 print("repo sync has finished successfully.") 2483 manager
2484 ) as ssh_proxy:
2485 ssh_proxy.sock()
2486 with self.ParallelContext():
2487 self.get_parallel_context()["ssh_proxy"] = ssh_proxy
2488 # TODO(gavinmak): Use multprocessing.Queue instead of dict.
2489 self.get_parallel_context()[
2490 "sync_dict"
2491 ] = multiprocessing.Manager().dict()
2492 sync_progress_thread.start()
2493
2494 try:
2495 # Outer loop for dynamic project discovery. This continues
2496 # until no unsynced projects remain.
2497 while True:
2498 projects_to_sync = [
2499 p
2500 for p in project_list
2501 if p.relpath not in synced_relpaths
2502 ]
2503 if not projects_to_sync:
2504 break
2505
2506 pending_relpaths = {p.relpath for p in projects_to_sync}
2507 if previously_pending_relpaths == pending_relpaths:
2508 logger.error(
2509 "Stall detected in interleaved sync, not all "
2510 "projects could be synced."
2511 )
2512 err_event.set()
2513 break
2514 previously_pending_relpaths = pending_relpaths
2515
2516 self.get_parallel_context()[
2517 "projects"
2518 ] = projects_to_sync
2519 project_index_map = {
2520 p: i for i, p in enumerate(projects_to_sync)
2521 }
2522
2523 # Inner loop to process projects in a hierarchical
2524 # order. This iterates through levels of project
2525 # dependencies (e.g. 'foo' then 'foo/bar'). All projects
2526 # in one level can be processed in parallel, but we must
2527 # wait for a level to complete before starting the next.
2528 for level_projects in _SafeCheckoutOrder(
2529 projects_to_sync
2530 ):
2531 if not level_projects:
2532 continue
2533
2534 objdir_project_map = collections.defaultdict(list)
2535 for p in level_projects:
2536 objdir_project_map[p.objdir].append(
2537 project_index_map[p]
2538 )
2539
2540 work_items = list(objdir_project_map.values())
2541 if not work_items:
2542 continue
2543
2544 jobs = max(1, min(opt.jobs, len(work_items)))
2545 callback = functools.partial(
2546 self._ProcessSyncInterleavedResults,
2547 synced_relpaths,
2548 err_event,
2549 errors,
2550 opt,
2551 )
2552 if not self.ExecuteInParallel(
2553 jobs,
2554 functools.partial(self._SyncProjectList, opt),
2555 work_items,
2556 callback=callback,
2557 output=pm,
2558 chunksize=1,
2559 ):
2560 err_event.set()
2561
2562 if err_event.is_set() and opt.fail_fast:
2563 raise SyncFailFastError(aggregate_errors=errors)
2564
2565 self._ReloadManifest(None, manifest)
2566 project_list = self.GetProjects(
2567 args,
2568 missing_ok=True,
2569 submodules_ok=opt.fetch_submodules,
2570 manifest=manifest,
2571 all_manifests=not opt.this_manifest_only,
2572 )
2573 finally:
2574 sync_event.set()
2575 sync_progress_thread.join()
2576
2577 pm.end()
2578
2579 err_update_projects, err_update_linkfiles = self._UpdateManifestLists(
2580 opt, err_event, errors
2581 )
2582 if not self.outer_client.manifest.IsArchive:
2583 self._GCProjects(project_list, opt, err_event)
2584
2585 self._PrintManifestNotices(opt)
2586 if err_event.is_set():
2587 self._ReportErrors(
2588 errors,
2589 err_network_sync=self._interleaved_err_network,
2590 failing_network_repos=self._interleaved_err_network_results,
2591 err_checkout=self._interleaved_err_checkout,
2592 failing_checkout_repos=self._interleaved_err_checkout_results,
2593 err_update_projects=err_update_projects,
2594 err_update_linkfiles=err_update_linkfiles,
2595 )
1981 2596
1982 2597
1983def _PostRepoUpgrade(manifest, quiet=False): 2598def _PostRepoUpgrade(manifest, quiet=False):
diff --git a/subcmds/upload.py b/subcmds/upload.py
index bac2f8ac..4f817ddf 100644
--- a/subcmds/upload.py
+++ b/subcmds/upload.py
@@ -267,7 +267,6 @@ Gerrit Code Review: https://www.gerritcodereview.com/
267 "--cc", 267 "--cc",
268 type="string", 268 type="string",
269 action="append", 269 action="append",
270 dest="cc",
271 help="also send email to these email addresses", 270 help="also send email to these email addresses",
272 ) 271 )
273 p.add_option( 272 p.add_option(
@@ -281,7 +280,6 @@ Gerrit Code Review: https://www.gerritcodereview.com/
281 p.add_option( 280 p.add_option(
282 "-c", 281 "-c",
283 "--current-branch", 282 "--current-branch",
284 dest="current_branch",
285 action="store_true", 283 action="store_true",
286 help="upload current git branch", 284 help="upload current git branch",
287 ) 285 )
@@ -310,7 +308,6 @@ Gerrit Code Review: https://www.gerritcodereview.com/
310 "-p", 308 "-p",
311 "--private", 309 "--private",
312 action="store_true", 310 action="store_true",
313 dest="private",
314 default=False, 311 default=False,
315 help="upload as a private change (deprecated; use --wip)", 312 help="upload as a private change (deprecated; use --wip)",
316 ) 313 )
@@ -318,7 +315,6 @@ Gerrit Code Review: https://www.gerritcodereview.com/
318 "-w", 315 "-w",
319 "--wip", 316 "--wip",
320 action="store_true", 317 action="store_true",
321 dest="wip",
322 default=False, 318 default=False,
323 help="upload as a work-in-progress change", 319 help="upload as a work-in-progress change",
324 ) 320 )
@@ -628,6 +624,16 @@ Gerrit Code Review: https://www.gerritcodereview.com/
628 branch.uploaded = False 624 branch.uploaded = False
629 return 625 return
630 626
627 # If using superproject, add the root repo as a push option.
628 manifest = branch.project.manifest
629 push_options = list(opt.push_options)
630 if manifest.manifestProject.use_superproject:
631 sp = manifest.superproject
632 if sp:
633 r_id = sp.repo_id
634 if r_id:
635 push_options.append(f"custom-keyed-value=rootRepo:{r_id}")
636
631 branch.UploadForReview( 637 branch.UploadForReview(
632 people, 638 people,
633 dryrun=opt.dryrun, 639 dryrun=opt.dryrun,
@@ -640,7 +646,7 @@ Gerrit Code Review: https://www.gerritcodereview.com/
640 ready=opt.ready, 646 ready=opt.ready,
641 dest_branch=destination, 647 dest_branch=destination,
642 validate_certs=opt.validate_certs, 648 validate_certs=opt.validate_certs,
643 push_options=opt.push_options, 649 push_options=push_options,
644 patchset_description=opt.patchset_description, 650 patchset_description=opt.patchset_description,
645 ) 651 )
646 652