summaryrefslogtreecommitdiffstats
path: root/subcmds/sync.py
diff options
context:
space:
mode:
Diffstat (limited to 'subcmds/sync.py')
-rw-r--r--subcmds/sync.py3130
1 files changed, 1730 insertions, 1400 deletions
diff --git a/subcmds/sync.py b/subcmds/sync.py
index 9a8ca8f7..eabaa68b 100644
--- a/subcmds/sync.py
+++ b/subcmds/sync.py
@@ -33,18 +33,21 @@ import xml.parsers.expat
33import xmlrpc.client 33import xmlrpc.client
34 34
35try: 35try:
36 import threading as _threading 36 import threading as _threading
37except ImportError: 37except ImportError:
38 import dummy_threading as _threading 38 import dummy_threading as _threading
39 39
40try: 40try:
41 import resource 41 import resource
42
43 def _rlimit_nofile():
44 return resource.getrlimit(resource.RLIMIT_NOFILE)
42 45
43 def _rlimit_nofile():
44 return resource.getrlimit(resource.RLIMIT_NOFILE)
45except ImportError: 46except ImportError:
46 def _rlimit_nofile(): 47
47 return (256, 256) 48 def _rlimit_nofile():
49 return (256, 256)
50
48 51
49import event_log 52import event_log
50from git_command import git_require 53from git_command import git_require
@@ -54,7 +57,12 @@ import git_superproject
54import gitc_utils 57import gitc_utils
55from project import Project 58from project import Project
56from project import RemoteSpec 59from project import RemoteSpec
57from command import Command, DEFAULT_LOCAL_JOBS, MirrorSafeCommand, WORKER_BATCH_SIZE 60from command import (
61 Command,
62 DEFAULT_LOCAL_JOBS,
63 MirrorSafeCommand,
64 WORKER_BATCH_SIZE,
65)
58from error import RepoChangedException, GitError 66from error import RepoChangedException, GitError
59import platform_utils 67import platform_utils
60from project import SyncBuffer 68from project import SyncBuffer
@@ -68,70 +76,74 @@ _ONE_DAY_S = 24 * 60 * 60
68 76
69# Env var to implicitly turn auto-gc back on. This was added to allow a user to 77# Env var to implicitly turn auto-gc back on. This was added to allow a user to
70# revert a change in default behavior in v2.29.9. Remove after 2023-04-01. 78# revert a change in default behavior in v2.29.9. Remove after 2023-04-01.
71_REPO_AUTO_GC = 'REPO_AUTO_GC' 79_REPO_AUTO_GC = "REPO_AUTO_GC"
72_AUTO_GC = os.environ.get(_REPO_AUTO_GC) == '1' 80_AUTO_GC = os.environ.get(_REPO_AUTO_GC) == "1"
73 81
74 82
75class _FetchOneResult(NamedTuple): 83class _FetchOneResult(NamedTuple):
76 """_FetchOne return value. 84 """_FetchOne return value.
77 85
78 Attributes: 86 Attributes:
79 success (bool): True if successful. 87 success (bool): True if successful.
80 project (Project): The fetched project. 88 project (Project): The fetched project.
81 start (float): The starting time.time(). 89 start (float): The starting time.time().
82 finish (float): The ending time.time(). 90 finish (float): The ending time.time().
83 remote_fetched (bool): True if the remote was actually queried. 91 remote_fetched (bool): True if the remote was actually queried.
84 """ 92 """
85 success: bool 93
86 project: Project 94 success: bool
87 start: float 95 project: Project
88 finish: float 96 start: float
89 remote_fetched: bool 97 finish: float
98 remote_fetched: bool
90 99
91 100
92class _FetchResult(NamedTuple): 101class _FetchResult(NamedTuple):
93 """_Fetch return value. 102 """_Fetch return value.
103
104 Attributes:
105 success (bool): True if successful.
106 projects (Set[str]): The names of the git directories of fetched projects.
107 """
94 108
95 Attributes: 109 success: bool
96 success (bool): True if successful. 110 projects: Set[str]
97 projects (Set[str]): The names of the git directories of fetched projects.
98 """
99 success: bool
100 projects: Set[str]
101 111
102 112
103class _FetchMainResult(NamedTuple): 113class _FetchMainResult(NamedTuple):
104 """_FetchMain return value. 114 """_FetchMain return value.
115
116 Attributes:
117 all_projects (List[Project]): The fetched projects.
118 """
105 119
106 Attributes: 120 all_projects: List[Project]
107 all_projects (List[Project]): The fetched projects.
108 """
109 all_projects: List[Project]
110 121
111 122
112class _CheckoutOneResult(NamedTuple): 123class _CheckoutOneResult(NamedTuple):
113 """_CheckoutOne return value. 124 """_CheckoutOne return value.
125
126 Attributes:
127 success (bool): True if successful.
128 project (Project): The project.
129 start (float): The starting time.time().
130 finish (float): The ending time.time().
131 """
114 132
115 Attributes: 133 success: bool
116 success (bool): True if successful. 134 project: Project
117 project (Project): The project. 135 start: float
118 start (float): The starting time.time(). 136 finish: float
119 finish (float): The ending time.time().
120 """
121 success: bool
122 project: Project
123 start: float
124 finish: float
125 137
126 138
127class Sync(Command, MirrorSafeCommand): 139class Sync(Command, MirrorSafeCommand):
128 COMMON = True 140 COMMON = True
129 MULTI_MANIFEST_SUPPORT = True 141 MULTI_MANIFEST_SUPPORT = True
130 helpSummary = "Update working tree to the latest revision" 142 helpSummary = "Update working tree to the latest revision"
131 helpUsage = """ 143 helpUsage = """
132%prog [<project>...] 144%prog [<project>...]
133""" 145"""
134 helpDescription = """ 146 helpDescription = """
135The '%prog' command synchronizes local project directories 147The '%prog' command synchronizes local project directories
136with the remote repositories specified in the manifest. If a local 148with the remote repositories specified in the manifest. If a local
137project does not yet exist, it will clone a new local directory from 149project does not yet exist, it will clone a new local directory from
@@ -230,1293 +242,1604 @@ If the remote SSH daemon is Gerrit Code Review, version 2.0.10 or
230later is required to fix a server side protocol bug. 242later is required to fix a server side protocol bug.
231 243
232""" 244"""
233 # A value of 0 means we want parallel jobs, but we'll determine the default 245 # A value of 0 means we want parallel jobs, but we'll determine the default
234 # value later on. 246 # value later on.
235 PARALLEL_JOBS = 0 247 PARALLEL_JOBS = 0
236 248
237 def _Options(self, p, show_smart=True): 249 def _Options(self, p, show_smart=True):
238 p.add_option('--jobs-network', default=None, type=int, metavar='JOBS', 250 p.add_option(
239 help='number of network jobs to run in parallel (defaults to --jobs or 1)') 251 "--jobs-network",
240 p.add_option('--jobs-checkout', default=None, type=int, metavar='JOBS', 252 default=None,
241 help='number of local checkout jobs to run in parallel (defaults to --jobs or ' 253 type=int,
242 f'{DEFAULT_LOCAL_JOBS})') 254 metavar="JOBS",
243 255 help="number of network jobs to run in parallel (defaults to "
244 p.add_option('-f', '--force-broken', 256 "--jobs or 1)",
245 dest='force_broken', action='store_true', 257 )
246 help='obsolete option (to be deleted in the future)') 258 p.add_option(
247 p.add_option('--fail-fast', 259 "--jobs-checkout",
248 dest='fail_fast', action='store_true', 260 default=None,
249 help='stop syncing after first error is hit') 261 type=int,
250 p.add_option('--force-sync', 262 metavar="JOBS",
251 dest='force_sync', action='store_true', 263 help="number of local checkout jobs to run in parallel (defaults "
252 help="overwrite an existing git directory if it needs to " 264 f"to --jobs or {DEFAULT_LOCAL_JOBS})",
253 "point to a different object directory. WARNING: this " 265 )
254 "may cause loss of data") 266
255 p.add_option('--force-remove-dirty', 267 p.add_option(
256 dest='force_remove_dirty', action='store_true', 268 "-f",
257 help="force remove projects with uncommitted modifications if " 269 "--force-broken",
258 "projects no longer exist in the manifest. " 270 dest="force_broken",
259 "WARNING: this may cause loss of data") 271 action="store_true",
260 p.add_option('-l', '--local-only', 272 help="obsolete option (to be deleted in the future)",
261 dest='local_only', action='store_true', 273 )
262 help="only update working tree, don't fetch") 274 p.add_option(
263 p.add_option('--no-manifest-update', '--nmu', 275 "--fail-fast",
264 dest='mp_update', action='store_false', default='true', 276 dest="fail_fast",
265 help='use the existing manifest checkout as-is. ' 277 action="store_true",
266 '(do not update to the latest revision)') 278 help="stop syncing after first error is hit",
267 p.add_option('-n', '--network-only', 279 )
268 dest='network_only', action='store_true', 280 p.add_option(
269 help="fetch only, don't update working tree") 281 "--force-sync",
270 p.add_option('-d', '--detach', 282 dest="force_sync",
271 dest='detach_head', action='store_true', 283 action="store_true",
272 help='detach projects back to manifest revision') 284 help="overwrite an existing git directory if it needs to "
273 p.add_option('-c', '--current-branch', 285 "point to a different object directory. WARNING: this "
274 dest='current_branch_only', action='store_true', 286 "may cause loss of data",
275 help='fetch only current branch from server') 287 )
276 p.add_option('--no-current-branch', 288 p.add_option(
277 dest='current_branch_only', action='store_false', 289 "--force-remove-dirty",
278 help='fetch all branches from server') 290 dest="force_remove_dirty",
279 p.add_option('-m', '--manifest-name', 291 action="store_true",
280 dest='manifest_name', 292 help="force remove projects with uncommitted modifications if "
281 help='temporary manifest to use for this sync', metavar='NAME.xml') 293 "projects no longer exist in the manifest. "
282 p.add_option('--clone-bundle', action='store_true', 294 "WARNING: this may cause loss of data",
283 help='enable use of /clone.bundle on HTTP/HTTPS') 295 )
284 p.add_option('--no-clone-bundle', dest='clone_bundle', action='store_false', 296 p.add_option(
285 help='disable use of /clone.bundle on HTTP/HTTPS') 297 "-l",
286 p.add_option('-u', '--manifest-server-username', action='store', 298 "--local-only",
287 dest='manifest_server_username', 299 dest="local_only",
288 help='username to authenticate with the manifest server') 300 action="store_true",
289 p.add_option('-p', '--manifest-server-password', action='store', 301 help="only update working tree, don't fetch",
290 dest='manifest_server_password', 302 )
291 help='password to authenticate with the manifest server') 303 p.add_option(
292 p.add_option('--fetch-submodules', 304 "--no-manifest-update",
293 dest='fetch_submodules', action='store_true', 305 "--nmu",
294 help='fetch submodules from server') 306 dest="mp_update",
295 p.add_option('--use-superproject', action='store_true', 307 action="store_false",
296 help='use the manifest superproject to sync projects; implies -c') 308 default="true",
297 p.add_option('--no-use-superproject', action='store_false', 309 help="use the existing manifest checkout as-is. "
298 dest='use_superproject', 310 "(do not update to the latest revision)",
299 help='disable use of manifest superprojects') 311 )
300 p.add_option('--tags', action='store_true', 312 p.add_option(
301 help='fetch tags') 313 "-n",
302 p.add_option('--no-tags', 314 "--network-only",
303 dest='tags', action='store_false', 315 dest="network_only",
304 help="don't fetch tags (default)") 316 action="store_true",
305 p.add_option('--optimized-fetch', 317 help="fetch only, don't update working tree",
306 dest='optimized_fetch', action='store_true', 318 )
307 help='only fetch projects fixed to sha1 if revision does not exist locally') 319 p.add_option(
308 p.add_option('--retry-fetches', 320 "-d",
309 default=0, action='store', type='int', 321 "--detach",
310 help='number of times to retry fetches on transient errors') 322 dest="detach_head",
311 p.add_option('--prune', action='store_true', 323 action="store_true",
312 help='delete refs that no longer exist on the remote (default)') 324 help="detach projects back to manifest revision",
313 p.add_option('--no-prune', dest='prune', action='store_false', 325 )
314 help='do not delete refs that no longer exist on the remote') 326 p.add_option(
315 p.add_option('--auto-gc', action='store_true', default=None, 327 "-c",
316 help='run garbage collection on all synced projects') 328 "--current-branch",
317 p.add_option('--no-auto-gc', dest='auto_gc', action='store_false', 329 dest="current_branch_only",
318 help='do not run garbage collection on any projects (default)') 330 action="store_true",
319 if show_smart: 331 help="fetch only current branch from server",
320 p.add_option('-s', '--smart-sync', 332 )
321 dest='smart_sync', action='store_true', 333 p.add_option(
322 help='smart sync using manifest from the latest known good build') 334 "--no-current-branch",
323 p.add_option('-t', '--smart-tag', 335 dest="current_branch_only",
324 dest='smart_tag', action='store', 336 action="store_false",
325 help='smart sync using manifest from a known tag') 337 help="fetch all branches from server",
326 338 )
327 g = p.add_option_group('repo Version options') 339 p.add_option(
328 g.add_option('--no-repo-verify', 340 "-m",
329 dest='repo_verify', default=True, action='store_false', 341 "--manifest-name",
330 help='do not verify repo source code') 342 dest="manifest_name",
331 g.add_option('--repo-upgraded', 343 help="temporary manifest to use for this sync",
332 dest='repo_upgraded', action='store_true', 344 metavar="NAME.xml",
333 help=SUPPRESS_HELP) 345 )
334 346 p.add_option(
335 def _GetBranch(self, manifest_project): 347 "--clone-bundle",
336 """Returns the branch name for getting the approved smartsync manifest. 348 action="store_true",
337 349 help="enable use of /clone.bundle on HTTP/HTTPS",
338 Args: 350 )
339 manifest_project: the manifestProject to query. 351 p.add_option(
340 """ 352 "--no-clone-bundle",
341 b = manifest_project.GetBranch(manifest_project.CurrentBranch) 353 dest="clone_bundle",
342 branch = b.merge 354 action="store_false",
343 if branch.startswith(R_HEADS): 355 help="disable use of /clone.bundle on HTTP/HTTPS",
344 branch = branch[len(R_HEADS):] 356 )
345 return branch 357 p.add_option(
346 358 "-u",
347 def _GetCurrentBranchOnly(self, opt, manifest): 359 "--manifest-server-username",
348 """Returns whether current-branch or use-superproject options are enabled. 360 action="store",
349 361 dest="manifest_server_username",
350 Args: 362 help="username to authenticate with the manifest server",
351 opt: Program options returned from optparse. See _Options(). 363 )
352 manifest: The manifest to use. 364 p.add_option(
353 365 "-p",
354 Returns: 366 "--manifest-server-password",
355 True if a superproject is requested, otherwise the value of the 367 action="store",
356 current_branch option (True, False or None). 368 dest="manifest_server_password",
357 """ 369 help="password to authenticate with the manifest server",
358 return git_superproject.UseSuperproject(opt.use_superproject, manifest) or opt.current_branch_only 370 )
359 371 p.add_option(
360 def _UpdateProjectsRevisionId(self, opt, args, superproject_logging_data, 372 "--fetch-submodules",
361 manifest): 373 dest="fetch_submodules",
362 """Update revisionId of projects with the commit hash from the superproject. 374 action="store_true",
363 375 help="fetch submodules from server",
364 This function updates each project's revisionId with the commit hash from 376 )
365 the superproject. It writes the updated manifest into a file and reloads 377 p.add_option(
366 the manifest from it. When appropriate, sub manifests are also processed. 378 "--use-superproject",
367 379 action="store_true",
368 Args: 380 help="use the manifest superproject to sync projects; implies -c",
369 opt: Program options returned from optparse. See _Options(). 381 )
370 args: Arguments to pass to GetProjects. See the GetProjects 382 p.add_option(
371 docstring for details. 383 "--no-use-superproject",
372 superproject_logging_data: A dictionary of superproject data to log. 384 action="store_false",
373 manifest: The manifest to use. 385 dest="use_superproject",
374 """ 386 help="disable use of manifest superprojects",
375 have_superproject = manifest.superproject or any( 387 )
376 m.superproject for m in manifest.all_children) 388 p.add_option("--tags", action="store_true", help="fetch tags")
377 if not have_superproject: 389 p.add_option(
378 return 390 "--no-tags",
379 391 dest="tags",
380 if opt.local_only and manifest.superproject: 392 action="store_false",
381 manifest_path = manifest.superproject.manifest_path 393 help="don't fetch tags (default)",
382 if manifest_path: 394 )
383 self._ReloadManifest(manifest_path, manifest) 395 p.add_option(
384 return 396 "--optimized-fetch",
385 397 dest="optimized_fetch",
386 all_projects = self.GetProjects(args, 398 action="store_true",
387 missing_ok=True, 399 help="only fetch projects fixed to sha1 if revision does not exist "
388 submodules_ok=opt.fetch_submodules, 400 "locally",
389 manifest=manifest, 401 )
390 all_manifests=not opt.this_manifest_only) 402 p.add_option(
391 403 "--retry-fetches",
392 per_manifest = collections.defaultdict(list) 404 default=0,
393 manifest_paths = {} 405 action="store",
394 if opt.this_manifest_only: 406 type="int",
395 per_manifest[manifest.path_prefix] = all_projects 407 help="number of times to retry fetches on transient errors",
396 else: 408 )
397 for p in all_projects: 409 p.add_option(
398 per_manifest[p.manifest.path_prefix].append(p) 410 "--prune",
399 411 action="store_true",
400 superproject_logging_data = {} 412 help="delete refs that no longer exist on the remote (default)",
401 need_unload = False 413 )
402 for m in self.ManifestList(opt): 414 p.add_option(
403 if not m.path_prefix in per_manifest: 415 "--no-prune",
404 continue 416 dest="prune",
405 use_super = git_superproject.UseSuperproject(opt.use_superproject, m) 417 action="store_false",
406 if superproject_logging_data: 418 help="do not delete refs that no longer exist on the remote",
407 superproject_logging_data['multimanifest'] = True 419 )
408 superproject_logging_data.update( 420 p.add_option(
409 superproject=use_super, 421 "--auto-gc",
410 haslocalmanifests=bool(m.HasLocalManifests), 422 action="store_true",
411 hassuperprojecttag=bool(m.superproject), 423 default=None,
412 ) 424 help="run garbage collection on all synced projects",
413 if use_super and (m.IsMirror or m.IsArchive): 425 )
414 # Don't use superproject, because we have no working tree. 426 p.add_option(
415 use_super = False 427 "--no-auto-gc",
416 superproject_logging_data['superproject'] = False 428 dest="auto_gc",
417 superproject_logging_data['noworktree'] = True 429 action="store_false",
418 if opt.use_superproject is not False: 430 help="do not run garbage collection on any projects (default)",
419 print(f'{m.path_prefix}: not using superproject because there is no ' 431 )
420 'working tree.') 432 if show_smart:
421 433 p.add_option(
422 if not use_super: 434 "-s",
423 continue 435 "--smart-sync",
424 m.superproject.SetQuiet(opt.quiet) 436 dest="smart_sync",
425 print_messages = git_superproject.PrintMessages(opt.use_superproject, m) 437 action="store_true",
426 m.superproject.SetPrintMessages(print_messages) 438 help="smart sync using manifest from the latest known good "
427 update_result = m.superproject.UpdateProjectsRevisionId( 439 "build",
428 per_manifest[m.path_prefix], git_event_log=self.git_event_log) 440 )
429 manifest_path = update_result.manifest_path 441 p.add_option(
430 superproject_logging_data['updatedrevisionid'] = bool(manifest_path) 442 "-t",
431 if manifest_path: 443 "--smart-tag",
432 m.SetManifestOverride(manifest_path) 444 dest="smart_tag",
433 need_unload = True 445 action="store",
434 else: 446 help="smart sync using manifest from a known tag",
435 if print_messages: 447 )
436 print(f'{m.path_prefix}: warning: Update of revisionId from ' 448
437 'superproject has failed, repo sync will not use superproject ' 449 g = p.add_option_group("repo Version options")
438 'to fetch the source. ', 450 g.add_option(
439 'Please resync with the --no-use-superproject option to avoid ' 451 "--no-repo-verify",
440 'this repo warning.', 452 dest="repo_verify",
441 file=sys.stderr) 453 default=True,
442 if update_result.fatal and opt.use_superproject is not None: 454 action="store_false",
443 sys.exit(1) 455 help="do not verify repo source code",
444 if need_unload: 456 )
445 m.outer_client.manifest.Unload() 457 g.add_option(
446 458 "--repo-upgraded",
447 def _FetchProjectList(self, opt, projects): 459 dest="repo_upgraded",
448 """Main function of the fetch worker. 460 action="store_true",
449 461 help=SUPPRESS_HELP,
450 The projects we're given share the same underlying git object store, so we 462 )
451 have to fetch them in serial.
452
453 Delegates most of the work to _FetchHelper.
454
455 Args:
456 opt: Program options returned from optparse. See _Options().
457 projects: Projects to fetch.
458 """
459 return [self._FetchOne(opt, x) for x in projects]
460 463
461 def _FetchOne(self, opt, project): 464 def _GetBranch(self, manifest_project):
462 """Fetch git objects for a single project. 465 """Returns the branch name for getting the approved smartsync manifest.
466
467 Args:
468 manifest_project: The manifestProject to query.
469 """
470 b = manifest_project.GetBranch(manifest_project.CurrentBranch)
471 branch = b.merge
472 if branch.startswith(R_HEADS):
473 branch = branch[len(R_HEADS) :]
474 return branch
475
476 def _GetCurrentBranchOnly(self, opt, manifest):
477 """Returns whether current-branch or use-superproject options are
478 enabled.
479
480 Args:
481 opt: Program options returned from optparse. See _Options().
482 manifest: The manifest to use.
483
484 Returns:
485 True if a superproject is requested, otherwise the value of the
486 current_branch option (True, False or None).
487 """
488 return (
489 git_superproject.UseSuperproject(opt.use_superproject, manifest)
490 or opt.current_branch_only
491 )
463 492
464 Args: 493 def _UpdateProjectsRevisionId(
465 opt: Program options returned from optparse. See _Options(). 494 self, opt, args, superproject_logging_data, manifest
466 project: Project object for the project to fetch. 495 ):
496 """Update revisionId of projects with the commit from the superproject.
497
498 This function updates each project's revisionId with the commit hash
499 from the superproject. It writes the updated manifest into a file and
500 reloads the manifest from it. When appropriate, sub manifests are also
501 processed.
502
503 Args:
504 opt: Program options returned from optparse. See _Options().
505 args: Arguments to pass to GetProjects. See the GetProjects
506 docstring for details.
507 superproject_logging_data: A dictionary of superproject data to log.
508 manifest: The manifest to use.
509 """
510 have_superproject = manifest.superproject or any(
511 m.superproject for m in manifest.all_children
512 )
513 if not have_superproject:
514 return
515
516 if opt.local_only and manifest.superproject:
517 manifest_path = manifest.superproject.manifest_path
518 if manifest_path:
519 self._ReloadManifest(manifest_path, manifest)
520 return
521
522 all_projects = self.GetProjects(
523 args,
524 missing_ok=True,
525 submodules_ok=opt.fetch_submodules,
526 manifest=manifest,
527 all_manifests=not opt.this_manifest_only,
528 )
467 529
468 Returns: 530 per_manifest = collections.defaultdict(list)
469 Whether the fetch was successful. 531 if opt.this_manifest_only:
470 """ 532 per_manifest[manifest.path_prefix] = all_projects
471 start = time.time() 533 else:
472 success = False 534 for p in all_projects:
473 remote_fetched = False 535 per_manifest[p.manifest.path_prefix].append(p)
474 buf = io.StringIO() 536
475 try: 537 superproject_logging_data = {}
476 sync_result = project.Sync_NetworkHalf( 538 need_unload = False
477 quiet=opt.quiet, 539 for m in self.ManifestList(opt):
478 verbose=opt.verbose, 540 if m.path_prefix not in per_manifest:
479 output_redir=buf, 541 continue
480 current_branch_only=self._GetCurrentBranchOnly(opt, project.manifest), 542 use_super = git_superproject.UseSuperproject(
481 force_sync=opt.force_sync, 543 opt.use_superproject, m
482 clone_bundle=opt.clone_bundle, 544 )
483 tags=opt.tags, archive=project.manifest.IsArchive, 545 if superproject_logging_data:
484 optimized_fetch=opt.optimized_fetch, 546 superproject_logging_data["multimanifest"] = True
485 retry_fetches=opt.retry_fetches, 547 superproject_logging_data.update(
486 prune=opt.prune, 548 superproject=use_super,
487 ssh_proxy=self.ssh_proxy, 549 haslocalmanifests=bool(m.HasLocalManifests),
488 clone_filter=project.manifest.CloneFilter, 550 hassuperprojecttag=bool(m.superproject),
489 partial_clone_exclude=project.manifest.PartialCloneExclude) 551 )
490 success = sync_result.success 552 if use_super and (m.IsMirror or m.IsArchive):
491 remote_fetched = sync_result.remote_fetched 553 # Don't use superproject, because we have no working tree.
492 554 use_super = False
493 output = buf.getvalue() 555 superproject_logging_data["superproject"] = False
494 if (opt.verbose or not success) and output: 556 superproject_logging_data["noworktree"] = True
495 print('\n' + output.rstrip()) 557 if opt.use_superproject is not False:
496 558 print(
497 if not success: 559 f"{m.path_prefix}: not using superproject because "
498 print('error: Cannot fetch %s from %s' 560 "there is no working tree."
499 % (project.name, project.remote.url), 561 )
500 file=sys.stderr) 562
501 except KeyboardInterrupt: 563 if not use_super:
502 print(f'Keyboard interrupt while processing {project.name}') 564 continue
503 except GitError as e: 565 m.superproject.SetQuiet(opt.quiet)
504 print('error.GitError: Cannot fetch %s' % str(e), file=sys.stderr) 566 print_messages = git_superproject.PrintMessages(
505 except Exception as e: 567 opt.use_superproject, m
506 print('error: Cannot fetch %s (%s: %s)' 568 )
507 % (project.name, type(e).__name__, str(e)), file=sys.stderr) 569 m.superproject.SetPrintMessages(print_messages)
508 raise 570 update_result = m.superproject.UpdateProjectsRevisionId(
509 571 per_manifest[m.path_prefix], git_event_log=self.git_event_log
510 finish = time.time() 572 )
511 return _FetchOneResult(success, project, start, finish, remote_fetched) 573 manifest_path = update_result.manifest_path
512 574 superproject_logging_data["updatedrevisionid"] = bool(manifest_path)
513 @classmethod 575 if manifest_path:
514 def _FetchInitChild(cls, ssh_proxy): 576 m.SetManifestOverride(manifest_path)
515 cls.ssh_proxy = ssh_proxy 577 need_unload = True
516 578 else:
517 def _Fetch(self, projects, opt, err_event, ssh_proxy): 579 if print_messages:
518 ret = True 580 print(
519 581 f"{m.path_prefix}: warning: Update of revisionId from "
520 jobs = opt.jobs_network 582 "superproject has failed, repo sync will not use "
521 fetched = set() 583 "superproject to fetch the source. ",
522 remote_fetched = set() 584 "Please resync with the --no-use-superproject option "
523 pm = Progress('Fetching', len(projects), delay=False, quiet=opt.quiet) 585 "to avoid this repo warning.",
524 586 file=sys.stderr,
525 objdir_project_map = dict() 587 )
526 for project in projects: 588 if update_result.fatal and opt.use_superproject is not None:
527 objdir_project_map.setdefault(project.objdir, []).append(project) 589 sys.exit(1)
528 projects_list = list(objdir_project_map.values()) 590 if need_unload:
529 591 m.outer_client.manifest.Unload()
530 def _ProcessResults(results_sets): 592
531 ret = True 593 def _FetchProjectList(self, opt, projects):
532 for results in results_sets: 594 """Main function of the fetch worker.
533 for result in results: 595
534 success = result.success 596 The projects we're given share the same underlying git object store, so
535 project = result.project 597 we have to fetch them in serial.
536 start = result.start 598
537 finish = result.finish 599 Delegates most of the work to _FetchHelper.
538 self._fetch_times.Set(project, finish - start) 600
539 self.event_log.AddSync(project, event_log.TASK_SYNC_NETWORK, 601 Args:
540 start, finish, success) 602 opt: Program options returned from optparse. See _Options().
541 if result.remote_fetched: 603 projects: Projects to fetch.
542 remote_fetched.add(project) 604 """
543 # Check for any errors before running any more tasks. 605 return [self._FetchOne(opt, x) for x in projects]
544 # ...we'll let existing jobs finish, though. 606
545 if not success: 607 def _FetchOne(self, opt, project):
546 ret = False 608 """Fetch git objects for a single project.
547 else: 609
548 fetched.add(project.gitdir) 610 Args:
549 pm.update(msg=f'Last synced: {project.name}') 611 opt: Program options returned from optparse. See _Options().
550 if not ret and opt.fail_fast: 612 project: Project object for the project to fetch.
551 break 613
552 return ret 614 Returns:
553 615 Whether the fetch was successful.
554 # We pass the ssh proxy settings via the class. This allows multiprocessing 616 """
555 # to pickle it up when spawning children. We can't pass it as an argument 617 start = time.time()
556 # to _FetchProjectList below as multiprocessing is unable to pickle those. 618 success = False
557 Sync.ssh_proxy = None 619 remote_fetched = False
558 620 buf = io.StringIO()
559 # NB: Multiprocessing is heavy, so don't spin it up for one job. 621 try:
560 if len(projects_list) == 1 or jobs == 1: 622 sync_result = project.Sync_NetworkHalf(
561 self._FetchInitChild(ssh_proxy) 623 quiet=opt.quiet,
562 if not _ProcessResults(self._FetchProjectList(opt, x) for x in projects_list): 624 verbose=opt.verbose,
563 ret = False 625 output_redir=buf,
564 else: 626 current_branch_only=self._GetCurrentBranchOnly(
565 # Favor throughput over responsiveness when quiet. It seems that imap() 627 opt, project.manifest
566 # will yield results in batches relative to chunksize, so even as the 628 ),
567 # children finish a sync, we won't see the result until one child finishes 629 force_sync=opt.force_sync,
568 # ~chunksize jobs. When using a large --jobs with large chunksize, this 630 clone_bundle=opt.clone_bundle,
569 # can be jarring as there will be a large initial delay where repo looks 631 tags=opt.tags,
570 # like it isn't doing anything and sits at 0%, but then suddenly completes 632 archive=project.manifest.IsArchive,
571 # a lot of jobs all at once. Since this code is more network bound, we 633 optimized_fetch=opt.optimized_fetch,
572 # can accept a bit more CPU overhead with a smaller chunksize so that the 634 retry_fetches=opt.retry_fetches,
573 # user sees more immediate & continuous feedback. 635 prune=opt.prune,
574 if opt.quiet: 636 ssh_proxy=self.ssh_proxy,
575 chunksize = WORKER_BATCH_SIZE 637 clone_filter=project.manifest.CloneFilter,
576 else: 638 partial_clone_exclude=project.manifest.PartialCloneExclude,
577 pm.update(inc=0, msg='warming up') 639 )
578 chunksize = 4 640 success = sync_result.success
579 with multiprocessing.Pool(jobs, initializer=self._FetchInitChild, 641 remote_fetched = sync_result.remote_fetched
580 initargs=(ssh_proxy,)) as pool: 642
581 results = pool.imap_unordered( 643 output = buf.getvalue()
582 functools.partial(self._FetchProjectList, opt), 644 if (opt.verbose or not success) and output:
583 projects_list, 645 print("\n" + output.rstrip())
584 chunksize=chunksize) 646
585 if not _ProcessResults(results): 647 if not success:
586 ret = False 648 print(
587 pool.close() 649 "error: Cannot fetch %s from %s"
588 650 % (project.name, project.remote.url),
589 # Cleanup the reference now that we're done with it, and we're going to 651 file=sys.stderr,
590 # release any resources it points to. If we don't, later multiprocessing 652 )
591 # usage (e.g. checkouts) will try to pickle and then crash. 653 except KeyboardInterrupt:
592 del Sync.ssh_proxy 654 print(f"Keyboard interrupt while processing {project.name}")
593 655 except GitError as e:
594 pm.end() 656 print("error.GitError: Cannot fetch %s" % str(e), file=sys.stderr)
595 self._fetch_times.Save() 657 except Exception as e:
596 658 print(
597 if not self.outer_client.manifest.IsArchive: 659 "error: Cannot fetch %s (%s: %s)"
598 self._GCProjects(projects, opt, err_event) 660 % (project.name, type(e).__name__, str(e)),
599 661 file=sys.stderr,
600 return _FetchResult(ret, fetched) 662 )
601 663 raise
602 def _FetchMain(self, opt, args, all_projects, err_event, 664
603 ssh_proxy, manifest): 665 finish = time.time()
604 """The main network fetch loop. 666 return _FetchOneResult(success, project, start, finish, remote_fetched)
605 667
606 Args: 668 @classmethod
607 opt: Program options returned from optparse. See _Options(). 669 def _FetchInitChild(cls, ssh_proxy):
608 args: Command line args used to filter out projects. 670 cls.ssh_proxy = ssh_proxy
609 all_projects: List of all projects that should be fetched. 671
610 err_event: Whether an error was hit while processing. 672 def _Fetch(self, projects, opt, err_event, ssh_proxy):
611 ssh_proxy: SSH manager for clients & masters. 673 ret = True
612 manifest: The manifest to use. 674
613 675 jobs = opt.jobs_network
614 Returns: 676 fetched = set()
615 List of all projects that should be checked out. 677 remote_fetched = set()
616 """ 678 pm = Progress("Fetching", len(projects), delay=False, quiet=opt.quiet)
617 rp = manifest.repoProject 679
618 680 objdir_project_map = dict()
619 to_fetch = [] 681 for project in projects:
620 now = time.time() 682 objdir_project_map.setdefault(project.objdir, []).append(project)
621 if _ONE_DAY_S <= (now - rp.LastFetch): 683 projects_list = list(objdir_project_map.values())
622 to_fetch.append(rp) 684
623 to_fetch.extend(all_projects) 685 def _ProcessResults(results_sets):
624 to_fetch.sort(key=self._fetch_times.Get, reverse=True) 686 ret = True
625 687 for results in results_sets:
626 result = self._Fetch(to_fetch, opt, err_event, ssh_proxy) 688 for result in results:
627 success = result.success 689 success = result.success
628 fetched = result.projects 690 project = result.project
629 if not success: 691 start = result.start
630 err_event.set() 692 finish = result.finish
631 693 self._fetch_times.Set(project, finish - start)
632 _PostRepoFetch(rp, opt.repo_verify) 694 self.event_log.AddSync(
633 if opt.network_only: 695 project,
634 # bail out now; the rest touches the working tree 696 event_log.TASK_SYNC_NETWORK,
635 if err_event.is_set(): 697 start,
636 print('\nerror: Exited sync due to fetch errors.\n', file=sys.stderr) 698 finish,
637 sys.exit(1) 699 success,
638 return _FetchMainResult([]) 700 )
639 701 if result.remote_fetched:
640 # Iteratively fetch missing and/or nested unregistered submodules 702 remote_fetched.add(project)
641 previously_missing_set = set() 703 # Check for any errors before running any more tasks.
642 while True: 704 # ...we'll let existing jobs finish, though.
643 self._ReloadManifest(None, manifest) 705 if not success:
644 all_projects = self.GetProjects(args, 706 ret = False
645 missing_ok=True, 707 else:
646 submodules_ok=opt.fetch_submodules, 708 fetched.add(project.gitdir)
647 manifest=manifest, 709 pm.update(msg=f"Last synced: {project.name}")
648 all_manifests=not opt.this_manifest_only) 710 if not ret and opt.fail_fast:
649 missing = [] 711 break
650 for project in all_projects: 712 return ret
651 if project.gitdir not in fetched:
652 missing.append(project)
653 if not missing:
654 break
655 # Stop us from non-stopped fetching actually-missing repos: If set of
656 # missing repos has not been changed from last fetch, we break.
657 missing_set = set(p.name for p in missing)
658 if previously_missing_set == missing_set:
659 break
660 previously_missing_set = missing_set
661 result = self._Fetch(missing, opt, err_event, ssh_proxy)
662 success = result.success
663 new_fetched = result.projects
664 if not success:
665 err_event.set()
666 fetched.update(new_fetched)
667
668 return _FetchMainResult(all_projects)
669
670 def _CheckoutOne(self, detach_head, force_sync, project):
671 """Checkout work tree for one project
672
673 Args:
674 detach_head: Whether to leave a detached HEAD.
675 force_sync: Force checking out of the repo.
676 project: Project object for the project to checkout.
677
678 Returns:
679 Whether the fetch was successful.
680 """
681 start = time.time()
682 syncbuf = SyncBuffer(project.manifest.manifestProject.config,
683 detach_head=detach_head)
684 success = False
685 try:
686 project.Sync_LocalHalf(syncbuf, force_sync=force_sync)
687 success = syncbuf.Finish()
688 except GitError as e:
689 print('error.GitError: Cannot checkout %s: %s' %
690 (project.name, str(e)), file=sys.stderr)
691 except Exception as e:
692 print('error: Cannot checkout %s: %s: %s' %
693 (project.name, type(e).__name__, str(e)),
694 file=sys.stderr)
695 raise
696
697 if not success:
698 print('error: Cannot checkout %s' % (project.name), file=sys.stderr)
699 finish = time.time()
700 return _CheckoutOneResult(success, project, start, finish)
701
702 def _Checkout(self, all_projects, opt, err_results):
703 """Checkout projects listed in all_projects
704
705 Args:
706 all_projects: List of all projects that should be checked out.
707 opt: Program options returned from optparse. See _Options().
708 err_results: A list of strings, paths to git repos where checkout failed.
709 """
710 # Only checkout projects with worktrees.
711 all_projects = [x for x in all_projects if x.worktree]
712 713
713 def _ProcessResults(pool, pm, results): 714 # We pass the ssh proxy settings via the class. This allows
714 ret = True 715 # multiprocessing to pickle it up when spawning children. We can't pass
715 for result in results: 716 # it as an argument to _FetchProjectList below as multiprocessing is
717 # unable to pickle those.
718 Sync.ssh_proxy = None
719
720 # NB: Multiprocessing is heavy, so don't spin it up for one job.
721 if len(projects_list) == 1 or jobs == 1:
722 self._FetchInitChild(ssh_proxy)
723 if not _ProcessResults(
724 self._FetchProjectList(opt, x) for x in projects_list
725 ):
726 ret = False
727 else:
728 # Favor throughput over responsiveness when quiet. It seems that
729 # imap() will yield results in batches relative to chunksize, so
730 # even as the children finish a sync, we won't see the result until
731 # one child finishes ~chunksize jobs. When using a large --jobs
732 # with large chunksize, this can be jarring as there will be a large
733 # initial delay where repo looks like it isn't doing anything and
734 # sits at 0%, but then suddenly completes a lot of jobs all at once.
735 # Since this code is more network bound, we can accept a bit more
736 # CPU overhead with a smaller chunksize so that the user sees more
737 # immediate & continuous feedback.
738 if opt.quiet:
739 chunksize = WORKER_BATCH_SIZE
740 else:
741 pm.update(inc=0, msg="warming up")
742 chunksize = 4
743 with multiprocessing.Pool(
744 jobs, initializer=self._FetchInitChild, initargs=(ssh_proxy,)
745 ) as pool:
746 results = pool.imap_unordered(
747 functools.partial(self._FetchProjectList, opt),
748 projects_list,
749 chunksize=chunksize,
750 )
751 if not _ProcessResults(results):
752 ret = False
753 pool.close()
754
755 # Cleanup the reference now that we're done with it, and we're going to
756 # release any resources it points to. If we don't, later
757 # multiprocessing usage (e.g. checkouts) will try to pickle and then
758 # crash.
759 del Sync.ssh_proxy
760
761 pm.end()
762 self._fetch_times.Save()
763
764 if not self.outer_client.manifest.IsArchive:
765 self._GCProjects(projects, opt, err_event)
766
767 return _FetchResult(ret, fetched)
768
769 def _FetchMain(
770 self, opt, args, all_projects, err_event, ssh_proxy, manifest
771 ):
772 """The main network fetch loop.
773
774 Args:
775 opt: Program options returned from optparse. See _Options().
776 args: Command line args used to filter out projects.
777 all_projects: List of all projects that should be fetched.
778 err_event: Whether an error was hit while processing.
779 ssh_proxy: SSH manager for clients & masters.
780 manifest: The manifest to use.
781
782 Returns:
783 List of all projects that should be checked out.
784 """
785 rp = manifest.repoProject
786
787 to_fetch = []
788 now = time.time()
789 if _ONE_DAY_S <= (now - rp.LastFetch):
790 to_fetch.append(rp)
791 to_fetch.extend(all_projects)
792 to_fetch.sort(key=self._fetch_times.Get, reverse=True)
793
794 result = self._Fetch(to_fetch, opt, err_event, ssh_proxy)
716 success = result.success 795 success = result.success
717 project = result.project 796 fetched = result.projects
718 start = result.start
719 finish = result.finish
720 self.event_log.AddSync(project, event_log.TASK_SYNC_LOCAL,
721 start, finish, success)
722 # Check for any errors before running any more tasks.
723 # ...we'll let existing jobs finish, though.
724 if not success: 797 if not success:
725 ret = False 798 err_event.set()
726 err_results.append(project.RelPath(local=opt.this_manifest_only)) 799
727 if opt.fail_fast: 800 _PostRepoFetch(rp, opt.repo_verify)
728 if pool: 801 if opt.network_only:
729 pool.close() 802 # Bail out now; the rest touches the working tree.
803 if err_event.is_set():
804 print(
805 "\nerror: Exited sync due to fetch errors.\n",
806 file=sys.stderr,
807 )
808 sys.exit(1)
809 return _FetchMainResult([])
810
811 # Iteratively fetch missing and/or nested unregistered submodules.
812 previously_missing_set = set()
813 while True:
814 self._ReloadManifest(None, manifest)
815 all_projects = self.GetProjects(
816 args,
817 missing_ok=True,
818 submodules_ok=opt.fetch_submodules,
819 manifest=manifest,
820 all_manifests=not opt.this_manifest_only,
821 )
822 missing = []
823 for project in all_projects:
824 if project.gitdir not in fetched:
825 missing.append(project)
826 if not missing:
827 break
828 # Stop us from non-stopped fetching actually-missing repos: If set
829 # of missing repos has not been changed from last fetch, we break.
830 missing_set = set(p.name for p in missing)
831 if previously_missing_set == missing_set:
832 break
833 previously_missing_set = missing_set
834 result = self._Fetch(missing, opt, err_event, ssh_proxy)
835 success = result.success
836 new_fetched = result.projects
837 if not success:
838 err_event.set()
839 fetched.update(new_fetched)
840
841 return _FetchMainResult(all_projects)
842
843 def _CheckoutOne(self, detach_head, force_sync, project):
844 """Checkout work tree for one project
845
846 Args:
847 detach_head: Whether to leave a detached HEAD.
848 force_sync: Force checking out of the repo.
849 project: Project object for the project to checkout.
850
851 Returns:
852 Whether the fetch was successful.
853 """
854 start = time.time()
855 syncbuf = SyncBuffer(
856 project.manifest.manifestProject.config, detach_head=detach_head
857 )
858 success = False
859 try:
860 project.Sync_LocalHalf(syncbuf, force_sync=force_sync)
861 success = syncbuf.Finish()
862 except GitError as e:
863 print(
864 "error.GitError: Cannot checkout %s: %s"
865 % (project.name, str(e)),
866 file=sys.stderr,
867 )
868 except Exception as e:
869 print(
870 "error: Cannot checkout %s: %s: %s"
871 % (project.name, type(e).__name__, str(e)),
872 file=sys.stderr,
873 )
874 raise
875
876 if not success:
877 print("error: Cannot checkout %s" % (project.name), file=sys.stderr)
878 finish = time.time()
879 return _CheckoutOneResult(success, project, start, finish)
880
881 def _Checkout(self, all_projects, opt, err_results):
882 """Checkout projects listed in all_projects
883
884 Args:
885 all_projects: List of all projects that should be checked out.
886 opt: Program options returned from optparse. See _Options().
887 err_results: A list of strings, paths to git repos where checkout
888 failed.
889 """
890 # Only checkout projects with worktrees.
891 all_projects = [x for x in all_projects if x.worktree]
892
893 def _ProcessResults(pool, pm, results):
894 ret = True
895 for result in results:
896 success = result.success
897 project = result.project
898 start = result.start
899 finish = result.finish
900 self.event_log.AddSync(
901 project, event_log.TASK_SYNC_LOCAL, start, finish, success
902 )
903 # Check for any errors before running any more tasks.
904 # ...we'll let existing jobs finish, though.
905 if not success:
906 ret = False
907 err_results.append(
908 project.RelPath(local=opt.this_manifest_only)
909 )
910 if opt.fail_fast:
911 if pool:
912 pool.close()
913 return ret
914 pm.update(msg=project.name)
730 return ret 915 return ret
731 pm.update(msg=project.name)
732 return ret
733
734 return self.ExecuteInParallel(
735 opt.jobs_checkout,
736 functools.partial(self._CheckoutOne, opt.detach_head, opt.force_sync),
737 all_projects,
738 callback=_ProcessResults,
739 output=Progress('Checking out', len(all_projects), quiet=opt.quiet)) and not err_results
740
741 @staticmethod
742 def _GetPreciousObjectsState(project: Project, opt):
743 """Get the preciousObjects state for the project.
744
745 Args:
746 project (Project): the project to examine, and possibly correct.
747 opt (optparse.Values): options given to sync.
748
749 Returns:
750 Expected state of extensions.preciousObjects:
751 False: Should be disabled. (not present)
752 True: Should be enabled.
753 """
754 if project.use_git_worktrees:
755 return False
756 projects = project.manifest.GetProjectsWithName(project.name,
757 all_manifests=True)
758 if len(projects) == 1:
759 return False
760 relpath = project.RelPath(local=opt.this_manifest_only)
761 if len(projects) > 1:
762 # Objects are potentially shared with another project.
763 # See the logic in Project.Sync_NetworkHalf regarding UseAlternates.
764 # - When False, shared projects share (via symlink)
765 # .repo/project-objects/{PROJECT_NAME}.git as the one-and-only objects
766 # directory. All objects are precious, since there is no project with a
767 # complete set of refs.
768 # - When True, shared projects share (via info/alternates)
769 # .repo/project-objects/{PROJECT_NAME}.git as an alternate object store,
770 # which is written only on the first clone of the project, and is not
771 # written subsequently. (When Sync_NetworkHalf sees that it exists, it
772 # makes sure that the alternates file points there, and uses a
773 # project-local .git/objects directory for all syncs going forward.
774 # We do not support switching between the options. The environment
775 # variable is present for testing and migration only.
776 return not project.UseAlternates
777
778 return False
779
780 def _SetPreciousObjectsState(self, project: Project, opt):
781 """Correct the preciousObjects state for the project.
782
783 Args:
784 project: the project to examine, and possibly correct.
785 opt: options given to sync.
786 """
787 expected = self._GetPreciousObjectsState(project, opt)
788 actual = project.config.GetBoolean('extensions.preciousObjects') or False
789 relpath = project.RelPath(local=opt.this_manifest_only)
790
791 if expected != actual:
792 # If this is unexpected, log it and repair.
793 Trace(f'{relpath} expected preciousObjects={expected}, got {actual}')
794 if expected:
795 if not opt.quiet:
796 print('\r%s: Shared project %s found, disabling pruning.' %
797 (relpath, project.name))
798 if git_require((2, 7, 0)):
799 project.EnableRepositoryExtension('preciousObjects')
800 else:
801 # This isn't perfect, but it's the best we can do with old git.
802 print('\r%s: WARNING: shared projects are unreliable when using '
803 'old versions of git; please upgrade to git-2.7.0+.'
804 % (relpath,),
805 file=sys.stderr)
806 project.config.SetString('gc.pruneExpire', 'never')
807 else:
808 if not opt.quiet:
809 print(f'\r{relpath}: not shared, disabling pruning.')
810 project.config.SetString('extensions.preciousObjects', None)
811 project.config.SetString('gc.pruneExpire', None)
812 916
813 def _GCProjects(self, projects, opt, err_event): 917 return (
814 """Perform garbage collection. 918 self.ExecuteInParallel(
919 opt.jobs_checkout,
920 functools.partial(
921 self._CheckoutOne, opt.detach_head, opt.force_sync
922 ),
923 all_projects,
924 callback=_ProcessResults,
925 output=Progress(
926 "Checking out", len(all_projects), quiet=opt.quiet
927 ),
928 )
929 and not err_results
930 )
815 931
816 If We are skipping garbage collection (opt.auto_gc not set), we still want 932 @staticmethod
817 to potentially mark objects precious, so that `git gc` does not discard 933 def _GetPreciousObjectsState(project: Project, opt):
818 shared objects. 934 """Get the preciousObjects state for the project.
819 """ 935
820 if not opt.auto_gc: 936 Args:
821 # Just repair preciousObjects state, and return. 937 project (Project): the project to examine, and possibly correct.
822 for project in projects: 938 opt (optparse.Values): options given to sync.
823 self._SetPreciousObjectsState(project, opt) 939
824 return 940 Returns:
825 941 Expected state of extensions.preciousObjects:
826 pm = Progress('Garbage collecting', len(projects), delay=False, 942 False: Should be disabled. (not present)
827 quiet=opt.quiet) 943 True: Should be enabled.
828 pm.update(inc=0, msg='prescan') 944 """
829 945 if project.use_git_worktrees:
830 tidy_dirs = {} 946 return False
831 for project in projects: 947 projects = project.manifest.GetProjectsWithName(
832 self._SetPreciousObjectsState(project, opt) 948 project.name, all_manifests=True
833
834 project.config.SetString('gc.autoDetach', 'false')
835 # Only call git gc once per objdir, but call pack-refs for the remainder.
836 if project.objdir not in tidy_dirs:
837 tidy_dirs[project.objdir] = (
838 True, # Run a full gc.
839 project.bare_git,
840 ) 949 )
841 elif project.gitdir not in tidy_dirs: 950 if len(projects) == 1:
842 tidy_dirs[project.gitdir] = ( 951 return False
843 False, # Do not run a full gc; just run pack-refs. 952 if len(projects) > 1:
844 project.bare_git, 953 # Objects are potentially shared with another project.
954 # See the logic in Project.Sync_NetworkHalf regarding UseAlternates.
955 # - When False, shared projects share (via symlink)
956 # .repo/project-objects/{PROJECT_NAME}.git as the one-and-only
957 # objects directory. All objects are precious, since there is no
958 # project with a complete set of refs.
959 # - When True, shared projects share (via info/alternates)
960 # .repo/project-objects/{PROJECT_NAME}.git as an alternate object
961 # store, which is written only on the first clone of the project,
962 # and is not written subsequently. (When Sync_NetworkHalf sees
963 # that it exists, it makes sure that the alternates file points
964 # there, and uses a project-local .git/objects directory for all
965 # syncs going forward.
966 # We do not support switching between the options. The environment
967 # variable is present for testing and migration only.
968 return not project.UseAlternates
969
970 return False
971
972 def _SetPreciousObjectsState(self, project: Project, opt):
973 """Correct the preciousObjects state for the project.
974
975 Args:
976 project: the project to examine, and possibly correct.
977 opt: options given to sync.
978 """
979 expected = self._GetPreciousObjectsState(project, opt)
980 actual = (
981 project.config.GetBoolean("extensions.preciousObjects") or False
845 ) 982 )
846 983 relpath = project.RelPath(local=opt.this_manifest_only)
847 jobs = opt.jobs 984
848 985 if expected != actual:
849 if jobs < 2: 986 # If this is unexpected, log it and repair.
850 for (run_gc, bare_git) in tidy_dirs.values(): 987 Trace(
851 pm.update(msg=bare_git._project.name) 988 f"{relpath} expected preciousObjects={expected}, got {actual}"
852 989 )
853 if run_gc: 990 if expected:
854 bare_git.gc('--auto') 991 if not opt.quiet:
992 print(
993 "\r%s: Shared project %s found, disabling pruning."
994 % (relpath, project.name)
995 )
996 if git_require((2, 7, 0)):
997 project.EnableRepositoryExtension("preciousObjects")
998 else:
999 # This isn't perfect, but it's the best we can do with old
1000 # git.
1001 print(
1002 "\r%s: WARNING: shared projects are unreliable when "
1003 "using old versions of git; please upgrade to "
1004 "git-2.7.0+." % (relpath,),
1005 file=sys.stderr,
1006 )
1007 project.config.SetString("gc.pruneExpire", "never")
1008 else:
1009 if not opt.quiet:
1010 print(f"\r{relpath}: not shared, disabling pruning.")
1011 project.config.SetString("extensions.preciousObjects", None)
1012 project.config.SetString("gc.pruneExpire", None)
1013
1014 def _GCProjects(self, projects, opt, err_event):
1015 """Perform garbage collection.
1016
1017 If We are skipping garbage collection (opt.auto_gc not set), we still
1018 want to potentially mark objects precious, so that `git gc` does not
1019 discard shared objects.
1020 """
1021 if not opt.auto_gc:
1022 # Just repair preciousObjects state, and return.
1023 for project in projects:
1024 self._SetPreciousObjectsState(project, opt)
1025 return
1026
1027 pm = Progress(
1028 "Garbage collecting", len(projects), delay=False, quiet=opt.quiet
1029 )
1030 pm.update(inc=0, msg="prescan")
1031
1032 tidy_dirs = {}
1033 for project in projects:
1034 self._SetPreciousObjectsState(project, opt)
1035
1036 project.config.SetString("gc.autoDetach", "false")
1037 # Only call git gc once per objdir, but call pack-refs for the
1038 # remainder.
1039 if project.objdir not in tidy_dirs:
1040 tidy_dirs[project.objdir] = (
1041 True, # Run a full gc.
1042 project.bare_git,
1043 )
1044 elif project.gitdir not in tidy_dirs:
1045 tidy_dirs[project.gitdir] = (
1046 False, # Do not run a full gc; just run pack-refs.
1047 project.bare_git,
1048 )
1049
1050 jobs = opt.jobs
1051
1052 if jobs < 2:
1053 for run_gc, bare_git in tidy_dirs.values():
1054 pm.update(msg=bare_git._project.name)
1055
1056 if run_gc:
1057 bare_git.gc("--auto")
1058 else:
1059 bare_git.pack_refs()
1060 pm.end()
1061 return
1062
1063 cpu_count = os.cpu_count()
1064 config = {"pack.threads": cpu_count // jobs if cpu_count > jobs else 1}
1065
1066 threads = set()
1067 sem = _threading.Semaphore(jobs)
1068
1069 def tidy_up(run_gc, bare_git):
1070 pm.start(bare_git._project.name)
1071 try:
1072 try:
1073 if run_gc:
1074 bare_git.gc("--auto", config=config)
1075 else:
1076 bare_git.pack_refs(config=config)
1077 except GitError:
1078 err_event.set()
1079 except Exception:
1080 err_event.set()
1081 raise
1082 finally:
1083 pm.finish(bare_git._project.name)
1084 sem.release()
1085
1086 for run_gc, bare_git in tidy_dirs.values():
1087 if err_event.is_set() and opt.fail_fast:
1088 break
1089 sem.acquire()
1090 t = _threading.Thread(
1091 target=tidy_up,
1092 args=(
1093 run_gc,
1094 bare_git,
1095 ),
1096 )
1097 t.daemon = True
1098 threads.add(t)
1099 t.start()
1100
1101 for t in threads:
1102 t.join()
1103 pm.end()
1104
1105 def _ReloadManifest(self, manifest_name, manifest):
1106 """Reload the manfiest from the file specified by the |manifest_name|.
1107
1108 It unloads the manifest if |manifest_name| is None.
1109
1110 Args:
1111 manifest_name: Manifest file to be reloaded.
1112 manifest: The manifest to use.
1113 """
1114 if manifest_name:
1115 # Override calls Unload already.
1116 manifest.Override(manifest_name)
855 else: 1117 else:
856 bare_git.pack_refs() 1118 manifest.Unload()
857 pm.end() 1119
858 return 1120 def UpdateProjectList(self, opt, manifest):
859 1121 """Update the cached projects list for |manifest|
860 cpu_count = os.cpu_count() 1122
861 config = {'pack.threads': cpu_count // jobs if cpu_count > jobs else 1} 1123 In a multi-manifest checkout, each manifest has its own project.list.
862 1124
863 threads = set() 1125 Args:
864 sem = _threading.Semaphore(jobs) 1126 opt: Program options returned from optparse. See _Options().
1127 manifest: The manifest to use.
1128
1129 Returns:
1130 0: success
1131 1: failure
1132 """
1133 new_project_paths = []
1134 for project in self.GetProjects(
1135 None, missing_ok=True, manifest=manifest, all_manifests=False
1136 ):
1137 if project.relpath:
1138 new_project_paths.append(project.relpath)
1139 file_name = "project.list"
1140 file_path = os.path.join(manifest.subdir, file_name)
1141 old_project_paths = []
1142
1143 if os.path.exists(file_path):
1144 with open(file_path, "r") as fd:
1145 old_project_paths = fd.read().split("\n")
1146 # In reversed order, so subfolders are deleted before parent folder.
1147 for path in sorted(old_project_paths, reverse=True):
1148 if not path:
1149 continue
1150 if path not in new_project_paths:
1151 # If the path has already been deleted, we don't need to do
1152 # it.
1153 gitdir = os.path.join(manifest.topdir, path, ".git")
1154 if os.path.exists(gitdir):
1155 project = Project(
1156 manifest=manifest,
1157 name=path,
1158 remote=RemoteSpec("origin"),
1159 gitdir=gitdir,
1160 objdir=gitdir,
1161 use_git_worktrees=os.path.isfile(gitdir),
1162 worktree=os.path.join(manifest.topdir, path),
1163 relpath=path,
1164 revisionExpr="HEAD",
1165 revisionId=None,
1166 groups=None,
1167 )
1168 if not project.DeleteWorktree(
1169 quiet=opt.quiet, force=opt.force_remove_dirty
1170 ):
1171 return 1
1172
1173 new_project_paths.sort()
1174 with open(file_path, "w") as fd:
1175 fd.write("\n".join(new_project_paths))
1176 fd.write("\n")
1177 return 0
1178
1179 def UpdateCopyLinkfileList(self, manifest):
1180 """Save all dests of copyfile and linkfile, and update them if needed.
1181
1182 Returns:
1183 Whether update was successful.
1184 """
1185 new_paths = {}
1186 new_linkfile_paths = []
1187 new_copyfile_paths = []
1188 for project in self.GetProjects(
1189 None, missing_ok=True, manifest=manifest, all_manifests=False
1190 ):
1191 new_linkfile_paths.extend(x.dest for x in project.linkfiles)
1192 new_copyfile_paths.extend(x.dest for x in project.copyfiles)
1193
1194 new_paths = {
1195 "linkfile": new_linkfile_paths,
1196 "copyfile": new_copyfile_paths,
1197 }
1198
1199 copylinkfile_name = "copy-link-files.json"
1200 copylinkfile_path = os.path.join(manifest.subdir, copylinkfile_name)
1201 old_copylinkfile_paths = {}
1202
1203 if os.path.exists(copylinkfile_path):
1204 with open(copylinkfile_path, "rb") as fp:
1205 try:
1206 old_copylinkfile_paths = json.load(fp)
1207 except Exception:
1208 print(
1209 "error: %s is not a json formatted file."
1210 % copylinkfile_path,
1211 file=sys.stderr,
1212 )
1213 platform_utils.remove(copylinkfile_path)
1214 return False
1215
1216 need_remove_files = []
1217 need_remove_files.extend(
1218 set(old_copylinkfile_paths.get("linkfile", []))
1219 - set(new_linkfile_paths)
1220 )
1221 need_remove_files.extend(
1222 set(old_copylinkfile_paths.get("copyfile", []))
1223 - set(new_copyfile_paths)
1224 )
1225
1226 for need_remove_file in need_remove_files:
1227 # Try to remove the updated copyfile or linkfile.
1228 # So, if the file is not exist, nothing need to do.
1229 platform_utils.remove(need_remove_file, missing_ok=True)
1230
1231 # Create copy-link-files.json, save dest path of "copyfile" and
1232 # "linkfile".
1233 with open(copylinkfile_path, "w", encoding="utf-8") as fp:
1234 json.dump(new_paths, fp)
1235 return True
1236
1237 def _SmartSyncSetup(self, opt, smart_sync_manifest_path, manifest):
1238 if not manifest.manifest_server:
1239 print(
1240 "error: cannot smart sync: no manifest server defined in "
1241 "manifest",
1242 file=sys.stderr,
1243 )
1244 sys.exit(1)
1245
1246 manifest_server = manifest.manifest_server
1247 if not opt.quiet:
1248 print("Using manifest server %s" % manifest_server)
1249
1250 if "@" not in manifest_server:
1251 username = None
1252 password = None
1253 if opt.manifest_server_username and opt.manifest_server_password:
1254 username = opt.manifest_server_username
1255 password = opt.manifest_server_password
1256 else:
1257 try:
1258 info = netrc.netrc()
1259 except IOError:
1260 # .netrc file does not exist or could not be opened.
1261 pass
1262 else:
1263 try:
1264 parse_result = urllib.parse.urlparse(manifest_server)
1265 if parse_result.hostname:
1266 auth = info.authenticators(parse_result.hostname)
1267 if auth:
1268 username, _account, password = auth
1269 else:
1270 print(
1271 "No credentials found for %s in .netrc"
1272 % parse_result.hostname,
1273 file=sys.stderr,
1274 )
1275 except netrc.NetrcParseError as e:
1276 print(
1277 "Error parsing .netrc file: %s" % e, file=sys.stderr
1278 )
1279
1280 if username and password:
1281 manifest_server = manifest_server.replace(
1282 "://", "://%s:%s@" % (username, password), 1
1283 )
1284
1285 transport = PersistentTransport(manifest_server)
1286 if manifest_server.startswith("persistent-"):
1287 manifest_server = manifest_server[len("persistent-") :]
865 1288
866 def tidy_up(run_gc, bare_git):
867 pm.start(bare_git._project.name)
868 try:
869 try: 1289 try:
870 if run_gc: 1290 server = xmlrpc.client.Server(manifest_server, transport=transport)
871 bare_git.gc('--auto', config=config) 1291 if opt.smart_sync:
872 else: 1292 branch = self._GetBranch(manifest.manifestProject)
873 bare_git.pack_refs(config=config) 1293
874 except GitError: 1294 if "SYNC_TARGET" in os.environ:
875 err_event.set() 1295 target = os.environ["SYNC_TARGET"]
876 except Exception: 1296 [success, manifest_str] = server.GetApprovedManifest(
877 err_event.set() 1297 branch, target
878 raise 1298 )
879 finally: 1299 elif (
880 pm.finish(bare_git._project.name) 1300 "TARGET_PRODUCT" in os.environ
881 sem.release() 1301 and "TARGET_BUILD_VARIANT" in os.environ
882 1302 ):
883 for (run_gc, bare_git) in tidy_dirs.values(): 1303 target = "%s-%s" % (
884 if err_event.is_set() and opt.fail_fast: 1304 os.environ["TARGET_PRODUCT"],
885 break 1305 os.environ["TARGET_BUILD_VARIANT"],
886 sem.acquire() 1306 )
887 t = _threading.Thread(target=tidy_up, args=(run_gc, bare_git,)) 1307 [success, manifest_str] = server.GetApprovedManifest(
888 t.daemon = True 1308 branch, target
889 threads.add(t) 1309 )
890 t.start() 1310 else:
891 1311 [success, manifest_str] = server.GetApprovedManifest(branch)
892 for t in threads: 1312 else:
893 t.join() 1313 assert opt.smart_tag
894 pm.end() 1314 [success, manifest_str] = server.GetManifest(opt.smart_tag)
895 1315
896 def _ReloadManifest(self, manifest_name, manifest): 1316 if success:
897 """Reload the manfiest from the file specified by the |manifest_name|. 1317 manifest_name = os.path.basename(smart_sync_manifest_path)
898 1318 try:
899 It unloads the manifest if |manifest_name| is None. 1319 with open(smart_sync_manifest_path, "w") as f:
900 1320 f.write(manifest_str)
901 Args: 1321 except IOError as e:
902 manifest_name: Manifest file to be reloaded. 1322 print(
903 manifest: The manifest to use. 1323 "error: cannot write manifest to %s:\n%s"
904 """ 1324 % (smart_sync_manifest_path, e),
905 if manifest_name: 1325 file=sys.stderr,
906 # Override calls Unload already 1326 )
907 manifest.Override(manifest_name) 1327 sys.exit(1)
908 else: 1328 self._ReloadManifest(manifest_name, manifest)
909 manifest.Unload() 1329 else:
910 1330 print(
911 def UpdateProjectList(self, opt, manifest): 1331 "error: manifest server RPC call failed: %s" % manifest_str,
912 """Update the cached projects list for |manifest| 1332 file=sys.stderr,
913 1333 )
914 In a multi-manifest checkout, each manifest has its own project.list. 1334 sys.exit(1)
1335 except (socket.error, IOError, xmlrpc.client.Fault) as e:
1336 print(
1337 "error: cannot connect to manifest server %s:\n%s"
1338 % (manifest.manifest_server, e),
1339 file=sys.stderr,
1340 )
1341 sys.exit(1)
1342 except xmlrpc.client.ProtocolError as e:
1343 print(
1344 "error: cannot connect to manifest server %s:\n%d %s"
1345 % (manifest.manifest_server, e.errcode, e.errmsg),
1346 file=sys.stderr,
1347 )
1348 sys.exit(1)
1349
1350 return manifest_name
1351
1352 def _UpdateAllManifestProjects(self, opt, mp, manifest_name):
1353 """Fetch & update the local manifest project.
1354
1355 After syncing the manifest project, if the manifest has any sub
1356 manifests, those are recursively processed.
1357
1358 Args:
1359 opt: Program options returned from optparse. See _Options().
1360 mp: the manifestProject to query.
1361 manifest_name: Manifest file to be reloaded.
1362 """
1363 if not mp.standalone_manifest_url:
1364 self._UpdateManifestProject(opt, mp, manifest_name)
1365
1366 if mp.manifest.submanifests:
1367 for submanifest in mp.manifest.submanifests.values():
1368 child = submanifest.repo_client.manifest
1369 child.manifestProject.SyncWithPossibleInit(
1370 submanifest,
1371 current_branch_only=self._GetCurrentBranchOnly(opt, child),
1372 verbose=opt.verbose,
1373 tags=opt.tags,
1374 git_event_log=self.git_event_log,
1375 )
1376 self._UpdateAllManifestProjects(
1377 opt, child.manifestProject, None
1378 )
1379
1380 def _UpdateManifestProject(self, opt, mp, manifest_name):
1381 """Fetch & update the local manifest project.
1382
1383 Args:
1384 opt: Program options returned from optparse. See _Options().
1385 mp: the manifestProject to query.
1386 manifest_name: Manifest file to be reloaded.
1387 """
1388 if not opt.local_only:
1389 start = time.time()
1390 success = mp.Sync_NetworkHalf(
1391 quiet=opt.quiet,
1392 verbose=opt.verbose,
1393 current_branch_only=self._GetCurrentBranchOnly(
1394 opt, mp.manifest
1395 ),
1396 force_sync=opt.force_sync,
1397 tags=opt.tags,
1398 optimized_fetch=opt.optimized_fetch,
1399 retry_fetches=opt.retry_fetches,
1400 submodules=mp.manifest.HasSubmodules,
1401 clone_filter=mp.manifest.CloneFilter,
1402 partial_clone_exclude=mp.manifest.PartialCloneExclude,
1403 )
1404 finish = time.time()
1405 self.event_log.AddSync(
1406 mp, event_log.TASK_SYNC_NETWORK, start, finish, success
1407 )
1408
1409 if mp.HasChanges:
1410 syncbuf = SyncBuffer(mp.config)
1411 start = time.time()
1412 mp.Sync_LocalHalf(syncbuf, submodules=mp.manifest.HasSubmodules)
1413 clean = syncbuf.Finish()
1414 self.event_log.AddSync(
1415 mp, event_log.TASK_SYNC_LOCAL, start, time.time(), clean
1416 )
1417 if not clean:
1418 sys.exit(1)
1419 self._ReloadManifest(manifest_name, mp.manifest)
1420
1421 def ValidateOptions(self, opt, args):
1422 if opt.force_broken:
1423 print(
1424 "warning: -f/--force-broken is now the default behavior, and "
1425 "the options are deprecated",
1426 file=sys.stderr,
1427 )
1428 if opt.network_only and opt.detach_head:
1429 self.OptionParser.error("cannot combine -n and -d")
1430 if opt.network_only and opt.local_only:
1431 self.OptionParser.error("cannot combine -n and -l")
1432 if opt.manifest_name and opt.smart_sync:
1433 self.OptionParser.error("cannot combine -m and -s")
1434 if opt.manifest_name and opt.smart_tag:
1435 self.OptionParser.error("cannot combine -m and -t")
1436 if opt.manifest_server_username or opt.manifest_server_password:
1437 if not (opt.smart_sync or opt.smart_tag):
1438 self.OptionParser.error(
1439 "-u and -p may only be combined with -s or -t"
1440 )
1441 if None in [
1442 opt.manifest_server_username,
1443 opt.manifest_server_password,
1444 ]:
1445 self.OptionParser.error("both -u and -p must be given")
1446
1447 if opt.prune is None:
1448 opt.prune = True
1449
1450 if opt.auto_gc is None and _AUTO_GC:
1451 print(
1452 f"Will run `git gc --auto` because {_REPO_AUTO_GC} is set.",
1453 f"{_REPO_AUTO_GC} is deprecated and will be removed in a ",
1454 "future release. Use `--auto-gc` instead.",
1455 file=sys.stderr,
1456 )
1457 opt.auto_gc = True
1458
1459 def _ValidateOptionsWithManifest(self, opt, mp):
1460 """Like ValidateOptions, but after we've updated the manifest.
1461
1462 Needed to handle sync-xxx option defaults in the manifest.
1463
1464 Args:
1465 opt: The options to process.
1466 mp: The manifest project to pull defaults from.
1467 """
1468 if not opt.jobs:
1469 # If the user hasn't made a choice, use the manifest value.
1470 opt.jobs = mp.manifest.default.sync_j
1471 if opt.jobs:
1472 # If --jobs has a non-default value, propagate it as the default for
1473 # --jobs-xxx flags too.
1474 if not opt.jobs_network:
1475 opt.jobs_network = opt.jobs
1476 if not opt.jobs_checkout:
1477 opt.jobs_checkout = opt.jobs
1478 else:
1479 # Neither user nor manifest have made a choice, so setup defaults.
1480 if not opt.jobs_network:
1481 opt.jobs_network = 1
1482 if not opt.jobs_checkout:
1483 opt.jobs_checkout = DEFAULT_LOCAL_JOBS
1484 opt.jobs = os.cpu_count()
1485
1486 # Try to stay under user rlimit settings.
1487 #
1488 # Since each worker requires at 3 file descriptors to run `git fetch`,
1489 # use that to scale down the number of jobs. Unfortunately there isn't
1490 # an easy way to determine this reliably as systems change, but it was
1491 # last measured by hand in 2011.
1492 soft_limit, _ = _rlimit_nofile()
1493 jobs_soft_limit = max(1, (soft_limit - 5) // 3)
1494 opt.jobs = min(opt.jobs, jobs_soft_limit)
1495 opt.jobs_network = min(opt.jobs_network, jobs_soft_limit)
1496 opt.jobs_checkout = min(opt.jobs_checkout, jobs_soft_limit)
1497
1498 def Execute(self, opt, args):
1499 manifest = self.outer_manifest
1500 if not opt.outer_manifest:
1501 manifest = self.manifest
1502
1503 if opt.manifest_name:
1504 manifest.Override(opt.manifest_name)
1505
1506 manifest_name = opt.manifest_name
1507 smart_sync_manifest_path = os.path.join(
1508 manifest.manifestProject.worktree, "smart_sync_override.xml"
1509 )
915 1510
916 Args: 1511 if opt.clone_bundle is None:
917 opt: Program options returned from optparse. See _Options(). 1512 opt.clone_bundle = manifest.CloneBundle
918 manifest: The manifest to use.
919 1513
920 Returns: 1514 if opt.smart_sync or opt.smart_tag:
921 0: success 1515 manifest_name = self._SmartSyncSetup(
922 1: failure 1516 opt, smart_sync_manifest_path, manifest
923 """ 1517 )
924 new_project_paths = []
925 for project in self.GetProjects(None, missing_ok=True, manifest=manifest,
926 all_manifests=False):
927 if project.relpath:
928 new_project_paths.append(project.relpath)
929 file_name = 'project.list'
930 file_path = os.path.join(manifest.subdir, file_name)
931 old_project_paths = []
932
933 if os.path.exists(file_path):
934 with open(file_path, 'r') as fd:
935 old_project_paths = fd.read().split('\n')
936 # In reversed order, so subfolders are deleted before parent folder.
937 for path in sorted(old_project_paths, reverse=True):
938 if not path:
939 continue
940 if path not in new_project_paths:
941 # If the path has already been deleted, we don't need to do it
942 gitdir = os.path.join(manifest.topdir, path, '.git')
943 if os.path.exists(gitdir):
944 project = Project(
945 manifest=manifest,
946 name=path,
947 remote=RemoteSpec('origin'),
948 gitdir=gitdir,
949 objdir=gitdir,
950 use_git_worktrees=os.path.isfile(gitdir),
951 worktree=os.path.join(manifest.topdir, path),
952 relpath=path,
953 revisionExpr='HEAD',
954 revisionId=None,
955 groups=None)
956 if not project.DeleteWorktree(
957 quiet=opt.quiet,
958 force=opt.force_remove_dirty):
959 return 1
960
961 new_project_paths.sort()
962 with open(file_path, 'w') as fd:
963 fd.write('\n'.join(new_project_paths))
964 fd.write('\n')
965 return 0
966
967 def UpdateCopyLinkfileList(self, manifest):
968 """Save all dests of copyfile and linkfile, and update them if needed.
969
970 Returns:
971 Whether update was successful.
972 """
973 new_paths = {}
974 new_linkfile_paths = []
975 new_copyfile_paths = []
976 for project in self.GetProjects(None, missing_ok=True,
977 manifest=manifest, all_manifests=False):
978 new_linkfile_paths.extend(x.dest for x in project.linkfiles)
979 new_copyfile_paths.extend(x.dest for x in project.copyfiles)
980
981 new_paths = {
982 'linkfile': new_linkfile_paths,
983 'copyfile': new_copyfile_paths,
984 }
985
986 copylinkfile_name = 'copy-link-files.json'
987 copylinkfile_path = os.path.join(manifest.subdir, copylinkfile_name)
988 old_copylinkfile_paths = {}
989
990 if os.path.exists(copylinkfile_path):
991 with open(copylinkfile_path, 'rb') as fp:
992 try:
993 old_copylinkfile_paths = json.load(fp)
994 except Exception:
995 print('error: %s is not a json formatted file.' %
996 copylinkfile_path, file=sys.stderr)
997 platform_utils.remove(copylinkfile_path)
998 return False
999
1000 need_remove_files = []
1001 need_remove_files.extend(
1002 set(old_copylinkfile_paths.get('linkfile', [])) -
1003 set(new_linkfile_paths))
1004 need_remove_files.extend(
1005 set(old_copylinkfile_paths.get('copyfile', [])) -
1006 set(new_copyfile_paths))
1007
1008 for need_remove_file in need_remove_files:
1009 # Try to remove the updated copyfile or linkfile.
1010 # So, if the file is not exist, nothing need to do.
1011 platform_utils.remove(need_remove_file, missing_ok=True)
1012
1013 # Create copy-link-files.json, save dest path of "copyfile" and "linkfile".
1014 with open(copylinkfile_path, 'w', encoding='utf-8') as fp:
1015 json.dump(new_paths, fp)
1016 return True
1017
1018 def _SmartSyncSetup(self, opt, smart_sync_manifest_path, manifest):
1019 if not manifest.manifest_server:
1020 print('error: cannot smart sync: no manifest server defined in '
1021 'manifest', file=sys.stderr)
1022 sys.exit(1)
1023
1024 manifest_server = manifest.manifest_server
1025 if not opt.quiet:
1026 print('Using manifest server %s' % manifest_server)
1027
1028 if '@' not in manifest_server:
1029 username = None
1030 password = None
1031 if opt.manifest_server_username and opt.manifest_server_password:
1032 username = opt.manifest_server_username
1033 password = opt.manifest_server_password
1034 else:
1035 try:
1036 info = netrc.netrc()
1037 except IOError:
1038 # .netrc file does not exist or could not be opened
1039 pass
1040 else: 1518 else:
1041 try: 1519 if os.path.isfile(smart_sync_manifest_path):
1042 parse_result = urllib.parse.urlparse(manifest_server) 1520 try:
1043 if parse_result.hostname: 1521 platform_utils.remove(smart_sync_manifest_path)
1044 auth = info.authenticators(parse_result.hostname) 1522 except OSError as e:
1045 if auth: 1523 print(
1046 username, _account, password = auth 1524 "error: failed to remove existing smart sync override "
1047 else: 1525 "manifest: %s" % e,
1048 print('No credentials found for %s in .netrc' 1526 file=sys.stderr,
1049 % parse_result.hostname, file=sys.stderr) 1527 )
1050 except netrc.NetrcParseError as e: 1528
1051 print('Error parsing .netrc file: %s' % e, file=sys.stderr) 1529 err_event = multiprocessing.Event()
1052 1530
1053 if (username and password): 1531 rp = manifest.repoProject
1054 manifest_server = manifest_server.replace('://', '://%s:%s@' % 1532 rp.PreSync()
1055 (username, password), 1533 cb = rp.CurrentBranch
1056 1) 1534 if cb:
1057 1535 base = rp.GetBranch(cb).merge
1058 transport = PersistentTransport(manifest_server) 1536 if not base or not base.startswith("refs/heads/"):
1059 if manifest_server.startswith('persistent-'): 1537 print(
1060 manifest_server = manifest_server[len('persistent-'):] 1538 "warning: repo is not tracking a remote branch, so it will "
1061 1539 "not receive updates; run `repo init --repo-rev=stable` to "
1062 try: 1540 "fix.",
1063 server = xmlrpc.client.Server(manifest_server, transport=transport) 1541 file=sys.stderr,
1064 if opt.smart_sync: 1542 )
1065 branch = self._GetBranch(manifest.manifestProject) 1543
1066 1544 for m in self.ManifestList(opt):
1067 if 'SYNC_TARGET' in os.environ: 1545 if not m.manifestProject.standalone_manifest_url:
1068 target = os.environ['SYNC_TARGET'] 1546 m.manifestProject.PreSync()
1069 [success, manifest_str] = server.GetApprovedManifest(branch, target) 1547
1070 elif ('TARGET_PRODUCT' in os.environ and 1548 if opt.repo_upgraded:
1071 'TARGET_BUILD_VARIANT' in os.environ): 1549 _PostRepoUpgrade(manifest, quiet=opt.quiet)
1072 target = '%s-%s' % (os.environ['TARGET_PRODUCT'], 1550
1073 os.environ['TARGET_BUILD_VARIANT']) 1551 mp = manifest.manifestProject
1074 [success, manifest_str] = server.GetApprovedManifest(branch, target) 1552 if opt.mp_update:
1553 self._UpdateAllManifestProjects(opt, mp, manifest_name)
1075 else: 1554 else:
1076 [success, manifest_str] = server.GetApprovedManifest(branch) 1555 print("Skipping update of local manifest project.")
1077 else:
1078 assert(opt.smart_tag)
1079 [success, manifest_str] = server.GetManifest(opt.smart_tag)
1080 1556
1081 if success: 1557 # Now that the manifests are up-to-date, setup options whose defaults
1082 manifest_name = os.path.basename(smart_sync_manifest_path) 1558 # might be in the manifest.
1083 try: 1559 self._ValidateOptionsWithManifest(opt, mp)
1084 with open(smart_sync_manifest_path, 'w') as f: 1560
1085 f.write(manifest_str) 1561 superproject_logging_data = {}
1086 except IOError as e: 1562 self._UpdateProjectsRevisionId(
1087 print('error: cannot write manifest to %s:\n%s' 1563 opt, args, superproject_logging_data, manifest
1088 % (smart_sync_manifest_path, e),
1089 file=sys.stderr)
1090 sys.exit(1)
1091 self._ReloadManifest(manifest_name, manifest)
1092 else:
1093 print('error: manifest server RPC call failed: %s' %
1094 manifest_str, file=sys.stderr)
1095 sys.exit(1)
1096 except (socket.error, IOError, xmlrpc.client.Fault) as e:
1097 print('error: cannot connect to manifest server %s:\n%s'
1098 % (manifest.manifest_server, e), file=sys.stderr)
1099 sys.exit(1)
1100 except xmlrpc.client.ProtocolError as e:
1101 print('error: cannot connect to manifest server %s:\n%d %s'
1102 % (manifest.manifest_server, e.errcode, e.errmsg),
1103 file=sys.stderr)
1104 sys.exit(1)
1105
1106 return manifest_name
1107
1108 def _UpdateAllManifestProjects(self, opt, mp, manifest_name):
1109 """Fetch & update the local manifest project.
1110
1111 After syncing the manifest project, if the manifest has any sub manifests,
1112 those are recursively processed.
1113
1114 Args:
1115 opt: Program options returned from optparse. See _Options().
1116 mp: the manifestProject to query.
1117 manifest_name: Manifest file to be reloaded.
1118 """
1119 if not mp.standalone_manifest_url:
1120 self._UpdateManifestProject(opt, mp, manifest_name)
1121
1122 if mp.manifest.submanifests:
1123 for submanifest in mp.manifest.submanifests.values():
1124 child = submanifest.repo_client.manifest
1125 child.manifestProject.SyncWithPossibleInit(
1126 submanifest,
1127 current_branch_only=self._GetCurrentBranchOnly(opt, child),
1128 verbose=opt.verbose,
1129 tags=opt.tags,
1130 git_event_log=self.git_event_log,
1131 ) 1564 )
1132 self._UpdateAllManifestProjects(opt, child.manifestProject, None)
1133 1565
1134 def _UpdateManifestProject(self, opt, mp, manifest_name): 1566 if self.gitc_manifest:
1135 """Fetch & update the local manifest project. 1567 gitc_manifest_projects = self.GetProjects(args, missing_ok=True)
1568 gitc_projects = []
1569 opened_projects = []
1570 for project in gitc_manifest_projects:
1571 if (
1572 project.relpath in self.gitc_manifest.paths
1573 and self.gitc_manifest.paths[project.relpath].old_revision
1574 ):
1575 opened_projects.append(project.relpath)
1576 else:
1577 gitc_projects.append(project.relpath)
1578
1579 if not args:
1580 gitc_projects = None
1581
1582 if gitc_projects != [] and not opt.local_only:
1583 print(
1584 "Updating GITC client: %s"
1585 % self.gitc_manifest.gitc_client_name
1586 )
1587 manifest = GitcManifest(
1588 self.repodir, self.gitc_manifest.gitc_client_name
1589 )
1590 if manifest_name:
1591 manifest.Override(manifest_name)
1592 else:
1593 manifest.Override(manifest.manifestFile)
1594 gitc_utils.generate_gitc_manifest(
1595 self.gitc_manifest, manifest, gitc_projects
1596 )
1597 print("GITC client successfully synced.")
1598
1599 # The opened projects need to be synced as normal, therefore we
1600 # generate a new args list to represent the opened projects.
1601 # TODO: make this more reliable -- if there's a project name/path
1602 # overlap, this may choose the wrong project.
1603 args = [
1604 os.path.relpath(manifest.paths[path].worktree, os.getcwd())
1605 for path in opened_projects
1606 ]
1607 if not args:
1608 return
1609
1610 all_projects = self.GetProjects(
1611 args,
1612 missing_ok=True,
1613 submodules_ok=opt.fetch_submodules,
1614 manifest=manifest,
1615 all_manifests=not opt.this_manifest_only,
1616 )
1136 1617
1137 Args: 1618 err_network_sync = False
1138 opt: Program options returned from optparse. See _Options(). 1619 err_update_projects = False
1139 mp: the manifestProject to query. 1620 err_update_linkfiles = False
1140 manifest_name: Manifest file to be reloaded. 1621
1141 """ 1622 self._fetch_times = _FetchTimes(manifest)
1142 if not opt.local_only: 1623 if not opt.local_only:
1143 start = time.time() 1624 with multiprocessing.Manager() as manager:
1144 success = mp.Sync_NetworkHalf(quiet=opt.quiet, verbose=opt.verbose, 1625 with ssh.ProxyManager(manager) as ssh_proxy:
1145 current_branch_only=self._GetCurrentBranchOnly(opt, mp.manifest), 1626 # Initialize the socket dir once in the parent.
1146 force_sync=opt.force_sync, 1627 ssh_proxy.sock()
1147 tags=opt.tags, 1628 result = self._FetchMain(
1148 optimized_fetch=opt.optimized_fetch, 1629 opt, args, all_projects, err_event, ssh_proxy, manifest
1149 retry_fetches=opt.retry_fetches, 1630 )
1150 submodules=mp.manifest.HasSubmodules, 1631 all_projects = result.all_projects
1151 clone_filter=mp.manifest.CloneFilter, 1632
1152 partial_clone_exclude=mp.manifest.PartialCloneExclude) 1633 if opt.network_only:
1153 finish = time.time() 1634 return
1154 self.event_log.AddSync(mp, event_log.TASK_SYNC_NETWORK, 1635
1155 start, finish, success) 1636 # If we saw an error, exit with code 1 so that other scripts can
1156 1637 # check.
1157 if mp.HasChanges: 1638 if err_event.is_set():
1158 syncbuf = SyncBuffer(mp.config) 1639 err_network_sync = True
1159 start = time.time() 1640 if opt.fail_fast:
1160 mp.Sync_LocalHalf(syncbuf, submodules=mp.manifest.HasSubmodules) 1641 print(
1161 clean = syncbuf.Finish() 1642 "\nerror: Exited sync due to fetch errors.\n"
1162 self.event_log.AddSync(mp, event_log.TASK_SYNC_LOCAL, 1643 "Local checkouts *not* updated. Resolve network issues "
1163 start, time.time(), clean) 1644 "& retry.\n"
1164 if not clean: 1645 "`repo sync -l` will update some local checkouts.",
1165 sys.exit(1) 1646 file=sys.stderr,
1166 self._ReloadManifest(manifest_name, mp.manifest) 1647 )
1167 1648 sys.exit(1)
1168 def ValidateOptions(self, opt, args): 1649
1169 if opt.force_broken: 1650 for m in self.ManifestList(opt):
1170 print('warning: -f/--force-broken is now the default behavior, and the ' 1651 if m.IsMirror or m.IsArchive:
1171 'options are deprecated', file=sys.stderr) 1652 # Bail out now, we have no working tree.
1172 if opt.network_only and opt.detach_head: 1653 continue
1173 self.OptionParser.error('cannot combine -n and -d') 1654
1174 if opt.network_only and opt.local_only: 1655 if self.UpdateProjectList(opt, m):
1175 self.OptionParser.error('cannot combine -n and -l') 1656 err_event.set()
1176 if opt.manifest_name and opt.smart_sync: 1657 err_update_projects = True
1177 self.OptionParser.error('cannot combine -m and -s') 1658 if opt.fail_fast:
1178 if opt.manifest_name and opt.smart_tag: 1659 print(
1179 self.OptionParser.error('cannot combine -m and -t') 1660 "\nerror: Local checkouts *not* updated.",
1180 if opt.manifest_server_username or opt.manifest_server_password: 1661 file=sys.stderr,
1181 if not (opt.smart_sync or opt.smart_tag): 1662 )
1182 self.OptionParser.error('-u and -p may only be combined with -s or -t') 1663 sys.exit(1)
1183 if None in [opt.manifest_server_username, opt.manifest_server_password]: 1664
1184 self.OptionParser.error('both -u and -p must be given') 1665 err_update_linkfiles = not self.UpdateCopyLinkfileList(m)
1185 1666 if err_update_linkfiles:
1186 if opt.prune is None: 1667 err_event.set()
1187 opt.prune = True 1668 if opt.fail_fast:
1188 1669 print(
1189 if opt.auto_gc is None and _AUTO_GC: 1670 "\nerror: Local update copyfile or linkfile failed.",
1190 print(f"Will run `git gc --auto` because {_REPO_AUTO_GC} is set.", 1671 file=sys.stderr,
1191 f'{_REPO_AUTO_GC} is deprecated and will be removed in a future', 1672 )
1192 'release. Use `--auto-gc` instead.', file=sys.stderr) 1673 sys.exit(1)
1193 opt.auto_gc = True 1674
1194 1675 err_results = []
1195 def _ValidateOptionsWithManifest(self, opt, mp): 1676 # NB: We don't exit here because this is the last step.
1196 """Like ValidateOptions, but after we've updated the manifest. 1677 err_checkout = not self._Checkout(all_projects, opt, err_results)
1197 1678 if err_checkout:
1198 Needed to handle sync-xxx option defaults in the manifest. 1679 err_event.set()
1199 1680
1200 Args: 1681 printed_notices = set()
1201 opt: The options to process. 1682 # If there's a notice that's supposed to print at the end of the sync,
1202 mp: The manifest project to pull defaults from. 1683 # print it now... But avoid printing duplicate messages, and preserve
1203 """ 1684 # order.
1204 if not opt.jobs: 1685 for m in sorted(self.ManifestList(opt), key=lambda x: x.path_prefix):
1205 # If the user hasn't made a choice, use the manifest value. 1686 if m.notice and m.notice not in printed_notices:
1206 opt.jobs = mp.manifest.default.sync_j 1687 print(m.notice)
1207 if opt.jobs: 1688 printed_notices.add(m.notice)
1208 # If --jobs has a non-default value, propagate it as the default for 1689
1209 # --jobs-xxx flags too. 1690 # If we saw an error, exit with code 1 so that other scripts can check.
1210 if not opt.jobs_network: 1691 if err_event.is_set():
1211 opt.jobs_network = opt.jobs 1692 print("\nerror: Unable to fully sync the tree.", file=sys.stderr)
1212 if not opt.jobs_checkout: 1693 if err_network_sync:
1213 opt.jobs_checkout = opt.jobs 1694 print(
1214 else: 1695 "error: Downloading network changes failed.",
1215 # Neither user nor manifest have made a choice, so setup defaults. 1696 file=sys.stderr,
1216 if not opt.jobs_network: 1697 )
1217 opt.jobs_network = 1 1698 if err_update_projects:
1218 if not opt.jobs_checkout: 1699 print(
1219 opt.jobs_checkout = DEFAULT_LOCAL_JOBS 1700 "error: Updating local project lists failed.",
1220 opt.jobs = os.cpu_count() 1701 file=sys.stderr,
1221 1702 )
1222 # Try to stay under user rlimit settings. 1703 if err_update_linkfiles:
1223 # 1704 print(
1224 # Since each worker requires at 3 file descriptors to run `git fetch`, use 1705 "error: Updating copyfiles or linkfiles failed.",
1225 # that to scale down the number of jobs. Unfortunately there isn't an easy 1706 file=sys.stderr,
1226 # way to determine this reliably as systems change, but it was last measured 1707 )
1227 # by hand in 2011. 1708 if err_checkout:
1228 soft_limit, _ = _rlimit_nofile() 1709 print(
1229 jobs_soft_limit = max(1, (soft_limit - 5) // 3) 1710 "error: Checking out local projects failed.",
1230 opt.jobs = min(opt.jobs, jobs_soft_limit) 1711 file=sys.stderr,
1231 opt.jobs_network = min(opt.jobs_network, jobs_soft_limit) 1712 )
1232 opt.jobs_checkout = min(opt.jobs_checkout, jobs_soft_limit) 1713 if err_results:
1233 1714 print(
1234 def Execute(self, opt, args): 1715 "Failing repos:\n%s" % "\n".join(err_results),
1235 manifest = self.outer_manifest 1716 file=sys.stderr,
1236 if not opt.outer_manifest: 1717 )
1237 manifest = self.manifest 1718 print(
1238 1719 'Try re-running with "-j1 --fail-fast" to exit at the first '
1239 if opt.manifest_name: 1720 "error.",
1240 manifest.Override(opt.manifest_name) 1721 file=sys.stderr,
1241 1722 )
1242 manifest_name = opt.manifest_name 1723 sys.exit(1)
1243 smart_sync_manifest_path = os.path.join( 1724
1244 manifest.manifestProject.worktree, 'smart_sync_override.xml') 1725 # Log the previous sync analysis state from the config.
1245 1726 self.git_event_log.LogDataConfigEvents(
1246 if opt.clone_bundle is None: 1727 mp.config.GetSyncAnalysisStateData(), "previous_sync_state"
1247 opt.clone_bundle = manifest.CloneBundle 1728 )
1248
1249 if opt.smart_sync or opt.smart_tag:
1250 manifest_name = self._SmartSyncSetup(opt, smart_sync_manifest_path, manifest)
1251 else:
1252 if os.path.isfile(smart_sync_manifest_path):
1253 try:
1254 platform_utils.remove(smart_sync_manifest_path)
1255 except OSError as e:
1256 print('error: failed to remove existing smart sync override manifest: %s' %
1257 e, file=sys.stderr)
1258
1259 err_event = multiprocessing.Event()
1260
1261 rp = manifest.repoProject
1262 rp.PreSync()
1263 cb = rp.CurrentBranch
1264 if cb:
1265 base = rp.GetBranch(cb).merge
1266 if not base or not base.startswith('refs/heads/'):
1267 print('warning: repo is not tracking a remote branch, so it will not '
1268 'receive updates; run `repo init --repo-rev=stable` to fix.',
1269 file=sys.stderr)
1270
1271 for m in self.ManifestList(opt):
1272 if not m.manifestProject.standalone_manifest_url:
1273 m.manifestProject.PreSync()
1274
1275 if opt.repo_upgraded:
1276 _PostRepoUpgrade(manifest, quiet=opt.quiet)
1277
1278 mp = manifest.manifestProject
1279 if opt.mp_update:
1280 self._UpdateAllManifestProjects(opt, mp, manifest_name)
1281 else:
1282 print('Skipping update of local manifest project.')
1283
1284 # Now that the manifests are up-to-date, setup options whose defaults might
1285 # be in the manifest.
1286 self._ValidateOptionsWithManifest(opt, mp)
1287
1288 superproject_logging_data = {}
1289 self._UpdateProjectsRevisionId(opt, args, superproject_logging_data,
1290 manifest)
1291
1292 if self.gitc_manifest:
1293 gitc_manifest_projects = self.GetProjects(args, missing_ok=True)
1294 gitc_projects = []
1295 opened_projects = []
1296 for project in gitc_manifest_projects:
1297 if project.relpath in self.gitc_manifest.paths and \
1298 self.gitc_manifest.paths[project.relpath].old_revision:
1299 opened_projects.append(project.relpath)
1300 else:
1301 gitc_projects.append(project.relpath)
1302 1729
1303 if not args: 1730 # Update and log with the new sync analysis state.
1304 gitc_projects = None 1731 mp.config.UpdateSyncAnalysisState(opt, superproject_logging_data)
1732 self.git_event_log.LogDataConfigEvents(
1733 mp.config.GetSyncAnalysisStateData(), "current_sync_state"
1734 )
1305 1735
1306 if gitc_projects != [] and not opt.local_only: 1736 if not opt.quiet:
1307 print('Updating GITC client: %s' % self.gitc_manifest.gitc_client_name) 1737 print("repo sync has finished successfully.")
1308 manifest = GitcManifest(self.repodir, self.gitc_manifest.gitc_client_name)
1309 if manifest_name:
1310 manifest.Override(manifest_name)
1311 else:
1312 manifest.Override(manifest.manifestFile)
1313 gitc_utils.generate_gitc_manifest(self.gitc_manifest,
1314 manifest,
1315 gitc_projects)
1316 print('GITC client successfully synced.')
1317
1318 # The opened projects need to be synced as normal, therefore we
1319 # generate a new args list to represent the opened projects.
1320 # TODO: make this more reliable -- if there's a project name/path overlap,
1321 # this may choose the wrong project.
1322 args = [os.path.relpath(manifest.paths[path].worktree, os.getcwd())
1323 for path in opened_projects]
1324 if not args:
1325 return
1326
1327 all_projects = self.GetProjects(args,
1328 missing_ok=True,
1329 submodules_ok=opt.fetch_submodules,
1330 manifest=manifest,
1331 all_manifests=not opt.this_manifest_only)
1332
1333 err_network_sync = False
1334 err_update_projects = False
1335 err_update_linkfiles = False
1336
1337 self._fetch_times = _FetchTimes(manifest)
1338 if not opt.local_only:
1339 with multiprocessing.Manager() as manager:
1340 with ssh.ProxyManager(manager) as ssh_proxy:
1341 # Initialize the socket dir once in the parent.
1342 ssh_proxy.sock()
1343 result = self._FetchMain(opt, args, all_projects, err_event,
1344 ssh_proxy, manifest)
1345 all_projects = result.all_projects
1346
1347 if opt.network_only:
1348 return
1349
1350 # If we saw an error, exit with code 1 so that other scripts can check.
1351 if err_event.is_set():
1352 err_network_sync = True
1353 if opt.fail_fast:
1354 print('\nerror: Exited sync due to fetch errors.\n'
1355 'Local checkouts *not* updated. Resolve network issues & '
1356 'retry.\n'
1357 '`repo sync -l` will update some local checkouts.',
1358 file=sys.stderr)
1359 sys.exit(1)
1360
1361 for m in self.ManifestList(opt):
1362 if m.IsMirror or m.IsArchive:
1363 # bail out now, we have no working tree
1364 continue
1365
1366 if self.UpdateProjectList(opt, m):
1367 err_event.set()
1368 err_update_projects = True
1369 if opt.fail_fast:
1370 print('\nerror: Local checkouts *not* updated.', file=sys.stderr)
1371 sys.exit(1)
1372
1373 err_update_linkfiles = not self.UpdateCopyLinkfileList(m)
1374 if err_update_linkfiles:
1375 err_event.set()
1376 if opt.fail_fast:
1377 print('\nerror: Local update copyfile or linkfile failed.', file=sys.stderr)
1378 sys.exit(1)
1379
1380 err_results = []
1381 # NB: We don't exit here because this is the last step.
1382 err_checkout = not self._Checkout(all_projects, opt, err_results)
1383 if err_checkout:
1384 err_event.set()
1385
1386 printed_notices = set()
1387 # If there's a notice that's supposed to print at the end of the sync,
1388 # print it now... But avoid printing duplicate messages, and preserve
1389 # order.
1390 for m in sorted(self.ManifestList(opt), key=lambda x: x.path_prefix):
1391 if m.notice and m.notice not in printed_notices:
1392 print(m.notice)
1393 printed_notices.add(m.notice)
1394
1395 # If we saw an error, exit with code 1 so that other scripts can check.
1396 if err_event.is_set():
1397 print('\nerror: Unable to fully sync the tree.', file=sys.stderr)
1398 if err_network_sync:
1399 print('error: Downloading network changes failed.', file=sys.stderr)
1400 if err_update_projects:
1401 print('error: Updating local project lists failed.', file=sys.stderr)
1402 if err_update_linkfiles:
1403 print('error: Updating copyfiles or linkfiles failed.', file=sys.stderr)
1404 if err_checkout:
1405 print('error: Checking out local projects failed.', file=sys.stderr)
1406 if err_results:
1407 print('Failing repos:\n%s' % '\n'.join(err_results), file=sys.stderr)
1408 print('Try re-running with "-j1 --fail-fast" to exit at the first error.',
1409 file=sys.stderr)
1410 sys.exit(1)
1411
1412 # Log the previous sync analysis state from the config.
1413 self.git_event_log.LogDataConfigEvents(mp.config.GetSyncAnalysisStateData(),
1414 'previous_sync_state')
1415
1416 # Update and log with the new sync analysis state.
1417 mp.config.UpdateSyncAnalysisState(opt, superproject_logging_data)
1418 self.git_event_log.LogDataConfigEvents(mp.config.GetSyncAnalysisStateData(),
1419 'current_sync_state')
1420
1421 if not opt.quiet:
1422 print('repo sync has finished successfully.')
1423 1738
1424 1739
1425def _PostRepoUpgrade(manifest, quiet=False): 1740def _PostRepoUpgrade(manifest, quiet=False):
1426 # Link the docs for the internal .repo/ layout for people 1741 # Link the docs for the internal .repo/ layout for people.
1427 link = os.path.join(manifest.repodir, 'internal-fs-layout.md') 1742 link = os.path.join(manifest.repodir, "internal-fs-layout.md")
1428 if not platform_utils.islink(link): 1743 if not platform_utils.islink(link):
1429 target = os.path.join('repo', 'docs', 'internal-fs-layout.md') 1744 target = os.path.join("repo", "docs", "internal-fs-layout.md")
1430 try: 1745 try:
1431 platform_utils.symlink(target, link) 1746 platform_utils.symlink(target, link)
1432 except Exception: 1747 except Exception:
1433 pass 1748 pass
1434 1749
1435 wrapper = Wrapper() 1750 wrapper = Wrapper()
1436 if wrapper.NeedSetupGnuPG(): 1751 if wrapper.NeedSetupGnuPG():
1437 wrapper.SetupGnuPG(quiet) 1752 wrapper.SetupGnuPG(quiet)
1438 for project in manifest.projects: 1753 for project in manifest.projects:
1439 if project.Exists: 1754 if project.Exists:
1440 project.PostRepoUpgrade() 1755 project.PostRepoUpgrade()
1441 1756
1442 1757
1443def _PostRepoFetch(rp, repo_verify=True, verbose=False): 1758def _PostRepoFetch(rp, repo_verify=True, verbose=False):
1444 if rp.HasChanges: 1759 if rp.HasChanges:
1445 print('info: A new version of repo is available', file=sys.stderr) 1760 print("info: A new version of repo is available", file=sys.stderr)
1446 wrapper = Wrapper() 1761 wrapper = Wrapper()
1447 try: 1762 try:
1448 rev = rp.bare_git.describe(rp.GetRevisionId()) 1763 rev = rp.bare_git.describe(rp.GetRevisionId())
1449 except GitError: 1764 except GitError:
1450 rev = None 1765 rev = None
1451 _, new_rev = wrapper.check_repo_rev(rp.gitdir, rev, repo_verify=repo_verify) 1766 _, new_rev = wrapper.check_repo_rev(
1452 # See if we're held back due to missing signed tag. 1767 rp.gitdir, rev, repo_verify=repo_verify
1453 current_revid = rp.bare_git.rev_parse('HEAD') 1768 )
1454 new_revid = rp.bare_git.rev_parse('--verify', new_rev) 1769 # See if we're held back due to missing signed tag.
1455 if current_revid != new_revid: 1770 current_revid = rp.bare_git.rev_parse("HEAD")
1456 # We want to switch to the new rev, but also not trash any uncommitted 1771 new_revid = rp.bare_git.rev_parse("--verify", new_rev)
1457 # changes. This helps with local testing/hacking. 1772 if current_revid != new_revid:
1458 # If a local change has been made, we will throw that away. 1773 # We want to switch to the new rev, but also not trash any
1459 # We also have to make sure this will switch to an older commit if that's 1774 # uncommitted changes. This helps with local testing/hacking.
1460 # the latest tag in order to support release rollback. 1775 # If a local change has been made, we will throw that away.
1461 try: 1776 # We also have to make sure this will switch to an older commit if
1462 rp.work_git.reset('--keep', new_rev) 1777 # that's the latest tag in order to support release rollback.
1463 except GitError as e: 1778 try:
1464 sys.exit(str(e)) 1779 rp.work_git.reset("--keep", new_rev)
1465 print('info: Restarting repo with latest version', file=sys.stderr) 1780 except GitError as e:
1466 raise RepoChangedException(['--repo-upgraded']) 1781 sys.exit(str(e))
1782 print("info: Restarting repo with latest version", file=sys.stderr)
1783 raise RepoChangedException(["--repo-upgraded"])
1784 else:
1785 print(
1786 "warning: Skipped upgrade to unverified version",
1787 file=sys.stderr,
1788 )
1467 else: 1789 else:
1468 print('warning: Skipped upgrade to unverified version', file=sys.stderr) 1790 if verbose:
1469 else: 1791 print(
1470 if verbose: 1792 "repo version %s is current" % rp.work_git.describe(HEAD),
1471 print('repo version %s is current' % rp.work_git.describe(HEAD), 1793 file=sys.stderr,
1472 file=sys.stderr) 1794 )
1473 1795
1474 1796
1475class _FetchTimes(object): 1797class _FetchTimes(object):
1476 _ALPHA = 0.5 1798 _ALPHA = 0.5
1477 1799
1478 def __init__(self, manifest): 1800 def __init__(self, manifest):
1479 self._path = os.path.join(manifest.repodir, '.repo_fetchtimes.json') 1801 self._path = os.path.join(manifest.repodir, ".repo_fetchtimes.json")
1480 self._times = None 1802 self._times = None
1481 self._seen = set() 1803 self._seen = set()
1482 1804
1483 def Get(self, project): 1805 def Get(self, project):
1484 self._Load() 1806 self._Load()
1485 return self._times.get(project.name, _ONE_DAY_S) 1807 return self._times.get(project.name, _ONE_DAY_S)
1486 1808
1487 def Set(self, project, t): 1809 def Set(self, project, t):
1488 self._Load() 1810 self._Load()
1489 name = project.name 1811 name = project.name
1490 old = self._times.get(name, t) 1812 old = self._times.get(name, t)
1491 self._seen.add(name) 1813 self._seen.add(name)
1492 a = self._ALPHA 1814 a = self._ALPHA
1493 self._times[name] = (a * t) + ((1 - a) * old) 1815 self._times[name] = (a * t) + ((1 - a) * old)
1494 1816
1495 def _Load(self): 1817 def _Load(self):
1496 if self._times is None: 1818 if self._times is None:
1497 try: 1819 try:
1498 with open(self._path) as f: 1820 with open(self._path) as f:
1499 self._times = json.load(f) 1821 self._times = json.load(f)
1500 except (IOError, ValueError): 1822 except (IOError, ValueError):
1501 platform_utils.remove(self._path, missing_ok=True) 1823 platform_utils.remove(self._path, missing_ok=True)
1502 self._times = {} 1824 self._times = {}
1503 1825
1504 def Save(self): 1826 def Save(self):
1505 if self._times is None: 1827 if self._times is None:
1506 return 1828 return
1507 1829
1508 to_delete = [] 1830 to_delete = []
1509 for name in self._times: 1831 for name in self._times:
1510 if name not in self._seen: 1832 if name not in self._seen:
1511 to_delete.append(name) 1833 to_delete.append(name)
1512 for name in to_delete: 1834 for name in to_delete:
1513 del self._times[name] 1835 del self._times[name]
1514 1836
1515 try: 1837 try:
1516 with open(self._path, 'w') as f: 1838 with open(self._path, "w") as f:
1517 json.dump(self._times, f, indent=2) 1839 json.dump(self._times, f, indent=2)
1518 except (IOError, TypeError): 1840 except (IOError, TypeError):
1519 platform_utils.remove(self._path, missing_ok=True) 1841 platform_utils.remove(self._path, missing_ok=True)
1842
1520 1843
1521# This is a replacement for xmlrpc.client.Transport using urllib2 1844# This is a replacement for xmlrpc.client.Transport using urllib2
1522# and supporting persistent-http[s]. It cannot change hosts from 1845# and supporting persistent-http[s]. It cannot change hosts from
@@ -1525,98 +1848,105 @@ class _FetchTimes(object):
1525 1848
1526 1849
1527class PersistentTransport(xmlrpc.client.Transport): 1850class PersistentTransport(xmlrpc.client.Transport):
1528 def __init__(self, orig_host): 1851 def __init__(self, orig_host):
1529 self.orig_host = orig_host 1852 self.orig_host = orig_host
1530 1853
1531 def request(self, host, handler, request_body, verbose=False): 1854 def request(self, host, handler, request_body, verbose=False):
1532 with GetUrlCookieFile(self.orig_host, not verbose) as (cookiefile, proxy): 1855 with GetUrlCookieFile(self.orig_host, not verbose) as (
1533 # Python doesn't understand cookies with the #HttpOnly_ prefix 1856 cookiefile,
1534 # Since we're only using them for HTTP, copy the file temporarily, 1857 proxy,
1535 # stripping those prefixes away. 1858 ):
1536 if cookiefile: 1859 # Python doesn't understand cookies with the #HttpOnly_ prefix
1537 tmpcookiefile = tempfile.NamedTemporaryFile(mode='w') 1860 # Since we're only using them for HTTP, copy the file temporarily,
1538 tmpcookiefile.write("# HTTP Cookie File") 1861 # stripping those prefixes away.
1539 try: 1862 if cookiefile:
1540 with open(cookiefile) as f: 1863 tmpcookiefile = tempfile.NamedTemporaryFile(mode="w")
1541 for line in f: 1864 tmpcookiefile.write("# HTTP Cookie File")
1542 if line.startswith("#HttpOnly_"): 1865 try:
1543 line = line[len("#HttpOnly_"):] 1866 with open(cookiefile) as f:
1544 tmpcookiefile.write(line) 1867 for line in f:
1545 tmpcookiefile.flush() 1868 if line.startswith("#HttpOnly_"):
1546 1869 line = line[len("#HttpOnly_") :]
1547 cookiejar = cookielib.MozillaCookieJar(tmpcookiefile.name) 1870 tmpcookiefile.write(line)
1548 try: 1871 tmpcookiefile.flush()
1549 cookiejar.load() 1872
1550 except cookielib.LoadError: 1873 cookiejar = cookielib.MozillaCookieJar(tmpcookiefile.name)
1551 cookiejar = cookielib.CookieJar() 1874 try:
1552 finally: 1875 cookiejar.load()
1553 tmpcookiefile.close() 1876 except cookielib.LoadError:
1554 else: 1877 cookiejar = cookielib.CookieJar()
1555 cookiejar = cookielib.CookieJar() 1878 finally:
1556 1879 tmpcookiefile.close()
1557 proxyhandler = urllib.request.ProxyHandler 1880 else:
1558 if proxy: 1881 cookiejar = cookielib.CookieJar()
1559 proxyhandler = urllib.request.ProxyHandler({ 1882
1560 "http": proxy, 1883 proxyhandler = urllib.request.ProxyHandler
1561 "https": proxy}) 1884 if proxy:
1562 1885 proxyhandler = urllib.request.ProxyHandler(
1563 opener = urllib.request.build_opener( 1886 {"http": proxy, "https": proxy}
1564 urllib.request.HTTPCookieProcessor(cookiejar), 1887 )
1565 proxyhandler) 1888
1566 1889 opener = urllib.request.build_opener(
1567 url = urllib.parse.urljoin(self.orig_host, handler) 1890 urllib.request.HTTPCookieProcessor(cookiejar), proxyhandler
1568 parse_results = urllib.parse.urlparse(url) 1891 )
1569 1892
1570 scheme = parse_results.scheme 1893 url = urllib.parse.urljoin(self.orig_host, handler)
1571 if scheme == 'persistent-http': 1894 parse_results = urllib.parse.urlparse(url)
1572 scheme = 'http' 1895
1573 if scheme == 'persistent-https': 1896 scheme = parse_results.scheme
1574 # If we're proxying through persistent-https, use http. The 1897 if scheme == "persistent-http":
1575 # proxy itself will do the https. 1898 scheme = "http"
1576 if proxy: 1899 if scheme == "persistent-https":
1577 scheme = 'http' 1900 # If we're proxying through persistent-https, use http. The
1578 else: 1901 # proxy itself will do the https.
1579 scheme = 'https' 1902 if proxy:
1580 1903 scheme = "http"
1581 # Parse out any authentication information using the base class 1904 else:
1582 host, extra_headers, _ = self.get_host_info(parse_results.netloc) 1905 scheme = "https"
1583 1906
1584 url = urllib.parse.urlunparse(( 1907 # Parse out any authentication information using the base class.
1585 scheme, 1908 host, extra_headers, _ = self.get_host_info(parse_results.netloc)
1586 host, 1909
1587 parse_results.path, 1910 url = urllib.parse.urlunparse(
1588 parse_results.params, 1911 (
1589 parse_results.query, 1912 scheme,
1590 parse_results.fragment)) 1913 host,
1591 1914 parse_results.path,
1592 request = urllib.request.Request(url, request_body) 1915 parse_results.params,
1593 if extra_headers is not None: 1916 parse_results.query,
1594 for (name, header) in extra_headers: 1917 parse_results.fragment,
1595 request.add_header(name, header) 1918 )
1596 request.add_header('Content-Type', 'text/xml') 1919 )
1597 try: 1920
1598 response = opener.open(request) 1921 request = urllib.request.Request(url, request_body)
1599 except urllib.error.HTTPError as e: 1922 if extra_headers is not None:
1600 if e.code == 501: 1923 for name, header in extra_headers:
1601 # We may have been redirected through a login process 1924 request.add_header(name, header)
1602 # but our POST turned into a GET. Retry. 1925 request.add_header("Content-Type", "text/xml")
1603 response = opener.open(request) 1926 try:
1604 else: 1927 response = opener.open(request)
1605 raise 1928 except urllib.error.HTTPError as e:
1606 1929 if e.code == 501:
1607 p, u = xmlrpc.client.getparser() 1930 # We may have been redirected through a login process
1608 # Response should be fairly small, so read it all at once. 1931 # but our POST turned into a GET. Retry.
1609 # This way we can show it to the user in case of error (e.g. HTML). 1932 response = opener.open(request)
1610 data = response.read() 1933 else:
1611 try: 1934 raise
1612 p.feed(data) 1935
1613 except xml.parsers.expat.ExpatError as e: 1936 p, u = xmlrpc.client.getparser()
1614 raise IOError( 1937 # Response should be fairly small, so read it all at once.
1615 f'Parsing the manifest failed: {e}\n' 1938 # This way we can show it to the user in case of error (e.g. HTML).
1616 f'Please report this to your manifest server admin.\n' 1939 data = response.read()
1617 f'Here is the full response:\n{data.decode("utf-8")}') 1940 try:
1618 p.close() 1941 p.feed(data)
1619 return u.close() 1942 except xml.parsers.expat.ExpatError as e:
1620 1943 raise IOError(
1621 def close(self): 1944 f"Parsing the manifest failed: {e}\n"
1622 pass 1945 f"Please report this to your manifest server admin.\n"
1946 f'Here is the full response:\n{data.decode("utf-8")}'
1947 )
1948 p.close()
1949 return u.close()
1950
1951 def close(self):
1952 pass