summaryrefslogtreecommitdiffstats
path: root/subcmds/sync.py
diff options
context:
space:
mode:
Diffstat (limited to 'subcmds/sync.py')
-rw-r--r--subcmds/sync.py1054
1 files changed, 559 insertions, 495 deletions
diff --git a/subcmds/sync.py b/subcmds/sync.py
index 2973a16e..3211cbb1 100644
--- a/subcmds/sync.py
+++ b/subcmds/sync.py
@@ -1,5 +1,3 @@
1# -*- coding:utf-8 -*-
2#
3# Copyright (C) 2008 The Android Open Source Project 1# Copyright (C) 2008 The Android Open Source Project
4# 2#
5# Licensed under the Apache License, Version 2.0 (the "License"); 3# Licensed under the Apache License, Version 2.0 (the "License");
@@ -14,37 +12,23 @@
14# See the License for the specific language governing permissions and 12# See the License for the specific language governing permissions and
15# limitations under the License. 13# limitations under the License.
16 14
17from __future__ import print_function 15import errno
16import functools
17import http.cookiejar as cookielib
18import io
18import json 19import json
20import multiprocessing
19import netrc 21import netrc
20from optparse import SUPPRESS_HELP 22from optparse import SUPPRESS_HELP
21import os 23import os
22import re
23import socket 24import socket
24import subprocess
25import sys 25import sys
26import tempfile 26import tempfile
27import time 27import time
28 28import urllib.error
29from pyversion import is_python3 29import urllib.parse
30if is_python3(): 30import urllib.request
31 import http.cookiejar as cookielib 31import xmlrpc.client
32 import urllib.error
33 import urllib.parse
34 import urllib.request
35 import xmlrpc.client
36else:
37 import cookielib
38 import imp
39 import urllib2
40 import urlparse
41 import xmlrpclib
42 urllib = imp.new_module('urllib')
43 urllib.error = urllib2
44 urllib.parse = urlparse
45 urllib.request = urllib2
46 xmlrpc = imp.new_module('xmlrpc')
47 xmlrpc.client = xmlrpclib
48 32
49try: 33try:
50 import threading as _threading 34 import threading as _threading
@@ -53,44 +37,36 @@ except ImportError:
53 37
54try: 38try:
55 import resource 39 import resource
40
56 def _rlimit_nofile(): 41 def _rlimit_nofile():
57 return resource.getrlimit(resource.RLIMIT_NOFILE) 42 return resource.getrlimit(resource.RLIMIT_NOFILE)
58except ImportError: 43except ImportError:
59 def _rlimit_nofile(): 44 def _rlimit_nofile():
60 return (256, 256) 45 return (256, 256)
61 46
62try:
63 import multiprocessing
64except ImportError:
65 multiprocessing = None
66
67import event_log 47import event_log
68from git_command import GIT, git_require 48from git_command import git_require
69from git_config import GetUrlCookieFile 49from git_config import GetUrlCookieFile
70from git_refs import R_HEADS, HEAD 50from git_refs import R_HEADS, HEAD
51import git_superproject
71import gitc_utils 52import gitc_utils
72from project import Project 53from project import Project
73from project import RemoteSpec 54from project import RemoteSpec
74from command import Command, MirrorSafeCommand 55from command import Command, MirrorSafeCommand, WORKER_BATCH_SIZE
75from error import RepoChangedException, GitError, ManifestParseError 56from error import RepoChangedException, GitError, ManifestParseError
76import platform_utils 57import platform_utils
77from project import SyncBuffer 58from project import SyncBuffer
78from progress import Progress 59from progress import Progress
60import ssh
79from wrapper import Wrapper 61from wrapper import Wrapper
80from manifest_xml import GitcManifest 62from manifest_xml import GitcManifest
81 63
82_ONE_DAY_S = 24 * 60 * 60 64_ONE_DAY_S = 24 * 60 * 60
83 65
84class _FetchError(Exception):
85 """Internal error thrown in _FetchHelper() when we don't want stack trace."""
86 pass
87
88class _CheckoutError(Exception):
89 """Internal error thrown in _CheckoutOne() when we don't want stack trace."""
90 66
91class Sync(Command, MirrorSafeCommand): 67class Sync(Command, MirrorSafeCommand):
92 jobs = 1 68 jobs = 1
93 common = True 69 COMMON = True
94 helpSummary = "Update working tree to the latest revision" 70 helpSummary = "Update working tree to the latest revision"
95 helpUsage = """ 71 helpUsage = """
96%prog [<project>...] 72%prog [<project>...]
@@ -133,11 +109,11 @@ if the manifest server specified in the manifest file already includes
133credentials. 109credentials.
134 110
135By default, all projects will be synced. The --fail-fast option can be used 111By default, all projects will be synced. The --fail-fast option can be used
136to halt syncing as soon as possible when the the first project fails to sync. 112to halt syncing as soon as possible when the first project fails to sync.
137 113
138The --force-sync option can be used to overwrite existing git 114The --force-sync option can be used to overwrite existing git
139directories if they have previously been linked to a different 115directories if they have previously been linked to a different
140object direcotry. WARNING: This may cause data to be lost since 116object directory. WARNING: This may cause data to be lost since
141refs may be removed when overwriting. 117refs may be removed when overwriting.
142 118
143The --force-remove-dirty option can be used to remove previously used 119The --force-remove-dirty option can be used to remove previously used
@@ -191,12 +167,21 @@ If the remote SSH daemon is Gerrit Code Review, version 2.0.10 or
191later is required to fix a server side protocol bug. 167later is required to fix a server side protocol bug.
192 168
193""" 169"""
170 PARALLEL_JOBS = 1
171
172 def _CommonOptions(self, p):
173 if self.manifest:
174 try:
175 self.PARALLEL_JOBS = self.manifest.default.sync_j
176 except ManifestParseError:
177 pass
178 super()._CommonOptions(p)
194 179
195 def _Options(self, p, show_smart=True): 180 def _Options(self, p, show_smart=True):
196 try: 181 p.add_option('--jobs-network', default=None, type=int, metavar='JOBS',
197 self.jobs = self.manifest.default.sync_j 182 help='number of network jobs to run in parallel (defaults to --jobs)')
198 except ManifestParseError: 183 p.add_option('--jobs-checkout', default=None, type=int, metavar='JOBS',
199 self.jobs = 1 184 help='number of local checkout jobs to run in parallel (defaults to --jobs)')
200 185
201 p.add_option('-f', '--force-broken', 186 p.add_option('-f', '--force-broken',
202 dest='force_broken', action='store_true', 187 dest='force_broken', action='store_true',
@@ -217,6 +202,10 @@ later is required to fix a server side protocol bug.
217 p.add_option('-l', '--local-only', 202 p.add_option('-l', '--local-only',
218 dest='local_only', action='store_true', 203 dest='local_only', action='store_true',
219 help="only update working tree, don't fetch") 204 help="only update working tree, don't fetch")
205 p.add_option('--no-manifest-update', '--nmu',
206 dest='mp_update', action='store_false', default='true',
207 help='use the existing manifest checkout as-is. '
208 '(do not update to the latest revision)')
220 p.add_option('-n', '--network-only', 209 p.add_option('-n', '--network-only',
221 dest='network_only', action='store_true', 210 dest='network_only', action='store_true',
222 help="fetch only, don't update working tree") 211 help="fetch only, don't update working tree")
@@ -226,17 +215,15 @@ later is required to fix a server side protocol bug.
226 p.add_option('-c', '--current-branch', 215 p.add_option('-c', '--current-branch',
227 dest='current_branch_only', action='store_true', 216 dest='current_branch_only', action='store_true',
228 help='fetch only current branch from server') 217 help='fetch only current branch from server')
229 p.add_option('-q', '--quiet', 218 p.add_option('--no-current-branch',
230 dest='quiet', action='store_true', 219 dest='current_branch_only', action='store_false',
231 help='be more quiet') 220 help='fetch all branches from server')
232 p.add_option('-j', '--jobs',
233 dest='jobs', action='store', type='int',
234 help="projects to fetch simultaneously (default %d)" % self.jobs)
235 p.add_option('-m', '--manifest-name', 221 p.add_option('-m', '--manifest-name',
236 dest='manifest_name', 222 dest='manifest_name',
237 help='temporary manifest to use for this sync', metavar='NAME.xml') 223 help='temporary manifest to use for this sync', metavar='NAME.xml')
238 p.add_option('--no-clone-bundle', 224 p.add_option('--clone-bundle', action='store_true',
239 dest='no_clone_bundle', action='store_true', 225 help='enable use of /clone.bundle on HTTP/HTTPS')
226 p.add_option('--no-clone-bundle', dest='clone_bundle', action='store_false',
240 help='disable use of /clone.bundle on HTTP/HTTPS') 227 help='disable use of /clone.bundle on HTTP/HTTPS')
241 p.add_option('-u', '--manifest-server-username', action='store', 228 p.add_option('-u', '--manifest-server-username', action='store',
242 dest='manifest_server_username', 229 dest='manifest_server_username',
@@ -247,12 +234,23 @@ later is required to fix a server side protocol bug.
247 p.add_option('--fetch-submodules', 234 p.add_option('--fetch-submodules',
248 dest='fetch_submodules', action='store_true', 235 dest='fetch_submodules', action='store_true',
249 help='fetch submodules from server') 236 help='fetch submodules from server')
237 p.add_option('--use-superproject', action='store_true',
238 help='use the manifest superproject to sync projects')
239 p.add_option('--no-use-superproject', action='store_false',
240 dest='use_superproject',
241 help='disable use of manifest superprojects')
242 p.add_option('--tags',
243 action='store_false',
244 help='fetch tags')
250 p.add_option('--no-tags', 245 p.add_option('--no-tags',
251 dest='no_tags', action='store_true', 246 dest='tags', action='store_false',
252 help="don't fetch tags") 247 help="don't fetch tags")
253 p.add_option('--optimized-fetch', 248 p.add_option('--optimized-fetch',
254 dest='optimized_fetch', action='store_true', 249 dest='optimized_fetch', action='store_true',
255 help='only fetch projects fixed to sha1 if revision does not exist locally') 250 help='only fetch projects fixed to sha1 if revision does not exist locally')
251 p.add_option('--retry-fetches',
252 default=0, action='store', type='int',
253 help='number of times to retry fetches on transient errors')
256 p.add_option('--prune', dest='prune', action='store_true', 254 p.add_option('--prune', dest='prune', action='store_true',
257 help='delete refs that no longer exist on the remote') 255 help='delete refs that no longer exist on the remote')
258 if show_smart: 256 if show_smart:
@@ -265,345 +263,400 @@ later is required to fix a server side protocol bug.
265 263
266 g = p.add_option_group('repo Version options') 264 g = p.add_option_group('repo Version options')
267 g.add_option('--no-repo-verify', 265 g.add_option('--no-repo-verify',
268 dest='no_repo_verify', action='store_true', 266 dest='repo_verify', default=True, action='store_false',
269 help='do not verify repo source code') 267 help='do not verify repo source code')
270 g.add_option('--repo-upgraded', 268 g.add_option('--repo-upgraded',
271 dest='repo_upgraded', action='store_true', 269 dest='repo_upgraded', action='store_true',
272 help=SUPPRESS_HELP) 270 help=SUPPRESS_HELP)
273 271
274 def _FetchProjectList(self, opt, projects, sem, *args, **kwargs): 272 def _GetBranch(self):
275 """Main function of the fetch threads. 273 """Returns the branch name for getting the approved manifest."""
274 p = self.manifest.manifestProject
275 b = p.GetBranch(p.CurrentBranch)
276 branch = b.merge
277 if branch.startswith(R_HEADS):
278 branch = branch[len(R_HEADS):]
279 return branch
280
281 def _GetCurrentBranchOnly(self, opt):
282 """Returns True if current-branch or use-superproject options are enabled."""
283 return opt.current_branch_only or git_superproject.UseSuperproject(opt, self.manifest)
284
285 def _UpdateProjectsRevisionId(self, opt, args, load_local_manifests, superproject_logging_data):
286 """Update revisionId of every project with the SHA from superproject.
287
288 This function updates each project's revisionId with SHA from superproject.
289 It writes the updated manifest into a file and reloads the manifest from it.
290
291 Args:
292 opt: Program options returned from optparse. See _Options().
293 args: Arguments to pass to GetProjects. See the GetProjects
294 docstring for details.
295 load_local_manifests: Whether to load local manifests.
296 superproject_logging_data: A dictionary of superproject data that is to be logged.
297
298 Returns:
299 Returns path to the overriding manifest file instead of None.
300 """
301 print_messages = git_superproject.PrintMessages(opt, self.manifest)
302 superproject = git_superproject.Superproject(self.manifest,
303 self.repodir,
304 self.git_event_log,
305 quiet=opt.quiet,
306 print_messages=print_messages)
307 if opt.local_only:
308 manifest_path = superproject.manifest_path
309 if manifest_path:
310 self._ReloadManifest(manifest_path, load_local_manifests)
311 return manifest_path
312
313 all_projects = self.GetProjects(args,
314 missing_ok=True,
315 submodules_ok=opt.fetch_submodules)
316 update_result = superproject.UpdateProjectsRevisionId(all_projects)
317 manifest_path = update_result.manifest_path
318 superproject_logging_data['updatedrevisionid'] = bool(manifest_path)
319 if manifest_path:
320 self._ReloadManifest(manifest_path, load_local_manifests)
321 else:
322 if print_messages:
323 print('warning: Update of revisionId from superproject has failed, '
324 'repo sync will not use superproject to fetch the source. ',
325 'Please resync with the --no-use-superproject option to avoid this repo warning.',
326 file=sys.stderr)
327 if update_result.fatal and opt.use_superproject is not None:
328 sys.exit(1)
329 return manifest_path
330
331 def _FetchProjectList(self, opt, projects):
332 """Main function of the fetch worker.
333
334 The projects we're given share the same underlying git object store, so we
335 have to fetch them in serial.
276 336
277 Delegates most of the work to _FetchHelper. 337 Delegates most of the work to _FetchHelper.
278 338
279 Args: 339 Args:
280 opt: Program options returned from optparse. See _Options(). 340 opt: Program options returned from optparse. See _Options().
281 projects: Projects to fetch. 341 projects: Projects to fetch.
282 sem: We'll release() this semaphore when we exit so that another thread
283 can be started up.
284 *args, **kwargs: Remaining arguments to pass to _FetchHelper. See the
285 _FetchHelper docstring for details.
286 """ 342 """
287 try: 343 return [self._FetchOne(opt, x) for x in projects]
288 for project in projects:
289 success = self._FetchHelper(opt, project, *args, **kwargs)
290 if not success and opt.fail_fast:
291 break
292 finally:
293 sem.release()
294 344
295 def _FetchHelper(self, opt, project, lock, fetched, pm, err_event, 345 def _FetchOne(self, opt, project):
296 clone_filter):
297 """Fetch git objects for a single project. 346 """Fetch git objects for a single project.
298 347
299 Args: 348 Args:
300 opt: Program options returned from optparse. See _Options(). 349 opt: Program options returned from optparse. See _Options().
301 project: Project object for the project to fetch. 350 project: Project object for the project to fetch.
302 lock: Lock for accessing objects that are shared amongst multiple
303 _FetchHelper() threads.
304 fetched: set object that we will add project.gitdir to when we're done
305 (with our lock held).
306 pm: Instance of a Project object. We will call pm.update() (with our
307 lock held).
308 err_event: We'll set this event in the case of an error (after printing
309 out info about the error).
310 clone_filter: Filter for use in a partial clone.
311 351
312 Returns: 352 Returns:
313 Whether the fetch was successful. 353 Whether the fetch was successful.
314 """ 354 """
315 # We'll set to true once we've locked the lock.
316 did_lock = False
317
318 # Encapsulate everything in a try/except/finally so that:
319 # - We always set err_event in the case of an exception.
320 # - We always make sure we unlock the lock if we locked it.
321 start = time.time() 355 start = time.time()
322 success = False 356 success = False
357 buf = io.StringIO()
323 try: 358 try:
324 try: 359 success = project.Sync_NetworkHalf(
325 success = project.Sync_NetworkHalf(
326 quiet=opt.quiet, 360 quiet=opt.quiet,
327 current_branch_only=opt.current_branch_only, 361 verbose=opt.verbose,
362 output_redir=buf,
363 current_branch_only=self._GetCurrentBranchOnly(opt),
328 force_sync=opt.force_sync, 364 force_sync=opt.force_sync,
329 clone_bundle=not opt.no_clone_bundle, 365 clone_bundle=opt.clone_bundle,
330 no_tags=opt.no_tags, archive=self.manifest.IsArchive, 366 tags=opt.tags, archive=self.manifest.IsArchive,
331 optimized_fetch=opt.optimized_fetch, 367 optimized_fetch=opt.optimized_fetch,
368 retry_fetches=opt.retry_fetches,
332 prune=opt.prune, 369 prune=opt.prune,
333 clone_filter=clone_filter) 370 ssh_proxy=self.ssh_proxy,
334 self._fetch_times.Set(project, time.time() - start) 371 clone_filter=self.manifest.CloneFilter,
372 partial_clone_exclude=self.manifest.PartialCloneExclude)
335 373
336 # Lock around all the rest of the code, since printing, updating a set 374 output = buf.getvalue()
337 # and Progress.update() are not thread safe. 375 if (opt.verbose or not success) and output:
338 lock.acquire() 376 print('\n' + output.rstrip())
339 did_lock = True
340 377
341 if not success: 378 if not success:
342 err_event.set() 379 print('error: Cannot fetch %s from %s'
343 print('error: Cannot fetch %s from %s' 380 % (project.name, project.remote.url),
344 % (project.name, project.remote.url), 381 file=sys.stderr)
345 file=sys.stderr) 382 except GitError as e:
346 if opt.fail_fast: 383 print('error.GitError: Cannot fetch %s' % str(e), file=sys.stderr)
347 raise _FetchError() 384 except Exception as e:
348 385 print('error: Cannot fetch %s (%s: %s)'
349 fetched.add(project.gitdir)
350 pm.update(msg=project.name)
351 except _FetchError:
352 pass
353 except Exception as e:
354 print('error: Cannot fetch %s (%s: %s)' \
355 % (project.name, type(e).__name__, str(e)), file=sys.stderr) 386 % (project.name, type(e).__name__, str(e)), file=sys.stderr)
356 err_event.set() 387 raise
357 raise 388
358 finally: 389 finish = time.time()
359 if did_lock: 390 return (success, project, start, finish)
360 lock.release()
361 finish = time.time()
362 self.event_log.AddSync(project, event_log.TASK_SYNC_NETWORK,
363 start, finish, success)
364 391
365 return success 392 @classmethod
393 def _FetchInitChild(cls, ssh_proxy):
394 cls.ssh_proxy = ssh_proxy
366 395
367 def _Fetch(self, projects, opt): 396 def _Fetch(self, projects, opt, err_event, ssh_proxy):
397 ret = True
398
399 jobs = opt.jobs_network if opt.jobs_network else self.jobs
368 fetched = set() 400 fetched = set()
369 lock = _threading.Lock() 401 pm = Progress('Fetching', len(projects), delay=False, quiet=opt.quiet)
370 pm = Progress('Fetching projects', len(projects),
371 always_print_percentage=opt.quiet)
372 402
373 objdir_project_map = dict() 403 objdir_project_map = dict()
374 for project in projects: 404 for project in projects:
375 objdir_project_map.setdefault(project.objdir, []).append(project) 405 objdir_project_map.setdefault(project.objdir, []).append(project)
376 406 projects_list = list(objdir_project_map.values())
377 threads = set() 407
378 sem = _threading.Semaphore(self.jobs) 408 def _ProcessResults(results_sets):
379 err_event = _threading.Event() 409 ret = True
380 for project_list in objdir_project_map.values(): 410 for results in results_sets:
381 # Check for any errors before running any more tasks. 411 for (success, project, start, finish) in results:
382 # ...we'll let existing threads finish, though. 412 self._fetch_times.Set(project, finish - start)
383 if err_event.isSet() and opt.fail_fast: 413 self.event_log.AddSync(project, event_log.TASK_SYNC_NETWORK,
384 break 414 start, finish, success)
385 415 # Check for any errors before running any more tasks.
386 sem.acquire() 416 # ...we'll let existing jobs finish, though.
387 kwargs = dict(opt=opt, 417 if not success:
388 projects=project_list, 418 ret = False
389 sem=sem, 419 else:
390 lock=lock, 420 fetched.add(project.gitdir)
391 fetched=fetched, 421 pm.update(msg=project.name)
392 pm=pm, 422 if not ret and opt.fail_fast:
393 err_event=err_event, 423 break
394 clone_filter=self.manifest.CloneFilter) 424 return ret
395 if self.jobs > 1: 425
396 t = _threading.Thread(target = self._FetchProjectList, 426 # We pass the ssh proxy settings via the class. This allows multiprocessing
397 kwargs = kwargs) 427 # to pickle it up when spawning children. We can't pass it as an argument
398 # Ensure that Ctrl-C will not freeze the repo process. 428 # to _FetchProjectList below as multiprocessing is unable to pickle those.
399 t.daemon = True 429 Sync.ssh_proxy = None
400 threads.add(t) 430
401 t.start() 431 # NB: Multiprocessing is heavy, so don't spin it up for one job.
432 if len(projects_list) == 1 or jobs == 1:
433 self._FetchInitChild(ssh_proxy)
434 if not _ProcessResults(self._FetchProjectList(opt, x) for x in projects_list):
435 ret = False
436 else:
437 # Favor throughput over responsiveness when quiet. It seems that imap()
438 # will yield results in batches relative to chunksize, so even as the
439 # children finish a sync, we won't see the result until one child finishes
440 # ~chunksize jobs. When using a large --jobs with large chunksize, this
441 # can be jarring as there will be a large initial delay where repo looks
442 # like it isn't doing anything and sits at 0%, but then suddenly completes
443 # a lot of jobs all at once. Since this code is more network bound, we
444 # can accept a bit more CPU overhead with a smaller chunksize so that the
445 # user sees more immediate & continuous feedback.
446 if opt.quiet:
447 chunksize = WORKER_BATCH_SIZE
402 else: 448 else:
403 self._FetchProjectList(**kwargs) 449 pm.update(inc=0, msg='warming up')
404 450 chunksize = 4
405 for t in threads: 451 with multiprocessing.Pool(
406 t.join() 452 jobs, initializer=self._FetchInitChild, initargs=(ssh_proxy,)) as pool:
407 453 results = pool.imap_unordered(
408 # If we saw an error, exit with code 1 so that other scripts can check. 454 functools.partial(self._FetchProjectList, opt),
409 if err_event.isSet() and opt.fail_fast: 455 projects_list,
410 print('\nerror: Exited sync due to fetch errors', file=sys.stderr) 456 chunksize=chunksize)
411 sys.exit(1) 457 if not _ProcessResults(results):
458 ret = False
459 pool.close()
460
461 # Cleanup the reference now that we're done with it, and we're going to
462 # release any resources it points to. If we don't, later multiprocessing
463 # usage (e.g. checkouts) will try to pickle and then crash.
464 del Sync.ssh_proxy
412 465
413 pm.end() 466 pm.end()
414 self._fetch_times.Save() 467 self._fetch_times.Save()
415 468
416 if not self.manifest.IsArchive: 469 if not self.manifest.IsArchive:
417 self._GCProjects(projects) 470 self._GCProjects(projects, opt, err_event)
418 471
419 return fetched 472 return (ret, fetched)
420 473
421 def _CheckoutWorker(self, opt, sem, project, *args, **kwargs): 474 def _FetchMain(self, opt, args, all_projects, err_event, manifest_name,
422 """Main function of the fetch threads. 475 load_local_manifests, ssh_proxy):
423 476 """The main network fetch loop.
424 Delegates most of the work to _CheckoutOne.
425 477
426 Args: 478 Args:
427 opt: Program options returned from optparse. See _Options(). 479 opt: Program options returned from optparse. See _Options().
428 projects: Projects to fetch. 480 args: Command line args used to filter out projects.
429 sem: We'll release() this semaphore when we exit so that another thread 481 all_projects: List of all projects that should be fetched.
430 can be started up. 482 err_event: Whether an error was hit while processing.
431 *args, **kwargs: Remaining arguments to pass to _CheckoutOne. See the 483 manifest_name: Manifest file to be reloaded.
432 _CheckoutOne docstring for details. 484 load_local_manifests: Whether to load local manifests.
485 ssh_proxy: SSH manager for clients & masters.
486
487 Returns:
488 List of all projects that should be checked out.
433 """ 489 """
434 try: 490 rp = self.manifest.repoProject
435 return self._CheckoutOne(opt, project, *args, **kwargs)
436 finally:
437 sem.release()
438 491
439 def _CheckoutOne(self, opt, project, lock, pm, err_event, err_results): 492 to_fetch = []
493 now = time.time()
494 if _ONE_DAY_S <= (now - rp.LastFetch):
495 to_fetch.append(rp)
496 to_fetch.extend(all_projects)
497 to_fetch.sort(key=self._fetch_times.Get, reverse=True)
498
499 success, fetched = self._Fetch(to_fetch, opt, err_event, ssh_proxy)
500 if not success:
501 err_event.set()
502
503 _PostRepoFetch(rp, opt.repo_verify)
504 if opt.network_only:
505 # bail out now; the rest touches the working tree
506 if err_event.is_set():
507 print('\nerror: Exited sync due to fetch errors.\n', file=sys.stderr)
508 sys.exit(1)
509 return
510
511 # Iteratively fetch missing and/or nested unregistered submodules
512 previously_missing_set = set()
513 while True:
514 self._ReloadManifest(manifest_name, load_local_manifests)
515 all_projects = self.GetProjects(args,
516 missing_ok=True,
517 submodules_ok=opt.fetch_submodules)
518 missing = []
519 for project in all_projects:
520 if project.gitdir not in fetched:
521 missing.append(project)
522 if not missing:
523 break
524 # Stop us from non-stopped fetching actually-missing repos: If set of
525 # missing repos has not been changed from last fetch, we break.
526 missing_set = set(p.name for p in missing)
527 if previously_missing_set == missing_set:
528 break
529 previously_missing_set = missing_set
530 success, new_fetched = self._Fetch(missing, opt, err_event, ssh_proxy)
531 if not success:
532 err_event.set()
533 fetched.update(new_fetched)
534
535 return all_projects
536
537 def _CheckoutOne(self, detach_head, force_sync, project):
440 """Checkout work tree for one project 538 """Checkout work tree for one project
441 539
442 Args: 540 Args:
443 opt: Program options returned from optparse. See _Options(). 541 detach_head: Whether to leave a detached HEAD.
542 force_sync: Force checking out of the repo.
444 project: Project object for the project to checkout. 543 project: Project object for the project to checkout.
445 lock: Lock for accessing objects that are shared amongst multiple
446 _CheckoutWorker() threads.
447 pm: Instance of a Project object. We will call pm.update() (with our
448 lock held).
449 err_event: We'll set this event in the case of an error (after printing
450 out info about the error).
451 err_results: A list of strings, paths to git repos where checkout
452 failed.
453 544
454 Returns: 545 Returns:
455 Whether the fetch was successful. 546 Whether the fetch was successful.
456 """ 547 """
457 # We'll set to true once we've locked the lock.
458 did_lock = False
459
460 # Encapsulate everything in a try/except/finally so that:
461 # - We always set err_event in the case of an exception.
462 # - We always make sure we unlock the lock if we locked it.
463 start = time.time() 548 start = time.time()
464 syncbuf = SyncBuffer(self.manifest.manifestProject.config, 549 syncbuf = SyncBuffer(self.manifest.manifestProject.config,
465 detach_head=opt.detach_head) 550 detach_head=detach_head)
466 success = False 551 success = False
467 try: 552 try:
468 try: 553 project.Sync_LocalHalf(syncbuf, force_sync=force_sync)
469 project.Sync_LocalHalf(syncbuf, force_sync=opt.force_sync) 554 success = syncbuf.Finish()
470 555 except GitError as e:
471 # Lock around all the rest of the code, since printing, updating a set 556 print('error.GitError: Cannot checkout %s: %s' %
472 # and Progress.update() are not thread safe. 557 (project.name, str(e)), file=sys.stderr)
473 lock.acquire() 558 except Exception as e:
474 success = syncbuf.Finish() 559 print('error: Cannot checkout %s: %s: %s' %
475 did_lock = True 560 (project.name, type(e).__name__, str(e)),
476 561 file=sys.stderr)
477 if not success: 562 raise
478 err_event.set()
479 print('error: Cannot checkout %s' % (project.name),
480 file=sys.stderr)
481 raise _CheckoutError()
482
483 pm.update(msg=project.name)
484 except _CheckoutError:
485 pass
486 except Exception as e:
487 print('error: Cannot checkout %s: %s: %s' %
488 (project.name, type(e).__name__, str(e)),
489 file=sys.stderr)
490 err_event.set()
491 raise
492 finally:
493 if did_lock:
494 if not success:
495 err_results.append(project.relpath)
496 lock.release()
497 finish = time.time()
498 self.event_log.AddSync(project, event_log.TASK_SYNC_LOCAL,
499 start, finish, success)
500 563
501 return success 564 if not success:
565 print('error: Cannot checkout %s' % (project.name), file=sys.stderr)
566 finish = time.time()
567 return (success, project, start, finish)
502 568
503 def _Checkout(self, all_projects, opt): 569 def _Checkout(self, all_projects, opt, err_results):
504 """Checkout projects listed in all_projects 570 """Checkout projects listed in all_projects
505 571
506 Args: 572 Args:
507 all_projects: List of all projects that should be checked out. 573 all_projects: List of all projects that should be checked out.
508 opt: Program options returned from optparse. See _Options(). 574 opt: Program options returned from optparse. See _Options().
575 err_results: A list of strings, paths to git repos where checkout failed.
509 """ 576 """
577 # Only checkout projects with worktrees.
578 all_projects = [x for x in all_projects if x.worktree]
579
580 def _ProcessResults(pool, pm, results):
581 ret = True
582 for (success, project, start, finish) in results:
583 self.event_log.AddSync(project, event_log.TASK_SYNC_LOCAL,
584 start, finish, success)
585 # Check for any errors before running any more tasks.
586 # ...we'll let existing jobs finish, though.
587 if not success:
588 ret = False
589 err_results.append(project.relpath)
590 if opt.fail_fast:
591 if pool:
592 pool.close()
593 return ret
594 pm.update(msg=project.name)
595 return ret
510 596
511 # Perform checkouts in multiple threads when we are using partial clone. 597 return self.ExecuteInParallel(
512 # Without partial clone, all needed git objects are already downloaded, 598 opt.jobs_checkout if opt.jobs_checkout else self.jobs,
513 # in this situation it's better to use only one process because the checkout 599 functools.partial(self._CheckoutOne, opt.detach_head, opt.force_sync),
514 # would be mostly disk I/O; with partial clone, the objects are only 600 all_projects,
515 # downloaded when demanded (at checkout time), which is similar to the 601 callback=_ProcessResults,
516 # Sync_NetworkHalf case and parallelism would be helpful. 602 output=Progress('Checking out', len(all_projects), quiet=opt.quiet)) and not err_results
517 if self.manifest.CloneFilter:
518 syncjobs = self.jobs
519 else:
520 syncjobs = 1
521
522 lock = _threading.Lock()
523 pm = Progress('Checking out projects', len(all_projects))
524
525 threads = set()
526 sem = _threading.Semaphore(syncjobs)
527 err_event = _threading.Event()
528 err_results = []
529
530 for project in all_projects:
531 # Check for any errors before running any more tasks.
532 # ...we'll let existing threads finish, though.
533 if err_event.isSet() and opt.fail_fast:
534 break
535
536 sem.acquire()
537 if project.worktree:
538 kwargs = dict(opt=opt,
539 sem=sem,
540 project=project,
541 lock=lock,
542 pm=pm,
543 err_event=err_event,
544 err_results=err_results)
545 if syncjobs > 1:
546 t = _threading.Thread(target=self._CheckoutWorker,
547 kwargs=kwargs)
548 # Ensure that Ctrl-C will not freeze the repo process.
549 t.daemon = True
550 threads.add(t)
551 t.start()
552 else:
553 self._CheckoutWorker(**kwargs)
554
555 for t in threads:
556 t.join()
557 603
558 pm.end() 604 def _GCProjects(self, projects, opt, err_event):
559 # If we saw an error, exit with code 1 so that other scripts can check. 605 pm = Progress('Garbage collecting', len(projects), delay=False, quiet=opt.quiet)
560 if err_event.isSet(): 606 pm.update(inc=0, msg='prescan')
561 print('\nerror: Exited sync due to checkout errors', file=sys.stderr)
562 if err_results:
563 print('Failing repos:\n%s' % '\n'.join(err_results),
564 file=sys.stderr)
565 sys.exit(1)
566 607
567 def _GCProjects(self, projects):
568 gc_gitdirs = {} 608 gc_gitdirs = {}
569 for project in projects: 609 for project in projects:
570 if len(project.manifest.GetProjectsWithName(project.name)) > 1: 610 # Make sure pruning never kicks in with shared projects.
571 print('Shared project %s found, disabling pruning.' % project.name) 611 if (not project.use_git_worktrees and
572 project.bare_git.config('--replace-all', 'gc.pruneExpire', 'never') 612 len(project.manifest.GetProjectsWithName(project.name)) > 1):
613 if not opt.quiet:
614 print('\r%s: Shared project %s found, disabling pruning.' %
615 (project.relpath, project.name))
616 if git_require((2, 7, 0)):
617 project.EnableRepositoryExtension('preciousObjects')
618 else:
619 # This isn't perfect, but it's the best we can do with old git.
620 print('\r%s: WARNING: shared projects are unreliable when using old '
621 'versions of git; please upgrade to git-2.7.0+.'
622 % (project.relpath,),
623 file=sys.stderr)
624 project.config.SetString('gc.pruneExpire', 'never')
573 gc_gitdirs[project.gitdir] = project.bare_git 625 gc_gitdirs[project.gitdir] = project.bare_git
574 626
575 has_dash_c = git_require((1, 7, 2)) 627 pm.update(inc=len(projects) - len(gc_gitdirs), msg='warming up')
576 if multiprocessing and has_dash_c: 628
577 cpu_count = multiprocessing.cpu_count() 629 cpu_count = os.cpu_count()
578 else:
579 cpu_count = 1
580 jobs = min(self.jobs, cpu_count) 630 jobs = min(self.jobs, cpu_count)
581 631
582 if jobs < 2: 632 if jobs < 2:
583 for bare_git in gc_gitdirs.values(): 633 for bare_git in gc_gitdirs.values():
634 pm.update(msg=bare_git._project.name)
584 bare_git.gc('--auto') 635 bare_git.gc('--auto')
636 pm.end()
585 return 637 return
586 638
587 config = {'pack.threads': cpu_count // jobs if cpu_count > jobs else 1} 639 config = {'pack.threads': cpu_count // jobs if cpu_count > jobs else 1}
588 640
589 threads = set() 641 threads = set()
590 sem = _threading.Semaphore(jobs) 642 sem = _threading.Semaphore(jobs)
591 err_event = _threading.Event()
592 643
593 def GC(bare_git): 644 def GC(bare_git):
645 pm.start(bare_git._project.name)
594 try: 646 try:
595 try: 647 try:
596 bare_git.gc('--auto', config=config) 648 bare_git.gc('--auto', config=config)
597 except GitError: 649 except GitError:
598 err_event.set() 650 err_event.set()
599 except: 651 except Exception:
600 err_event.set() 652 err_event.set()
601 raise 653 raise
602 finally: 654 finally:
655 pm.finish(bare_git._project.name)
603 sem.release() 656 sem.release()
604 657
605 for bare_git in gc_gitdirs.values(): 658 for bare_git in gc_gitdirs.values():
606 if err_event.isSet(): 659 if err_event.is_set() and opt.fail_fast:
607 break 660 break
608 sem.acquire() 661 sem.acquire()
609 t = _threading.Thread(target=GC, args=(bare_git,)) 662 t = _threading.Thread(target=GC, args=(bare_git,))
@@ -613,84 +666,30 @@ later is required to fix a server side protocol bug.
613 666
614 for t in threads: 667 for t in threads:
615 t.join() 668 t.join()
669 pm.end()
616 670
617 if err_event.isSet(): 671 def _ReloadManifest(self, manifest_name=None, load_local_manifests=True):
618 print('\nerror: Exited sync due to gc errors', file=sys.stderr) 672 """Reload the manfiest from the file specified by the |manifest_name|.
619 sys.exit(1) 673
674 It unloads the manifest if |manifest_name| is None.
620 675
621 def _ReloadManifest(self, manifest_name=None): 676 Args:
677 manifest_name: Manifest file to be reloaded.
678 load_local_manifests: Whether to load local manifests.
679 """
622 if manifest_name: 680 if manifest_name:
623 # Override calls _Unload already 681 # Override calls _Unload already
624 self.manifest.Override(manifest_name) 682 self.manifest.Override(manifest_name, load_local_manifests=load_local_manifests)
625 else: 683 else:
626 self.manifest._Unload() 684 self.manifest._Unload()
627 685
628 def _DeleteProject(self, path):
629 print('Deleting obsolete path %s' % path, file=sys.stderr)
630
631 # Delete the .git directory first, so we're less likely to have a partially
632 # working git repository around. There shouldn't be any git projects here,
633 # so rmtree works.
634 try:
635 platform_utils.rmtree(os.path.join(path, '.git'))
636 except OSError as e:
637 print('Failed to remove %s (%s)' % (os.path.join(path, '.git'), str(e)), file=sys.stderr)
638 print('error: Failed to delete obsolete path %s' % path, file=sys.stderr)
639 print(' remove manually, then run sync again', file=sys.stderr)
640 return 1
641
642 # Delete everything under the worktree, except for directories that contain
643 # another git project
644 dirs_to_remove = []
645 failed = False
646 for root, dirs, files in platform_utils.walk(path):
647 for f in files:
648 try:
649 platform_utils.remove(os.path.join(root, f))
650 except OSError as e:
651 print('Failed to remove %s (%s)' % (os.path.join(root, f), str(e)), file=sys.stderr)
652 failed = True
653 dirs[:] = [d for d in dirs
654 if not os.path.lexists(os.path.join(root, d, '.git'))]
655 dirs_to_remove += [os.path.join(root, d) for d in dirs
656 if os.path.join(root, d) not in dirs_to_remove]
657 for d in reversed(dirs_to_remove):
658 if platform_utils.islink(d):
659 try:
660 platform_utils.remove(d)
661 except OSError as e:
662 print('Failed to remove %s (%s)' % (os.path.join(root, d), str(e)), file=sys.stderr)
663 failed = True
664 elif len(platform_utils.listdir(d)) == 0:
665 try:
666 platform_utils.rmdir(d)
667 except OSError as e:
668 print('Failed to remove %s (%s)' % (os.path.join(root, d), str(e)), file=sys.stderr)
669 failed = True
670 continue
671 if failed:
672 print('error: Failed to delete obsolete path %s' % path, file=sys.stderr)
673 print(' remove manually, then run sync again', file=sys.stderr)
674 return 1
675
676 # Try deleting parent dirs if they are empty
677 project_dir = path
678 while project_dir != self.manifest.topdir:
679 if len(platform_utils.listdir(project_dir)) == 0:
680 platform_utils.rmdir(project_dir)
681 else:
682 break
683 project_dir = os.path.dirname(project_dir)
684
685 return 0
686
687 def UpdateProjectList(self, opt): 686 def UpdateProjectList(self, opt):
688 new_project_paths = [] 687 new_project_paths = []
689 for project in self.GetProjects(None, missing_ok=True): 688 for project in self.GetProjects(None, missing_ok=True):
690 if project.relpath: 689 if project.relpath:
691 new_project_paths.append(project.relpath) 690 new_project_paths.append(project.relpath)
692 file_name = 'project.list' 691 file_name = 'project.list'
693 file_path = os.path.join(self.manifest.repodir, file_name) 692 file_path = os.path.join(self.repodir, file_name)
694 old_project_paths = [] 693 old_project_paths = []
695 694
696 if os.path.exists(file_path): 695 if os.path.exists(file_path):
@@ -705,28 +704,20 @@ later is required to fix a server side protocol bug.
705 gitdir = os.path.join(self.manifest.topdir, path, '.git') 704 gitdir = os.path.join(self.manifest.topdir, path, '.git')
706 if os.path.exists(gitdir): 705 if os.path.exists(gitdir):
707 project = Project( 706 project = Project(
708 manifest = self.manifest, 707 manifest=self.manifest,
709 name = path, 708 name=path,
710 remote = RemoteSpec('origin'), 709 remote=RemoteSpec('origin'),
711 gitdir = gitdir, 710 gitdir=gitdir,
712 objdir = gitdir, 711 objdir=gitdir,
713 worktree = os.path.join(self.manifest.topdir, path), 712 use_git_worktrees=os.path.isfile(gitdir),
714 relpath = path, 713 worktree=os.path.join(self.manifest.topdir, path),
715 revisionExpr = 'HEAD', 714 relpath=path,
716 revisionId = None, 715 revisionExpr='HEAD',
717 groups = None) 716 revisionId=None,
718 717 groups=None)
719 if project.IsDirty() and opt.force_remove_dirty: 718 if not project.DeleteWorktree(
720 print('WARNING: Removing dirty project "%s": uncommitted changes ' 719 quiet=opt.quiet,
721 'erased' % project.relpath, file=sys.stderr) 720 force=opt.force_remove_dirty):
722 self._DeleteProject(project.worktree)
723 elif project.IsDirty():
724 print('error: Cannot remove project "%s": uncommitted changes '
725 'are present' % project.relpath, file=sys.stderr)
726 print(' commit changes, then run sync again',
727 file=sys.stderr)
728 return 1
729 elif self._DeleteProject(project.worktree):
730 return 1 721 return 1
731 722
732 new_project_paths.sort() 723 new_project_paths.sort()
@@ -735,6 +726,56 @@ later is required to fix a server side protocol bug.
735 fd.write('\n') 726 fd.write('\n')
736 return 0 727 return 0
737 728
729 def UpdateCopyLinkfileList(self):
730 """Save all dests of copyfile and linkfile, and update them if needed.
731
732 Returns:
733 Whether update was successful.
734 """
735 new_paths = {}
736 new_linkfile_paths = []
737 new_copyfile_paths = []
738 for project in self.GetProjects(None, missing_ok=True):
739 new_linkfile_paths.extend(x.dest for x in project.linkfiles)
740 new_copyfile_paths.extend(x.dest for x in project.copyfiles)
741
742 new_paths = {
743 'linkfile': new_linkfile_paths,
744 'copyfile': new_copyfile_paths,
745 }
746
747 copylinkfile_name = 'copy-link-files.json'
748 copylinkfile_path = os.path.join(self.manifest.repodir, copylinkfile_name)
749 old_copylinkfile_paths = {}
750
751 if os.path.exists(copylinkfile_path):
752 with open(copylinkfile_path, 'rb') as fp:
753 try:
754 old_copylinkfile_paths = json.load(fp)
755 except:
756 print('error: %s is not a json formatted file.' %
757 copylinkfile_path, file=sys.stderr)
758 platform_utils.remove(copylinkfile_path)
759 return False
760
761 need_remove_files = []
762 need_remove_files.extend(
763 set(old_copylinkfile_paths.get('linkfile', [])) -
764 set(new_linkfile_paths))
765 need_remove_files.extend(
766 set(old_copylinkfile_paths.get('copyfile', [])) -
767 set(new_copyfile_paths))
768
769 for need_remove_file in need_remove_files:
770 # Try to remove the updated copyfile or linkfile.
771 # So, if the file is not exist, nothing need to do.
772 platform_utils.remove(need_remove_file, missing_ok=True)
773
774 # Create copy-link-files.json, save dest path of "copyfile" and "linkfile".
775 with open(copylinkfile_path, 'w', encoding='utf-8') as fp:
776 json.dump(new_paths, fp)
777 return True
778
738 def _SmartSyncSetup(self, opt, smart_sync_manifest_path): 779 def _SmartSyncSetup(self, opt, smart_sync_manifest_path):
739 if not self.manifest.manifest_server: 780 if not self.manifest.manifest_server:
740 print('error: cannot smart sync: no manifest server defined in ' 781 print('error: cannot smart sync: no manifest server defined in '
@@ -745,7 +786,7 @@ later is required to fix a server side protocol bug.
745 if not opt.quiet: 786 if not opt.quiet:
746 print('Using manifest server %s' % manifest_server) 787 print('Using manifest server %s' % manifest_server)
747 788
748 if not '@' in manifest_server: 789 if '@' not in manifest_server:
749 username = None 790 username = None
750 password = None 791 password = None
751 if opt.manifest_server_username and opt.manifest_server_password: 792 if opt.manifest_server_username and opt.manifest_server_password:
@@ -782,19 +823,15 @@ later is required to fix a server side protocol bug.
782 try: 823 try:
783 server = xmlrpc.client.Server(manifest_server, transport=transport) 824 server = xmlrpc.client.Server(manifest_server, transport=transport)
784 if opt.smart_sync: 825 if opt.smart_sync:
785 p = self.manifest.manifestProject 826 branch = self._GetBranch()
786 b = p.GetBranch(p.CurrentBranch) 827
787 branch = b.merge 828 if 'SYNC_TARGET' in os.environ:
788 if branch.startswith(R_HEADS): 829 target = os.environ['SYNC_TARGET']
789 branch = branch[len(R_HEADS):]
790
791 env = os.environ.copy()
792 if 'SYNC_TARGET' in env:
793 target = env['SYNC_TARGET']
794 [success, manifest_str] = server.GetApprovedManifest(branch, target) 830 [success, manifest_str] = server.GetApprovedManifest(branch, target)
795 elif 'TARGET_PRODUCT' in env and 'TARGET_BUILD_VARIANT' in env: 831 elif ('TARGET_PRODUCT' in os.environ and
796 target = '%s-%s' % (env['TARGET_PRODUCT'], 832 'TARGET_BUILD_VARIANT' in os.environ):
797 env['TARGET_BUILD_VARIANT']) 833 target = '%s-%s' % (os.environ['TARGET_PRODUCT'],
834 os.environ['TARGET_BUILD_VARIANT'])
798 [success, manifest_str] = server.GetApprovedManifest(branch, target) 835 [success, manifest_str] = server.GetApprovedManifest(branch, target)
799 else: 836 else:
800 [success, manifest_str] = server.GetApprovedManifest(branch) 837 [success, manifest_str] = server.GetApprovedManifest(branch)
@@ -833,12 +870,15 @@ later is required to fix a server side protocol bug.
833 """Fetch & update the local manifest project.""" 870 """Fetch & update the local manifest project."""
834 if not opt.local_only: 871 if not opt.local_only:
835 start = time.time() 872 start = time.time()
836 success = mp.Sync_NetworkHalf(quiet=opt.quiet, 873 success = mp.Sync_NetworkHalf(quiet=opt.quiet, verbose=opt.verbose,
837 current_branch_only=opt.current_branch_only, 874 current_branch_only=self._GetCurrentBranchOnly(opt),
838 no_tags=opt.no_tags, 875 force_sync=opt.force_sync,
876 tags=opt.tags,
839 optimized_fetch=opt.optimized_fetch, 877 optimized_fetch=opt.optimized_fetch,
878 retry_fetches=opt.retry_fetches,
840 submodules=self.manifest.HasSubmodules, 879 submodules=self.manifest.HasSubmodules,
841 clone_filter=self.manifest.CloneFilter) 880 clone_filter=self.manifest.CloneFilter,
881 partial_clone_exclude=self.manifest.PartialCloneExclude)
842 finish = time.time() 882 finish = time.time()
843 self.event_log.AddSync(mp, event_log.TASK_SYNC_NETWORK, 883 self.event_log.AddSync(mp, event_log.TASK_SYNC_NETWORK,
844 start, finish, success) 884 start, finish, success)
@@ -852,7 +892,7 @@ later is required to fix a server side protocol bug.
852 start, time.time(), clean) 892 start, time.time(), clean)
853 if not clean: 893 if not clean:
854 sys.exit(1) 894 sys.exit(1)
855 self._ReloadManifest(opt.manifest_name) 895 self._ReloadManifest(manifest_name)
856 if opt.jobs is None: 896 if opt.jobs is None:
857 self.jobs = self.manifest.default.sync_j 897 self.jobs = self.manifest.default.sync_j
858 898
@@ -886,7 +926,10 @@ later is required to fix a server side protocol bug.
886 926
887 manifest_name = opt.manifest_name 927 manifest_name = opt.manifest_name
888 smart_sync_manifest_path = os.path.join( 928 smart_sync_manifest_path = os.path.join(
889 self.manifest.manifestProject.worktree, 'smart_sync_override.xml') 929 self.manifest.manifestProject.worktree, 'smart_sync_override.xml')
930
931 if opt.clone_bundle is None:
932 opt.clone_bundle = self.manifest.CloneBundle
890 933
891 if opt.smart_sync or opt.smart_tag: 934 if opt.smart_sync or opt.smart_tag:
892 manifest_name = self._SmartSyncSetup(opt, smart_sync_manifest_path) 935 manifest_name = self._SmartSyncSetup(opt, smart_sync_manifest_path)
@@ -898,8 +941,17 @@ later is required to fix a server side protocol bug.
898 print('error: failed to remove existing smart sync override manifest: %s' % 941 print('error: failed to remove existing smart sync override manifest: %s' %
899 e, file=sys.stderr) 942 e, file=sys.stderr)
900 943
944 err_event = multiprocessing.Event()
945
901 rp = self.manifest.repoProject 946 rp = self.manifest.repoProject
902 rp.PreSync() 947 rp.PreSync()
948 cb = rp.CurrentBranch
949 if cb:
950 base = rp.GetBranch(cb).merge
951 if not base or not base.startswith('refs/heads/'):
952 print('warning: repo is not tracking a remote branch, so it will not '
953 'receive updates; run `repo init --repo-rev=stable` to fix.',
954 file=sys.stderr)
903 955
904 mp = self.manifest.manifestProject 956 mp = self.manifest.manifestProject
905 mp.PreSync() 957 mp.PreSync()
@@ -907,7 +959,21 @@ later is required to fix a server side protocol bug.
907 if opt.repo_upgraded: 959 if opt.repo_upgraded:
908 _PostRepoUpgrade(self.manifest, quiet=opt.quiet) 960 _PostRepoUpgrade(self.manifest, quiet=opt.quiet)
909 961
910 self._UpdateManifestProject(opt, mp, manifest_name) 962 if not opt.mp_update:
963 print('Skipping update of local manifest project.')
964 else:
965 self._UpdateManifestProject(opt, mp, manifest_name)
966
967 load_local_manifests = not self.manifest.HasLocalManifests
968 use_superproject = git_superproject.UseSuperproject(opt, self.manifest)
969 superproject_logging_data = {
970 'superproject': use_superproject,
971 'haslocalmanifests': bool(self.manifest.HasLocalManifests),
972 'hassuperprojecttag': bool(self.manifest.superproject),
973 }
974 if use_superproject:
975 manifest_name = self._UpdateProjectsRevisionId(
976 opt, args, load_local_manifests, superproject_logging_data) or opt.manifest_name
911 977
912 if self.gitc_manifest: 978 if self.gitc_manifest:
913 gitc_manifest_projects = self.GetProjects(args, 979 gitc_manifest_projects = self.GetProjects(args,
@@ -948,56 +1014,92 @@ later is required to fix a server side protocol bug.
948 missing_ok=True, 1014 missing_ok=True,
949 submodules_ok=opt.fetch_submodules) 1015 submodules_ok=opt.fetch_submodules)
950 1016
1017 err_network_sync = False
1018 err_update_projects = False
1019
951 self._fetch_times = _FetchTimes(self.manifest) 1020 self._fetch_times = _FetchTimes(self.manifest)
952 if not opt.local_only: 1021 if not opt.local_only:
953 to_fetch = [] 1022 with multiprocessing.Manager() as manager:
954 now = time.time() 1023 with ssh.ProxyManager(manager) as ssh_proxy:
955 if _ONE_DAY_S <= (now - rp.LastFetch): 1024 # Initialize the socket dir once in the parent.
956 to_fetch.append(rp) 1025 ssh_proxy.sock()
957 to_fetch.extend(all_projects) 1026 all_projects = self._FetchMain(opt, args, all_projects, err_event,
958 to_fetch.sort(key=self._fetch_times.Get, reverse=True) 1027 manifest_name, load_local_manifests,
959 1028 ssh_proxy)
960 fetched = self._Fetch(to_fetch, opt) 1029
961 _PostRepoFetch(rp, opt.no_repo_verify)
962 if opt.network_only: 1030 if opt.network_only:
963 # bail out now; the rest touches the working tree
964 return 1031 return
965 1032
966 # Iteratively fetch missing and/or nested unregistered submodules 1033 # If we saw an error, exit with code 1 so that other scripts can check.
967 previously_missing_set = set() 1034 if err_event.is_set():
968 while True: 1035 err_network_sync = True
969 self._ReloadManifest(manifest_name) 1036 if opt.fail_fast:
970 all_projects = self.GetProjects(args, 1037 print('\nerror: Exited sync due to fetch errors.\n'
971 missing_ok=True, 1038 'Local checkouts *not* updated. Resolve network issues & '
972 submodules_ok=opt.fetch_submodules) 1039 'retry.\n'
973 missing = [] 1040 '`repo sync -l` will update some local checkouts.',
974 for project in all_projects: 1041 file=sys.stderr)
975 if project.gitdir not in fetched: 1042 sys.exit(1)
976 missing.append(project)
977 if not missing:
978 break
979 # Stop us from non-stopped fetching actually-missing repos: If set of
980 # missing repos has not been changed from last fetch, we break.
981 missing_set = set(p.name for p in missing)
982 if previously_missing_set == missing_set:
983 break
984 previously_missing_set = missing_set
985 fetched.update(self._Fetch(missing, opt))
986 1043
987 if self.manifest.IsMirror or self.manifest.IsArchive: 1044 if self.manifest.IsMirror or self.manifest.IsArchive:
988 # bail out now, we have no working tree 1045 # bail out now, we have no working tree
989 return 1046 return
990 1047
991 if self.UpdateProjectList(opt): 1048 if self.UpdateProjectList(opt):
992 sys.exit(1) 1049 err_event.set()
1050 err_update_projects = True
1051 if opt.fail_fast:
1052 print('\nerror: Local checkouts *not* updated.', file=sys.stderr)
1053 sys.exit(1)
993 1054
994 self._Checkout(all_projects, opt) 1055 err_update_linkfiles = not self.UpdateCopyLinkfileList()
1056 if err_update_linkfiles:
1057 err_event.set()
1058 if opt.fail_fast:
1059 print('\nerror: Local update copyfile or linkfile failed.', file=sys.stderr)
1060 sys.exit(1)
1061
1062 err_results = []
1063 # NB: We don't exit here because this is the last step.
1064 err_checkout = not self._Checkout(all_projects, opt, err_results)
1065 if err_checkout:
1066 err_event.set()
995 1067
996 # If there's a notice that's supposed to print at the end of the sync, print 1068 # If there's a notice that's supposed to print at the end of the sync, print
997 # it now... 1069 # it now...
998 if self.manifest.notice: 1070 if self.manifest.notice:
999 print(self.manifest.notice) 1071 print(self.manifest.notice)
1000 1072
1073 # If we saw an error, exit with code 1 so that other scripts can check.
1074 if err_event.is_set():
1075 print('\nerror: Unable to fully sync the tree.', file=sys.stderr)
1076 if err_network_sync:
1077 print('error: Downloading network changes failed.', file=sys.stderr)
1078 if err_update_projects:
1079 print('error: Updating local project lists failed.', file=sys.stderr)
1080 if err_update_linkfiles:
1081 print('error: Updating copyfiles or linkfiles failed.', file=sys.stderr)
1082 if err_checkout:
1083 print('error: Checking out local projects failed.', file=sys.stderr)
1084 if err_results:
1085 print('Failing repos:\n%s' % '\n'.join(err_results), file=sys.stderr)
1086 print('Try re-running with "-j1 --fail-fast" to exit at the first error.',
1087 file=sys.stderr)
1088 sys.exit(1)
1089
1090 # Log the previous sync analysis state from the config.
1091 self.git_event_log.LogDataConfigEvents(mp.config.GetSyncAnalysisStateData(),
1092 'previous_sync_state')
1093
1094 # Update and log with the new sync analysis state.
1095 mp.config.UpdateSyncAnalysisState(opt, superproject_logging_data)
1096 self.git_event_log.LogDataConfigEvents(mp.config.GetSyncAnalysisStateData(),
1097 'current_sync_state')
1098
1099 if not opt.quiet:
1100 print('repo sync has finished successfully.')
1101
1102
1001def _PostRepoUpgrade(manifest, quiet=False): 1103def _PostRepoUpgrade(manifest, quiet=False):
1002 wrapper = Wrapper() 1104 wrapper = Wrapper()
1003 if wrapper.NeedSetupGnuPG(): 1105 if wrapper.NeedSetupGnuPG():
@@ -1006,15 +1108,29 @@ def _PostRepoUpgrade(manifest, quiet=False):
1006 if project.Exists: 1108 if project.Exists:
1007 project.PostRepoUpgrade() 1109 project.PostRepoUpgrade()
1008 1110
1009def _PostRepoFetch(rp, no_repo_verify=False, verbose=False): 1111
1112def _PostRepoFetch(rp, repo_verify=True, verbose=False):
1010 if rp.HasChanges: 1113 if rp.HasChanges:
1011 print('info: A new version of repo is available', file=sys.stderr) 1114 print('info: A new version of repo is available', file=sys.stderr)
1012 print(file=sys.stderr) 1115 wrapper = Wrapper()
1013 if no_repo_verify or _VerifyTag(rp): 1116 try:
1014 syncbuf = SyncBuffer(rp.config) 1117 rev = rp.bare_git.describe(rp.GetRevisionId())
1015 rp.Sync_LocalHalf(syncbuf) 1118 except GitError:
1016 if not syncbuf.Finish(): 1119 rev = None
1017 sys.exit(1) 1120 _, new_rev = wrapper.check_repo_rev(rp.gitdir, rev, repo_verify=repo_verify)
1121 # See if we're held back due to missing signed tag.
1122 current_revid = rp.bare_git.rev_parse('HEAD')
1123 new_revid = rp.bare_git.rev_parse('--verify', new_rev)
1124 if current_revid != new_revid:
1125 # We want to switch to the new rev, but also not trash any uncommitted
1126 # changes. This helps with local testing/hacking.
1127 # If a local change has been made, we will throw that away.
1128 # We also have to make sure this will switch to an older commit if that's
1129 # the latest tag in order to support release rollback.
1130 try:
1131 rp.work_git.reset('--keep', new_rev)
1132 except GitError as e:
1133 sys.exit(str(e))
1018 print('info: Restarting repo with latest version', file=sys.stderr) 1134 print('info: Restarting repo with latest version', file=sys.stderr)
1019 raise RepoChangedException(['--repo-upgraded']) 1135 raise RepoChangedException(['--repo-upgraded'])
1020 else: 1136 else:
@@ -1024,53 +1140,6 @@ def _PostRepoFetch(rp, no_repo_verify=False, verbose=False):
1024 print('repo version %s is current' % rp.work_git.describe(HEAD), 1140 print('repo version %s is current' % rp.work_git.describe(HEAD),
1025 file=sys.stderr) 1141 file=sys.stderr)
1026 1142
1027def _VerifyTag(project):
1028 gpg_dir = os.path.expanduser('~/.repoconfig/gnupg')
1029 if not os.path.exists(gpg_dir):
1030 print('warning: GnuPG was not available during last "repo init"\n'
1031 'warning: Cannot automatically authenticate repo."""',
1032 file=sys.stderr)
1033 return True
1034
1035 try:
1036 cur = project.bare_git.describe(project.GetRevisionId())
1037 except GitError:
1038 cur = None
1039
1040 if not cur \
1041 or re.compile(r'^.*-[0-9]{1,}-g[0-9a-f]{1,}$').match(cur):
1042 rev = project.revisionExpr
1043 if rev.startswith(R_HEADS):
1044 rev = rev[len(R_HEADS):]
1045
1046 print(file=sys.stderr)
1047 print("warning: project '%s' branch '%s' is not signed"
1048 % (project.name, rev), file=sys.stderr)
1049 return False
1050
1051 env = os.environ.copy()
1052 env['GIT_DIR'] = project.gitdir.encode()
1053 env['GNUPGHOME'] = gpg_dir.encode()
1054
1055 cmd = [GIT, 'tag', '-v', cur]
1056 proc = subprocess.Popen(cmd,
1057 stdout = subprocess.PIPE,
1058 stderr = subprocess.PIPE,
1059 env = env)
1060 out = proc.stdout.read()
1061 proc.stdout.close()
1062
1063 err = proc.stderr.read()
1064 proc.stderr.close()
1065
1066 if proc.wait() != 0:
1067 print(file=sys.stderr)
1068 print(out, file=sys.stderr)
1069 print(err, file=sys.stderr)
1070 print(file=sys.stderr)
1071 return False
1072 return True
1073
1074 1143
1075class _FetchTimes(object): 1144class _FetchTimes(object):
1076 _ALPHA = 0.5 1145 _ALPHA = 0.5
@@ -1090,7 +1159,7 @@ class _FetchTimes(object):
1090 old = self._times.get(name, t) 1159 old = self._times.get(name, t)
1091 self._seen.add(name) 1160 self._seen.add(name)
1092 a = self._ALPHA 1161 a = self._ALPHA
1093 self._times[name] = (a*t) + ((1-a) * old) 1162 self._times[name] = (a * t) + ((1 - a) * old)
1094 1163
1095 def _Load(self): 1164 def _Load(self):
1096 if self._times is None: 1165 if self._times is None:
@@ -1098,10 +1167,7 @@ class _FetchTimes(object):
1098 with open(self._path) as f: 1167 with open(self._path) as f:
1099 self._times = json.load(f) 1168 self._times = json.load(f)
1100 except (IOError, ValueError): 1169 except (IOError, ValueError):
1101 try: 1170 platform_utils.remove(self._path, missing_ok=True)
1102 platform_utils.remove(self._path)
1103 except OSError:
1104 pass
1105 self._times = {} 1171 self._times = {}
1106 1172
1107 def Save(self): 1173 def Save(self):
@@ -1119,15 +1185,14 @@ class _FetchTimes(object):
1119 with open(self._path, 'w') as f: 1185 with open(self._path, 'w') as f:
1120 json.dump(self._times, f, indent=2) 1186 json.dump(self._times, f, indent=2)
1121 except (IOError, TypeError): 1187 except (IOError, TypeError):
1122 try: 1188 platform_utils.remove(self._path, missing_ok=True)
1123 platform_utils.remove(self._path)
1124 except OSError:
1125 pass
1126 1189
1127# This is a replacement for xmlrpc.client.Transport using urllib2 1190# This is a replacement for xmlrpc.client.Transport using urllib2
1128# and supporting persistent-http[s]. It cannot change hosts from 1191# and supporting persistent-http[s]. It cannot change hosts from
1129# request to request like the normal transport, the real url 1192# request to request like the normal transport, the real url
1130# is passed during initialization. 1193# is passed during initialization.
1194
1195
1131class PersistentTransport(xmlrpc.client.Transport): 1196class PersistentTransport(xmlrpc.client.Transport):
1132 def __init__(self, orig_host): 1197 def __init__(self, orig_host):
1133 self.orig_host = orig_host 1198 self.orig_host = orig_host
@@ -1138,7 +1203,7 @@ class PersistentTransport(xmlrpc.client.Transport):
1138 # Since we're only using them for HTTP, copy the file temporarily, 1203 # Since we're only using them for HTTP, copy the file temporarily,
1139 # stripping those prefixes away. 1204 # stripping those prefixes away.
1140 if cookiefile: 1205 if cookiefile:
1141 tmpcookiefile = tempfile.NamedTemporaryFile() 1206 tmpcookiefile = tempfile.NamedTemporaryFile(mode='w')
1142 tmpcookiefile.write("# HTTP Cookie File") 1207 tmpcookiefile.write("# HTTP Cookie File")
1143 try: 1208 try:
1144 with open(cookiefile) as f: 1209 with open(cookiefile) as f:
@@ -1162,7 +1227,7 @@ class PersistentTransport(xmlrpc.client.Transport):
1162 if proxy: 1227 if proxy:
1163 proxyhandler = urllib.request.ProxyHandler({ 1228 proxyhandler = urllib.request.ProxyHandler({
1164 "http": proxy, 1229 "http": proxy,
1165 "https": proxy }) 1230 "https": proxy})
1166 1231
1167 opener = urllib.request.build_opener( 1232 opener = urllib.request.build_opener(
1168 urllib.request.HTTPCookieProcessor(cookiejar), 1233 urllib.request.HTTPCookieProcessor(cookiejar),
@@ -1219,4 +1284,3 @@ class PersistentTransport(xmlrpc.client.Transport):
1219 1284
1220 def close(self): 1285 def close(self):
1221 pass 1286 pass
1222