diff options
Diffstat (limited to 'project.py')
-rw-r--r-- | project.py | 1441 |
1 files changed, 747 insertions, 694 deletions
@@ -1,5 +1,3 @@ | |||
1 | # -*- coding:utf-8 -*- | ||
2 | # | ||
3 | # Copyright (C) 2008 The Android Open Source Project | 1 | # Copyright (C) 2008 The Android Open Source Project |
4 | # | 2 | # |
5 | # Licensed under the Apache License, Version 2.0 (the "License"); | 3 | # Licensed under the Apache License, Version 2.0 (the "License"); |
@@ -14,11 +12,9 @@ | |||
14 | # See the License for the specific language governing permissions and | 12 | # See the License for the specific language governing permissions and |
15 | # limitations under the License. | 13 | # limitations under the License. |
16 | 14 | ||
17 | from __future__ import print_function | ||
18 | import errno | 15 | import errno |
19 | import filecmp | 16 | import filecmp |
20 | import glob | 17 | import glob |
21 | import json | ||
22 | import os | 18 | import os |
23 | import random | 19 | import random |
24 | import re | 20 | import re |
@@ -29,36 +25,33 @@ import sys | |||
29 | import tarfile | 25 | import tarfile |
30 | import tempfile | 26 | import tempfile |
31 | import time | 27 | import time |
32 | import traceback | 28 | import urllib.parse |
33 | 29 | ||
34 | from color import Coloring | 30 | from color import Coloring |
35 | from git_command import GitCommand, git_require | 31 | from git_command import GitCommand, git_require |
36 | from git_config import GitConfig, IsId, GetSchemeFromUrl, GetUrlCookieFile, \ | 32 | from git_config import GitConfig, IsId, GetSchemeFromUrl, GetUrlCookieFile, \ |
37 | ID_RE | 33 | ID_RE |
38 | from error import GitError, HookError, UploadError, DownloadError | 34 | from error import GitError, UploadError, DownloadError |
39 | from error import ManifestInvalidRevisionError | 35 | from error import ManifestInvalidRevisionError, ManifestInvalidPathError |
40 | from error import NoManifestException | 36 | from error import NoManifestException |
41 | import platform_utils | 37 | import platform_utils |
42 | import progress | 38 | import progress |
43 | from repo_trace import IsTrace, Trace | 39 | from repo_trace import IsTrace, Trace |
44 | 40 | ||
45 | from git_refs import GitRefs, HEAD, R_HEADS, R_TAGS, R_PUB, R_M | 41 | from git_refs import GitRefs, HEAD, R_HEADS, R_TAGS, R_PUB, R_M, R_WORKTREE_M |
42 | |||
46 | 43 | ||
47 | from pyversion import is_python3 | 44 | # Maximum sleep time allowed during retries. |
48 | if is_python3(): | 45 | MAXIMUM_RETRY_SLEEP_SEC = 3600.0 |
49 | import urllib.parse | 46 | # +-10% random jitter is added to each Fetches retry sleep duration. |
50 | else: | 47 | RETRY_JITTER_PERCENT = 0.1 |
51 | import imp | ||
52 | import urlparse | ||
53 | urllib = imp.new_module('urllib') | ||
54 | urllib.parse = urlparse | ||
55 | input = raw_input | ||
56 | 48 | ||
57 | 49 | ||
58 | def _lwrite(path, content): | 50 | def _lwrite(path, content): |
59 | lock = '%s.lock' % path | 51 | lock = '%s.lock' % path |
60 | 52 | ||
61 | with open(lock, 'w') as fd: | 53 | # Maintain Unix line endings on all OS's to match git behavior. |
54 | with open(lock, 'w', newline='\n') as fd: | ||
62 | fd.write(content) | 55 | fd.write(content) |
63 | 56 | ||
64 | try: | 57 | try: |
@@ -85,6 +78,7 @@ def not_rev(r): | |||
85 | def sq(r): | 78 | def sq(r): |
86 | return "'" + r.replace("'", "'\''") + "'" | 79 | return "'" + r.replace("'", "'\''") + "'" |
87 | 80 | ||
81 | |||
88 | _project_hook_list = None | 82 | _project_hook_list = None |
89 | 83 | ||
90 | 84 | ||
@@ -197,18 +191,22 @@ class ReviewableBranch(object): | |||
197 | return self._base_exists | 191 | return self._base_exists |
198 | 192 | ||
199 | def UploadForReview(self, people, | 193 | def UploadForReview(self, people, |
194 | dryrun=False, | ||
200 | auto_topic=False, | 195 | auto_topic=False, |
201 | draft=False, | 196 | hashtags=(), |
197 | labels=(), | ||
202 | private=False, | 198 | private=False, |
203 | notify=None, | 199 | notify=None, |
204 | wip=False, | 200 | wip=False, |
205 | dest_branch=None, | 201 | dest_branch=None, |
206 | validate_certs=True, | 202 | validate_certs=True, |
207 | push_options=None): | 203 | push_options=None): |
208 | self.project.UploadForReview(self.name, | 204 | self.project.UploadForReview(branch=self.name, |
209 | people, | 205 | people=people, |
206 | dryrun=dryrun, | ||
210 | auto_topic=auto_topic, | 207 | auto_topic=auto_topic, |
211 | draft=draft, | 208 | hashtags=hashtags, |
209 | labels=labels, | ||
212 | private=private, | 210 | private=private, |
213 | notify=notify, | 211 | notify=notify, |
214 | wip=wip, | 212 | wip=wip, |
@@ -234,7 +232,7 @@ class ReviewableBranch(object): | |||
234 | class StatusColoring(Coloring): | 232 | class StatusColoring(Coloring): |
235 | 233 | ||
236 | def __init__(self, config): | 234 | def __init__(self, config): |
237 | Coloring.__init__(self, config, 'status') | 235 | super().__init__(config, 'status') |
238 | self.project = self.printer('header', attr='bold') | 236 | self.project = self.printer('header', attr='bold') |
239 | self.branch = self.printer('header', attr='bold') | 237 | self.branch = self.printer('header', attr='bold') |
240 | self.nobranch = self.printer('nobranch', fg='red') | 238 | self.nobranch = self.printer('nobranch', fg='red') |
@@ -248,30 +246,104 @@ class StatusColoring(Coloring): | |||
248 | class DiffColoring(Coloring): | 246 | class DiffColoring(Coloring): |
249 | 247 | ||
250 | def __init__(self, config): | 248 | def __init__(self, config): |
251 | Coloring.__init__(self, config, 'diff') | 249 | super().__init__(config, 'diff') |
252 | self.project = self.printer('header', attr='bold') | 250 | self.project = self.printer('header', attr='bold') |
253 | self.fail = self.printer('fail', fg='red') | 251 | self.fail = self.printer('fail', fg='red') |
254 | 252 | ||
255 | 253 | ||
256 | class _Annotation(object): | 254 | class Annotation(object): |
257 | 255 | ||
258 | def __init__(self, name, value, keep): | 256 | def __init__(self, name, value, keep): |
259 | self.name = name | 257 | self.name = name |
260 | self.value = value | 258 | self.value = value |
261 | self.keep = keep | 259 | self.keep = keep |
262 | 260 | ||
261 | def __eq__(self, other): | ||
262 | if not isinstance(other, Annotation): | ||
263 | return False | ||
264 | return self.__dict__ == other.__dict__ | ||
265 | |||
266 | def __lt__(self, other): | ||
267 | # This exists just so that lists of Annotation objects can be sorted, for | ||
268 | # use in comparisons. | ||
269 | if not isinstance(other, Annotation): | ||
270 | raise ValueError('comparison is not between two Annotation objects') | ||
271 | if self.name == other.name: | ||
272 | if self.value == other.value: | ||
273 | return self.keep < other.keep | ||
274 | return self.value < other.value | ||
275 | return self.name < other.name | ||
276 | |||
277 | |||
278 | def _SafeExpandPath(base, subpath, skipfinal=False): | ||
279 | """Make sure |subpath| is completely safe under |base|. | ||
280 | |||
281 | We make sure no intermediate symlinks are traversed, and that the final path | ||
282 | is not a special file (e.g. not a socket or fifo). | ||
283 | |||
284 | NB: We rely on a number of paths already being filtered out while parsing the | ||
285 | manifest. See the validation logic in manifest_xml.py for more details. | ||
286 | """ | ||
287 | # Split up the path by its components. We can't use os.path.sep exclusively | ||
288 | # as some platforms (like Windows) will convert / to \ and that bypasses all | ||
289 | # our constructed logic here. Especially since manifest authors only use | ||
290 | # / in their paths. | ||
291 | resep = re.compile(r'[/%s]' % re.escape(os.path.sep)) | ||
292 | components = resep.split(subpath) | ||
293 | if skipfinal: | ||
294 | # Whether the caller handles the final component itself. | ||
295 | finalpart = components.pop() | ||
296 | |||
297 | path = base | ||
298 | for part in components: | ||
299 | if part in {'.', '..'}: | ||
300 | raise ManifestInvalidPathError( | ||
301 | '%s: "%s" not allowed in paths' % (subpath, part)) | ||
302 | |||
303 | path = os.path.join(path, part) | ||
304 | if platform_utils.islink(path): | ||
305 | raise ManifestInvalidPathError( | ||
306 | '%s: traversing symlinks not allow' % (path,)) | ||
307 | |||
308 | if os.path.exists(path): | ||
309 | if not os.path.isfile(path) and not platform_utils.isdir(path): | ||
310 | raise ManifestInvalidPathError( | ||
311 | '%s: only regular files & directories allowed' % (path,)) | ||
312 | |||
313 | if skipfinal: | ||
314 | path = os.path.join(path, finalpart) | ||
315 | |||
316 | return path | ||
317 | |||
263 | 318 | ||
264 | class _CopyFile(object): | 319 | class _CopyFile(object): |
320 | """Container for <copyfile> manifest element.""" | ||
321 | |||
322 | def __init__(self, git_worktree, src, topdir, dest): | ||
323 | """Register a <copyfile> request. | ||
265 | 324 | ||
266 | def __init__(self, src, dest, abssrc, absdest): | 325 | Args: |
326 | git_worktree: Absolute path to the git project checkout. | ||
327 | src: Relative path under |git_worktree| of file to read. | ||
328 | topdir: Absolute path to the top of the repo client checkout. | ||
329 | dest: Relative path under |topdir| of file to write. | ||
330 | """ | ||
331 | self.git_worktree = git_worktree | ||
332 | self.topdir = topdir | ||
267 | self.src = src | 333 | self.src = src |
268 | self.dest = dest | 334 | self.dest = dest |
269 | self.abs_src = abssrc | ||
270 | self.abs_dest = absdest | ||
271 | 335 | ||
272 | def _Copy(self): | 336 | def _Copy(self): |
273 | src = self.abs_src | 337 | src = _SafeExpandPath(self.git_worktree, self.src) |
274 | dest = self.abs_dest | 338 | dest = _SafeExpandPath(self.topdir, self.dest) |
339 | |||
340 | if platform_utils.isdir(src): | ||
341 | raise ManifestInvalidPathError( | ||
342 | '%s: copying from directory not supported' % (self.src,)) | ||
343 | if platform_utils.isdir(dest): | ||
344 | raise ManifestInvalidPathError( | ||
345 | '%s: copying to directory not allowed' % (self.dest,)) | ||
346 | |||
275 | # copy file if it does not exist or is out of date | 347 | # copy file if it does not exist or is out of date |
276 | if not os.path.exists(dest) or not filecmp.cmp(src, dest): | 348 | if not os.path.exists(dest) or not filecmp.cmp(src, dest): |
277 | try: | 349 | try: |
@@ -292,13 +364,21 @@ class _CopyFile(object): | |||
292 | 364 | ||
293 | 365 | ||
294 | class _LinkFile(object): | 366 | class _LinkFile(object): |
367 | """Container for <linkfile> manifest element.""" | ||
295 | 368 | ||
296 | def __init__(self, git_worktree, src, dest, relsrc, absdest): | 369 | def __init__(self, git_worktree, src, topdir, dest): |
370 | """Register a <linkfile> request. | ||
371 | |||
372 | Args: | ||
373 | git_worktree: Absolute path to the git project checkout. | ||
374 | src: Target of symlink relative to path under |git_worktree|. | ||
375 | topdir: Absolute path to the top of the repo client checkout. | ||
376 | dest: Relative path under |topdir| of symlink to create. | ||
377 | """ | ||
297 | self.git_worktree = git_worktree | 378 | self.git_worktree = git_worktree |
379 | self.topdir = topdir | ||
298 | self.src = src | 380 | self.src = src |
299 | self.dest = dest | 381 | self.dest = dest |
300 | self.src_rel_to_dest = relsrc | ||
301 | self.abs_dest = absdest | ||
302 | 382 | ||
303 | def __linkIt(self, relSrc, absDest): | 383 | def __linkIt(self, relSrc, absDest): |
304 | # link file if it does not exist or is out of date | 384 | # link file if it does not exist or is out of date |
@@ -316,35 +396,42 @@ class _LinkFile(object): | |||
316 | _error('Cannot link file %s to %s', relSrc, absDest) | 396 | _error('Cannot link file %s to %s', relSrc, absDest) |
317 | 397 | ||
318 | def _Link(self): | 398 | def _Link(self): |
319 | """Link the self.rel_src_to_dest and self.abs_dest. Handles wild cards | 399 | """Link the self.src & self.dest paths. |
320 | on the src linking all of the files in the source in to the destination | 400 | |
321 | directory. | 401 | Handles wild cards on the src linking all of the files in the source in to |
402 | the destination directory. | ||
322 | """ | 403 | """ |
323 | # We use the absSrc to handle the situation where the current directory | 404 | # Some people use src="." to create stable links to projects. Lets allow |
324 | # is not the root of the repo | 405 | # that but reject all other uses of "." to keep things simple. |
325 | absSrc = os.path.join(self.git_worktree, self.src) | 406 | if self.src == '.': |
326 | if os.path.exists(absSrc): | 407 | src = self.git_worktree |
327 | # Entity exists so just a simple one to one link operation | 408 | else: |
328 | self.__linkIt(self.src_rel_to_dest, self.abs_dest) | 409 | src = _SafeExpandPath(self.git_worktree, self.src) |
410 | |||
411 | if not glob.has_magic(src): | ||
412 | # Entity does not contain a wild card so just a simple one to one link operation. | ||
413 | dest = _SafeExpandPath(self.topdir, self.dest, skipfinal=True) | ||
414 | # dest & src are absolute paths at this point. Make sure the target of | ||
415 | # the symlink is relative in the context of the repo client checkout. | ||
416 | relpath = os.path.relpath(src, os.path.dirname(dest)) | ||
417 | self.__linkIt(relpath, dest) | ||
329 | else: | 418 | else: |
330 | # Entity doesn't exist assume there is a wild card | 419 | dest = _SafeExpandPath(self.topdir, self.dest) |
331 | absDestDir = self.abs_dest | 420 | # Entity contains a wild card. |
332 | if os.path.exists(absDestDir) and not platform_utils.isdir(absDestDir): | 421 | if os.path.exists(dest) and not platform_utils.isdir(dest): |
333 | _error('Link error: src with wildcard, %s must be a directory', | 422 | _error('Link error: src with wildcard, %s must be a directory', dest) |
334 | absDestDir) | ||
335 | else: | 423 | else: |
336 | absSrcFiles = glob.glob(absSrc) | 424 | for absSrcFile in glob.glob(src): |
337 | for absSrcFile in absSrcFiles: | ||
338 | # Create a releative path from source dir to destination dir | 425 | # Create a releative path from source dir to destination dir |
339 | absSrcDir = os.path.dirname(absSrcFile) | 426 | absSrcDir = os.path.dirname(absSrcFile) |
340 | relSrcDir = os.path.relpath(absSrcDir, absDestDir) | 427 | relSrcDir = os.path.relpath(absSrcDir, dest) |
341 | 428 | ||
342 | # Get the source file name | 429 | # Get the source file name |
343 | srcFile = os.path.basename(absSrcFile) | 430 | srcFile = os.path.basename(absSrcFile) |
344 | 431 | ||
345 | # Now form the final full paths to srcFile. They will be | 432 | # Now form the final full paths to srcFile. They will be |
346 | # absolute for the desintaiton and relative for the srouce. | 433 | # absolute for the desintaiton and relative for the srouce. |
347 | absDest = os.path.join(absDestDir, srcFile) | 434 | absDest = os.path.join(dest, srcFile) |
348 | relSrc = os.path.join(relSrcDir, srcFile) | 435 | relSrc = os.path.join(relSrcDir, srcFile) |
349 | self.__linkIt(relSrc, absDest) | 436 | self.__linkIt(relSrc, absDest) |
350 | 437 | ||
@@ -368,405 +455,6 @@ class RemoteSpec(object): | |||
368 | self.fetchUrl = fetchUrl | 455 | self.fetchUrl = fetchUrl |
369 | 456 | ||
370 | 457 | ||
371 | class RepoHook(object): | ||
372 | |||
373 | """A RepoHook contains information about a script to run as a hook. | ||
374 | |||
375 | Hooks are used to run a python script before running an upload (for instance, | ||
376 | to run presubmit checks). Eventually, we may have hooks for other actions. | ||
377 | |||
378 | This shouldn't be confused with files in the 'repo/hooks' directory. Those | ||
379 | files are copied into each '.git/hooks' folder for each project. Repo-level | ||
380 | hooks are associated instead with repo actions. | ||
381 | |||
382 | Hooks are always python. When a hook is run, we will load the hook into the | ||
383 | interpreter and execute its main() function. | ||
384 | """ | ||
385 | |||
386 | def __init__(self, | ||
387 | hook_type, | ||
388 | hooks_project, | ||
389 | topdir, | ||
390 | manifest_url, | ||
391 | abort_if_user_denies=False): | ||
392 | """RepoHook constructor. | ||
393 | |||
394 | Params: | ||
395 | hook_type: A string representing the type of hook. This is also used | ||
396 | to figure out the name of the file containing the hook. For | ||
397 | example: 'pre-upload'. | ||
398 | hooks_project: The project containing the repo hooks. If you have a | ||
399 | manifest, this is manifest.repo_hooks_project. OK if this is None, | ||
400 | which will make the hook a no-op. | ||
401 | topdir: Repo's top directory (the one containing the .repo directory). | ||
402 | Scripts will run with CWD as this directory. If you have a manifest, | ||
403 | this is manifest.topdir | ||
404 | manifest_url: The URL to the manifest git repo. | ||
405 | abort_if_user_denies: If True, we'll throw a HookError() if the user | ||
406 | doesn't allow us to run the hook. | ||
407 | """ | ||
408 | self._hook_type = hook_type | ||
409 | self._hooks_project = hooks_project | ||
410 | self._manifest_url = manifest_url | ||
411 | self._topdir = topdir | ||
412 | self._abort_if_user_denies = abort_if_user_denies | ||
413 | |||
414 | # Store the full path to the script for convenience. | ||
415 | if self._hooks_project: | ||
416 | self._script_fullpath = os.path.join(self._hooks_project.worktree, | ||
417 | self._hook_type + '.py') | ||
418 | else: | ||
419 | self._script_fullpath = None | ||
420 | |||
421 | def _GetHash(self): | ||
422 | """Return a hash of the contents of the hooks directory. | ||
423 | |||
424 | We'll just use git to do this. This hash has the property that if anything | ||
425 | changes in the directory we will return a different has. | ||
426 | |||
427 | SECURITY CONSIDERATION: | ||
428 | This hash only represents the contents of files in the hook directory, not | ||
429 | any other files imported or called by hooks. Changes to imported files | ||
430 | can change the script behavior without affecting the hash. | ||
431 | |||
432 | Returns: | ||
433 | A string representing the hash. This will always be ASCII so that it can | ||
434 | be printed to the user easily. | ||
435 | """ | ||
436 | assert self._hooks_project, "Must have hooks to calculate their hash." | ||
437 | |||
438 | # We will use the work_git object rather than just calling GetRevisionId(). | ||
439 | # That gives us a hash of the latest checked in version of the files that | ||
440 | # the user will actually be executing. Specifically, GetRevisionId() | ||
441 | # doesn't appear to change even if a user checks out a different version | ||
442 | # of the hooks repo (via git checkout) nor if a user commits their own revs. | ||
443 | # | ||
444 | # NOTE: Local (non-committed) changes will not be factored into this hash. | ||
445 | # I think this is OK, since we're really only worried about warning the user | ||
446 | # about upstream changes. | ||
447 | return self._hooks_project.work_git.rev_parse('HEAD') | ||
448 | |||
449 | def _GetMustVerb(self): | ||
450 | """Return 'must' if the hook is required; 'should' if not.""" | ||
451 | if self._abort_if_user_denies: | ||
452 | return 'must' | ||
453 | else: | ||
454 | return 'should' | ||
455 | |||
456 | def _CheckForHookApproval(self): | ||
457 | """Check to see whether this hook has been approved. | ||
458 | |||
459 | We'll accept approval of manifest URLs if they're using secure transports. | ||
460 | This way the user can say they trust the manifest hoster. For insecure | ||
461 | hosts, we fall back to checking the hash of the hooks repo. | ||
462 | |||
463 | Note that we ask permission for each individual hook even though we use | ||
464 | the hash of all hooks when detecting changes. We'd like the user to be | ||
465 | able to approve / deny each hook individually. We only use the hash of all | ||
466 | hooks because there is no other easy way to detect changes to local imports. | ||
467 | |||
468 | Returns: | ||
469 | True if this hook is approved to run; False otherwise. | ||
470 | |||
471 | Raises: | ||
472 | HookError: Raised if the user doesn't approve and abort_if_user_denies | ||
473 | was passed to the consturctor. | ||
474 | """ | ||
475 | if self._ManifestUrlHasSecureScheme(): | ||
476 | return self._CheckForHookApprovalManifest() | ||
477 | else: | ||
478 | return self._CheckForHookApprovalHash() | ||
479 | |||
480 | def _CheckForHookApprovalHelper(self, subkey, new_val, main_prompt, | ||
481 | changed_prompt): | ||
482 | """Check for approval for a particular attribute and hook. | ||
483 | |||
484 | Args: | ||
485 | subkey: The git config key under [repo.hooks.<hook_type>] to store the | ||
486 | last approved string. | ||
487 | new_val: The new value to compare against the last approved one. | ||
488 | main_prompt: Message to display to the user to ask for approval. | ||
489 | changed_prompt: Message explaining why we're re-asking for approval. | ||
490 | |||
491 | Returns: | ||
492 | True if this hook is approved to run; False otherwise. | ||
493 | |||
494 | Raises: | ||
495 | HookError: Raised if the user doesn't approve and abort_if_user_denies | ||
496 | was passed to the consturctor. | ||
497 | """ | ||
498 | hooks_config = self._hooks_project.config | ||
499 | git_approval_key = 'repo.hooks.%s.%s' % (self._hook_type, subkey) | ||
500 | |||
501 | # Get the last value that the user approved for this hook; may be None. | ||
502 | old_val = hooks_config.GetString(git_approval_key) | ||
503 | |||
504 | if old_val is not None: | ||
505 | # User previously approved hook and asked not to be prompted again. | ||
506 | if new_val == old_val: | ||
507 | # Approval matched. We're done. | ||
508 | return True | ||
509 | else: | ||
510 | # Give the user a reason why we're prompting, since they last told | ||
511 | # us to "never ask again". | ||
512 | prompt = 'WARNING: %s\n\n' % (changed_prompt,) | ||
513 | else: | ||
514 | prompt = '' | ||
515 | |||
516 | # Prompt the user if we're not on a tty; on a tty we'll assume "no". | ||
517 | if sys.stdout.isatty(): | ||
518 | prompt += main_prompt + ' (yes/always/NO)? ' | ||
519 | response = input(prompt).lower() | ||
520 | print() | ||
521 | |||
522 | # User is doing a one-time approval. | ||
523 | if response in ('y', 'yes'): | ||
524 | return True | ||
525 | elif response == 'always': | ||
526 | hooks_config.SetString(git_approval_key, new_val) | ||
527 | return True | ||
528 | |||
529 | # For anything else, we'll assume no approval. | ||
530 | if self._abort_if_user_denies: | ||
531 | raise HookError('You must allow the %s hook or use --no-verify.' % | ||
532 | self._hook_type) | ||
533 | |||
534 | return False | ||
535 | |||
536 | def _ManifestUrlHasSecureScheme(self): | ||
537 | """Check if the URI for the manifest is a secure transport.""" | ||
538 | secure_schemes = ('file', 'https', 'ssh', 'persistent-https', 'sso', 'rpc') | ||
539 | parse_results = urllib.parse.urlparse(self._manifest_url) | ||
540 | return parse_results.scheme in secure_schemes | ||
541 | |||
542 | def _CheckForHookApprovalManifest(self): | ||
543 | """Check whether the user has approved this manifest host. | ||
544 | |||
545 | Returns: | ||
546 | True if this hook is approved to run; False otherwise. | ||
547 | """ | ||
548 | return self._CheckForHookApprovalHelper( | ||
549 | 'approvedmanifest', | ||
550 | self._manifest_url, | ||
551 | 'Run hook scripts from %s' % (self._manifest_url,), | ||
552 | 'Manifest URL has changed since %s was allowed.' % (self._hook_type,)) | ||
553 | |||
554 | def _CheckForHookApprovalHash(self): | ||
555 | """Check whether the user has approved the hooks repo. | ||
556 | |||
557 | Returns: | ||
558 | True if this hook is approved to run; False otherwise. | ||
559 | """ | ||
560 | prompt = ('Repo %s run the script:\n' | ||
561 | ' %s\n' | ||
562 | '\n' | ||
563 | 'Do you want to allow this script to run') | ||
564 | return self._CheckForHookApprovalHelper( | ||
565 | 'approvedhash', | ||
566 | self._GetHash(), | ||
567 | prompt % (self._GetMustVerb(), self._script_fullpath), | ||
568 | 'Scripts have changed since %s was allowed.' % (self._hook_type,)) | ||
569 | |||
570 | @staticmethod | ||
571 | def _ExtractInterpFromShebang(data): | ||
572 | """Extract the interpreter used in the shebang. | ||
573 | |||
574 | Try to locate the interpreter the script is using (ignoring `env`). | ||
575 | |||
576 | Args: | ||
577 | data: The file content of the script. | ||
578 | |||
579 | Returns: | ||
580 | The basename of the main script interpreter, or None if a shebang is not | ||
581 | used or could not be parsed out. | ||
582 | """ | ||
583 | firstline = data.splitlines()[:1] | ||
584 | if not firstline: | ||
585 | return None | ||
586 | |||
587 | # The format here can be tricky. | ||
588 | shebang = firstline[0].strip() | ||
589 | m = re.match(r'^#!\s*([^\s]+)(?:\s+([^\s]+))?', shebang) | ||
590 | if not m: | ||
591 | return None | ||
592 | |||
593 | # If the using `env`, find the target program. | ||
594 | interp = m.group(1) | ||
595 | if os.path.basename(interp) == 'env': | ||
596 | interp = m.group(2) | ||
597 | |||
598 | return interp | ||
599 | |||
600 | def _ExecuteHookViaReexec(self, interp, context, **kwargs): | ||
601 | """Execute the hook script through |interp|. | ||
602 | |||
603 | Note: Support for this feature should be dropped ~Jun 2021. | ||
604 | |||
605 | Args: | ||
606 | interp: The Python program to run. | ||
607 | context: Basic Python context to execute the hook inside. | ||
608 | kwargs: Arbitrary arguments to pass to the hook script. | ||
609 | |||
610 | Raises: | ||
611 | HookError: When the hooks failed for any reason. | ||
612 | """ | ||
613 | # This logic needs to be kept in sync with _ExecuteHookViaImport below. | ||
614 | script = """ | ||
615 | import json, os, sys | ||
616 | path = '''%(path)s''' | ||
617 | kwargs = json.loads('''%(kwargs)s''') | ||
618 | context = json.loads('''%(context)s''') | ||
619 | sys.path.insert(0, os.path.dirname(path)) | ||
620 | data = open(path).read() | ||
621 | exec(compile(data, path, 'exec'), context) | ||
622 | context['main'](**kwargs) | ||
623 | """ % { | ||
624 | 'path': self._script_fullpath, | ||
625 | 'kwargs': json.dumps(kwargs), | ||
626 | 'context': json.dumps(context), | ||
627 | } | ||
628 | |||
629 | # We pass the script via stdin to avoid OS argv limits. It also makes | ||
630 | # unhandled exception tracebacks less verbose/confusing for users. | ||
631 | cmd = [interp, '-c', 'import sys; exec(sys.stdin.read())'] | ||
632 | proc = subprocess.Popen(cmd, stdin=subprocess.PIPE) | ||
633 | proc.communicate(input=script.encode('utf-8')) | ||
634 | if proc.returncode: | ||
635 | raise HookError('Failed to run %s hook.' % (self._hook_type,)) | ||
636 | |||
637 | def _ExecuteHookViaImport(self, data, context, **kwargs): | ||
638 | """Execute the hook code in |data| directly. | ||
639 | |||
640 | Args: | ||
641 | data: The code of the hook to execute. | ||
642 | context: Basic Python context to execute the hook inside. | ||
643 | kwargs: Arbitrary arguments to pass to the hook script. | ||
644 | |||
645 | Raises: | ||
646 | HookError: When the hooks failed for any reason. | ||
647 | """ | ||
648 | # Exec, storing global context in the context dict. We catch exceptions | ||
649 | # and convert to a HookError w/ just the failing traceback. | ||
650 | try: | ||
651 | exec(compile(data, self._script_fullpath, 'exec'), context) | ||
652 | except Exception: | ||
653 | raise HookError('%s\nFailed to import %s hook; see traceback above.' % | ||
654 | (traceback.format_exc(), self._hook_type)) | ||
655 | |||
656 | # Running the script should have defined a main() function. | ||
657 | if 'main' not in context: | ||
658 | raise HookError('Missing main() in: "%s"' % self._script_fullpath) | ||
659 | |||
660 | # Call the main function in the hook. If the hook should cause the | ||
661 | # build to fail, it will raise an Exception. We'll catch that convert | ||
662 | # to a HookError w/ just the failing traceback. | ||
663 | try: | ||
664 | context['main'](**kwargs) | ||
665 | except Exception: | ||
666 | raise HookError('%s\nFailed to run main() for %s hook; see traceback ' | ||
667 | 'above.' % (traceback.format_exc(), self._hook_type)) | ||
668 | |||
669 | def _ExecuteHook(self, **kwargs): | ||
670 | """Actually execute the given hook. | ||
671 | |||
672 | This will run the hook's 'main' function in our python interpreter. | ||
673 | |||
674 | Args: | ||
675 | kwargs: Keyword arguments to pass to the hook. These are often specific | ||
676 | to the hook type. For instance, pre-upload hooks will contain | ||
677 | a project_list. | ||
678 | """ | ||
679 | # Keep sys.path and CWD stashed away so that we can always restore them | ||
680 | # upon function exit. | ||
681 | orig_path = os.getcwd() | ||
682 | orig_syspath = sys.path | ||
683 | |||
684 | try: | ||
685 | # Always run hooks with CWD as topdir. | ||
686 | os.chdir(self._topdir) | ||
687 | |||
688 | # Put the hook dir as the first item of sys.path so hooks can do | ||
689 | # relative imports. We want to replace the repo dir as [0] so | ||
690 | # hooks can't import repo files. | ||
691 | sys.path = [os.path.dirname(self._script_fullpath)] + sys.path[1:] | ||
692 | |||
693 | # Initial global context for the hook to run within. | ||
694 | context = {'__file__': self._script_fullpath} | ||
695 | |||
696 | # Add 'hook_should_take_kwargs' to the arguments to be passed to main. | ||
697 | # We don't actually want hooks to define their main with this argument-- | ||
698 | # it's there to remind them that their hook should always take **kwargs. | ||
699 | # For instance, a pre-upload hook should be defined like: | ||
700 | # def main(project_list, **kwargs): | ||
701 | # | ||
702 | # This allows us to later expand the API without breaking old hooks. | ||
703 | kwargs = kwargs.copy() | ||
704 | kwargs['hook_should_take_kwargs'] = True | ||
705 | |||
706 | # See what version of python the hook has been written against. | ||
707 | data = open(self._script_fullpath).read() | ||
708 | interp = self._ExtractInterpFromShebang(data) | ||
709 | reexec = False | ||
710 | if interp: | ||
711 | prog = os.path.basename(interp) | ||
712 | if prog.startswith('python2') and sys.version_info.major != 2: | ||
713 | reexec = True | ||
714 | elif prog.startswith('python3') and sys.version_info.major == 2: | ||
715 | reexec = True | ||
716 | |||
717 | # Attempt to execute the hooks through the requested version of Python. | ||
718 | if reexec: | ||
719 | try: | ||
720 | self._ExecuteHookViaReexec(interp, context, **kwargs) | ||
721 | except OSError as e: | ||
722 | if e.errno == errno.ENOENT: | ||
723 | # We couldn't find the interpreter, so fallback to importing. | ||
724 | reexec = False | ||
725 | else: | ||
726 | raise | ||
727 | |||
728 | # Run the hook by importing directly. | ||
729 | if not reexec: | ||
730 | self._ExecuteHookViaImport(data, context, **kwargs) | ||
731 | finally: | ||
732 | # Restore sys.path and CWD. | ||
733 | sys.path = orig_syspath | ||
734 | os.chdir(orig_path) | ||
735 | |||
736 | def Run(self, user_allows_all_hooks, **kwargs): | ||
737 | """Run the hook. | ||
738 | |||
739 | If the hook doesn't exist (because there is no hooks project or because | ||
740 | this particular hook is not enabled), this is a no-op. | ||
741 | |||
742 | Args: | ||
743 | user_allows_all_hooks: If True, we will never prompt about running the | ||
744 | hook--we'll just assume it's OK to run it. | ||
745 | kwargs: Keyword arguments to pass to the hook. These are often specific | ||
746 | to the hook type. For instance, pre-upload hooks will contain | ||
747 | a project_list. | ||
748 | |||
749 | Raises: | ||
750 | HookError: If there was a problem finding the hook or the user declined | ||
751 | to run a required hook (from _CheckForHookApproval). | ||
752 | """ | ||
753 | # No-op if there is no hooks project or if hook is disabled. | ||
754 | if ((not self._hooks_project) or (self._hook_type not in | ||
755 | self._hooks_project.enabled_repo_hooks)): | ||
756 | return | ||
757 | |||
758 | # Bail with a nice error if we can't find the hook. | ||
759 | if not os.path.isfile(self._script_fullpath): | ||
760 | raise HookError('Couldn\'t find repo hook: "%s"' % self._script_fullpath) | ||
761 | |||
762 | # Make sure the user is OK with running the hook. | ||
763 | if (not user_allows_all_hooks) and (not self._CheckForHookApproval()): | ||
764 | return | ||
765 | |||
766 | # Run the hook with the same version of python we're using. | ||
767 | self._ExecuteHook(**kwargs) | ||
768 | |||
769 | |||
770 | class Project(object): | 458 | class Project(object): |
771 | # These objects can be shared between several working trees. | 459 | # These objects can be shared between several working trees. |
772 | shareable_files = ['description', 'info'] | 460 | shareable_files = ['description', 'info'] |
@@ -793,9 +481,11 @@ class Project(object): | |||
793 | clone_depth=None, | 481 | clone_depth=None, |
794 | upstream=None, | 482 | upstream=None, |
795 | parent=None, | 483 | parent=None, |
484 | use_git_worktrees=False, | ||
796 | is_derived=False, | 485 | is_derived=False, |
797 | dest_branch=None, | 486 | dest_branch=None, |
798 | optimized_fetch=False, | 487 | optimized_fetch=False, |
488 | retry_fetches=0, | ||
799 | old_revision=None): | 489 | old_revision=None): |
800 | """Init a Project object. | 490 | """Init a Project object. |
801 | 491 | ||
@@ -816,31 +506,21 @@ class Project(object): | |||
816 | sync_tags: The `sync-tags` attribute of manifest.xml's project element. | 506 | sync_tags: The `sync-tags` attribute of manifest.xml's project element. |
817 | upstream: The `upstream` attribute of manifest.xml's project element. | 507 | upstream: The `upstream` attribute of manifest.xml's project element. |
818 | parent: The parent Project object. | 508 | parent: The parent Project object. |
509 | use_git_worktrees: Whether to use `git worktree` for this project. | ||
819 | is_derived: False if the project was explicitly defined in the manifest; | 510 | is_derived: False if the project was explicitly defined in the manifest; |
820 | True if the project is a discovered submodule. | 511 | True if the project is a discovered submodule. |
821 | dest_branch: The branch to which to push changes for review by default. | 512 | dest_branch: The branch to which to push changes for review by default. |
822 | optimized_fetch: If True, when a project is set to a sha1 revision, only | 513 | optimized_fetch: If True, when a project is set to a sha1 revision, only |
823 | fetch from the remote if the sha1 is not present locally. | 514 | fetch from the remote if the sha1 is not present locally. |
515 | retry_fetches: Retry remote fetches n times upon receiving transient error | ||
516 | with exponential backoff and jitter. | ||
824 | old_revision: saved git commit id for open GITC projects. | 517 | old_revision: saved git commit id for open GITC projects. |
825 | """ | 518 | """ |
826 | self.manifest = manifest | 519 | self.client = self.manifest = manifest |
827 | self.name = name | 520 | self.name = name |
828 | self.remote = remote | 521 | self.remote = remote |
829 | self.gitdir = gitdir.replace('\\', '/') | 522 | self.UpdatePaths(relpath, worktree, gitdir, objdir) |
830 | self.objdir = objdir.replace('\\', '/') | 523 | self.SetRevision(revisionExpr, revisionId=revisionId) |
831 | if worktree: | ||
832 | self.worktree = os.path.normpath(worktree).replace('\\', '/') | ||
833 | else: | ||
834 | self.worktree = None | ||
835 | self.relpath = relpath | ||
836 | self.revisionExpr = revisionExpr | ||
837 | |||
838 | if revisionId is None \ | ||
839 | and revisionExpr \ | ||
840 | and IsId(revisionExpr): | ||
841 | self.revisionId = revisionExpr | ||
842 | else: | ||
843 | self.revisionId = revisionId | ||
844 | 524 | ||
845 | self.rebase = rebase | 525 | self.rebase = rebase |
846 | self.groups = groups | 526 | self.groups = groups |
@@ -850,24 +530,19 @@ class Project(object): | |||
850 | self.clone_depth = clone_depth | 530 | self.clone_depth = clone_depth |
851 | self.upstream = upstream | 531 | self.upstream = upstream |
852 | self.parent = parent | 532 | self.parent = parent |
533 | # NB: Do not use this setting in __init__ to change behavior so that the | ||
534 | # manifest.git checkout can inspect & change it after instantiating. See | ||
535 | # the XmlManifest init code for more info. | ||
536 | self.use_git_worktrees = use_git_worktrees | ||
853 | self.is_derived = is_derived | 537 | self.is_derived = is_derived |
854 | self.optimized_fetch = optimized_fetch | 538 | self.optimized_fetch = optimized_fetch |
539 | self.retry_fetches = max(0, retry_fetches) | ||
855 | self.subprojects = [] | 540 | self.subprojects = [] |
856 | 541 | ||
857 | self.snapshots = {} | 542 | self.snapshots = {} |
858 | self.copyfiles = [] | 543 | self.copyfiles = [] |
859 | self.linkfiles = [] | 544 | self.linkfiles = [] |
860 | self.annotations = [] | 545 | self.annotations = [] |
861 | self.config = GitConfig.ForRepository(gitdir=self.gitdir, | ||
862 | defaults=self.manifest.globalConfig) | ||
863 | |||
864 | if self.worktree: | ||
865 | self.work_git = self._GitGetByExec(self, bare=False, gitdir=gitdir) | ||
866 | else: | ||
867 | self.work_git = None | ||
868 | self.bare_git = self._GitGetByExec(self, bare=True, gitdir=gitdir) | ||
869 | self.bare_ref = GitRefs(gitdir) | ||
870 | self.bare_objdir = self._GitGetByExec(self, bare=True, gitdir=objdir) | ||
871 | self.dest_branch = dest_branch | 546 | self.dest_branch = dest_branch |
872 | self.old_revision = old_revision | 547 | self.old_revision = old_revision |
873 | 548 | ||
@@ -875,6 +550,35 @@ class Project(object): | |||
875 | # project containing repo hooks. | 550 | # project containing repo hooks. |
876 | self.enabled_repo_hooks = [] | 551 | self.enabled_repo_hooks = [] |
877 | 552 | ||
553 | def SetRevision(self, revisionExpr, revisionId=None): | ||
554 | """Set revisionId based on revision expression and id""" | ||
555 | self.revisionExpr = revisionExpr | ||
556 | if revisionId is None and revisionExpr and IsId(revisionExpr): | ||
557 | self.revisionId = self.revisionExpr | ||
558 | else: | ||
559 | self.revisionId = revisionId | ||
560 | |||
561 | def UpdatePaths(self, relpath, worktree, gitdir, objdir): | ||
562 | """Update paths used by this project""" | ||
563 | self.gitdir = gitdir.replace('\\', '/') | ||
564 | self.objdir = objdir.replace('\\', '/') | ||
565 | if worktree: | ||
566 | self.worktree = os.path.normpath(worktree).replace('\\', '/') | ||
567 | else: | ||
568 | self.worktree = None | ||
569 | self.relpath = relpath | ||
570 | |||
571 | self.config = GitConfig.ForRepository(gitdir=self.gitdir, | ||
572 | defaults=self.manifest.globalConfig) | ||
573 | |||
574 | if self.worktree: | ||
575 | self.work_git = self._GitGetByExec(self, bare=False, gitdir=self.gitdir) | ||
576 | else: | ||
577 | self.work_git = None | ||
578 | self.bare_git = self._GitGetByExec(self, bare=True, gitdir=self.gitdir) | ||
579 | self.bare_ref = GitRefs(self.gitdir) | ||
580 | self.bare_objdir = self._GitGetByExec(self, bare=True, gitdir=self.objdir) | ||
581 | |||
878 | @property | 582 | @property |
879 | def Derived(self): | 583 | def Derived(self): |
880 | return self.is_derived | 584 | return self.is_derived |
@@ -902,11 +606,9 @@ class Project(object): | |||
902 | return None | 606 | return None |
903 | 607 | ||
904 | def IsRebaseInProgress(self): | 608 | def IsRebaseInProgress(self): |
905 | w = self.worktree | 609 | return (os.path.exists(self.work_git.GetDotgitPath('rebase-apply')) or |
906 | g = os.path.join(w, '.git') | 610 | os.path.exists(self.work_git.GetDotgitPath('rebase-merge')) or |
907 | return os.path.exists(os.path.join(g, 'rebase-apply')) \ | 611 | os.path.exists(os.path.join(self.worktree, '.dotest'))) |
908 | or os.path.exists(os.path.join(g, 'rebase-merge')) \ | ||
909 | or os.path.exists(os.path.join(w, '.dotest')) | ||
910 | 612 | ||
911 | def IsDirty(self, consider_untracked=True): | 613 | def IsDirty(self, consider_untracked=True): |
912 | """Is the working directory modified in some way? | 614 | """Is the working directory modified in some way? |
@@ -1152,10 +854,12 @@ class Project(object): | |||
1152 | 854 | ||
1153 | return 'DIRTY' | 855 | return 'DIRTY' |
1154 | 856 | ||
1155 | def PrintWorkTreeDiff(self, absolute_paths=False): | 857 | def PrintWorkTreeDiff(self, absolute_paths=False, output_redir=None): |
1156 | """Prints the status of the repository to stdout. | 858 | """Prints the status of the repository to stdout. |
1157 | """ | 859 | """ |
1158 | out = DiffColoring(self.config) | 860 | out = DiffColoring(self.config) |
861 | if output_redir: | ||
862 | out.redirect(output_redir) | ||
1159 | cmd = ['diff'] | 863 | cmd = ['diff'] |
1160 | if out.is_on: | 864 | if out.is_on: |
1161 | cmd.append('--color') | 865 | cmd.append('--color') |
@@ -1169,6 +873,7 @@ class Project(object): | |||
1169 | cmd, | 873 | cmd, |
1170 | capture_stdout=True, | 874 | capture_stdout=True, |
1171 | capture_stderr=True) | 875 | capture_stderr=True) |
876 | p.Wait() | ||
1172 | except GitError as e: | 877 | except GitError as e: |
1173 | out.nl() | 878 | out.nl() |
1174 | out.project('project %s/' % self.relpath) | 879 | out.project('project %s/' % self.relpath) |
@@ -1176,21 +881,14 @@ class Project(object): | |||
1176 | out.fail('%s', str(e)) | 881 | out.fail('%s', str(e)) |
1177 | out.nl() | 882 | out.nl() |
1178 | return False | 883 | return False |
1179 | has_diff = False | 884 | if p.stdout: |
1180 | for line in p.process.stdout: | 885 | out.nl() |
1181 | if not hasattr(line, 'encode'): | 886 | out.project('project %s/' % self.relpath) |
1182 | line = line.decode() | 887 | out.nl() |
1183 | if not has_diff: | 888 | out.write('%s', p.stdout) |
1184 | out.nl() | ||
1185 | out.project('project %s/' % self.relpath) | ||
1186 | out.nl() | ||
1187 | has_diff = True | ||
1188 | print(line[:-1]) | ||
1189 | return p.Wait() == 0 | 889 | return p.Wait() == 0 |
1190 | 890 | ||
1191 | |||
1192 | # Publish / Upload ## | 891 | # Publish / Upload ## |
1193 | |||
1194 | def WasPublished(self, branch, all_refs=None): | 892 | def WasPublished(self, branch, all_refs=None): |
1195 | """Was the branch published (uploaded) for code review? | 893 | """Was the branch published (uploaded) for code review? |
1196 | If so, returns the SHA-1 hash of the last published | 894 | If so, returns the SHA-1 hash of the last published |
@@ -1263,8 +961,10 @@ class Project(object): | |||
1263 | 961 | ||
1264 | def UploadForReview(self, branch=None, | 962 | def UploadForReview(self, branch=None, |
1265 | people=([], []), | 963 | people=([], []), |
964 | dryrun=False, | ||
1266 | auto_topic=False, | 965 | auto_topic=False, |
1267 | draft=False, | 966 | hashtags=(), |
967 | labels=(), | ||
1268 | private=False, | 968 | private=False, |
1269 | notify=None, | 969 | notify=None, |
1270 | wip=False, | 970 | wip=False, |
@@ -1299,6 +999,8 @@ class Project(object): | |||
1299 | if url is None: | 999 | if url is None: |
1300 | raise UploadError('review not configured') | 1000 | raise UploadError('review not configured') |
1301 | cmd = ['push'] | 1001 | cmd = ['push'] |
1002 | if dryrun: | ||
1003 | cmd.append('-n') | ||
1302 | 1004 | ||
1303 | if url.startswith('ssh://'): | 1005 | if url.startswith('ssh://'): |
1304 | cmd.append('--receive-pack=gerrit receive-pack') | 1006 | cmd.append('--receive-pack=gerrit receive-pack') |
@@ -1312,15 +1014,12 @@ class Project(object): | |||
1312 | if dest_branch.startswith(R_HEADS): | 1014 | if dest_branch.startswith(R_HEADS): |
1313 | dest_branch = dest_branch[len(R_HEADS):] | 1015 | dest_branch = dest_branch[len(R_HEADS):] |
1314 | 1016 | ||
1315 | upload_type = 'for' | 1017 | ref_spec = '%s:refs/for/%s' % (R_HEADS + branch.name, dest_branch) |
1316 | if draft: | ||
1317 | upload_type = 'drafts' | ||
1318 | |||
1319 | ref_spec = '%s:refs/%s/%s' % (R_HEADS + branch.name, upload_type, | ||
1320 | dest_branch) | ||
1321 | opts = [] | 1018 | opts = [] |
1322 | if auto_topic: | 1019 | if auto_topic: |
1323 | opts += ['topic=' + branch.name] | 1020 | opts += ['topic=' + branch.name] |
1021 | opts += ['t=%s' % p for p in hashtags] | ||
1022 | opts += ['l=%s' % p for p in labels] | ||
1324 | 1023 | ||
1325 | opts += ['r=%s' % p for p in people[0]] | 1024 | opts += ['r=%s' % p for p in people[0]] |
1326 | opts += ['cc=%s' % p for p in people[1]] | 1025 | opts += ['cc=%s' % p for p in people[1]] |
@@ -1337,14 +1036,13 @@ class Project(object): | |||
1337 | if GitCommand(self, cmd, bare=True).Wait() != 0: | 1036 | if GitCommand(self, cmd, bare=True).Wait() != 0: |
1338 | raise UploadError('Upload failed') | 1037 | raise UploadError('Upload failed') |
1339 | 1038 | ||
1340 | msg = "posted to %s for %s" % (branch.remote.review, dest_branch) | 1039 | if not dryrun: |
1341 | self.bare_git.UpdateRef(R_PUB + branch.name, | 1040 | msg = "posted to %s for %s" % (branch.remote.review, dest_branch) |
1342 | R_HEADS + branch.name, | 1041 | self.bare_git.UpdateRef(R_PUB + branch.name, |
1343 | message=msg) | 1042 | R_HEADS + branch.name, |
1344 | 1043 | message=msg) | |
1345 | 1044 | ||
1346 | # Sync ## | 1045 | # Sync ## |
1347 | |||
1348 | def _ExtractArchive(self, tarpath, path=None): | 1046 | def _ExtractArchive(self, tarpath, path=None): |
1349 | """Extract the given tar on its current location | 1047 | """Extract the given tar on its current location |
1350 | 1048 | ||
@@ -1362,16 +1060,21 @@ class Project(object): | |||
1362 | 1060 | ||
1363 | def Sync_NetworkHalf(self, | 1061 | def Sync_NetworkHalf(self, |
1364 | quiet=False, | 1062 | quiet=False, |
1063 | verbose=False, | ||
1064 | output_redir=None, | ||
1365 | is_new=None, | 1065 | is_new=None, |
1366 | current_branch_only=False, | 1066 | current_branch_only=None, |
1367 | force_sync=False, | 1067 | force_sync=False, |
1368 | clone_bundle=True, | 1068 | clone_bundle=True, |
1369 | no_tags=False, | 1069 | tags=None, |
1370 | archive=False, | 1070 | archive=False, |
1371 | optimized_fetch=False, | 1071 | optimized_fetch=False, |
1072 | retry_fetches=0, | ||
1372 | prune=False, | 1073 | prune=False, |
1373 | submodules=False, | 1074 | submodules=False, |
1374 | clone_filter=None): | 1075 | ssh_proxy=None, |
1076 | clone_filter=None, | ||
1077 | partial_clone_exclude=set()): | ||
1375 | """Perform only the network IO portion of the sync process. | 1078 | """Perform only the network IO portion of the sync process. |
1376 | Local working directory/branch state is not affected. | 1079 | Local working directory/branch state is not affected. |
1377 | """ | 1080 | """ |
@@ -1402,12 +1105,22 @@ class Project(object): | |||
1402 | _warn("Cannot remove archive %s: %s", tarpath, str(e)) | 1105 | _warn("Cannot remove archive %s: %s", tarpath, str(e)) |
1403 | self._CopyAndLinkFiles() | 1106 | self._CopyAndLinkFiles() |
1404 | return True | 1107 | return True |
1108 | |||
1109 | # If the shared object dir already exists, don't try to rebootstrap with a | ||
1110 | # clone bundle download. We should have the majority of objects already. | ||
1111 | if clone_bundle and os.path.exists(self.objdir): | ||
1112 | clone_bundle = False | ||
1113 | |||
1114 | if self.name in partial_clone_exclude: | ||
1115 | clone_bundle = True | ||
1116 | clone_filter = None | ||
1117 | |||
1405 | if is_new is None: | 1118 | if is_new is None: |
1406 | is_new = not self.Exists | 1119 | is_new = not self.Exists |
1407 | if is_new: | 1120 | if is_new: |
1408 | self._InitGitDir(force_sync=force_sync) | 1121 | self._InitGitDir(force_sync=force_sync, quiet=quiet) |
1409 | else: | 1122 | else: |
1410 | self._UpdateHooks() | 1123 | self._UpdateHooks(quiet=quiet) |
1411 | self._InitRemote() | 1124 | self._InitRemote() |
1412 | 1125 | ||
1413 | if is_new: | 1126 | if is_new: |
@@ -1421,12 +1134,12 @@ class Project(object): | |||
1421 | else: | 1134 | else: |
1422 | alt_dir = None | 1135 | alt_dir = None |
1423 | 1136 | ||
1424 | if clone_bundle \ | 1137 | if (clone_bundle |
1425 | and alt_dir is None \ | 1138 | and alt_dir is None |
1426 | and self._ApplyCloneBundle(initial=is_new, quiet=quiet): | 1139 | and self._ApplyCloneBundle(initial=is_new, quiet=quiet, verbose=verbose)): |
1427 | is_new = False | 1140 | is_new = False |
1428 | 1141 | ||
1429 | if not current_branch_only: | 1142 | if current_branch_only is None: |
1430 | if self.sync_c: | 1143 | if self.sync_c: |
1431 | current_branch_only = True | 1144 | current_branch_only = True |
1432 | elif not self.manifest._loaded: | 1145 | elif not self.manifest._loaded: |
@@ -1435,25 +1148,27 @@ class Project(object): | |||
1435 | elif self.manifest.default.sync_c: | 1148 | elif self.manifest.default.sync_c: |
1436 | current_branch_only = True | 1149 | current_branch_only = True |
1437 | 1150 | ||
1438 | if not no_tags: | 1151 | if tags is None: |
1439 | if not self.sync_tags: | 1152 | tags = self.sync_tags |
1440 | no_tags = True | ||
1441 | 1153 | ||
1442 | if self.clone_depth: | 1154 | if self.clone_depth: |
1443 | depth = self.clone_depth | 1155 | depth = self.clone_depth |
1444 | else: | 1156 | else: |
1445 | depth = self.manifest.manifestProject.config.GetString('repo.depth') | 1157 | depth = self.manifest.manifestProject.config.GetString('repo.depth') |
1446 | 1158 | ||
1447 | need_to_fetch = not (optimized_fetch and | 1159 | # See if we can skip the network fetch entirely. |
1448 | (ID_RE.match(self.revisionExpr) and | 1160 | if not (optimized_fetch and |
1449 | self._CheckForImmutableRevision())) | 1161 | (ID_RE.match(self.revisionExpr) and |
1450 | if (need_to_fetch and | 1162 | self._CheckForImmutableRevision())): |
1451 | not self._RemoteFetch(initial=is_new, quiet=quiet, alt_dir=alt_dir, | 1163 | if not self._RemoteFetch( |
1452 | current_branch_only=current_branch_only, | 1164 | initial=is_new, |
1453 | no_tags=no_tags, prune=prune, depth=depth, | 1165 | quiet=quiet, verbose=verbose, output_redir=output_redir, |
1454 | submodules=submodules, force_sync=force_sync, | 1166 | alt_dir=alt_dir, current_branch_only=current_branch_only, |
1455 | clone_filter=clone_filter)): | 1167 | tags=tags, prune=prune, depth=depth, |
1456 | return False | 1168 | submodules=submodules, force_sync=force_sync, |
1169 | ssh_proxy=ssh_proxy, | ||
1170 | clone_filter=clone_filter, retry_fetches=retry_fetches): | ||
1171 | return False | ||
1457 | 1172 | ||
1458 | mp = self.manifest.manifestProject | 1173 | mp = self.manifest.manifestProject |
1459 | dissociate = mp.config.GetBoolean('repo.dissociate') | 1174 | dissociate = mp.config.GetBoolean('repo.dissociate') |
@@ -1461,7 +1176,11 @@ class Project(object): | |||
1461 | alternates_file = os.path.join(self.gitdir, 'objects/info/alternates') | 1176 | alternates_file = os.path.join(self.gitdir, 'objects/info/alternates') |
1462 | if os.path.exists(alternates_file): | 1177 | if os.path.exists(alternates_file): |
1463 | cmd = ['repack', '-a', '-d'] | 1178 | cmd = ['repack', '-a', '-d'] |
1464 | if GitCommand(self, cmd, bare=True).Wait() != 0: | 1179 | p = GitCommand(self, cmd, bare=True, capture_stdout=bool(output_redir), |
1180 | merge_output=bool(output_redir)) | ||
1181 | if p.stdout and output_redir: | ||
1182 | output_redir.write(p.stdout) | ||
1183 | if p.Wait() != 0: | ||
1465 | return False | 1184 | return False |
1466 | platform_utils.remove(alternates_file) | 1185 | platform_utils.remove(alternates_file) |
1467 | 1186 | ||
@@ -1469,17 +1188,15 @@ class Project(object): | |||
1469 | self._InitMRef() | 1188 | self._InitMRef() |
1470 | else: | 1189 | else: |
1471 | self._InitMirrorHead() | 1190 | self._InitMirrorHead() |
1472 | try: | 1191 | platform_utils.remove(os.path.join(self.gitdir, 'FETCH_HEAD'), |
1473 | platform_utils.remove(os.path.join(self.gitdir, 'FETCH_HEAD')) | 1192 | missing_ok=True) |
1474 | except OSError: | ||
1475 | pass | ||
1476 | return True | 1193 | return True |
1477 | 1194 | ||
1478 | def PostRepoUpgrade(self): | 1195 | def PostRepoUpgrade(self): |
1479 | self._InitHooks() | 1196 | self._InitHooks() |
1480 | 1197 | ||
1481 | def _CopyAndLinkFiles(self): | 1198 | def _CopyAndLinkFiles(self): |
1482 | if self.manifest.isGitcClient: | 1199 | if self.client.isGitcClient: |
1483 | return | 1200 | return |
1484 | for copyfile in self.copyfiles: | 1201 | for copyfile in self.copyfiles: |
1485 | copyfile._Copy() | 1202 | copyfile._Copy() |
@@ -1518,6 +1235,12 @@ class Project(object): | |||
1518 | raise ManifestInvalidRevisionError('revision %s in %s not found' % | 1235 | raise ManifestInvalidRevisionError('revision %s in %s not found' % |
1519 | (self.revisionExpr, self.name)) | 1236 | (self.revisionExpr, self.name)) |
1520 | 1237 | ||
1238 | def SetRevisionId(self, revisionId): | ||
1239 | if self.revisionExpr: | ||
1240 | self.upstream = self.revisionExpr | ||
1241 | |||
1242 | self.revisionId = revisionId | ||
1243 | |||
1521 | def Sync_LocalHalf(self, syncbuf, force_sync=False, submodules=False): | 1244 | def Sync_LocalHalf(self, syncbuf, force_sync=False, submodules=False): |
1522 | """Perform only the local IO portion of the sync process. | 1245 | """Perform only the local IO portion of the sync process. |
1523 | Network access is not required. | 1246 | Network access is not required. |
@@ -1534,6 +1257,18 @@ class Project(object): | |||
1534 | self.CleanPublishedCache(all_refs) | 1257 | self.CleanPublishedCache(all_refs) |
1535 | revid = self.GetRevisionId(all_refs) | 1258 | revid = self.GetRevisionId(all_refs) |
1536 | 1259 | ||
1260 | # Special case the root of the repo client checkout. Make sure it doesn't | ||
1261 | # contain files being checked out to dirs we don't allow. | ||
1262 | if self.relpath == '.': | ||
1263 | PROTECTED_PATHS = {'.repo'} | ||
1264 | paths = set(self.work_git.ls_tree('-z', '--name-only', '--', revid).split('\0')) | ||
1265 | bad_paths = paths & PROTECTED_PATHS | ||
1266 | if bad_paths: | ||
1267 | syncbuf.fail(self, | ||
1268 | 'Refusing to checkout project that writes to protected ' | ||
1269 | 'paths: %s' % (', '.join(bad_paths),)) | ||
1270 | return | ||
1271 | |||
1537 | def _doff(): | 1272 | def _doff(): |
1538 | self._FastForward(revid) | 1273 | self._FastForward(revid) |
1539 | self._CopyAndLinkFiles() | 1274 | self._CopyAndLinkFiles() |
@@ -1712,21 +1447,28 @@ class Project(object): | |||
1712 | if submodules: | 1447 | if submodules: |
1713 | syncbuf.later1(self, _dosubmodules) | 1448 | syncbuf.later1(self, _dosubmodules) |
1714 | 1449 | ||
1715 | def AddCopyFile(self, src, dest, absdest): | 1450 | def AddCopyFile(self, src, dest, topdir): |
1716 | # dest should already be an absolute path, but src is project relative | 1451 | """Mark |src| for copying to |dest| (relative to |topdir|). |
1717 | # make src an absolute path | 1452 | |
1718 | abssrc = os.path.join(self.worktree, src) | 1453 | No filesystem changes occur here. Actual copying happens later on. |
1719 | self.copyfiles.append(_CopyFile(src, dest, abssrc, absdest)) | 1454 | |
1455 | Paths should have basic validation run on them before being queued. | ||
1456 | Further checking will be handled when the actual copy happens. | ||
1457 | """ | ||
1458 | self.copyfiles.append(_CopyFile(self.worktree, src, topdir, dest)) | ||
1720 | 1459 | ||
1721 | def AddLinkFile(self, src, dest, absdest): | 1460 | def AddLinkFile(self, src, dest, topdir): |
1722 | # dest should already be an absolute path, but src is project relative | 1461 | """Mark |dest| to create a symlink (relative to |topdir|) pointing to |src|. |
1723 | # make src relative path to dest | 1462 | |
1724 | absdestdir = os.path.dirname(absdest) | 1463 | No filesystem changes occur here. Actual linking happens later on. |
1725 | relsrc = os.path.relpath(os.path.join(self.worktree, src), absdestdir) | 1464 | |
1726 | self.linkfiles.append(_LinkFile(self.worktree, src, dest, relsrc, absdest)) | 1465 | Paths should have basic validation run on them before being queued. |
1466 | Further checking will be handled when the actual link happens. | ||
1467 | """ | ||
1468 | self.linkfiles.append(_LinkFile(self.worktree, src, topdir, dest)) | ||
1727 | 1469 | ||
1728 | def AddAnnotation(self, name, value, keep): | 1470 | def AddAnnotation(self, name, value, keep): |
1729 | self.annotations.append(_Annotation(name, value, keep)) | 1471 | self.annotations.append(Annotation(name, value, keep)) |
1730 | 1472 | ||
1731 | def DownloadPatchSet(self, change_id, patch_id): | 1473 | def DownloadPatchSet(self, change_id, patch_id): |
1732 | """Download a single patch set of a single change to FETCH_HEAD. | 1474 | """Download a single patch set of a single change to FETCH_HEAD. |
@@ -1744,9 +1486,123 @@ class Project(object): | |||
1744 | patch_id, | 1486 | patch_id, |
1745 | self.bare_git.rev_parse('FETCH_HEAD')) | 1487 | self.bare_git.rev_parse('FETCH_HEAD')) |
1746 | 1488 | ||
1489 | def DeleteWorktree(self, quiet=False, force=False): | ||
1490 | """Delete the source checkout and any other housekeeping tasks. | ||
1747 | 1491 | ||
1748 | # Branch Management ## | 1492 | This currently leaves behind the internal .repo/ cache state. This helps |
1493 | when switching branches or manifest changes get reverted as we don't have | ||
1494 | to redownload all the git objects. But we should do some GC at some point. | ||
1495 | |||
1496 | Args: | ||
1497 | quiet: Whether to hide normal messages. | ||
1498 | force: Always delete tree even if dirty. | ||
1499 | |||
1500 | Returns: | ||
1501 | True if the worktree was completely cleaned out. | ||
1502 | """ | ||
1503 | if self.IsDirty(): | ||
1504 | if force: | ||
1505 | print('warning: %s: Removing dirty project: uncommitted changes lost.' % | ||
1506 | (self.relpath,), file=sys.stderr) | ||
1507 | else: | ||
1508 | print('error: %s: Cannot remove project: uncommitted changes are ' | ||
1509 | 'present.\n' % (self.relpath,), file=sys.stderr) | ||
1510 | return False | ||
1511 | |||
1512 | if not quiet: | ||
1513 | print('%s: Deleting obsolete checkout.' % (self.relpath,)) | ||
1514 | |||
1515 | # Unlock and delink from the main worktree. We don't use git's worktree | ||
1516 | # remove because it will recursively delete projects -- we handle that | ||
1517 | # ourselves below. https://crbug.com/git/48 | ||
1518 | if self.use_git_worktrees: | ||
1519 | needle = platform_utils.realpath(self.gitdir) | ||
1520 | # Find the git worktree commondir under .repo/worktrees/. | ||
1521 | output = self.bare_git.worktree('list', '--porcelain').splitlines()[0] | ||
1522 | assert output.startswith('worktree '), output | ||
1523 | commondir = output[9:] | ||
1524 | # Walk each of the git worktrees to see where they point. | ||
1525 | configs = os.path.join(commondir, 'worktrees') | ||
1526 | for name in os.listdir(configs): | ||
1527 | gitdir = os.path.join(configs, name, 'gitdir') | ||
1528 | with open(gitdir) as fp: | ||
1529 | relpath = fp.read().strip() | ||
1530 | # Resolve the checkout path and see if it matches this project. | ||
1531 | fullpath = platform_utils.realpath(os.path.join(configs, name, relpath)) | ||
1532 | if fullpath == needle: | ||
1533 | platform_utils.rmtree(os.path.join(configs, name)) | ||
1534 | |||
1535 | # Delete the .git directory first, so we're less likely to have a partially | ||
1536 | # working git repository around. There shouldn't be any git projects here, | ||
1537 | # so rmtree works. | ||
1538 | |||
1539 | # Try to remove plain files first in case of git worktrees. If this fails | ||
1540 | # for any reason, we'll fall back to rmtree, and that'll display errors if | ||
1541 | # it can't remove things either. | ||
1542 | try: | ||
1543 | platform_utils.remove(self.gitdir) | ||
1544 | except OSError: | ||
1545 | pass | ||
1546 | try: | ||
1547 | platform_utils.rmtree(self.gitdir) | ||
1548 | except OSError as e: | ||
1549 | if e.errno != errno.ENOENT: | ||
1550 | print('error: %s: %s' % (self.gitdir, e), file=sys.stderr) | ||
1551 | print('error: %s: Failed to delete obsolete checkout; remove manually, ' | ||
1552 | 'then run `repo sync -l`.' % (self.relpath,), file=sys.stderr) | ||
1553 | return False | ||
1554 | |||
1555 | # Delete everything under the worktree, except for directories that contain | ||
1556 | # another git project. | ||
1557 | dirs_to_remove = [] | ||
1558 | failed = False | ||
1559 | for root, dirs, files in platform_utils.walk(self.worktree): | ||
1560 | for f in files: | ||
1561 | path = os.path.join(root, f) | ||
1562 | try: | ||
1563 | platform_utils.remove(path) | ||
1564 | except OSError as e: | ||
1565 | if e.errno != errno.ENOENT: | ||
1566 | print('error: %s: Failed to remove: %s' % (path, e), file=sys.stderr) | ||
1567 | failed = True | ||
1568 | dirs[:] = [d for d in dirs | ||
1569 | if not os.path.lexists(os.path.join(root, d, '.git'))] | ||
1570 | dirs_to_remove += [os.path.join(root, d) for d in dirs | ||
1571 | if os.path.join(root, d) not in dirs_to_remove] | ||
1572 | for d in reversed(dirs_to_remove): | ||
1573 | if platform_utils.islink(d): | ||
1574 | try: | ||
1575 | platform_utils.remove(d) | ||
1576 | except OSError as e: | ||
1577 | if e.errno != errno.ENOENT: | ||
1578 | print('error: %s: Failed to remove: %s' % (d, e), file=sys.stderr) | ||
1579 | failed = True | ||
1580 | elif not platform_utils.listdir(d): | ||
1581 | try: | ||
1582 | platform_utils.rmdir(d) | ||
1583 | except OSError as e: | ||
1584 | if e.errno != errno.ENOENT: | ||
1585 | print('error: %s: Failed to remove: %s' % (d, e), file=sys.stderr) | ||
1586 | failed = True | ||
1587 | if failed: | ||
1588 | print('error: %s: Failed to delete obsolete checkout.' % (self.relpath,), | ||
1589 | file=sys.stderr) | ||
1590 | print(' Remove manually, then run `repo sync -l`.', file=sys.stderr) | ||
1591 | return False | ||
1749 | 1592 | ||
1593 | # Try deleting parent dirs if they are empty. | ||
1594 | path = self.worktree | ||
1595 | while path != self.manifest.topdir: | ||
1596 | try: | ||
1597 | platform_utils.rmdir(path) | ||
1598 | except OSError as e: | ||
1599 | if e.errno != errno.ENOENT: | ||
1600 | break | ||
1601 | path = os.path.dirname(path) | ||
1602 | |||
1603 | return True | ||
1604 | |||
1605 | # Branch Management ## | ||
1750 | def StartBranch(self, name, branch_merge='', revision=None): | 1606 | def StartBranch(self, name, branch_merge='', revision=None): |
1751 | """Create a new branch off the manifest's revision. | 1607 | """Create a new branch off the manifest's revision. |
1752 | """ | 1608 | """ |
@@ -1780,14 +1636,9 @@ class Project(object): | |||
1780 | except KeyError: | 1636 | except KeyError: |
1781 | head = None | 1637 | head = None |
1782 | if revid and head and revid == head: | 1638 | if revid and head and revid == head: |
1783 | ref = os.path.join(self.gitdir, R_HEADS + name) | 1639 | ref = R_HEADS + name |
1784 | try: | 1640 | self.work_git.update_ref(ref, revid) |
1785 | os.makedirs(os.path.dirname(ref)) | 1641 | self.work_git.symbolic_ref(HEAD, ref) |
1786 | except OSError: | ||
1787 | pass | ||
1788 | _lwrite(ref, '%s\n' % revid) | ||
1789 | _lwrite(os.path.join(self.worktree, '.git', HEAD), | ||
1790 | 'ref: %s%s\n' % (R_HEADS, name)) | ||
1791 | branch.Save() | 1642 | branch.Save() |
1792 | return True | 1643 | return True |
1793 | 1644 | ||
@@ -1834,7 +1685,7 @@ class Project(object): | |||
1834 | # Same revision; just update HEAD to point to the new | 1685 | # Same revision; just update HEAD to point to the new |
1835 | # target branch, but otherwise take no other action. | 1686 | # target branch, but otherwise take no other action. |
1836 | # | 1687 | # |
1837 | _lwrite(os.path.join(self.worktree, '.git', HEAD), | 1688 | _lwrite(self.work_git.GetDotgitPath(subpath=HEAD), |
1838 | 'ref: %s%s\n' % (R_HEADS, name)) | 1689 | 'ref: %s%s\n' % (R_HEADS, name)) |
1839 | return True | 1690 | return True |
1840 | 1691 | ||
@@ -1868,8 +1719,7 @@ class Project(object): | |||
1868 | 1719 | ||
1869 | revid = self.GetRevisionId(all_refs) | 1720 | revid = self.GetRevisionId(all_refs) |
1870 | if head == revid: | 1721 | if head == revid: |
1871 | _lwrite(os.path.join(self.worktree, '.git', HEAD), | 1722 | _lwrite(self.work_git.GetDotgitPath(subpath=HEAD), '%s\n' % revid) |
1872 | '%s\n' % revid) | ||
1873 | else: | 1723 | else: |
1874 | self._Checkout(revid, quiet=True) | 1724 | self._Checkout(revid, quiet=True) |
1875 | 1725 | ||
@@ -1890,6 +1740,11 @@ class Project(object): | |||
1890 | if cb is None or name != cb: | 1740 | if cb is None or name != cb: |
1891 | kill.append(name) | 1741 | kill.append(name) |
1892 | 1742 | ||
1743 | # Minor optimization: If there's nothing to prune, then don't try to read | ||
1744 | # any project state. | ||
1745 | if not kill and not cb: | ||
1746 | return [] | ||
1747 | |||
1893 | rev = self.GetRevisionId(left) | 1748 | rev = self.GetRevisionId(left) |
1894 | if cb is not None \ | 1749 | if cb is not None \ |
1895 | and not self._revlist(HEAD + '...' + rev) \ | 1750 | and not self._revlist(HEAD + '...' + rev) \ |
@@ -1935,9 +1790,7 @@ class Project(object): | |||
1935 | kept.append(ReviewableBranch(self, branch, base)) | 1790 | kept.append(ReviewableBranch(self, branch, base)) |
1936 | return kept | 1791 | return kept |
1937 | 1792 | ||
1938 | |||
1939 | # Submodule Management ## | 1793 | # Submodule Management ## |
1940 | |||
1941 | def GetRegisteredSubprojects(self): | 1794 | def GetRegisteredSubprojects(self): |
1942 | result = [] | 1795 | result = [] |
1943 | 1796 | ||
@@ -2088,13 +1941,57 @@ class Project(object): | |||
2088 | result.extend(subproject.GetDerivedSubprojects()) | 1941 | result.extend(subproject.GetDerivedSubprojects()) |
2089 | return result | 1942 | return result |
2090 | 1943 | ||
2091 | |||
2092 | # Direct Git Commands ## | 1944 | # Direct Git Commands ## |
1945 | def EnableRepositoryExtension(self, key, value='true', version=1): | ||
1946 | """Enable git repository extension |key| with |value|. | ||
1947 | |||
1948 | Args: | ||
1949 | key: The extension to enabled. Omit the "extensions." prefix. | ||
1950 | value: The value to use for the extension. | ||
1951 | version: The minimum git repository version needed. | ||
1952 | """ | ||
1953 | # Make sure the git repo version is new enough already. | ||
1954 | found_version = self.config.GetInt('core.repositoryFormatVersion') | ||
1955 | if found_version is None: | ||
1956 | found_version = 0 | ||
1957 | if found_version < version: | ||
1958 | self.config.SetString('core.repositoryFormatVersion', str(version)) | ||
1959 | |||
1960 | # Enable the extension! | ||
1961 | self.config.SetString('extensions.%s' % (key,), value) | ||
1962 | |||
1963 | def ResolveRemoteHead(self, name=None): | ||
1964 | """Find out what the default branch (HEAD) points to. | ||
1965 | |||
1966 | Normally this points to refs/heads/master, but projects are moving to main. | ||
1967 | Support whatever the server uses rather than hardcoding "master" ourselves. | ||
1968 | """ | ||
1969 | if name is None: | ||
1970 | name = self.remote.name | ||
1971 | |||
1972 | # The output will look like (NB: tabs are separators): | ||
1973 | # ref: refs/heads/master HEAD | ||
1974 | # 5f6803b100bb3cd0f534e96e88c91373e8ed1c44 HEAD | ||
1975 | output = self.bare_git.ls_remote('-q', '--symref', '--exit-code', name, 'HEAD') | ||
1976 | |||
1977 | for line in output.splitlines(): | ||
1978 | lhs, rhs = line.split('\t', 1) | ||
1979 | if rhs == 'HEAD' and lhs.startswith('ref:'): | ||
1980 | return lhs[4:].strip() | ||
1981 | |||
1982 | return None | ||
1983 | |||
2093 | def _CheckForImmutableRevision(self): | 1984 | def _CheckForImmutableRevision(self): |
2094 | try: | 1985 | try: |
2095 | # if revision (sha or tag) is not present then following function | 1986 | # if revision (sha or tag) is not present then following function |
2096 | # throws an error. | 1987 | # throws an error. |
2097 | self.bare_git.rev_parse('--verify', '%s^0' % self.revisionExpr) | 1988 | self.bare_git.rev_list('-1', '--missing=allow-any', |
1989 | '%s^0' % self.revisionExpr, '--') | ||
1990 | if self.upstream: | ||
1991 | rev = self.GetRemote(self.remote.name).ToLocal(self.upstream) | ||
1992 | self.bare_git.rev_list('-1', '--missing=allow-any', | ||
1993 | '%s^0' % rev, '--') | ||
1994 | self.bare_git.merge_base('--is-ancestor', self.revisionExpr, rev) | ||
2098 | return True | 1995 | return True |
2099 | except GitError: | 1996 | except GitError: |
2100 | # There is no such persistent revision. We have to fetch it. | 1997 | # There is no such persistent revision. We have to fetch it. |
@@ -2117,14 +2014,19 @@ class Project(object): | |||
2117 | current_branch_only=False, | 2014 | current_branch_only=False, |
2118 | initial=False, | 2015 | initial=False, |
2119 | quiet=False, | 2016 | quiet=False, |
2017 | verbose=False, | ||
2018 | output_redir=None, | ||
2120 | alt_dir=None, | 2019 | alt_dir=None, |
2121 | no_tags=False, | 2020 | tags=True, |
2122 | prune=False, | 2021 | prune=False, |
2123 | depth=None, | 2022 | depth=None, |
2124 | submodules=False, | 2023 | submodules=False, |
2024 | ssh_proxy=None, | ||
2125 | force_sync=False, | 2025 | force_sync=False, |
2126 | clone_filter=None): | 2026 | clone_filter=None, |
2127 | 2027 | retry_fetches=2, | |
2028 | retry_sleep_initial_sec=4.0, | ||
2029 | retry_exp_factor=2.0): | ||
2128 | is_sha1 = False | 2030 | is_sha1 = False |
2129 | tag_name = None | 2031 | tag_name = None |
2130 | # The depth should not be used when fetching to a mirror because | 2032 | # The depth should not be used when fetching to a mirror because |
@@ -2147,7 +2049,7 @@ class Project(object): | |||
2147 | 2049 | ||
2148 | if is_sha1 or tag_name is not None: | 2050 | if is_sha1 or tag_name is not None: |
2149 | if self._CheckForImmutableRevision(): | 2051 | if self._CheckForImmutableRevision(): |
2150 | if not quiet: | 2052 | if verbose: |
2151 | print('Skipped fetching project %s (already have persistent ref)' | 2053 | print('Skipped fetching project %s (already have persistent ref)' |
2152 | % self.name) | 2054 | % self.name) |
2153 | return True | 2055 | return True |
@@ -2167,16 +2069,14 @@ class Project(object): | |||
2167 | if not name: | 2069 | if not name: |
2168 | name = self.remote.name | 2070 | name = self.remote.name |
2169 | 2071 | ||
2170 | ssh_proxy = False | ||
2171 | remote = self.GetRemote(name) | 2072 | remote = self.GetRemote(name) |
2172 | if remote.PreConnectFetch(): | 2073 | if not remote.PreConnectFetch(ssh_proxy): |
2173 | ssh_proxy = True | 2074 | ssh_proxy = None |
2174 | 2075 | ||
2175 | if initial: | 2076 | if initial: |
2176 | if alt_dir and 'objects' == os.path.basename(alt_dir): | 2077 | if alt_dir and 'objects' == os.path.basename(alt_dir): |
2177 | ref_dir = os.path.dirname(alt_dir) | 2078 | ref_dir = os.path.dirname(alt_dir) |
2178 | packed_refs = os.path.join(self.gitdir, 'packed-refs') | 2079 | packed_refs = os.path.join(self.gitdir, 'packed-refs') |
2179 | remote = self.GetRemote(name) | ||
2180 | 2080 | ||
2181 | all_refs = self.bare_ref.all | 2081 | all_refs = self.bare_ref.all |
2182 | ids = set(all_refs.values()) | 2082 | ids = set(all_refs.values()) |
@@ -2217,7 +2117,7 @@ class Project(object): | |||
2217 | if clone_filter: | 2117 | if clone_filter: |
2218 | git_require((2, 19, 0), fail=True, msg='partial clones') | 2118 | git_require((2, 19, 0), fail=True, msg='partial clones') |
2219 | cmd.append('--filter=%s' % clone_filter) | 2119 | cmd.append('--filter=%s' % clone_filter) |
2220 | self.config.SetString('extensions.partialclone', self.remote.name) | 2120 | self.EnableRepositoryExtension('partialclone', self.remote.name) |
2221 | 2121 | ||
2222 | if depth: | 2122 | if depth: |
2223 | cmd.append('--depth=%s' % depth) | 2123 | cmd.append('--depth=%s' % depth) |
@@ -2229,8 +2129,10 @@ class Project(object): | |||
2229 | if os.path.exists(os.path.join(self.gitdir, 'shallow')): | 2129 | if os.path.exists(os.path.join(self.gitdir, 'shallow')): |
2230 | cmd.append('--depth=2147483647') | 2130 | cmd.append('--depth=2147483647') |
2231 | 2131 | ||
2232 | if quiet: | 2132 | if not verbose: |
2233 | cmd.append('--quiet') | 2133 | cmd.append('--quiet') |
2134 | if not quiet and sys.stdout.isatty(): | ||
2135 | cmd.append('--progress') | ||
2234 | if not self.worktree: | 2136 | if not self.worktree: |
2235 | cmd.append('--update-head-ok') | 2137 | cmd.append('--update-head-ok') |
2236 | cmd.append(name) | 2138 | cmd.append(name) |
@@ -2257,10 +2159,12 @@ class Project(object): | |||
2257 | else: | 2159 | else: |
2258 | branch = self.revisionExpr | 2160 | branch = self.revisionExpr |
2259 | if (not self.manifest.IsMirror and is_sha1 and depth | 2161 | if (not self.manifest.IsMirror and is_sha1 and depth |
2260 | and git_require((1, 8, 3))): | 2162 | and git_require((1, 8, 3))): |
2261 | # Shallow checkout of a specific commit, fetch from that commit and not | 2163 | # Shallow checkout of a specific commit, fetch from that commit and not |
2262 | # the heads only as the commit might be deeper in the history. | 2164 | # the heads only as the commit might be deeper in the history. |
2263 | spec.append(branch) | 2165 | spec.append(branch) |
2166 | if self.upstream: | ||
2167 | spec.append(self.upstream) | ||
2264 | else: | 2168 | else: |
2265 | if is_sha1: | 2169 | if is_sha1: |
2266 | branch = self.upstream | 2170 | branch = self.upstream |
@@ -2276,7 +2180,7 @@ class Project(object): | |||
2276 | 2180 | ||
2277 | # If using depth then we should not get all the tags since they may | 2181 | # If using depth then we should not get all the tags since they may |
2278 | # be outside of the depth. | 2182 | # be outside of the depth. |
2279 | if no_tags or depth: | 2183 | if not tags or depth: |
2280 | cmd.append('--no-tags') | 2184 | cmd.append('--no-tags') |
2281 | else: | 2185 | else: |
2282 | cmd.append('--tags') | 2186 | cmd.append('--tags') |
@@ -2284,22 +2188,42 @@ class Project(object): | |||
2284 | 2188 | ||
2285 | cmd.extend(spec) | 2189 | cmd.extend(spec) |
2286 | 2190 | ||
2287 | ok = False | 2191 | # At least one retry minimum due to git remote prune. |
2288 | for _i in range(2): | 2192 | retry_fetches = max(retry_fetches, 2) |
2289 | gitcmd = GitCommand(self, cmd, bare=True, ssh_proxy=ssh_proxy) | 2193 | retry_cur_sleep = retry_sleep_initial_sec |
2194 | ok = prune_tried = False | ||
2195 | for try_n in range(retry_fetches): | ||
2196 | gitcmd = GitCommand(self, cmd, bare=True, ssh_proxy=ssh_proxy, | ||
2197 | merge_output=True, capture_stdout=quiet or bool(output_redir)) | ||
2198 | if gitcmd.stdout and not quiet and output_redir: | ||
2199 | output_redir.write(gitcmd.stdout) | ||
2290 | ret = gitcmd.Wait() | 2200 | ret = gitcmd.Wait() |
2291 | if ret == 0: | 2201 | if ret == 0: |
2292 | ok = True | 2202 | ok = True |
2293 | break | 2203 | break |
2294 | # If needed, run the 'git remote prune' the first time through the loop | 2204 | |
2295 | elif (not _i and | 2205 | # Retry later due to HTTP 429 Too Many Requests. |
2296 | "error:" in gitcmd.stderr and | 2206 | elif (gitcmd.stdout and |
2297 | "git remote prune" in gitcmd.stderr): | 2207 | 'error:' in gitcmd.stdout and |
2208 | 'HTTP 429' in gitcmd.stdout): | ||
2209 | # Fallthru to sleep+retry logic at the bottom. | ||
2210 | pass | ||
2211 | |||
2212 | # Try to prune remote branches once in case there are conflicts. | ||
2213 | # For example, if the remote had refs/heads/upstream, but deleted that and | ||
2214 | # now has refs/heads/upstream/foo. | ||
2215 | elif (gitcmd.stdout and | ||
2216 | 'error:' in gitcmd.stdout and | ||
2217 | 'git remote prune' in gitcmd.stdout and | ||
2218 | not prune_tried): | ||
2219 | prune_tried = True | ||
2298 | prunecmd = GitCommand(self, ['remote', 'prune', name], bare=True, | 2220 | prunecmd = GitCommand(self, ['remote', 'prune', name], bare=True, |
2299 | ssh_proxy=ssh_proxy) | 2221 | ssh_proxy=ssh_proxy) |
2300 | ret = prunecmd.Wait() | 2222 | ret = prunecmd.Wait() |
2301 | if ret: | 2223 | if ret: |
2302 | break | 2224 | break |
2225 | print('retrying fetch after pruning remote branches', file=output_redir) | ||
2226 | # Continue right away so we don't sleep as we shouldn't need to. | ||
2303 | continue | 2227 | continue |
2304 | elif current_branch_only and is_sha1 and ret == 128: | 2228 | elif current_branch_only and is_sha1 and ret == 128: |
2305 | # Exit code 128 means "couldn't find the ref you asked for"; if we're | 2229 | # Exit code 128 means "couldn't find the ref you asked for"; if we're |
@@ -2309,7 +2233,18 @@ class Project(object): | |||
2309 | elif ret < 0: | 2233 | elif ret < 0: |
2310 | # Git died with a signal, exit immediately | 2234 | # Git died with a signal, exit immediately |
2311 | break | 2235 | break |
2312 | time.sleep(random.randint(30, 45)) | 2236 | |
2237 | # Figure out how long to sleep before the next attempt, if there is one. | ||
2238 | if not verbose and gitcmd.stdout: | ||
2239 | print('\n%s:\n%s' % (self.name, gitcmd.stdout), end='', file=output_redir) | ||
2240 | if try_n < retry_fetches - 1: | ||
2241 | print('%s: sleeping %s seconds before retrying' % (self.name, retry_cur_sleep), | ||
2242 | file=output_redir) | ||
2243 | time.sleep(retry_cur_sleep) | ||
2244 | retry_cur_sleep = min(retry_exp_factor * retry_cur_sleep, | ||
2245 | MAXIMUM_RETRY_SLEEP_SEC) | ||
2246 | retry_cur_sleep *= (1 - random.uniform(-RETRY_JITTER_PERCENT, | ||
2247 | RETRY_JITTER_PERCENT)) | ||
2313 | 2248 | ||
2314 | if initial: | 2249 | if initial: |
2315 | if alt_dir: | 2250 | if alt_dir: |
@@ -2324,21 +2259,17 @@ class Project(object): | |||
2324 | # got what we wanted, else trigger a second run of all | 2259 | # got what we wanted, else trigger a second run of all |
2325 | # refs. | 2260 | # refs. |
2326 | if not self._CheckForImmutableRevision(): | 2261 | if not self._CheckForImmutableRevision(): |
2327 | if current_branch_only and depth: | 2262 | # Sync the current branch only with depth set to None. |
2328 | # Sync the current branch only with depth set to None | 2263 | # We always pass depth=None down to avoid infinite recursion. |
2329 | return self._RemoteFetch(name=name, | 2264 | return self._RemoteFetch( |
2330 | current_branch_only=current_branch_only, | 2265 | name=name, quiet=quiet, verbose=verbose, output_redir=output_redir, |
2331 | initial=False, quiet=quiet, alt_dir=alt_dir, | 2266 | current_branch_only=current_branch_only and depth, |
2332 | depth=None, clone_filter=clone_filter) | 2267 | initial=False, alt_dir=alt_dir, |
2333 | else: | 2268 | depth=None, ssh_proxy=ssh_proxy, clone_filter=clone_filter) |
2334 | # Avoid infinite recursion: sync all branches with depth set to None | ||
2335 | return self._RemoteFetch(name=name, current_branch_only=False, | ||
2336 | initial=False, quiet=quiet, alt_dir=alt_dir, | ||
2337 | depth=None, clone_filter=clone_filter) | ||
2338 | 2269 | ||
2339 | return ok | 2270 | return ok |
2340 | 2271 | ||
2341 | def _ApplyCloneBundle(self, initial=False, quiet=False): | 2272 | def _ApplyCloneBundle(self, initial=False, quiet=False, verbose=False): |
2342 | if initial and \ | 2273 | if initial and \ |
2343 | (self.manifest.manifestProject.config.GetString('repo.depth') or | 2274 | (self.manifest.manifestProject.config.GetString('repo.depth') or |
2344 | self.clone_depth): | 2275 | self.clone_depth): |
@@ -2362,13 +2293,16 @@ class Project(object): | |||
2362 | return False | 2293 | return False |
2363 | 2294 | ||
2364 | if not exist_dst: | 2295 | if not exist_dst: |
2365 | exist_dst = self._FetchBundle(bundle_url, bundle_tmp, bundle_dst, quiet) | 2296 | exist_dst = self._FetchBundle(bundle_url, bundle_tmp, bundle_dst, quiet, |
2297 | verbose) | ||
2366 | if not exist_dst: | 2298 | if not exist_dst: |
2367 | return False | 2299 | return False |
2368 | 2300 | ||
2369 | cmd = ['fetch'] | 2301 | cmd = ['fetch'] |
2370 | if quiet: | 2302 | if not verbose: |
2371 | cmd.append('--quiet') | 2303 | cmd.append('--quiet') |
2304 | if not quiet and sys.stdout.isatty(): | ||
2305 | cmd.append('--progress') | ||
2372 | if not self.worktree: | 2306 | if not self.worktree: |
2373 | cmd.append('--update-head-ok') | 2307 | cmd.append('--update-head-ok') |
2374 | cmd.append(bundle_dst) | 2308 | cmd.append(bundle_dst) |
@@ -2377,19 +2311,16 @@ class Project(object): | |||
2377 | cmd.append('+refs/tags/*:refs/tags/*') | 2311 | cmd.append('+refs/tags/*:refs/tags/*') |
2378 | 2312 | ||
2379 | ok = GitCommand(self, cmd, bare=True).Wait() == 0 | 2313 | ok = GitCommand(self, cmd, bare=True).Wait() == 0 |
2380 | if os.path.exists(bundle_dst): | 2314 | platform_utils.remove(bundle_dst, missing_ok=True) |
2381 | platform_utils.remove(bundle_dst) | 2315 | platform_utils.remove(bundle_tmp, missing_ok=True) |
2382 | if os.path.exists(bundle_tmp): | ||
2383 | platform_utils.remove(bundle_tmp) | ||
2384 | return ok | 2316 | return ok |
2385 | 2317 | ||
2386 | def _FetchBundle(self, srcUrl, tmpPath, dstPath, quiet): | 2318 | def _FetchBundle(self, srcUrl, tmpPath, dstPath, quiet, verbose): |
2387 | if os.path.exists(dstPath): | 2319 | platform_utils.remove(dstPath, missing_ok=True) |
2388 | platform_utils.remove(dstPath) | ||
2389 | 2320 | ||
2390 | cmd = ['curl', '--fail', '--output', tmpPath, '--netrc', '--location'] | 2321 | cmd = ['curl', '--fail', '--output', tmpPath, '--netrc', '--location'] |
2391 | if quiet: | 2322 | if quiet: |
2392 | cmd += ['--silent'] | 2323 | cmd += ['--silent', '--show-error'] |
2393 | if os.path.exists(tmpPath): | 2324 | if os.path.exists(tmpPath): |
2394 | size = os.stat(tmpPath).st_size | 2325 | size = os.stat(tmpPath).st_size |
2395 | if size >= 1024: | 2326 | if size >= 1024: |
@@ -2411,22 +2342,30 @@ class Project(object): | |||
2411 | 2342 | ||
2412 | if IsTrace(): | 2343 | if IsTrace(): |
2413 | Trace('%s', ' '.join(cmd)) | 2344 | Trace('%s', ' '.join(cmd)) |
2345 | if verbose: | ||
2346 | print('%s: Downloading bundle: %s' % (self.name, srcUrl)) | ||
2347 | stdout = None if verbose else subprocess.PIPE | ||
2348 | stderr = None if verbose else subprocess.STDOUT | ||
2414 | try: | 2349 | try: |
2415 | proc = subprocess.Popen(cmd) | 2350 | proc = subprocess.Popen(cmd, stdout=stdout, stderr=stderr) |
2416 | except OSError: | 2351 | except OSError: |
2417 | return False | 2352 | return False |
2418 | 2353 | ||
2419 | curlret = proc.wait() | 2354 | (output, _) = proc.communicate() |
2355 | curlret = proc.returncode | ||
2420 | 2356 | ||
2421 | if curlret == 22: | 2357 | if curlret == 22: |
2422 | # From curl man page: | 2358 | # From curl man page: |
2423 | # 22: HTTP page not retrieved. The requested url was not found or | 2359 | # 22: HTTP page not retrieved. The requested url was not found or |
2424 | # returned another error with the HTTP error code being 400 or above. | 2360 | # returned another error with the HTTP error code being 400 or above. |
2425 | # This return code only appears if -f, --fail is used. | 2361 | # This return code only appears if -f, --fail is used. |
2426 | if not quiet: | 2362 | if verbose: |
2427 | print("Server does not provide clone.bundle; ignoring.", | 2363 | print('%s: Unable to retrieve clone.bundle; ignoring.' % self.name) |
2428 | file=sys.stderr) | 2364 | if output: |
2365 | print('Curl output:\n%s' % output) | ||
2429 | return False | 2366 | return False |
2367 | elif curlret and not verbose and output: | ||
2368 | print('%s' % output, file=sys.stderr) | ||
2430 | 2369 | ||
2431 | if os.path.exists(tmpPath): | 2370 | if os.path.exists(tmpPath): |
2432 | if curlret == 0 and self._IsValidBundle(tmpPath, quiet): | 2371 | if curlret == 0 and self._IsValidBundle(tmpPath, quiet): |
@@ -2460,8 +2399,12 @@ class Project(object): | |||
2460 | if self._allrefs: | 2399 | if self._allrefs: |
2461 | raise GitError('%s checkout %s ' % (self.name, rev)) | 2400 | raise GitError('%s checkout %s ' % (self.name, rev)) |
2462 | 2401 | ||
2463 | def _CherryPick(self, rev): | 2402 | def _CherryPick(self, rev, ffonly=False, record_origin=False): |
2464 | cmd = ['cherry-pick'] | 2403 | cmd = ['cherry-pick'] |
2404 | if ffonly: | ||
2405 | cmd.append('--ff') | ||
2406 | if record_origin: | ||
2407 | cmd.append('-x') | ||
2465 | cmd.append(rev) | 2408 | cmd.append(rev) |
2466 | cmd.append('--') | 2409 | cmd.append('--') |
2467 | if GitCommand(self, cmd).Wait() != 0: | 2410 | if GitCommand(self, cmd).Wait() != 0: |
@@ -2508,13 +2451,13 @@ class Project(object): | |||
2508 | raise GitError('%s rebase %s ' % (self.name, upstream)) | 2451 | raise GitError('%s rebase %s ' % (self.name, upstream)) |
2509 | 2452 | ||
2510 | def _FastForward(self, head, ffonly=False): | 2453 | def _FastForward(self, head, ffonly=False): |
2511 | cmd = ['merge', head] | 2454 | cmd = ['merge', '--no-stat', head] |
2512 | if ffonly: | 2455 | if ffonly: |
2513 | cmd.append("--ff-only") | 2456 | cmd.append("--ff-only") |
2514 | if GitCommand(self, cmd).Wait() != 0: | 2457 | if GitCommand(self, cmd).Wait() != 0: |
2515 | raise GitError('%s merge %s ' % (self.name, head)) | 2458 | raise GitError('%s merge %s ' % (self.name, head)) |
2516 | 2459 | ||
2517 | def _InitGitDir(self, mirror_git=None, force_sync=False): | 2460 | def _InitGitDir(self, mirror_git=None, force_sync=False, quiet=False): |
2518 | init_git_dir = not os.path.exists(self.gitdir) | 2461 | init_git_dir = not os.path.exists(self.gitdir) |
2519 | init_obj_dir = not os.path.exists(self.objdir) | 2462 | init_obj_dir = not os.path.exists(self.objdir) |
2520 | try: | 2463 | try: |
@@ -2523,6 +2466,12 @@ class Project(object): | |||
2523 | os.makedirs(self.objdir) | 2466 | os.makedirs(self.objdir) |
2524 | self.bare_objdir.init() | 2467 | self.bare_objdir.init() |
2525 | 2468 | ||
2469 | if self.use_git_worktrees: | ||
2470 | # Enable per-worktree config file support if possible. This is more a | ||
2471 | # nice-to-have feature for users rather than a hard requirement. | ||
2472 | if git_require((2, 20, 0)): | ||
2473 | self.EnableRepositoryExtension('worktreeConfig') | ||
2474 | |||
2526 | # If we have a separate directory to hold refs, initialize it as well. | 2475 | # If we have a separate directory to hold refs, initialize it as well. |
2527 | if self.objdir != self.gitdir: | 2476 | if self.objdir != self.gitdir: |
2528 | if init_git_dir: | 2477 | if init_git_dir: |
@@ -2542,8 +2491,9 @@ class Project(object): | |||
2542 | if self.worktree and os.path.exists(platform_utils.realpath | 2491 | if self.worktree and os.path.exists(platform_utils.realpath |
2543 | (self.worktree)): | 2492 | (self.worktree)): |
2544 | platform_utils.rmtree(platform_utils.realpath(self.worktree)) | 2493 | platform_utils.rmtree(platform_utils.realpath(self.worktree)) |
2545 | return self._InitGitDir(mirror_git=mirror_git, force_sync=False) | 2494 | return self._InitGitDir(mirror_git=mirror_git, force_sync=False, |
2546 | except: | 2495 | quiet=quiet) |
2496 | except Exception: | ||
2547 | raise e | 2497 | raise e |
2548 | raise e | 2498 | raise e |
2549 | 2499 | ||
@@ -2556,13 +2506,15 @@ class Project(object): | |||
2556 | mirror_git = os.path.join(ref_dir, self.name + '.git') | 2506 | mirror_git = os.path.join(ref_dir, self.name + '.git') |
2557 | repo_git = os.path.join(ref_dir, '.repo', 'projects', | 2507 | repo_git = os.path.join(ref_dir, '.repo', 'projects', |
2558 | self.relpath + '.git') | 2508 | self.relpath + '.git') |
2509 | worktrees_git = os.path.join(ref_dir, '.repo', 'worktrees', | ||
2510 | self.name + '.git') | ||
2559 | 2511 | ||
2560 | if os.path.exists(mirror_git): | 2512 | if os.path.exists(mirror_git): |
2561 | ref_dir = mirror_git | 2513 | ref_dir = mirror_git |
2562 | |||
2563 | elif os.path.exists(repo_git): | 2514 | elif os.path.exists(repo_git): |
2564 | ref_dir = repo_git | 2515 | ref_dir = repo_git |
2565 | 2516 | elif os.path.exists(worktrees_git): | |
2517 | ref_dir = worktrees_git | ||
2566 | else: | 2518 | else: |
2567 | ref_dir = None | 2519 | ref_dir = None |
2568 | 2520 | ||
@@ -2574,7 +2526,7 @@ class Project(object): | |||
2574 | _lwrite(os.path.join(self.gitdir, 'objects/info/alternates'), | 2526 | _lwrite(os.path.join(self.gitdir, 'objects/info/alternates'), |
2575 | os.path.join(ref_dir, 'objects') + '\n') | 2527 | os.path.join(ref_dir, 'objects') + '\n') |
2576 | 2528 | ||
2577 | self._UpdateHooks() | 2529 | self._UpdateHooks(quiet=quiet) |
2578 | 2530 | ||
2579 | m = self.manifest.manifestProject.config | 2531 | m = self.manifest.manifestProject.config |
2580 | for key in ['user.name', 'user.email']: | 2532 | for key in ['user.name', 'user.email']: |
@@ -2582,10 +2534,7 @@ class Project(object): | |||
2582 | self.config.SetString(key, m.GetString(key)) | 2534 | self.config.SetString(key, m.GetString(key)) |
2583 | self.config.SetString('filter.lfs.smudge', 'git-lfs smudge --skip -- %f') | 2535 | self.config.SetString('filter.lfs.smudge', 'git-lfs smudge --skip -- %f') |
2584 | self.config.SetString('filter.lfs.process', 'git-lfs filter-process --skip') | 2536 | self.config.SetString('filter.lfs.process', 'git-lfs filter-process --skip') |
2585 | if self.manifest.IsMirror: | 2537 | self.config.SetBoolean('core.bare', True if self.manifest.IsMirror else None) |
2586 | self.config.SetString('core.bare', 'true') | ||
2587 | else: | ||
2588 | self.config.SetString('core.bare', None) | ||
2589 | except Exception: | 2538 | except Exception: |
2590 | if init_obj_dir and os.path.exists(self.objdir): | 2539 | if init_obj_dir and os.path.exists(self.objdir): |
2591 | platform_utils.rmtree(self.objdir) | 2540 | platform_utils.rmtree(self.objdir) |
@@ -2593,11 +2542,11 @@ class Project(object): | |||
2593 | platform_utils.rmtree(self.gitdir) | 2542 | platform_utils.rmtree(self.gitdir) |
2594 | raise | 2543 | raise |
2595 | 2544 | ||
2596 | def _UpdateHooks(self): | 2545 | def _UpdateHooks(self, quiet=False): |
2597 | if os.path.exists(self.gitdir): | 2546 | if os.path.exists(self.gitdir): |
2598 | self._InitHooks() | 2547 | self._InitHooks(quiet=quiet) |
2599 | 2548 | ||
2600 | def _InitHooks(self): | 2549 | def _InitHooks(self, quiet=False): |
2601 | hooks = platform_utils.realpath(self._gitdir_path('hooks')) | 2550 | hooks = platform_utils.realpath(self._gitdir_path('hooks')) |
2602 | if not os.path.exists(hooks): | 2551 | if not os.path.exists(hooks): |
2603 | os.makedirs(hooks) | 2552 | os.makedirs(hooks) |
@@ -2617,18 +2566,23 @@ class Project(object): | |||
2617 | if platform_utils.islink(dst): | 2566 | if platform_utils.islink(dst): |
2618 | continue | 2567 | continue |
2619 | if os.path.exists(dst): | 2568 | if os.path.exists(dst): |
2620 | if filecmp.cmp(stock_hook, dst, shallow=False): | 2569 | # If the files are the same, we'll leave it alone. We create symlinks |
2621 | platform_utils.remove(dst) | 2570 | # below by default but fallback to hardlinks if the OS blocks them. |
2622 | else: | 2571 | # So if we're here, it's probably because we made a hardlink below. |
2623 | _warn("%s: Not replacing locally modified %s hook", | 2572 | if not filecmp.cmp(stock_hook, dst, shallow=False): |
2624 | self.relpath, name) | 2573 | if not quiet: |
2625 | continue | 2574 | _warn("%s: Not replacing locally modified %s hook", |
2575 | self.relpath, name) | ||
2576 | continue | ||
2626 | try: | 2577 | try: |
2627 | platform_utils.symlink( | 2578 | platform_utils.symlink( |
2628 | os.path.relpath(stock_hook, os.path.dirname(dst)), dst) | 2579 | os.path.relpath(stock_hook, os.path.dirname(dst)), dst) |
2629 | except OSError as e: | 2580 | except OSError as e: |
2630 | if e.errno == errno.EPERM: | 2581 | if e.errno == errno.EPERM: |
2631 | raise GitError(self._get_symlink_error_message()) | 2582 | try: |
2583 | os.link(stock_hook, dst) | ||
2584 | except OSError: | ||
2585 | raise GitError(self._get_symlink_error_message()) | ||
2632 | else: | 2586 | else: |
2633 | raise | 2587 | raise |
2634 | 2588 | ||
@@ -2648,27 +2602,56 @@ class Project(object): | |||
2648 | 2602 | ||
2649 | def _InitMRef(self): | 2603 | def _InitMRef(self): |
2650 | if self.manifest.branch: | 2604 | if self.manifest.branch: |
2651 | self._InitAnyMRef(R_M + self.manifest.branch) | 2605 | if self.use_git_worktrees: |
2606 | # Set up the m/ space to point to the worktree-specific ref space. | ||
2607 | # We'll update the worktree-specific ref space on each checkout. | ||
2608 | ref = R_M + self.manifest.branch | ||
2609 | if not self.bare_ref.symref(ref): | ||
2610 | self.bare_git.symbolic_ref( | ||
2611 | '-m', 'redirecting to worktree scope', | ||
2612 | ref, R_WORKTREE_M + self.manifest.branch) | ||
2613 | |||
2614 | # We can't update this ref with git worktrees until it exists. | ||
2615 | # We'll wait until the initial checkout to set it. | ||
2616 | if not os.path.exists(self.worktree): | ||
2617 | return | ||
2618 | |||
2619 | base = R_WORKTREE_M | ||
2620 | active_git = self.work_git | ||
2621 | |||
2622 | self._InitAnyMRef(HEAD, self.bare_git, detach=True) | ||
2623 | else: | ||
2624 | base = R_M | ||
2625 | active_git = self.bare_git | ||
2626 | |||
2627 | self._InitAnyMRef(base + self.manifest.branch, active_git) | ||
2652 | 2628 | ||
2653 | def _InitMirrorHead(self): | 2629 | def _InitMirrorHead(self): |
2654 | self._InitAnyMRef(HEAD) | 2630 | self._InitAnyMRef(HEAD, self.bare_git) |
2655 | 2631 | ||
2656 | def _InitAnyMRef(self, ref): | 2632 | def _InitAnyMRef(self, ref, active_git, detach=False): |
2657 | cur = self.bare_ref.symref(ref) | 2633 | cur = self.bare_ref.symref(ref) |
2658 | 2634 | ||
2659 | if self.revisionId: | 2635 | if self.revisionId: |
2660 | if cur != '' or self.bare_ref.get(ref) != self.revisionId: | 2636 | if cur != '' or self.bare_ref.get(ref) != self.revisionId: |
2661 | msg = 'manifest set to %s' % self.revisionId | 2637 | msg = 'manifest set to %s' % self.revisionId |
2662 | dst = self.revisionId + '^0' | 2638 | dst = self.revisionId + '^0' |
2663 | self.bare_git.UpdateRef(ref, dst, message=msg, detach=True) | 2639 | active_git.UpdateRef(ref, dst, message=msg, detach=True) |
2664 | else: | 2640 | else: |
2665 | remote = self.GetRemote(self.remote.name) | 2641 | remote = self.GetRemote(self.remote.name) |
2666 | dst = remote.ToLocal(self.revisionExpr) | 2642 | dst = remote.ToLocal(self.revisionExpr) |
2667 | if cur != dst: | 2643 | if cur != dst: |
2668 | msg = 'manifest set to %s' % self.revisionExpr | 2644 | msg = 'manifest set to %s' % self.revisionExpr |
2669 | self.bare_git.symbolic_ref('-m', msg, ref, dst) | 2645 | if detach: |
2646 | active_git.UpdateRef(ref, dst, message=msg, detach=True) | ||
2647 | else: | ||
2648 | active_git.symbolic_ref('-m', msg, ref, dst) | ||
2670 | 2649 | ||
2671 | def _CheckDirReference(self, srcdir, destdir, share_refs): | 2650 | def _CheckDirReference(self, srcdir, destdir, share_refs): |
2651 | # Git worktrees don't use symlinks to share at all. | ||
2652 | if self.use_git_worktrees: | ||
2653 | return | ||
2654 | |||
2672 | symlink_files = self.shareable_files[:] | 2655 | symlink_files = self.shareable_files[:] |
2673 | symlink_dirs = self.shareable_dirs[:] | 2656 | symlink_dirs = self.shareable_dirs[:] |
2674 | if share_refs: | 2657 | if share_refs: |
@@ -2676,9 +2659,31 @@ class Project(object): | |||
2676 | symlink_dirs += self.working_tree_dirs | 2659 | symlink_dirs += self.working_tree_dirs |
2677 | to_symlink = symlink_files + symlink_dirs | 2660 | to_symlink = symlink_files + symlink_dirs |
2678 | for name in set(to_symlink): | 2661 | for name in set(to_symlink): |
2679 | dst = platform_utils.realpath(os.path.join(destdir, name)) | 2662 | # Try to self-heal a bit in simple cases. |
2663 | dst_path = os.path.join(destdir, name) | ||
2664 | src_path = os.path.join(srcdir, name) | ||
2665 | |||
2666 | if name in self.working_tree_dirs: | ||
2667 | # If the dir is missing under .repo/projects/, create it. | ||
2668 | if not os.path.exists(src_path): | ||
2669 | os.makedirs(src_path) | ||
2670 | |||
2671 | elif name in self.working_tree_files: | ||
2672 | # If it's a file under the checkout .git/ and the .repo/projects/ has | ||
2673 | # nothing, move the file under the .repo/projects/ tree. | ||
2674 | if not os.path.exists(src_path) and os.path.isfile(dst_path): | ||
2675 | platform_utils.rename(dst_path, src_path) | ||
2676 | |||
2677 | # If the path exists under the .repo/projects/ and there's no symlink | ||
2678 | # under the checkout .git/, recreate the symlink. | ||
2679 | if name in self.working_tree_dirs or name in self.working_tree_files: | ||
2680 | if os.path.exists(src_path) and not os.path.exists(dst_path): | ||
2681 | platform_utils.symlink( | ||
2682 | os.path.relpath(src_path, os.path.dirname(dst_path)), dst_path) | ||
2683 | |||
2684 | dst = platform_utils.realpath(dst_path) | ||
2680 | if os.path.lexists(dst): | 2685 | if os.path.lexists(dst): |
2681 | src = platform_utils.realpath(os.path.join(srcdir, name)) | 2686 | src = platform_utils.realpath(src_path) |
2682 | # Fail if the links are pointing to the wrong place | 2687 | # Fail if the links are pointing to the wrong place |
2683 | if src != dst: | 2688 | if src != dst: |
2684 | _error('%s is different in %s vs %s', name, destdir, srcdir) | 2689 | _error('%s is different in %s vs %s', name, destdir, srcdir) |
@@ -2735,10 +2740,7 @@ class Project(object): | |||
2735 | # If the source file doesn't exist, ensure the destination | 2740 | # If the source file doesn't exist, ensure the destination |
2736 | # file doesn't either. | 2741 | # file doesn't either. |
2737 | if name in symlink_files and not os.path.lexists(src): | 2742 | if name in symlink_files and not os.path.lexists(src): |
2738 | try: | 2743 | platform_utils.remove(dst, missing_ok=True) |
2739 | platform_utils.remove(dst) | ||
2740 | except OSError: | ||
2741 | pass | ||
2742 | 2744 | ||
2743 | except OSError as e: | 2745 | except OSError as e: |
2744 | if e.errno == errno.EPERM: | 2746 | if e.errno == errno.EPERM: |
@@ -2746,11 +2748,45 @@ class Project(object): | |||
2746 | else: | 2748 | else: |
2747 | raise | 2749 | raise |
2748 | 2750 | ||
2751 | def _InitGitWorktree(self): | ||
2752 | """Init the project using git worktrees.""" | ||
2753 | self.bare_git.worktree('prune') | ||
2754 | self.bare_git.worktree('add', '-ff', '--checkout', '--detach', '--lock', | ||
2755 | self.worktree, self.GetRevisionId()) | ||
2756 | |||
2757 | # Rewrite the internal state files to use relative paths between the | ||
2758 | # checkouts & worktrees. | ||
2759 | dotgit = os.path.join(self.worktree, '.git') | ||
2760 | with open(dotgit, 'r') as fp: | ||
2761 | # Figure out the checkout->worktree path. | ||
2762 | setting = fp.read() | ||
2763 | assert setting.startswith('gitdir:') | ||
2764 | git_worktree_path = setting.split(':', 1)[1].strip() | ||
2765 | # Some platforms (e.g. Windows) won't let us update dotgit in situ because | ||
2766 | # of file permissions. Delete it and recreate it from scratch to avoid. | ||
2767 | platform_utils.remove(dotgit) | ||
2768 | # Use relative path from checkout->worktree & maintain Unix line endings | ||
2769 | # on all OS's to match git behavior. | ||
2770 | with open(dotgit, 'w', newline='\n') as fp: | ||
2771 | print('gitdir:', os.path.relpath(git_worktree_path, self.worktree), | ||
2772 | file=fp) | ||
2773 | # Use relative path from worktree->checkout & maintain Unix line endings | ||
2774 | # on all OS's to match git behavior. | ||
2775 | with open(os.path.join(git_worktree_path, 'gitdir'), 'w', newline='\n') as fp: | ||
2776 | print(os.path.relpath(dotgit, git_worktree_path), file=fp) | ||
2777 | |||
2778 | self._InitMRef() | ||
2779 | |||
2749 | def _InitWorkTree(self, force_sync=False, submodules=False): | 2780 | def _InitWorkTree(self, force_sync=False, submodules=False): |
2750 | realdotgit = os.path.join(self.worktree, '.git') | 2781 | realdotgit = os.path.join(self.worktree, '.git') |
2751 | tmpdotgit = realdotgit + '.tmp' | 2782 | tmpdotgit = realdotgit + '.tmp' |
2752 | init_dotgit = not os.path.exists(realdotgit) | 2783 | init_dotgit = not os.path.exists(realdotgit) |
2753 | if init_dotgit: | 2784 | if init_dotgit: |
2785 | if self.use_git_worktrees: | ||
2786 | self._InitGitWorktree() | ||
2787 | self._CopyAndLinkFiles() | ||
2788 | return | ||
2789 | |||
2754 | dotgit = tmpdotgit | 2790 | dotgit = tmpdotgit |
2755 | platform_utils.rmtree(tmpdotgit, ignore_errors=True) | 2791 | platform_utils.rmtree(tmpdotgit, ignore_errors=True) |
2756 | os.makedirs(tmpdotgit) | 2792 | os.makedirs(tmpdotgit) |
@@ -2766,7 +2802,7 @@ class Project(object): | |||
2766 | try: | 2802 | try: |
2767 | platform_utils.rmtree(dotgit) | 2803 | platform_utils.rmtree(dotgit) |
2768 | return self._InitWorkTree(force_sync=False, submodules=submodules) | 2804 | return self._InitWorkTree(force_sync=False, submodules=submodules) |
2769 | except: | 2805 | except Exception: |
2770 | raise e | 2806 | raise e |
2771 | raise e | 2807 | raise e |
2772 | 2808 | ||
@@ -2857,6 +2893,13 @@ class Project(object): | |||
2857 | self._bare = bare | 2893 | self._bare = bare |
2858 | self._gitdir = gitdir | 2894 | self._gitdir = gitdir |
2859 | 2895 | ||
2896 | # __getstate__ and __setstate__ are required for pickling because __getattr__ exists. | ||
2897 | def __getstate__(self): | ||
2898 | return (self._project, self._bare, self._gitdir) | ||
2899 | |||
2900 | def __setstate__(self, state): | ||
2901 | self._project, self._bare, self._gitdir = state | ||
2902 | |||
2860 | def LsOthers(self): | 2903 | def LsOthers(self): |
2861 | p = GitCommand(self._project, | 2904 | p = GitCommand(self._project, |
2862 | ['ls-files', | 2905 | ['ls-files', |
@@ -2885,54 +2928,67 @@ class Project(object): | |||
2885 | bare=False, | 2928 | bare=False, |
2886 | capture_stdout=True, | 2929 | capture_stdout=True, |
2887 | capture_stderr=True) | 2930 | capture_stderr=True) |
2888 | try: | 2931 | p.Wait() |
2889 | out = p.process.stdout.read() | 2932 | r = {} |
2890 | if not hasattr(out, 'encode'): | 2933 | out = p.stdout |
2891 | out = out.decode() | 2934 | if out: |
2892 | r = {} | 2935 | out = iter(out[:-1].split('\0')) |
2893 | if out: | 2936 | while out: |
2894 | out = iter(out[:-1].split('\0')) | 2937 | try: |
2895 | while out: | 2938 | info = next(out) |
2896 | try: | 2939 | path = next(out) |
2897 | info = next(out) | 2940 | except StopIteration: |
2898 | path = next(out) | 2941 | break |
2899 | except StopIteration: | 2942 | |
2900 | break | 2943 | class _Info(object): |
2901 | 2944 | ||
2902 | class _Info(object): | 2945 | def __init__(self, path, omode, nmode, oid, nid, state): |
2903 | 2946 | self.path = path | |
2904 | def __init__(self, path, omode, nmode, oid, nid, state): | 2947 | self.src_path = None |
2905 | self.path = path | 2948 | self.old_mode = omode |
2906 | self.src_path = None | 2949 | self.new_mode = nmode |
2907 | self.old_mode = omode | 2950 | self.old_id = oid |
2908 | self.new_mode = nmode | 2951 | self.new_id = nid |
2909 | self.old_id = oid | 2952 | |
2910 | self.new_id = nid | 2953 | if len(state) == 1: |
2911 | 2954 | self.status = state | |
2912 | if len(state) == 1: | 2955 | self.level = None |
2913 | self.status = state | 2956 | else: |
2914 | self.level = None | 2957 | self.status = state[:1] |
2915 | else: | 2958 | self.level = state[1:] |
2916 | self.status = state[:1] | 2959 | while self.level.startswith('0'): |
2917 | self.level = state[1:] | 2960 | self.level = self.level[1:] |
2918 | while self.level.startswith('0'): | 2961 | |
2919 | self.level = self.level[1:] | 2962 | info = info[1:].split(' ') |
2920 | 2963 | info = _Info(path, *info) | |
2921 | info = info[1:].split(' ') | 2964 | if info.status in ('R', 'C'): |
2922 | info = _Info(path, *info) | 2965 | info.src_path = info.path |
2923 | if info.status in ('R', 'C'): | 2966 | info.path = next(out) |
2924 | info.src_path = info.path | 2967 | r[info.path] = info |
2925 | info.path = next(out) | 2968 | return r |
2926 | r[info.path] = info | 2969 | |
2927 | return r | 2970 | def GetDotgitPath(self, subpath=None): |
2928 | finally: | 2971 | """Return the full path to the .git dir. |
2929 | p.Wait() | 2972 | |
2930 | 2973 | As a convenience, append |subpath| if provided. | |
2931 | def GetHead(self): | 2974 | """ |
2932 | if self._bare: | 2975 | if self._bare: |
2933 | path = os.path.join(self._project.gitdir, HEAD) | 2976 | dotgit = self._gitdir |
2934 | else: | 2977 | else: |
2935 | path = os.path.join(self._project.worktree, '.git', HEAD) | 2978 | dotgit = os.path.join(self._project.worktree, '.git') |
2979 | if os.path.isfile(dotgit): | ||
2980 | # Git worktrees use a "gitdir:" syntax to point to the scratch space. | ||
2981 | with open(dotgit) as fp: | ||
2982 | setting = fp.read() | ||
2983 | assert setting.startswith('gitdir:') | ||
2984 | gitdir = setting.split(':', 1)[1].strip() | ||
2985 | dotgit = os.path.normpath(os.path.join(self._project.worktree, gitdir)) | ||
2986 | |||
2987 | return dotgit if subpath is None else os.path.join(dotgit, subpath) | ||
2988 | |||
2989 | def GetHead(self): | ||
2990 | """Return the ref that HEAD points to.""" | ||
2991 | path = self.GetDotgitPath(subpath=HEAD) | ||
2936 | try: | 2992 | try: |
2937 | with open(path) as fd: | 2993 | with open(path) as fd: |
2938 | line = fd.readline() | 2994 | line = fd.readline() |
@@ -3027,9 +3083,6 @@ class Project(object): | |||
3027 | raise TypeError('%s() got an unexpected keyword argument %r' | 3083 | raise TypeError('%s() got an unexpected keyword argument %r' |
3028 | % (name, k)) | 3084 | % (name, k)) |
3029 | if config is not None: | 3085 | if config is not None: |
3030 | if not git_require((1, 7, 2)): | ||
3031 | raise ValueError('cannot set config on command line for %s()' | ||
3032 | % name) | ||
3033 | for k, v in config.items(): | 3086 | for k, v in config.items(): |
3034 | cmdv.append('-c') | 3087 | cmdv.append('-c') |
3035 | cmdv.append('%s=%s' % (k, v)) | 3088 | cmdv.append('%s=%s' % (k, v)) |
@@ -3109,7 +3162,7 @@ class _Later(object): | |||
3109 | class _SyncColoring(Coloring): | 3162 | class _SyncColoring(Coloring): |
3110 | 3163 | ||
3111 | def __init__(self, config): | 3164 | def __init__(self, config): |
3112 | Coloring.__init__(self, config, 'reposync') | 3165 | super().__init__(config, 'reposync') |
3113 | self.project = self.printer('header', attr='bold') | 3166 | self.project = self.printer('header', attr='bold') |
3114 | self.info = self.printer('info') | 3167 | self.info = self.printer('info') |
3115 | self.fail = self.printer('fail', fg='red') | 3168 | self.fail = self.printer('fail', fg='red') |