diff options
| -rw-r--r-- | .pylintrc | 3 | ||||
| -rw-r--r-- | color.py | 73 | ||||
| -rw-r--r-- | docs/manifest-format.txt | 44 | ||||
| -rw-r--r-- | error.py | 4 | ||||
| -rw-r--r-- | git_command.py | 73 | ||||
| -rw-r--r-- | git_config.py | 72 | ||||
| -rwxr-xr-x | hooks/commit-msg | 10 | ||||
| -rwxr-xr-x | main.py | 13 | ||||
| -rw-r--r-- | manifest_xml.py | 106 | ||||
| -rw-r--r-- | project.py | 786 | ||||
| -rwxr-xr-x | repo | 18 | ||||
| -rw-r--r-- | subcmds/branches.py | 20 | ||||
| -rw-r--r-- | subcmds/cherry_pick.py | 1 | ||||
| -rw-r--r-- | subcmds/download.py | 1 | ||||
| -rw-r--r-- | subcmds/forall.py | 325 | ||||
| -rw-r--r-- | subcmds/info.py | 3 | ||||
| -rw-r--r-- | subcmds/init.py | 6 | ||||
| -rw-r--r-- | subcmds/start.py | 8 | ||||
| -rw-r--r-- | subcmds/status.py | 54 | ||||
| -rw-r--r-- | subcmds/sync.py | 102 | ||||
| -rw-r--r-- | subcmds/upload.py | 14 |
21 files changed, 1123 insertions, 613 deletions
| @@ -61,9 +61,6 @@ disable=R0903,R0912,R0913,R0914,R0915,W0141,C0111,C0103,W0603,W0703,R0911,C0301, | |||
| 61 | # (visual studio) and html | 61 | # (visual studio) and html |
| 62 | output-format=text | 62 | output-format=text |
| 63 | 63 | ||
| 64 | # Include message's id in output | ||
| 65 | include-ids=yes | ||
| 66 | |||
| 67 | # Put messages in a separate file for each module / package specified on the | 64 | # Put messages in a separate file for each module / package specified on the |
| 68 | # command line instead of printing them on stdout. Reports (if any) will be | 65 | # command line instead of printing them on stdout. Reports (if any) will be |
| 69 | # written in a file name "pylint_global.[txt|html]". | 66 | # written in a file name "pylint_global.[txt|html]". |
| @@ -18,41 +18,43 @@ import sys | |||
| 18 | 18 | ||
| 19 | import pager | 19 | import pager |
| 20 | 20 | ||
| 21 | COLORS = {None :-1, | 21 | COLORS = {None: -1, |
| 22 | 'normal' :-1, | 22 | 'normal': -1, |
| 23 | 'black' : 0, | 23 | 'black': 0, |
| 24 | 'red' : 1, | 24 | 'red': 1, |
| 25 | 'green' : 2, | 25 | 'green': 2, |
| 26 | 'yellow' : 3, | 26 | 'yellow': 3, |
| 27 | 'blue' : 4, | 27 | 'blue': 4, |
| 28 | 'magenta': 5, | 28 | 'magenta': 5, |
| 29 | 'cyan' : 6, | 29 | 'cyan': 6, |
| 30 | 'white' : 7} | 30 | 'white': 7} |
| 31 | 31 | ||
| 32 | ATTRS = {None :-1, | 32 | ATTRS = {None: -1, |
| 33 | 'bold' : 1, | 33 | 'bold': 1, |
| 34 | 'dim' : 2, | 34 | 'dim': 2, |
| 35 | 'ul' : 4, | 35 | 'ul': 4, |
| 36 | 'blink' : 5, | 36 | 'blink': 5, |
| 37 | 'reverse': 7} | 37 | 'reverse': 7} |
| 38 | 38 | ||
| 39 | RESET = "\033[m" # pylint: disable=W1401 | 39 | RESET = "\033[m" |
| 40 | # backslash is not anomalous | 40 | |
| 41 | 41 | ||
| 42 | def is_color(s): | 42 | def is_color(s): |
| 43 | return s in COLORS | 43 | return s in COLORS |
| 44 | 44 | ||
| 45 | |||
| 45 | def is_attr(s): | 46 | def is_attr(s): |
| 46 | return s in ATTRS | 47 | return s in ATTRS |
| 47 | 48 | ||
| 48 | def _Color(fg = None, bg = None, attr = None): | 49 | |
| 50 | def _Color(fg=None, bg=None, attr=None): | ||
| 49 | fg = COLORS[fg] | 51 | fg = COLORS[fg] |
| 50 | bg = COLORS[bg] | 52 | bg = COLORS[bg] |
| 51 | attr = ATTRS[attr] | 53 | attr = ATTRS[attr] |
| 52 | 54 | ||
| 53 | if attr >= 0 or fg >= 0 or bg >= 0: | 55 | if attr >= 0 or fg >= 0 or bg >= 0: |
| 54 | need_sep = False | 56 | need_sep = False |
| 55 | code = "\033[" #pylint: disable=W1401 | 57 | code = "\033[" |
| 56 | 58 | ||
| 57 | if attr >= 0: | 59 | if attr >= 0: |
| 58 | code += chr(ord('0') + attr) | 60 | code += chr(ord('0') + attr) |
| @@ -71,7 +73,6 @@ def _Color(fg = None, bg = None, attr = None): | |||
| 71 | if bg >= 0: | 73 | if bg >= 0: |
| 72 | if need_sep: | 74 | if need_sep: |
| 73 | code += ';' | 75 | code += ';' |
| 74 | need_sep = True | ||
| 75 | 76 | ||
| 76 | if bg < 8: | 77 | if bg < 8: |
| 77 | code += '4%c' % (ord('0') + bg) | 78 | code += '4%c' % (ord('0') + bg) |
| @@ -82,6 +83,27 @@ def _Color(fg = None, bg = None, attr = None): | |||
| 82 | code = '' | 83 | code = '' |
| 83 | return code | 84 | return code |
| 84 | 85 | ||
| 86 | DEFAULT = None | ||
| 87 | |||
| 88 | |||
| 89 | def SetDefaultColoring(state): | ||
| 90 | """Set coloring behavior to |state|. | ||
| 91 | |||
| 92 | This is useful for overriding config options via the command line. | ||
| 93 | """ | ||
| 94 | if state is None: | ||
| 95 | # Leave it alone -- return quick! | ||
| 96 | return | ||
| 97 | |||
| 98 | global DEFAULT | ||
| 99 | state = state.lower() | ||
| 100 | if state in ('auto',): | ||
| 101 | DEFAULT = state | ||
| 102 | elif state in ('always', 'yes', 'true', True): | ||
| 103 | DEFAULT = 'always' | ||
| 104 | elif state in ('never', 'no', 'false', False): | ||
| 105 | DEFAULT = 'never' | ||
| 106 | |||
| 85 | 107 | ||
| 86 | class Coloring(object): | 108 | class Coloring(object): |
| 87 | def __init__(self, config, section_type): | 109 | def __init__(self, config, section_type): |
| @@ -89,9 +111,11 @@ class Coloring(object): | |||
| 89 | self._config = config | 111 | self._config = config |
| 90 | self._out = sys.stdout | 112 | self._out = sys.stdout |
| 91 | 113 | ||
| 92 | on = self._config.GetString(self._section) | 114 | on = DEFAULT |
| 93 | if on is None: | 115 | if on is None: |
| 94 | on = self._config.GetString('color.ui') | 116 | on = self._config.GetString(self._section) |
| 117 | if on is None: | ||
| 118 | on = self._config.GetString('color.ui') | ||
| 95 | 119 | ||
| 96 | if on == 'auto': | 120 | if on == 'auto': |
| 97 | if pager.active or os.isatty(1): | 121 | if pager.active or os.isatty(1): |
| @@ -122,6 +146,7 @@ class Coloring(object): | |||
| 122 | def printer(self, opt=None, fg=None, bg=None, attr=None): | 146 | def printer(self, opt=None, fg=None, bg=None, attr=None): |
| 123 | s = self | 147 | s = self |
| 124 | c = self.colorer(opt, fg, bg, attr) | 148 | c = self.colorer(opt, fg, bg, attr) |
| 149 | |||
| 125 | def f(fmt, *args): | 150 | def f(fmt, *args): |
| 126 | s._out.write(c(fmt, *args)) | 151 | s._out.write(c(fmt, *args)) |
| 127 | return f | 152 | return f |
| @@ -129,6 +154,7 @@ class Coloring(object): | |||
| 129 | def nofmt_printer(self, opt=None, fg=None, bg=None, attr=None): | 154 | def nofmt_printer(self, opt=None, fg=None, bg=None, attr=None): |
| 130 | s = self | 155 | s = self |
| 131 | c = self.nofmt_colorer(opt, fg, bg, attr) | 156 | c = self.nofmt_colorer(opt, fg, bg, attr) |
| 157 | |||
| 132 | def f(fmt): | 158 | def f(fmt): |
| 133 | s._out.write(c(fmt)) | 159 | s._out.write(c(fmt)) |
| 134 | return f | 160 | return f |
| @@ -136,11 +162,13 @@ class Coloring(object): | |||
| 136 | def colorer(self, opt=None, fg=None, bg=None, attr=None): | 162 | def colorer(self, opt=None, fg=None, bg=None, attr=None): |
| 137 | if self._on: | 163 | if self._on: |
| 138 | c = self._parse(opt, fg, bg, attr) | 164 | c = self._parse(opt, fg, bg, attr) |
| 165 | |||
| 139 | def f(fmt, *args): | 166 | def f(fmt, *args): |
| 140 | output = fmt % args | 167 | output = fmt % args |
| 141 | return ''.join([c, output, RESET]) | 168 | return ''.join([c, output, RESET]) |
| 142 | return f | 169 | return f |
| 143 | else: | 170 | else: |
| 171 | |||
| 144 | def f(fmt, *args): | 172 | def f(fmt, *args): |
| 145 | return fmt % args | 173 | return fmt % args |
| 146 | return f | 174 | return f |
| @@ -148,6 +176,7 @@ class Coloring(object): | |||
| 148 | def nofmt_colorer(self, opt=None, fg=None, bg=None, attr=None): | 176 | def nofmt_colorer(self, opt=None, fg=None, bg=None, attr=None): |
| 149 | if self._on: | 177 | if self._on: |
| 150 | c = self._parse(opt, fg, bg, attr) | 178 | c = self._parse(opt, fg, bg, attr) |
| 179 | |||
| 151 | def f(fmt): | 180 | def f(fmt): |
| 152 | return ''.join([c, fmt, RESET]) | 181 | return ''.join([c, fmt, RESET]) |
| 153 | return f | 182 | return f |
diff --git a/docs/manifest-format.txt b/docs/manifest-format.txt index e48b75fe..1aa93965 100644 --- a/docs/manifest-format.txt +++ b/docs/manifest-format.txt | |||
| @@ -26,6 +26,7 @@ following DTD: | |||
| 26 | manifest-server?, | 26 | manifest-server?, |
| 27 | remove-project*, | 27 | remove-project*, |
| 28 | project*, | 28 | project*, |
| 29 | extend-project*, | ||
| 29 | repo-hooks?)> | 30 | repo-hooks?)> |
| 30 | 31 | ||
| 31 | <!ELEMENT notice (#PCDATA)> | 32 | <!ELEMENT notice (#PCDATA)> |
| @@ -35,6 +36,7 @@ following DTD: | |||
| 35 | <!ATTLIST remote alias CDATA #IMPLIED> | 36 | <!ATTLIST remote alias CDATA #IMPLIED> |
| 36 | <!ATTLIST remote fetch CDATA #REQUIRED> | 37 | <!ATTLIST remote fetch CDATA #REQUIRED> |
| 37 | <!ATTLIST remote review CDATA #IMPLIED> | 38 | <!ATTLIST remote review CDATA #IMPLIED> |
| 39 | <!ATTLIST remote revision CDATA #IMPLIED> | ||
| 38 | 40 | ||
| 39 | <!ELEMENT default (EMPTY)> | 41 | <!ELEMENT default (EMPTY)> |
| 40 | <!ATTLIST default remote IDREF #IMPLIED> | 42 | <!ATTLIST default remote IDREF #IMPLIED> |
| @@ -66,6 +68,11 @@ following DTD: | |||
| 66 | <!ATTLIST annotation value CDATA #REQUIRED> | 68 | <!ATTLIST annotation value CDATA #REQUIRED> |
| 67 | <!ATTLIST annotation keep CDATA "true"> | 69 | <!ATTLIST annotation keep CDATA "true"> |
| 68 | 70 | ||
| 71 | <!ELEMENT extend-project> | ||
| 72 | <!ATTLIST extend-project name CDATA #REQUIRED> | ||
| 73 | <!ATTLIST extend-project path CDATA #IMPLIED> | ||
| 74 | <!ATTLIST extend-project groups CDATA #IMPLIED> | ||
| 75 | |||
| 69 | <!ELEMENT remove-project (EMPTY)> | 76 | <!ELEMENT remove-project (EMPTY)> |
| 70 | <!ATTLIST remove-project name CDATA #REQUIRED> | 77 | <!ATTLIST remove-project name CDATA #REQUIRED> |
| 71 | 78 | ||
| @@ -112,6 +119,10 @@ Attribute `review`: Hostname of the Gerrit server where reviews | |||
| 112 | are uploaded to by `repo upload`. This attribute is optional; | 119 | are uploaded to by `repo upload`. This attribute is optional; |
| 113 | if not specified then `repo upload` will not function. | 120 | if not specified then `repo upload` will not function. |
| 114 | 121 | ||
| 122 | Attribute `revision`: Name of a Git branch (e.g. `master` or | ||
| 123 | `refs/heads/master`). Remotes with their own revision will override | ||
| 124 | the default revision. | ||
| 125 | |||
| 115 | Element default | 126 | Element default |
| 116 | --------------- | 127 | --------------- |
| 117 | 128 | ||
| @@ -132,14 +143,14 @@ Project elements not setting their own `dest-branch` will inherit | |||
| 132 | this value. If this value is not set, projects will use `revision` | 143 | this value. If this value is not set, projects will use `revision` |
| 133 | by default instead. | 144 | by default instead. |
| 134 | 145 | ||
| 135 | Attribute `sync_j`: Number of parallel jobs to use when synching. | 146 | Attribute `sync-j`: Number of parallel jobs to use when synching. |
| 136 | 147 | ||
| 137 | Attribute `sync_c`: Set to true to only sync the given Git | 148 | Attribute `sync-c`: Set to true to only sync the given Git |
| 138 | branch (specified in the `revision` attribute) rather than the | 149 | branch (specified in the `revision` attribute) rather than the |
| 139 | whole ref space. Project elements lacking a sync_c element of | 150 | whole ref space. Project elements lacking a sync-c element of |
| 140 | their own will use this value. | 151 | their own will use this value. |
| 141 | 152 | ||
| 142 | Attribute `sync_s`: Set to true to also sync sub-projects. | 153 | Attribute `sync-s`: Set to true to also sync sub-projects. |
| 143 | 154 | ||
| 144 | 155 | ||
| 145 | Element manifest-server | 156 | Element manifest-server |
| @@ -208,7 +219,8 @@ to track for this project. Names can be relative to refs/heads | |||
| 208 | (e.g. just "master") or absolute (e.g. "refs/heads/master"). | 219 | (e.g. just "master") or absolute (e.g. "refs/heads/master"). |
| 209 | Tags and/or explicit SHA-1s should work in theory, but have not | 220 | Tags and/or explicit SHA-1s should work in theory, but have not |
| 210 | been extensively tested. If not supplied the revision given by | 221 | been extensively tested. If not supplied the revision given by |
| 211 | the default element is used. | 222 | the remote element is used if applicable, else the default |
| 223 | element is used. | ||
| 212 | 224 | ||
| 213 | Attribute `dest-branch`: Name of a Git branch (e.g. `master`). | 225 | Attribute `dest-branch`: Name of a Git branch (e.g. `master`). |
| 214 | When using `repo upload`, changes will be submitted for code | 226 | When using `repo upload`, changes will be submitted for code |
| @@ -226,13 +238,13 @@ group "notdefault", it will not be automatically downloaded by repo. | |||
| 226 | If the project has a parent element, the `name` and `path` here | 238 | If the project has a parent element, the `name` and `path` here |
| 227 | are the prefixed ones. | 239 | are the prefixed ones. |
| 228 | 240 | ||
| 229 | Attribute `sync_c`: Set to true to only sync the given Git | 241 | Attribute `sync-c`: Set to true to only sync the given Git |
| 230 | branch (specified in the `revision` attribute) rather than the | 242 | branch (specified in the `revision` attribute) rather than the |
| 231 | whole ref space. | 243 | whole ref space. |
| 232 | 244 | ||
| 233 | Attribute `sync_s`: Set to true to also sync sub-projects. | 245 | Attribute `sync-s`: Set to true to also sync sub-projects. |
| 234 | 246 | ||
| 235 | Attribute `upstream`: Name of the Git branch in which a sha1 | 247 | Attribute `upstream`: Name of the Git ref in which a sha1 |
| 236 | can be found. Used when syncing a revision locked manifest in | 248 | can be found. Used when syncing a revision locked manifest in |
| 237 | -c mode to avoid having to sync the entire ref space. | 249 | -c mode to avoid having to sync the entire ref space. |
| 238 | 250 | ||
| @@ -246,6 +258,22 @@ rather than the `name` attribute. This attribute only applies to the | |||
| 246 | local mirrors syncing, it will be ignored when syncing the projects in a | 258 | local mirrors syncing, it will be ignored when syncing the projects in a |
| 247 | client working directory. | 259 | client working directory. |
| 248 | 260 | ||
| 261 | Element extend-project | ||
| 262 | ---------------------- | ||
| 263 | |||
| 264 | Modify the attributes of the named project. | ||
| 265 | |||
| 266 | This element is mostly useful in a local manifest file, to modify the | ||
| 267 | attributes of an existing project without completely replacing the | ||
| 268 | existing project definition. This makes the local manifest more robust | ||
| 269 | against changes to the original manifest. | ||
| 270 | |||
| 271 | Attribute `path`: If specified, limit the change to projects checked out | ||
| 272 | at the specified path, rather than all projects with the given name. | ||
| 273 | |||
| 274 | Attribute `groups`: List of additional groups to which this project | ||
| 275 | belongs. Same syntax as the corresponding element of `project`. | ||
| 276 | |||
| 249 | Element annotation | 277 | Element annotation |
| 250 | ------------------ | 278 | ------------------ |
| 251 | 279 | ||
| @@ -80,7 +80,7 @@ class NoSuchProjectError(Exception): | |||
| 80 | self.name = name | 80 | self.name = name |
| 81 | 81 | ||
| 82 | def __str__(self): | 82 | def __str__(self): |
| 83 | if self.Name is None: | 83 | if self.name is None: |
| 84 | return 'in current directory' | 84 | return 'in current directory' |
| 85 | return self.name | 85 | return self.name |
| 86 | 86 | ||
| @@ -93,7 +93,7 @@ class InvalidProjectGroupsError(Exception): | |||
| 93 | self.name = name | 93 | self.name = name |
| 94 | 94 | ||
| 95 | def __str__(self): | 95 | def __str__(self): |
| 96 | if self.Name is None: | 96 | if self.name is None: |
| 97 | return 'in current directory' | 97 | return 'in current directory' |
| 98 | return self.name | 98 | return self.name |
| 99 | 99 | ||
diff --git a/git_command.py b/git_command.py index 354fc715..0893bff7 100644 --- a/git_command.py +++ b/git_command.py | |||
| @@ -14,7 +14,9 @@ | |||
| 14 | # limitations under the License. | 14 | # limitations under the License. |
| 15 | 15 | ||
| 16 | from __future__ import print_function | 16 | from __future__ import print_function |
| 17 | import fcntl | ||
| 17 | import os | 18 | import os |
| 19 | import select | ||
| 18 | import sys | 20 | import sys |
| 19 | import subprocess | 21 | import subprocess |
| 20 | import tempfile | 22 | import tempfile |
| @@ -76,17 +78,30 @@ def terminate_ssh_clients(): | |||
| 76 | 78 | ||
| 77 | _git_version = None | 79 | _git_version = None |
| 78 | 80 | ||
| 81 | class _sfd(object): | ||
| 82 | """select file descriptor class""" | ||
| 83 | def __init__(self, fd, dest, std_name): | ||
| 84 | assert std_name in ('stdout', 'stderr') | ||
| 85 | self.fd = fd | ||
| 86 | self.dest = dest | ||
| 87 | self.std_name = std_name | ||
| 88 | def fileno(self): | ||
| 89 | return self.fd.fileno() | ||
| 90 | |||
| 79 | class _GitCall(object): | 91 | class _GitCall(object): |
| 80 | def version(self): | 92 | def version(self): |
| 81 | p = GitCommand(None, ['--version'], capture_stdout=True) | 93 | p = GitCommand(None, ['--version'], capture_stdout=True) |
| 82 | if p.Wait() == 0: | 94 | if p.Wait() == 0: |
| 83 | return p.stdout | 95 | if hasattr(p.stdout, 'decode'): |
| 96 | return p.stdout.decode('utf-8') | ||
| 97 | else: | ||
| 98 | return p.stdout | ||
| 84 | return None | 99 | return None |
| 85 | 100 | ||
| 86 | def version_tuple(self): | 101 | def version_tuple(self): |
| 87 | global _git_version | 102 | global _git_version |
| 88 | if _git_version is None: | 103 | if _git_version is None: |
| 89 | ver_str = git.version().decode('utf-8') | 104 | ver_str = git.version() |
| 90 | _git_version = Wrapper().ParseGitVersion(ver_str) | 105 | _git_version = Wrapper().ParseGitVersion(ver_str) |
| 91 | if _git_version is None: | 106 | if _git_version is None: |
| 92 | print('fatal: "%s" unsupported' % ver_str, file=sys.stderr) | 107 | print('fatal: "%s" unsupported' % ver_str, file=sys.stderr) |
| @@ -139,6 +154,9 @@ class GitCommand(object): | |||
| 139 | if key in env: | 154 | if key in env: |
| 140 | del env[key] | 155 | del env[key] |
| 141 | 156 | ||
| 157 | # If we are not capturing std* then need to print it. | ||
| 158 | self.tee = {'stdout': not capture_stdout, 'stderr': not capture_stderr} | ||
| 159 | |||
| 142 | if disable_editor: | 160 | if disable_editor: |
| 143 | _setenv(env, 'GIT_EDITOR', ':') | 161 | _setenv(env, 'GIT_EDITOR', ':') |
| 144 | if ssh_proxy: | 162 | if ssh_proxy: |
| @@ -162,22 +180,21 @@ class GitCommand(object): | |||
| 162 | if gitdir: | 180 | if gitdir: |
| 163 | _setenv(env, GIT_DIR, gitdir) | 181 | _setenv(env, GIT_DIR, gitdir) |
| 164 | cwd = None | 182 | cwd = None |
| 165 | command.extend(cmdv) | 183 | command.append(cmdv[0]) |
| 184 | # Need to use the --progress flag for fetch/clone so output will be | ||
| 185 | # displayed as by default git only does progress output if stderr is a TTY. | ||
| 186 | if sys.stderr.isatty() and cmdv[0] in ('fetch', 'clone'): | ||
| 187 | if '--progress' not in cmdv and '--quiet' not in cmdv: | ||
| 188 | command.append('--progress') | ||
| 189 | command.extend(cmdv[1:]) | ||
| 166 | 190 | ||
| 167 | if provide_stdin: | 191 | if provide_stdin: |
| 168 | stdin = subprocess.PIPE | 192 | stdin = subprocess.PIPE |
| 169 | else: | 193 | else: |
| 170 | stdin = None | 194 | stdin = None |
| 171 | 195 | ||
| 172 | if capture_stdout: | 196 | stdout = subprocess.PIPE |
| 173 | stdout = subprocess.PIPE | 197 | stderr = subprocess.PIPE |
| 174 | else: | ||
| 175 | stdout = None | ||
| 176 | |||
| 177 | if capture_stderr: | ||
| 178 | stderr = subprocess.PIPE | ||
| 179 | else: | ||
| 180 | stderr = None | ||
| 181 | 198 | ||
| 182 | if IsTrace(): | 199 | if IsTrace(): |
| 183 | global LAST_CWD | 200 | global LAST_CWD |
| @@ -226,8 +243,36 @@ class GitCommand(object): | |||
| 226 | def Wait(self): | 243 | def Wait(self): |
| 227 | try: | 244 | try: |
| 228 | p = self.process | 245 | p = self.process |
| 229 | (self.stdout, self.stderr) = p.communicate() | 246 | rc = self._CaptureOutput() |
| 230 | rc = p.returncode | ||
| 231 | finally: | 247 | finally: |
| 232 | _remove_ssh_client(p) | 248 | _remove_ssh_client(p) |
| 233 | return rc | 249 | return rc |
| 250 | |||
| 251 | def _CaptureOutput(self): | ||
| 252 | p = self.process | ||
| 253 | s_in = [_sfd(p.stdout, sys.stdout, 'stdout'), | ||
| 254 | _sfd(p.stderr, sys.stderr, 'stderr')] | ||
| 255 | self.stdout = '' | ||
| 256 | self.stderr = '' | ||
| 257 | |||
| 258 | for s in s_in: | ||
| 259 | flags = fcntl.fcntl(s.fd, fcntl.F_GETFL) | ||
| 260 | fcntl.fcntl(s.fd, fcntl.F_SETFL, flags | os.O_NONBLOCK) | ||
| 261 | |||
| 262 | while s_in: | ||
| 263 | in_ready, _, _ = select.select(s_in, [], []) | ||
| 264 | for s in in_ready: | ||
| 265 | buf = s.fd.read(4096) | ||
| 266 | if not buf: | ||
| 267 | s_in.remove(s) | ||
| 268 | continue | ||
| 269 | if not hasattr(buf, 'encode'): | ||
| 270 | buf = buf.decode() | ||
| 271 | if s.std_name == 'stdout': | ||
| 272 | self.stdout += buf | ||
| 273 | else: | ||
| 274 | self.stderr += buf | ||
| 275 | if self.tee[s.std_name]: | ||
| 276 | s.dest.write(buf) | ||
| 277 | s.dest.flush() | ||
| 278 | return p.wait() | ||
diff --git a/git_config.py b/git_config.py index 32879ec7..8ded7c25 100644 --- a/git_config.py +++ b/git_config.py | |||
| @@ -15,8 +15,8 @@ | |||
| 15 | 15 | ||
| 16 | from __future__ import print_function | 16 | from __future__ import print_function |
| 17 | 17 | ||
| 18 | import json | ||
| 18 | import os | 19 | import os |
| 19 | import pickle | ||
| 20 | import re | 20 | import re |
| 21 | import subprocess | 21 | import subprocess |
| 22 | import sys | 22 | import sys |
| @@ -80,7 +80,7 @@ class GitConfig(object): | |||
| 80 | return cls(configfile = os.path.join(gitdir, 'config'), | 80 | return cls(configfile = os.path.join(gitdir, 'config'), |
| 81 | defaults = defaults) | 81 | defaults = defaults) |
| 82 | 82 | ||
| 83 | def __init__(self, configfile, defaults=None, pickleFile=None): | 83 | def __init__(self, configfile, defaults=None, jsonFile=None): |
| 84 | self.file = configfile | 84 | self.file = configfile |
| 85 | self.defaults = defaults | 85 | self.defaults = defaults |
| 86 | self._cache_dict = None | 86 | self._cache_dict = None |
| @@ -88,12 +88,11 @@ class GitConfig(object): | |||
| 88 | self._remotes = {} | 88 | self._remotes = {} |
| 89 | self._branches = {} | 89 | self._branches = {} |
| 90 | 90 | ||
| 91 | if pickleFile is None: | 91 | self._json = jsonFile |
| 92 | self._pickle = os.path.join( | 92 | if self._json is None: |
| 93 | self._json = os.path.join( | ||
| 93 | os.path.dirname(self.file), | 94 | os.path.dirname(self.file), |
| 94 | '.repopickle_' + os.path.basename(self.file)) | 95 | '.repo_' + os.path.basename(self.file) + '.json') |
| 95 | else: | ||
| 96 | self._pickle = pickleFile | ||
| 97 | 96 | ||
| 98 | def Has(self, name, include_defaults = True): | 97 | def Has(self, name, include_defaults = True): |
| 99 | """Return true if this configuration file has the key. | 98 | """Return true if this configuration file has the key. |
| @@ -217,9 +216,9 @@ class GitConfig(object): | |||
| 217 | """Resolve any url.*.insteadof references. | 216 | """Resolve any url.*.insteadof references. |
| 218 | """ | 217 | """ |
| 219 | for new_url in self.GetSubSections('url'): | 218 | for new_url in self.GetSubSections('url'): |
| 220 | old_url = self.GetString('url.%s.insteadof' % new_url) | 219 | for old_url in self.GetString('url.%s.insteadof' % new_url, True): |
| 221 | if old_url is not None and url.startswith(old_url): | 220 | if old_url is not None and url.startswith(old_url): |
| 222 | return new_url + url[len(old_url):] | 221 | return new_url + url[len(old_url):] |
| 223 | return url | 222 | return url |
| 224 | 223 | ||
| 225 | @property | 224 | @property |
| @@ -248,50 +247,41 @@ class GitConfig(object): | |||
| 248 | return self._cache_dict | 247 | return self._cache_dict |
| 249 | 248 | ||
| 250 | def _Read(self): | 249 | def _Read(self): |
| 251 | d = self._ReadPickle() | 250 | d = self._ReadJson() |
| 252 | if d is None: | 251 | if d is None: |
| 253 | d = self._ReadGit() | 252 | d = self._ReadGit() |
| 254 | self._SavePickle(d) | 253 | self._SaveJson(d) |
| 255 | return d | 254 | return d |
| 256 | 255 | ||
| 257 | def _ReadPickle(self): | 256 | def _ReadJson(self): |
| 258 | try: | 257 | try: |
| 259 | if os.path.getmtime(self._pickle) \ | 258 | if os.path.getmtime(self._json) \ |
| 260 | <= os.path.getmtime(self.file): | 259 | <= os.path.getmtime(self.file): |
| 261 | os.remove(self._pickle) | 260 | os.remove(self._json) |
| 262 | return None | 261 | return None |
| 263 | except OSError: | 262 | except OSError: |
| 264 | return None | 263 | return None |
| 265 | try: | 264 | try: |
| 266 | Trace(': unpickle %s', self.file) | 265 | Trace(': parsing %s', self.file) |
| 267 | fd = open(self._pickle, 'rb') | 266 | fd = open(self._json) |
| 268 | try: | 267 | try: |
| 269 | return pickle.load(fd) | 268 | return json.load(fd) |
| 270 | finally: | 269 | finally: |
| 271 | fd.close() | 270 | fd.close() |
| 272 | except EOFError: | 271 | except (IOError, ValueError): |
| 273 | os.remove(self._pickle) | 272 | os.remove(self._json) |
| 274 | return None | ||
| 275 | except IOError: | ||
| 276 | os.remove(self._pickle) | ||
| 277 | return None | ||
| 278 | except pickle.PickleError: | ||
| 279 | os.remove(self._pickle) | ||
| 280 | return None | 273 | return None |
| 281 | 274 | ||
| 282 | def _SavePickle(self, cache): | 275 | def _SaveJson(self, cache): |
| 283 | try: | 276 | try: |
| 284 | fd = open(self._pickle, 'wb') | 277 | fd = open(self._json, 'w') |
| 285 | try: | 278 | try: |
| 286 | pickle.dump(cache, fd, pickle.HIGHEST_PROTOCOL) | 279 | json.dump(cache, fd, indent=2) |
| 287 | finally: | 280 | finally: |
| 288 | fd.close() | 281 | fd.close() |
| 289 | except IOError: | 282 | except (IOError, TypeError): |
| 290 | if os.path.exists(self._pickle): | 283 | if os.path.exists(self._json): |
| 291 | os.remove(self._pickle) | 284 | os.remove(self._json) |
| 292 | except pickle.PickleError: | ||
| 293 | if os.path.exists(self._pickle): | ||
| 294 | os.remove(self._pickle) | ||
| 295 | 285 | ||
| 296 | def _ReadGit(self): | 286 | def _ReadGit(self): |
| 297 | """ | 287 | """ |
| @@ -576,6 +566,8 @@ class Remote(object): | |||
| 576 | return None | 566 | return None |
| 577 | 567 | ||
| 578 | u = self.review | 568 | u = self.review |
| 569 | if u.startswith('persistent-'): | ||
| 570 | u = u[len('persistent-'):] | ||
| 579 | if u.split(':')[0] not in ('http', 'https', 'sso'): | 571 | if u.split(':')[0] not in ('http', 'https', 'sso'): |
| 580 | u = 'http://%s' % u | 572 | u = 'http://%s' % u |
| 581 | if u.endswith('/Gerrit'): | 573 | if u.endswith('/Gerrit'): |
| @@ -627,9 +619,7 @@ class Remote(object): | |||
| 627 | def ToLocal(self, rev): | 619 | def ToLocal(self, rev): |
| 628 | """Convert a remote revision string to something we have locally. | 620 | """Convert a remote revision string to something we have locally. |
| 629 | """ | 621 | """ |
| 630 | if IsId(rev): | 622 | if self.name == '.' or IsId(rev): |
| 631 | return rev | ||
| 632 | if rev.startswith(R_TAGS): | ||
| 633 | return rev | 623 | return rev |
| 634 | 624 | ||
| 635 | if not rev.startswith('refs/'): | 625 | if not rev.startswith('refs/'): |
| @@ -638,6 +628,10 @@ class Remote(object): | |||
| 638 | for spec in self.fetch: | 628 | for spec in self.fetch: |
| 639 | if spec.SourceMatches(rev): | 629 | if spec.SourceMatches(rev): |
| 640 | return spec.MapSource(rev) | 630 | return spec.MapSource(rev) |
| 631 | |||
| 632 | if not rev.startswith(R_HEADS): | ||
| 633 | return rev | ||
| 634 | |||
| 641 | raise GitError('remote %s does not have %s' % (self.name, rev)) | 635 | raise GitError('remote %s does not have %s' % (self.name, rev)) |
| 642 | 636 | ||
| 643 | def WritesTo(self, ref): | 637 | def WritesTo(self, ref): |
| @@ -707,7 +701,7 @@ class Branch(object): | |||
| 707 | self._Set('merge', self.merge) | 701 | self._Set('merge', self.merge) |
| 708 | 702 | ||
| 709 | else: | 703 | else: |
| 710 | fd = open(self._config.file, 'ab') | 704 | fd = open(self._config.file, 'a') |
| 711 | try: | 705 | try: |
| 712 | fd.write('[branch "%s"]\n' % self.name) | 706 | fd.write('[branch "%s"]\n' % self.name) |
| 713 | if self.remote: | 707 | if self.remote: |
diff --git a/hooks/commit-msg b/hooks/commit-msg index 5ca2b112..d8f009b6 100755 --- a/hooks/commit-msg +++ b/hooks/commit-msg | |||
| @@ -1,5 +1,4 @@ | |||
| 1 | #!/bin/sh | 1 | #!/bin/sh |
| 2 | # From Gerrit Code Review 2.6 | ||
| 3 | # | 2 | # |
| 4 | # Part of Gerrit Code Review (http://code.google.com/p/gerrit/) | 3 | # Part of Gerrit Code Review (http://code.google.com/p/gerrit/) |
| 5 | # | 4 | # |
| @@ -27,7 +26,7 @@ MSG="$1" | |||
| 27 | # | 26 | # |
| 28 | add_ChangeId() { | 27 | add_ChangeId() { |
| 29 | clean_message=`sed -e ' | 28 | clean_message=`sed -e ' |
| 30 | /^diff --git a\/.*/{ | 29 | /^diff --git .*/{ |
| 31 | s/// | 30 | s/// |
| 32 | q | 31 | q |
| 33 | } | 32 | } |
| @@ -39,6 +38,11 @@ add_ChangeId() { | |||
| 39 | return | 38 | return |
| 40 | fi | 39 | fi |
| 41 | 40 | ||
| 41 | if test "false" = "`git config --bool --get gerrit.createChangeId`" | ||
| 42 | then | ||
| 43 | return | ||
| 44 | fi | ||
| 45 | |||
| 42 | # Does Change-Id: already exist? if so, exit (no change). | 46 | # Does Change-Id: already exist? if so, exit (no change). |
| 43 | if grep -i '^Change-Id:' "$MSG" >/dev/null | 47 | if grep -i '^Change-Id:' "$MSG" >/dev/null |
| 44 | then | 48 | then |
| @@ -77,7 +81,7 @@ add_ChangeId() { | |||
| 77 | # Skip the line starting with the diff command and everything after it, | 81 | # Skip the line starting with the diff command and everything after it, |
| 78 | # up to the end of the file, assuming it is only patch data. | 82 | # up to the end of the file, assuming it is only patch data. |
| 79 | # If more than one line before the diff was empty, strip all but one. | 83 | # If more than one line before the diff was empty, strip all but one. |
| 80 | /^diff --git a/ { | 84 | /^diff --git / { |
| 81 | blankLines = 0 | 85 | blankLines = 0 |
| 82 | while (getline) { } | 86 | while (getline) { } |
| 83 | next | 87 | next |
| @@ -36,6 +36,7 @@ try: | |||
| 36 | except ImportError: | 36 | except ImportError: |
| 37 | kerberos = None | 37 | kerberos = None |
| 38 | 38 | ||
| 39 | from color import SetDefaultColoring | ||
| 39 | from trace import SetTrace | 40 | from trace import SetTrace |
| 40 | from git_command import git, GitCommand | 41 | from git_command import git, GitCommand |
| 41 | from git_config import init_ssh, close_ssh | 42 | from git_config import init_ssh, close_ssh |
| @@ -44,6 +45,7 @@ from command import MirrorSafeCommand | |||
| 44 | from subcmds.version import Version | 45 | from subcmds.version import Version |
| 45 | from editor import Editor | 46 | from editor import Editor |
| 46 | from error import DownloadError | 47 | from error import DownloadError |
| 48 | from error import InvalidProjectGroupsError | ||
| 47 | from error import ManifestInvalidRevisionError | 49 | from error import ManifestInvalidRevisionError |
| 48 | from error import ManifestParseError | 50 | from error import ManifestParseError |
| 49 | from error import NoManifestException | 51 | from error import NoManifestException |
| @@ -69,6 +71,9 @@ global_options.add_option('-p', '--paginate', | |||
| 69 | global_options.add_option('--no-pager', | 71 | global_options.add_option('--no-pager', |
| 70 | dest='no_pager', action='store_true', | 72 | dest='no_pager', action='store_true', |
| 71 | help='disable the pager') | 73 | help='disable the pager') |
| 74 | global_options.add_option('--color', | ||
| 75 | choices=('auto', 'always', 'never'), default=None, | ||
| 76 | help='control color usage: auto, always, never') | ||
| 72 | global_options.add_option('--trace', | 77 | global_options.add_option('--trace', |
| 73 | dest='trace', action='store_true', | 78 | dest='trace', action='store_true', |
| 74 | help='trace git command execution') | 79 | help='trace git command execution') |
| @@ -113,6 +118,8 @@ class _Repo(object): | |||
| 113 | print('fatal: invalid usage of --version', file=sys.stderr) | 118 | print('fatal: invalid usage of --version', file=sys.stderr) |
| 114 | return 1 | 119 | return 1 |
| 115 | 120 | ||
| 121 | SetDefaultColoring(gopts.color) | ||
| 122 | |||
| 116 | try: | 123 | try: |
| 117 | cmd = self.commands[name] | 124 | cmd = self.commands[name] |
| 118 | except KeyError: | 125 | except KeyError: |
| @@ -167,6 +174,12 @@ class _Repo(object): | |||
| 167 | else: | 174 | else: |
| 168 | print('error: no project in current directory', file=sys.stderr) | 175 | print('error: no project in current directory', file=sys.stderr) |
| 169 | result = 1 | 176 | result = 1 |
| 177 | except InvalidProjectGroupsError as e: | ||
| 178 | if e.name: | ||
| 179 | print('error: project group must be enabled for project %s' % e.name, file=sys.stderr) | ||
| 180 | else: | ||
| 181 | print('error: project group must be enabled for the project in the current directory', file=sys.stderr) | ||
| 182 | result = 1 | ||
| 170 | finally: | 183 | finally: |
| 171 | elapsed = time.time() - start | 184 | elapsed = time.time() - start |
| 172 | hours, remainder = divmod(elapsed, 3600) | 185 | hours, remainder = divmod(elapsed, 3600) |
diff --git a/manifest_xml.py b/manifest_xml.py index 3c8fadd6..130e17c2 100644 --- a/manifest_xml.py +++ b/manifest_xml.py | |||
| @@ -38,8 +38,9 @@ MANIFEST_FILE_NAME = 'manifest.xml' | |||
| 38 | LOCAL_MANIFEST_NAME = 'local_manifest.xml' | 38 | LOCAL_MANIFEST_NAME = 'local_manifest.xml' |
| 39 | LOCAL_MANIFESTS_DIR_NAME = 'local_manifests' | 39 | LOCAL_MANIFESTS_DIR_NAME = 'local_manifests' |
| 40 | 40 | ||
| 41 | urllib.parse.uses_relative.extend(['ssh', 'git']) | 41 | # urljoin gets confused if the scheme is not known. |
| 42 | urllib.parse.uses_netloc.extend(['ssh', 'git']) | 42 | urllib.parse.uses_relative.extend(['ssh', 'git', 'persistent-https', 'rpc']) |
| 43 | urllib.parse.uses_netloc.extend(['ssh', 'git', 'persistent-https', 'rpc']) | ||
| 43 | 44 | ||
| 44 | class _Default(object): | 45 | class _Default(object): |
| 45 | """Project defaults within the manifest.""" | 46 | """Project defaults within the manifest.""" |
| @@ -63,12 +64,14 @@ class _XmlRemote(object): | |||
| 63 | alias=None, | 64 | alias=None, |
| 64 | fetch=None, | 65 | fetch=None, |
| 65 | manifestUrl=None, | 66 | manifestUrl=None, |
| 66 | review=None): | 67 | review=None, |
| 68 | revision=None): | ||
| 67 | self.name = name | 69 | self.name = name |
| 68 | self.fetchUrl = fetch | 70 | self.fetchUrl = fetch |
| 69 | self.manifestUrl = manifestUrl | 71 | self.manifestUrl = manifestUrl |
| 70 | self.remoteAlias = alias | 72 | self.remoteAlias = alias |
| 71 | self.reviewUrl = review | 73 | self.reviewUrl = review |
| 74 | self.revision = revision | ||
| 72 | self.resolvedFetchUrl = self._resolveFetchUrl() | 75 | self.resolvedFetchUrl = self._resolveFetchUrl() |
| 73 | 76 | ||
| 74 | def __eq__(self, other): | 77 | def __eq__(self, other): |
| @@ -83,17 +86,14 @@ class _XmlRemote(object): | |||
| 83 | # urljoin will gets confused over quite a few things. The ones we care | 86 | # urljoin will gets confused over quite a few things. The ones we care |
| 84 | # about here are: | 87 | # about here are: |
| 85 | # * no scheme in the base url, like <hostname:port> | 88 | # * no scheme in the base url, like <hostname:port> |
| 86 | # * persistent-https:// | 89 | # We handle no scheme by replacing it with an obscure protocol, gopher |
| 87 | # We handle this by replacing these with obscure protocols | 90 | # and then replacing it with the original when we are done. |
| 88 | # and then replacing them with the original when we are done. | 91 | |
| 89 | # gopher -> <none> | ||
| 90 | # wais -> persistent-https | ||
| 91 | if manifestUrl.find(':') != manifestUrl.find('/') - 1: | 92 | if manifestUrl.find(':') != manifestUrl.find('/') - 1: |
| 92 | manifestUrl = 'gopher://' + manifestUrl | 93 | url = urllib.parse.urljoin('gopher://' + manifestUrl, url) |
| 93 | manifestUrl = re.sub(r'^persistent-https://', 'wais://', manifestUrl) | 94 | url = re.sub(r'^gopher://', '', url) |
| 94 | url = urllib.parse.urljoin(manifestUrl, url) | 95 | else: |
| 95 | url = re.sub(r'^gopher://', '', url) | 96 | url = urllib.parse.urljoin(manifestUrl, url) |
| 96 | url = re.sub(r'^wais://', 'persistent-https://', url) | ||
| 97 | return url | 97 | return url |
| 98 | 98 | ||
| 99 | def ToRemoteSpec(self, projectName): | 99 | def ToRemoteSpec(self, projectName): |
| @@ -159,6 +159,11 @@ class XmlManifest(object): | |||
| 159 | e.setAttribute('alias', r.remoteAlias) | 159 | e.setAttribute('alias', r.remoteAlias) |
| 160 | if r.reviewUrl is not None: | 160 | if r.reviewUrl is not None: |
| 161 | e.setAttribute('review', r.reviewUrl) | 161 | e.setAttribute('review', r.reviewUrl) |
| 162 | if r.revision is not None: | ||
| 163 | e.setAttribute('revision', r.revision) | ||
| 164 | |||
| 165 | def _ParseGroups(self, groups): | ||
| 166 | return [x for x in re.split(r'[,\s]+', groups) if x] | ||
| 162 | 167 | ||
| 163 | def Save(self, fd, peg_rev=False, peg_rev_upstream=True): | 168 | def Save(self, fd, peg_rev=False, peg_rev_upstream=True): |
| 164 | """Write the current manifest out to the given file descriptor. | 169 | """Write the current manifest out to the given file descriptor. |
| @@ -167,7 +172,7 @@ class XmlManifest(object): | |||
| 167 | 172 | ||
| 168 | groups = mp.config.GetString('manifest.groups') | 173 | groups = mp.config.GetString('manifest.groups') |
| 169 | if groups: | 174 | if groups: |
| 170 | groups = [x for x in re.split(r'[,\s]+', groups) if x] | 175 | groups = self._ParseGroups(groups) |
| 171 | 176 | ||
| 172 | doc = xml.dom.minidom.Document() | 177 | doc = xml.dom.minidom.Document() |
| 173 | root = doc.createElement('manifest') | 178 | root = doc.createElement('manifest') |
| @@ -240,20 +245,27 @@ class XmlManifest(object): | |||
| 240 | if d.remote: | 245 | if d.remote: |
| 241 | remoteName = d.remote.remoteAlias or d.remote.name | 246 | remoteName = d.remote.remoteAlias or d.remote.name |
| 242 | if not d.remote or p.remote.name != remoteName: | 247 | if not d.remote or p.remote.name != remoteName: |
| 243 | e.setAttribute('remote', p.remote.name) | 248 | remoteName = p.remote.name |
| 249 | e.setAttribute('remote', remoteName) | ||
| 244 | if peg_rev: | 250 | if peg_rev: |
| 245 | if self.IsMirror: | 251 | if self.IsMirror: |
| 246 | value = p.bare_git.rev_parse(p.revisionExpr + '^0') | 252 | value = p.bare_git.rev_parse(p.revisionExpr + '^0') |
| 247 | else: | 253 | else: |
| 248 | value = p.work_git.rev_parse(HEAD + '^0') | 254 | value = p.work_git.rev_parse(HEAD + '^0') |
| 249 | e.setAttribute('revision', value) | 255 | e.setAttribute('revision', value) |
| 250 | if peg_rev_upstream and value != p.revisionExpr: | 256 | if peg_rev_upstream: |
| 251 | # Only save the origin if the origin is not a sha1, and the default | 257 | if p.upstream: |
| 252 | # isn't our value, and the if the default doesn't already have that | 258 | e.setAttribute('upstream', p.upstream) |
| 253 | # covered. | 259 | elif value != p.revisionExpr: |
| 254 | e.setAttribute('upstream', p.revisionExpr) | 260 | # Only save the origin if the origin is not a sha1, and the default |
| 255 | elif not d.revisionExpr or p.revisionExpr != d.revisionExpr: | 261 | # isn't our value |
| 256 | e.setAttribute('revision', p.revisionExpr) | 262 | e.setAttribute('upstream', p.revisionExpr) |
| 263 | else: | ||
| 264 | revision = self.remotes[remoteName].revision or d.revisionExpr | ||
| 265 | if not revision or revision != p.revisionExpr: | ||
| 266 | e.setAttribute('revision', p.revisionExpr) | ||
| 267 | if p.upstream and p.upstream != p.revisionExpr: | ||
| 268 | e.setAttribute('upstream', p.upstream) | ||
| 257 | 269 | ||
| 258 | for c in p.copyfiles: | 270 | for c in p.copyfiles: |
| 259 | ce = doc.createElement('copyfile') | 271 | ce = doc.createElement('copyfile') |
| @@ -261,6 +273,12 @@ class XmlManifest(object): | |||
| 261 | ce.setAttribute('dest', c.dest) | 273 | ce.setAttribute('dest', c.dest) |
| 262 | e.appendChild(ce) | 274 | e.appendChild(ce) |
| 263 | 275 | ||
| 276 | for l in p.linkfiles: | ||
| 277 | le = doc.createElement('linkfile') | ||
| 278 | le.setAttribute('src', l.src) | ||
| 279 | le.setAttribute('dest', l.dest) | ||
| 280 | e.appendChild(le) | ||
| 281 | |||
| 264 | default_groups = ['all', 'name:%s' % p.name, 'path:%s' % p.relpath] | 282 | default_groups = ['all', 'name:%s' % p.name, 'path:%s' % p.relpath] |
| 265 | egroups = [g for g in p.groups if g not in default_groups] | 283 | egroups = [g for g in p.groups if g not in default_groups] |
| 266 | if egroups: | 284 | if egroups: |
| @@ -304,7 +322,7 @@ class XmlManifest(object): | |||
| 304 | @property | 322 | @property |
| 305 | def projects(self): | 323 | def projects(self): |
| 306 | self._Load() | 324 | self._Load() |
| 307 | return self._paths.values() | 325 | return list(self._paths.values()) |
| 308 | 326 | ||
| 309 | @property | 327 | @property |
| 310 | def remotes(self): | 328 | def remotes(self): |
| @@ -492,6 +510,23 @@ class XmlManifest(object): | |||
| 492 | if node.nodeName == 'project': | 510 | if node.nodeName == 'project': |
| 493 | project = self._ParseProject(node) | 511 | project = self._ParseProject(node) |
| 494 | recursively_add_projects(project) | 512 | recursively_add_projects(project) |
| 513 | if node.nodeName == 'extend-project': | ||
| 514 | name = self._reqatt(node, 'name') | ||
| 515 | |||
| 516 | if name not in self._projects: | ||
| 517 | raise ManifestParseError('extend-project element specifies non-existent ' | ||
| 518 | 'project: %s' % name) | ||
| 519 | |||
| 520 | path = node.getAttribute('path') | ||
| 521 | groups = node.getAttribute('groups') | ||
| 522 | if groups: | ||
| 523 | groups = self._ParseGroups(groups) | ||
| 524 | |||
| 525 | for p in self._projects[name]: | ||
| 526 | if path and p.relpath != path: | ||
| 527 | continue | ||
| 528 | if groups: | ||
| 529 | p.groups.extend(groups) | ||
| 495 | if node.nodeName == 'repo-hooks': | 530 | if node.nodeName == 'repo-hooks': |
| 496 | # Get the name of the project and the (space-separated) list of enabled. | 531 | # Get the name of the project and the (space-separated) list of enabled. |
| 497 | repo_hooks_project = self._reqatt(node, 'in-project') | 532 | repo_hooks_project = self._reqatt(node, 'in-project') |
| @@ -586,8 +621,11 @@ class XmlManifest(object): | |||
| 586 | review = node.getAttribute('review') | 621 | review = node.getAttribute('review') |
| 587 | if review == '': | 622 | if review == '': |
| 588 | review = None | 623 | review = None |
| 624 | revision = node.getAttribute('revision') | ||
| 625 | if revision == '': | ||
| 626 | revision = None | ||
| 589 | manifestUrl = self.manifestProject.config.GetString('remote.origin.url') | 627 | manifestUrl = self.manifestProject.config.GetString('remote.origin.url') |
| 590 | return _XmlRemote(name, alias, fetch, manifestUrl, review) | 628 | return _XmlRemote(name, alias, fetch, manifestUrl, review, revision) |
| 591 | 629 | ||
| 592 | def _ParseDefault(self, node): | 630 | def _ParseDefault(self, node): |
| 593 | """ | 631 | """ |
| @@ -680,7 +718,7 @@ class XmlManifest(object): | |||
| 680 | raise ManifestParseError("no remote for project %s within %s" % | 718 | raise ManifestParseError("no remote for project %s within %s" % |
| 681 | (name, self.manifestFile)) | 719 | (name, self.manifestFile)) |
| 682 | 720 | ||
| 683 | revisionExpr = node.getAttribute('revision') | 721 | revisionExpr = node.getAttribute('revision') or remote.revision |
| 684 | if not revisionExpr: | 722 | if not revisionExpr: |
| 685 | revisionExpr = self._default.revisionExpr | 723 | revisionExpr = self._default.revisionExpr |
| 686 | if not revisionExpr: | 724 | if not revisionExpr: |
| @@ -729,7 +767,7 @@ class XmlManifest(object): | |||
| 729 | groups = '' | 767 | groups = '' |
| 730 | if node.hasAttribute('groups'): | 768 | if node.hasAttribute('groups'): |
| 731 | groups = node.getAttribute('groups') | 769 | groups = node.getAttribute('groups') |
| 732 | groups = [x for x in re.split(r'[,\s]+', groups) if x] | 770 | groups = self._ParseGroups(groups) |
| 733 | 771 | ||
| 734 | if parent is None: | 772 | if parent is None: |
| 735 | relpath, worktree, gitdir, objdir = self.GetProjectPaths(name, path) | 773 | relpath, worktree, gitdir, objdir = self.GetProjectPaths(name, path) |
| @@ -765,6 +803,8 @@ class XmlManifest(object): | |||
| 765 | for n in node.childNodes: | 803 | for n in node.childNodes: |
| 766 | if n.nodeName == 'copyfile': | 804 | if n.nodeName == 'copyfile': |
| 767 | self._ParseCopyFile(project, n) | 805 | self._ParseCopyFile(project, n) |
| 806 | if n.nodeName == 'linkfile': | ||
| 807 | self._ParseLinkFile(project, n) | ||
| 768 | if n.nodeName == 'annotation': | 808 | if n.nodeName == 'annotation': |
| 769 | self._ParseAnnotation(project, n) | 809 | self._ParseAnnotation(project, n) |
| 770 | if n.nodeName == 'project': | 810 | if n.nodeName == 'project': |
| @@ -814,6 +854,14 @@ class XmlManifest(object): | |||
| 814 | # dest is relative to the top of the tree | 854 | # dest is relative to the top of the tree |
| 815 | project.AddCopyFile(src, dest, os.path.join(self.topdir, dest)) | 855 | project.AddCopyFile(src, dest, os.path.join(self.topdir, dest)) |
| 816 | 856 | ||
| 857 | def _ParseLinkFile(self, project, node): | ||
| 858 | src = self._reqatt(node, 'src') | ||
| 859 | dest = self._reqatt(node, 'dest') | ||
| 860 | if not self.IsMirror: | ||
| 861 | # src is project relative; | ||
| 862 | # dest is relative to the top of the tree | ||
| 863 | project.AddLinkFile(src, dest, os.path.join(self.topdir, dest)) | ||
| 864 | |||
| 817 | def _ParseAnnotation(self, project, node): | 865 | def _ParseAnnotation(self, project, node): |
| 818 | name = self._reqatt(node, 'name') | 866 | name = self._reqatt(node, 'name') |
| 819 | value = self._reqatt(node, 'value') | 867 | value = self._reqatt(node, 'value') |
| @@ -856,10 +904,8 @@ class XmlManifest(object): | |||
| 856 | fromProjects = self.paths | 904 | fromProjects = self.paths |
| 857 | toProjects = manifest.paths | 905 | toProjects = manifest.paths |
| 858 | 906 | ||
| 859 | fromKeys = fromProjects.keys() | 907 | fromKeys = sorted(fromProjects.keys()) |
| 860 | fromKeys.sort() | 908 | toKeys = sorted(toProjects.keys()) |
| 861 | toKeys = toProjects.keys() | ||
| 862 | toKeys.sort() | ||
| 863 | 909 | ||
| 864 | diff = {'added': [], 'removed': [], 'changed': [], 'unreachable': []} | 910 | diff = {'added': [], 'removed': [], 'changed': [], 'unreachable': []} |
| 865 | 911 | ||
| @@ -13,9 +13,10 @@ | |||
| 13 | # limitations under the License. | 13 | # limitations under the License. |
| 14 | 14 | ||
| 15 | from __future__ import print_function | 15 | from __future__ import print_function |
| 16 | import traceback | 16 | import contextlib |
| 17 | import errno | 17 | import errno |
| 18 | import filecmp | 18 | import filecmp |
| 19 | import glob | ||
| 19 | import os | 20 | import os |
| 20 | import random | 21 | import random |
| 21 | import re | 22 | import re |
| @@ -26,11 +27,12 @@ import sys | |||
| 26 | import tarfile | 27 | import tarfile |
| 27 | import tempfile | 28 | import tempfile |
| 28 | import time | 29 | import time |
| 30 | import traceback | ||
| 29 | 31 | ||
| 30 | from color import Coloring | 32 | from color import Coloring |
| 31 | from git_command import GitCommand, git_require | 33 | from git_command import GitCommand, git_require |
| 32 | from git_config import GitConfig, IsId, GetSchemeFromUrl, ID_RE | 34 | from git_config import GitConfig, IsId, GetSchemeFromUrl, ID_RE |
| 33 | from error import GitError, HookError, UploadError | 35 | from error import GitError, HookError, UploadError, DownloadError |
| 34 | from error import ManifestInvalidRevisionError | 36 | from error import ManifestInvalidRevisionError |
| 35 | from error import NoManifestException | 37 | from error import NoManifestException |
| 36 | from trace import IsTrace, Trace | 38 | from trace import IsTrace, Trace |
| @@ -46,7 +48,7 @@ if not is_python3(): | |||
| 46 | def _lwrite(path, content): | 48 | def _lwrite(path, content): |
| 47 | lock = '%s.lock' % path | 49 | lock = '%s.lock' % path |
| 48 | 50 | ||
| 49 | fd = open(lock, 'wb') | 51 | fd = open(lock, 'w') |
| 50 | try: | 52 | try: |
| 51 | fd.write(content) | 53 | fd.write(content) |
| 52 | finally: | 54 | finally: |
| @@ -84,7 +86,7 @@ def _ProjectHooks(): | |||
| 84 | global _project_hook_list | 86 | global _project_hook_list |
| 85 | if _project_hook_list is None: | 87 | if _project_hook_list is None: |
| 86 | d = os.path.realpath(os.path.abspath(os.path.dirname(__file__))) | 88 | d = os.path.realpath(os.path.abspath(os.path.dirname(__file__))) |
| 87 | d = os.path.join(d , 'hooks') | 89 | d = os.path.join(d, 'hooks') |
| 88 | _project_hook_list = [os.path.join(d, x) for x in os.listdir(d)] | 90 | _project_hook_list = [os.path.join(d, x) for x in os.listdir(d)] |
| 89 | return _project_hook_list | 91 | return _project_hook_list |
| 90 | 92 | ||
| @@ -182,28 +184,28 @@ class ReviewableBranch(object): | |||
| 182 | class StatusColoring(Coloring): | 184 | class StatusColoring(Coloring): |
| 183 | def __init__(self, config): | 185 | def __init__(self, config): |
| 184 | Coloring.__init__(self, config, 'status') | 186 | Coloring.__init__(self, config, 'status') |
| 185 | self.project = self.printer('header', attr = 'bold') | 187 | self.project = self.printer('header', attr='bold') |
| 186 | self.branch = self.printer('header', attr = 'bold') | 188 | self.branch = self.printer('header', attr='bold') |
| 187 | self.nobranch = self.printer('nobranch', fg = 'red') | 189 | self.nobranch = self.printer('nobranch', fg='red') |
| 188 | self.important = self.printer('important', fg = 'red') | 190 | self.important = self.printer('important', fg='red') |
| 189 | 191 | ||
| 190 | self.added = self.printer('added', fg = 'green') | 192 | self.added = self.printer('added', fg='green') |
| 191 | self.changed = self.printer('changed', fg = 'red') | 193 | self.changed = self.printer('changed', fg='red') |
| 192 | self.untracked = self.printer('untracked', fg = 'red') | 194 | self.untracked = self.printer('untracked', fg='red') |
| 193 | 195 | ||
| 194 | 196 | ||
| 195 | class DiffColoring(Coloring): | 197 | class DiffColoring(Coloring): |
| 196 | def __init__(self, config): | 198 | def __init__(self, config): |
| 197 | Coloring.__init__(self, config, 'diff') | 199 | Coloring.__init__(self, config, 'diff') |
| 198 | self.project = self.printer('header', attr = 'bold') | 200 | self.project = self.printer('header', attr='bold') |
| 199 | 201 | ||
| 200 | class _Annotation: | 202 | class _Annotation(object): |
| 201 | def __init__(self, name, value, keep): | 203 | def __init__(self, name, value, keep): |
| 202 | self.name = name | 204 | self.name = name |
| 203 | self.value = value | 205 | self.value = value |
| 204 | self.keep = keep | 206 | self.keep = keep |
| 205 | 207 | ||
| 206 | class _CopyFile: | 208 | class _CopyFile(object): |
| 207 | def __init__(self, src, dest, abssrc, absdest): | 209 | def __init__(self, src, dest, abssrc, absdest): |
| 208 | self.src = src | 210 | self.src = src |
| 209 | self.dest = dest | 211 | self.dest = dest |
| @@ -231,14 +233,72 @@ class _CopyFile: | |||
| 231 | except IOError: | 233 | except IOError: |
| 232 | _error('Cannot copy file %s to %s', src, dest) | 234 | _error('Cannot copy file %s to %s', src, dest) |
| 233 | 235 | ||
| 236 | class _LinkFile(object): | ||
| 237 | def __init__(self, git_worktree, src, dest, relsrc, absdest): | ||
| 238 | self.git_worktree = git_worktree | ||
| 239 | self.src = src | ||
| 240 | self.dest = dest | ||
| 241 | self.src_rel_to_dest = relsrc | ||
| 242 | self.abs_dest = absdest | ||
| 243 | |||
| 244 | def __linkIt(self, relSrc, absDest): | ||
| 245 | # link file if it does not exist or is out of date | ||
| 246 | if not os.path.islink(absDest) or (os.readlink(absDest) != relSrc): | ||
| 247 | try: | ||
| 248 | # remove existing file first, since it might be read-only | ||
| 249 | if os.path.exists(absDest): | ||
| 250 | os.remove(absDest) | ||
| 251 | else: | ||
| 252 | dest_dir = os.path.dirname(absDest) | ||
| 253 | if not os.path.isdir(dest_dir): | ||
| 254 | os.makedirs(dest_dir) | ||
| 255 | os.symlink(relSrc, absDest) | ||
| 256 | except IOError: | ||
| 257 | _error('Cannot link file %s to %s', relSrc, absDest) | ||
| 258 | |||
| 259 | def _Link(self): | ||
| 260 | """Link the self.rel_src_to_dest and self.abs_dest. Handles wild cards | ||
| 261 | on the src linking all of the files in the source in to the destination | ||
| 262 | directory. | ||
| 263 | """ | ||
| 264 | # We use the absSrc to handle the situation where the current directory | ||
| 265 | # is not the root of the repo | ||
| 266 | absSrc = os.path.join(self.git_worktree, self.src) | ||
| 267 | if os.path.exists(absSrc): | ||
| 268 | # Entity exists so just a simple one to one link operation | ||
| 269 | self.__linkIt(self.src_rel_to_dest, self.abs_dest) | ||
| 270 | else: | ||
| 271 | # Entity doesn't exist assume there is a wild card | ||
| 272 | absDestDir = self.abs_dest | ||
| 273 | if os.path.exists(absDestDir) and not os.path.isdir(absDestDir): | ||
| 274 | _error('Link error: src with wildcard, %s must be a directory', | ||
| 275 | absDestDir) | ||
| 276 | else: | ||
| 277 | absSrcFiles = glob.glob(absSrc) | ||
| 278 | for absSrcFile in absSrcFiles: | ||
| 279 | # Create a releative path from source dir to destination dir | ||
| 280 | absSrcDir = os.path.dirname(absSrcFile) | ||
| 281 | relSrcDir = os.path.relpath(absSrcDir, absDestDir) | ||
| 282 | |||
| 283 | # Get the source file name | ||
| 284 | srcFile = os.path.basename(absSrcFile) | ||
| 285 | |||
| 286 | # Now form the final full paths to srcFile. They will be | ||
| 287 | # absolute for the desintaiton and relative for the srouce. | ||
| 288 | absDest = os.path.join(absDestDir, srcFile) | ||
| 289 | relSrc = os.path.join(relSrcDir, srcFile) | ||
| 290 | self.__linkIt(relSrc, absDest) | ||
| 291 | |||
| 234 | class RemoteSpec(object): | 292 | class RemoteSpec(object): |
| 235 | def __init__(self, | 293 | def __init__(self, |
| 236 | name, | 294 | name, |
| 237 | url = None, | 295 | url=None, |
| 238 | review = None): | 296 | review=None, |
| 297 | revision=None): | ||
| 239 | self.name = name | 298 | self.name = name |
| 240 | self.url = url | 299 | self.url = url |
| 241 | self.review = review | 300 | self.review = review |
| 301 | self.revision = revision | ||
| 242 | 302 | ||
| 243 | class RepoHook(object): | 303 | class RepoHook(object): |
| 244 | """A RepoHook contains information about a script to run as a hook. | 304 | """A RepoHook contains information about a script to run as a hook. |
| @@ -414,7 +474,8 @@ class RepoHook(object): | |||
| 414 | # and convert to a HookError w/ just the failing traceback. | 474 | # and convert to a HookError w/ just the failing traceback. |
| 415 | context = {} | 475 | context = {} |
| 416 | try: | 476 | try: |
| 417 | execfile(self._script_fullpath, context) | 477 | exec(compile(open(self._script_fullpath).read(), |
| 478 | self._script_fullpath, 'exec'), context) | ||
| 418 | except Exception: | 479 | except Exception: |
| 419 | raise HookError('%s\nFailed to import %s hook; see traceback above.' % ( | 480 | raise HookError('%s\nFailed to import %s hook; see traceback above.' % ( |
| 420 | traceback.format_exc(), self._hook_type)) | 481 | traceback.format_exc(), self._hook_type)) |
| @@ -483,6 +544,12 @@ class RepoHook(object): | |||
| 483 | 544 | ||
| 484 | 545 | ||
| 485 | class Project(object): | 546 | class Project(object): |
| 547 | # These objects can be shared between several working trees. | ||
| 548 | shareable_files = ['description', 'info'] | ||
| 549 | shareable_dirs = ['hooks', 'objects', 'rr-cache', 'svn'] | ||
| 550 | # These objects can only be used by a single working tree. | ||
| 551 | working_tree_files = ['config', 'packed-refs', 'shallow'] | ||
| 552 | working_tree_dirs = ['logs', 'refs'] | ||
| 486 | def __init__(self, | 553 | def __init__(self, |
| 487 | manifest, | 554 | manifest, |
| 488 | name, | 555 | name, |
| @@ -493,15 +560,16 @@ class Project(object): | |||
| 493 | relpath, | 560 | relpath, |
| 494 | revisionExpr, | 561 | revisionExpr, |
| 495 | revisionId, | 562 | revisionId, |
| 496 | rebase = True, | 563 | rebase=True, |
| 497 | groups = None, | 564 | groups=None, |
| 498 | sync_c = False, | 565 | sync_c=False, |
| 499 | sync_s = False, | 566 | sync_s=False, |
| 500 | clone_depth = None, | 567 | clone_depth=None, |
| 501 | upstream = None, | 568 | upstream=None, |
| 502 | parent = None, | 569 | parent=None, |
| 503 | is_derived = False, | 570 | is_derived=False, |
| 504 | dest_branch = None): | 571 | dest_branch=None, |
| 572 | optimized_fetch=False): | ||
| 505 | """Init a Project object. | 573 | """Init a Project object. |
| 506 | 574 | ||
| 507 | Args: | 575 | Args: |
| @@ -523,6 +591,8 @@ class Project(object): | |||
| 523 | is_derived: False if the project was explicitly defined in the manifest; | 591 | is_derived: False if the project was explicitly defined in the manifest; |
| 524 | True if the project is a discovered submodule. | 592 | True if the project is a discovered submodule. |
| 525 | dest_branch: The branch to which to push changes for review by default. | 593 | dest_branch: The branch to which to push changes for review by default. |
| 594 | optimized_fetch: If True, when a project is set to a sha1 revision, only | ||
| 595 | fetch from the remote if the sha1 is not present locally. | ||
| 526 | """ | 596 | """ |
| 527 | self.manifest = manifest | 597 | self.manifest = manifest |
| 528 | self.name = name | 598 | self.name = name |
| @@ -551,14 +621,16 @@ class Project(object): | |||
| 551 | self.upstream = upstream | 621 | self.upstream = upstream |
| 552 | self.parent = parent | 622 | self.parent = parent |
| 553 | self.is_derived = is_derived | 623 | self.is_derived = is_derived |
| 624 | self.optimized_fetch = optimized_fetch | ||
| 554 | self.subprojects = [] | 625 | self.subprojects = [] |
| 555 | 626 | ||
| 556 | self.snapshots = {} | 627 | self.snapshots = {} |
| 557 | self.copyfiles = [] | 628 | self.copyfiles = [] |
| 629 | self.linkfiles = [] | ||
| 558 | self.annotations = [] | 630 | self.annotations = [] |
| 559 | self.config = GitConfig.ForRepository( | 631 | self.config = GitConfig.ForRepository( |
| 560 | gitdir = self.gitdir, | 632 | gitdir=self.gitdir, |
| 561 | defaults = self.manifest.globalConfig) | 633 | defaults=self.manifest.globalConfig) |
| 562 | 634 | ||
| 563 | if self.worktree: | 635 | if self.worktree: |
| 564 | self.work_git = self._GitGetByExec(self, bare=False, gitdir=gitdir) | 636 | self.work_git = self._GitGetByExec(self, bare=False, gitdir=gitdir) |
| @@ -579,7 +651,7 @@ class Project(object): | |||
| 579 | 651 | ||
| 580 | @property | 652 | @property |
| 581 | def Exists(self): | 653 | def Exists(self): |
| 582 | return os.path.isdir(self.gitdir) | 654 | return os.path.isdir(self.gitdir) and os.path.isdir(self.objdir) |
| 583 | 655 | ||
| 584 | @property | 656 | @property |
| 585 | def CurrentBranch(self): | 657 | def CurrentBranch(self): |
| @@ -708,27 +780,49 @@ class Project(object): | |||
| 708 | return matched | 780 | return matched |
| 709 | 781 | ||
| 710 | ## Status Display ## | 782 | ## Status Display ## |
| 783 | def UncommitedFiles(self, get_all=True): | ||
| 784 | """Returns a list of strings, uncommitted files in the git tree. | ||
| 711 | 785 | ||
| 712 | def HasChanges(self): | 786 | Args: |
| 713 | """Returns true if there are uncommitted changes. | 787 | get_all: a boolean, if True - get information about all different |
| 788 | uncommitted files. If False - return as soon as any kind of | ||
| 789 | uncommitted files is detected. | ||
| 714 | """ | 790 | """ |
| 791 | details = [] | ||
| 715 | self.work_git.update_index('-q', | 792 | self.work_git.update_index('-q', |
| 716 | '--unmerged', | 793 | '--unmerged', |
| 717 | '--ignore-missing', | 794 | '--ignore-missing', |
| 718 | '--refresh') | 795 | '--refresh') |
| 719 | if self.IsRebaseInProgress(): | 796 | if self.IsRebaseInProgress(): |
| 720 | return True | 797 | details.append("rebase in progress") |
| 798 | if not get_all: | ||
| 799 | return details | ||
| 721 | 800 | ||
| 722 | if self.work_git.DiffZ('diff-index', '--cached', HEAD): | 801 | changes = self.work_git.DiffZ('diff-index', '--cached', HEAD).keys() |
| 723 | return True | 802 | if changes: |
| 803 | details.extend(changes) | ||
| 804 | if not get_all: | ||
| 805 | return details | ||
| 724 | 806 | ||
| 725 | if self.work_git.DiffZ('diff-files'): | 807 | changes = self.work_git.DiffZ('diff-files').keys() |
| 726 | return True | 808 | if changes: |
| 809 | details.extend(changes) | ||
| 810 | if not get_all: | ||
| 811 | return details | ||
| 727 | 812 | ||
| 728 | if self.work_git.LsOthers(): | 813 | changes = self.work_git.LsOthers() |
| 729 | return True | 814 | if changes: |
| 815 | details.extend(changes) | ||
| 730 | 816 | ||
| 731 | return False | 817 | return details |
| 818 | |||
| 819 | def HasChanges(self): | ||
| 820 | """Returns true if there are uncommitted changes. | ||
| 821 | """ | ||
| 822 | if self.UncommitedFiles(get_all=False): | ||
| 823 | return True | ||
| 824 | else: | ||
| 825 | return False | ||
| 732 | 826 | ||
| 733 | def PrintWorkTreeStatus(self, output_redir=None): | 827 | def PrintWorkTreeStatus(self, output_redir=None): |
| 734 | """Prints the status of the repository to stdout. | 828 | """Prints the status of the repository to stdout. |
| @@ -758,7 +852,7 @@ class Project(object): | |||
| 758 | out = StatusColoring(self.config) | 852 | out = StatusColoring(self.config) |
| 759 | if not output_redir == None: | 853 | if not output_redir == None: |
| 760 | out.redirect(output_redir) | 854 | out.redirect(output_redir) |
| 761 | out.project('project %-40s', self.relpath + '/') | 855 | out.project('project %-40s', self.relpath + '/ ') |
| 762 | 856 | ||
| 763 | branch = self.CurrentBranch | 857 | branch = self.CurrentBranch |
| 764 | if branch is None: | 858 | if branch is None: |
| @@ -829,8 +923,8 @@ class Project(object): | |||
| 829 | cmd.append('--') | 923 | cmd.append('--') |
| 830 | p = GitCommand(self, | 924 | p = GitCommand(self, |
| 831 | cmd, | 925 | cmd, |
| 832 | capture_stdout = True, | 926 | capture_stdout=True, |
| 833 | capture_stderr = True) | 927 | capture_stderr=True) |
| 834 | has_diff = False | 928 | has_diff = False |
| 835 | for line in p.process.stdout: | 929 | for line in p.process.stdout: |
| 836 | if not has_diff: | 930 | if not has_diff: |
| @@ -915,7 +1009,7 @@ class Project(object): | |||
| 915 | return None | 1009 | return None |
| 916 | 1010 | ||
| 917 | def UploadForReview(self, branch=None, | 1011 | def UploadForReview(self, branch=None, |
| 918 | people=([],[]), | 1012 | people=([], []), |
| 919 | auto_topic=False, | 1013 | auto_topic=False, |
| 920 | draft=False, | 1014 | draft=False, |
| 921 | dest_branch=None): | 1015 | dest_branch=None): |
| @@ -976,13 +1070,13 @@ class Project(object): | |||
| 976 | ref_spec = ref_spec + '%' + ','.join(rp) | 1070 | ref_spec = ref_spec + '%' + ','.join(rp) |
| 977 | cmd.append(ref_spec) | 1071 | cmd.append(ref_spec) |
| 978 | 1072 | ||
| 979 | if GitCommand(self, cmd, bare = True).Wait() != 0: | 1073 | if GitCommand(self, cmd, bare=True).Wait() != 0: |
| 980 | raise UploadError('Upload failed') | 1074 | raise UploadError('Upload failed') |
| 981 | 1075 | ||
| 982 | msg = "posted to %s for %s" % (branch.remote.review, dest_branch) | 1076 | msg = "posted to %s for %s" % (branch.remote.review, dest_branch) |
| 983 | self.bare_git.UpdateRef(R_PUB + branch.name, | 1077 | self.bare_git.UpdateRef(R_PUB + branch.name, |
| 984 | R_HEADS + branch.name, | 1078 | R_HEADS + branch.name, |
| 985 | message = msg) | 1079 | message=msg) |
| 986 | 1080 | ||
| 987 | 1081 | ||
| 988 | ## Sync ## | 1082 | ## Sync ## |
| @@ -1007,9 +1101,11 @@ class Project(object): | |||
| 1007 | quiet=False, | 1101 | quiet=False, |
| 1008 | is_new=None, | 1102 | is_new=None, |
| 1009 | current_branch_only=False, | 1103 | current_branch_only=False, |
| 1104 | force_sync=False, | ||
| 1010 | clone_bundle=True, | 1105 | clone_bundle=True, |
| 1011 | no_tags=False, | 1106 | no_tags=False, |
| 1012 | archive=False): | 1107 | archive=False, |
| 1108 | optimized_fetch=False): | ||
| 1013 | """Perform only the network IO portion of the sync process. | 1109 | """Perform only the network IO portion of the sync process. |
| 1014 | Local working directory/branch state is not affected. | 1110 | Local working directory/branch state is not affected. |
| 1015 | """ | 1111 | """ |
| @@ -1040,13 +1136,12 @@ class Project(object): | |||
| 1040 | except OSError as e: | 1136 | except OSError as e: |
| 1041 | print("warn: Cannot remove archive %s: " | 1137 | print("warn: Cannot remove archive %s: " |
| 1042 | "%s" % (tarpath, str(e)), file=sys.stderr) | 1138 | "%s" % (tarpath, str(e)), file=sys.stderr) |
| 1043 | self._CopyFiles() | 1139 | self._CopyAndLinkFiles() |
| 1044 | return True | 1140 | return True |
| 1045 | |||
| 1046 | if is_new is None: | 1141 | if is_new is None: |
| 1047 | is_new = not self.Exists | 1142 | is_new = not self.Exists |
| 1048 | if is_new: | 1143 | if is_new: |
| 1049 | self._InitGitDir() | 1144 | self._InitGitDir(force_sync=force_sync) |
| 1050 | else: | 1145 | else: |
| 1051 | self._UpdateHooks() | 1146 | self._UpdateHooks() |
| 1052 | self._InitRemote() | 1147 | self._InitRemote() |
| @@ -1078,16 +1173,12 @@ class Project(object): | |||
| 1078 | elif self.manifest.default.sync_c: | 1173 | elif self.manifest.default.sync_c: |
| 1079 | current_branch_only = True | 1174 | current_branch_only = True |
| 1080 | 1175 | ||
| 1081 | is_sha1 = False | 1176 | need_to_fetch = not (optimized_fetch and \ |
| 1082 | if ID_RE.match(self.revisionExpr) is not None: | 1177 | (ID_RE.match(self.revisionExpr) and self._CheckForSha1())) |
| 1083 | is_sha1 = True | 1178 | if (need_to_fetch |
| 1084 | if is_sha1 and self._CheckForSha1(): | 1179 | and not self._RemoteFetch(initial=is_new, quiet=quiet, alt_dir=alt_dir, |
| 1085 | # Don't need to fetch since we already have this revision | 1180 | current_branch_only=current_branch_only, |
| 1086 | return True | 1181 | no_tags=no_tags)): |
| 1087 | |||
| 1088 | if not self._RemoteFetch(initial=is_new, quiet=quiet, alt_dir=alt_dir, | ||
| 1089 | current_branch_only=current_branch_only, | ||
| 1090 | no_tags=no_tags): | ||
| 1091 | return False | 1182 | return False |
| 1092 | 1183 | ||
| 1093 | if self.worktree: | 1184 | if self.worktree: |
| @@ -1103,9 +1194,11 @@ class Project(object): | |||
| 1103 | def PostRepoUpgrade(self): | 1194 | def PostRepoUpgrade(self): |
| 1104 | self._InitHooks() | 1195 | self._InitHooks() |
| 1105 | 1196 | ||
| 1106 | def _CopyFiles(self): | 1197 | def _CopyAndLinkFiles(self): |
| 1107 | for copyfile in self.copyfiles: | 1198 | for copyfile in self.copyfiles: |
| 1108 | copyfile._Copy() | 1199 | copyfile._Copy() |
| 1200 | for linkfile in self.linkfiles: | ||
| 1201 | linkfile._Link() | ||
| 1109 | 1202 | ||
| 1110 | def GetCommitRevisionId(self): | 1203 | def GetCommitRevisionId(self): |
| 1111 | """Get revisionId of a commit. | 1204 | """Get revisionId of a commit. |
| @@ -1141,18 +1234,18 @@ class Project(object): | |||
| 1141 | 'revision %s in %s not found' % (self.revisionExpr, | 1234 | 'revision %s in %s not found' % (self.revisionExpr, |
| 1142 | self.name)) | 1235 | self.name)) |
| 1143 | 1236 | ||
| 1144 | def Sync_LocalHalf(self, syncbuf): | 1237 | def Sync_LocalHalf(self, syncbuf, force_sync=False): |
| 1145 | """Perform only the local IO portion of the sync process. | 1238 | """Perform only the local IO portion of the sync process. |
| 1146 | Network access is not required. | 1239 | Network access is not required. |
| 1147 | """ | 1240 | """ |
| 1148 | self._InitWorkTree() | 1241 | self._InitWorkTree(force_sync=force_sync) |
| 1149 | all_refs = self.bare_ref.all | 1242 | all_refs = self.bare_ref.all |
| 1150 | self.CleanPublishedCache(all_refs) | 1243 | self.CleanPublishedCache(all_refs) |
| 1151 | revid = self.GetRevisionId(all_refs) | 1244 | revid = self.GetRevisionId(all_refs) |
| 1152 | 1245 | ||
| 1153 | def _doff(): | 1246 | def _doff(): |
| 1154 | self._FastForward(revid) | 1247 | self._FastForward(revid) |
| 1155 | self._CopyFiles() | 1248 | self._CopyAndLinkFiles() |
| 1156 | 1249 | ||
| 1157 | head = self.work_git.GetHead() | 1250 | head = self.work_git.GetHead() |
| 1158 | if head.startswith(R_HEADS): | 1251 | if head.startswith(R_HEADS): |
| @@ -1188,7 +1281,7 @@ class Project(object): | |||
| 1188 | except GitError as e: | 1281 | except GitError as e: |
| 1189 | syncbuf.fail(self, e) | 1282 | syncbuf.fail(self, e) |
| 1190 | return | 1283 | return |
| 1191 | self._CopyFiles() | 1284 | self._CopyAndLinkFiles() |
| 1192 | return | 1285 | return |
| 1193 | 1286 | ||
| 1194 | if head == revid: | 1287 | if head == revid: |
| @@ -1210,7 +1303,7 @@ class Project(object): | |||
| 1210 | except GitError as e: | 1303 | except GitError as e: |
| 1211 | syncbuf.fail(self, e) | 1304 | syncbuf.fail(self, e) |
| 1212 | return | 1305 | return |
| 1213 | self._CopyFiles() | 1306 | self._CopyAndLinkFiles() |
| 1214 | return | 1307 | return |
| 1215 | 1308 | ||
| 1216 | upstream_gain = self._revlist(not_rev(HEAD), revid) | 1309 | upstream_gain = self._revlist(not_rev(HEAD), revid) |
| @@ -1278,17 +1371,19 @@ class Project(object): | |||
| 1278 | if not ID_RE.match(self.revisionExpr): | 1371 | if not ID_RE.match(self.revisionExpr): |
| 1279 | # in case of manifest sync the revisionExpr might be a SHA1 | 1372 | # in case of manifest sync the revisionExpr might be a SHA1 |
| 1280 | branch.merge = self.revisionExpr | 1373 | branch.merge = self.revisionExpr |
| 1374 | if not branch.merge.startswith('refs/'): | ||
| 1375 | branch.merge = R_HEADS + branch.merge | ||
| 1281 | branch.Save() | 1376 | branch.Save() |
| 1282 | 1377 | ||
| 1283 | if cnt_mine > 0 and self.rebase: | 1378 | if cnt_mine > 0 and self.rebase: |
| 1284 | def _dorebase(): | 1379 | def _dorebase(): |
| 1285 | self._Rebase(upstream = '%s^1' % last_mine, onto = revid) | 1380 | self._Rebase(upstream='%s^1' % last_mine, onto=revid) |
| 1286 | self._CopyFiles() | 1381 | self._CopyAndLinkFiles() |
| 1287 | syncbuf.later2(self, _dorebase) | 1382 | syncbuf.later2(self, _dorebase) |
| 1288 | elif local_changes: | 1383 | elif local_changes: |
| 1289 | try: | 1384 | try: |
| 1290 | self._ResetHard(revid) | 1385 | self._ResetHard(revid) |
| 1291 | self._CopyFiles() | 1386 | self._CopyAndLinkFiles() |
| 1292 | except GitError as e: | 1387 | except GitError as e: |
| 1293 | syncbuf.fail(self, e) | 1388 | syncbuf.fail(self, e) |
| 1294 | return | 1389 | return |
| @@ -1301,6 +1396,13 @@ class Project(object): | |||
| 1301 | abssrc = os.path.join(self.worktree, src) | 1396 | abssrc = os.path.join(self.worktree, src) |
| 1302 | self.copyfiles.append(_CopyFile(src, dest, abssrc, absdest)) | 1397 | self.copyfiles.append(_CopyFile(src, dest, abssrc, absdest)) |
| 1303 | 1398 | ||
| 1399 | def AddLinkFile(self, src, dest, absdest): | ||
| 1400 | # dest should already be an absolute path, but src is project relative | ||
| 1401 | # make src relative path to dest | ||
| 1402 | absdestdir = os.path.dirname(absdest) | ||
| 1403 | relsrc = os.path.relpath(os.path.join(self.worktree, src), absdestdir) | ||
| 1404 | self.linkfiles.append(_LinkFile(self.worktree, src, dest, relsrc, absdest)) | ||
| 1405 | |||
| 1304 | def AddAnnotation(self, name, value, keep): | 1406 | def AddAnnotation(self, name, value, keep): |
| 1305 | self.annotations.append(_Annotation(name, value, keep)) | 1407 | self.annotations.append(_Annotation(name, value, keep)) |
| 1306 | 1408 | ||
| @@ -1331,15 +1433,17 @@ class Project(object): | |||
| 1331 | return True | 1433 | return True |
| 1332 | 1434 | ||
| 1333 | all_refs = self.bare_ref.all | 1435 | all_refs = self.bare_ref.all |
| 1334 | if (R_HEADS + name) in all_refs: | 1436 | if R_HEADS + name in all_refs: |
| 1335 | return GitCommand(self, | 1437 | return GitCommand(self, |
| 1336 | ['checkout', name, '--'], | 1438 | ['checkout', name, '--'], |
| 1337 | capture_stdout = True, | 1439 | capture_stdout=True, |
| 1338 | capture_stderr = True).Wait() == 0 | 1440 | capture_stderr=True).Wait() == 0 |
| 1339 | 1441 | ||
| 1340 | branch = self.GetBranch(name) | 1442 | branch = self.GetBranch(name) |
| 1341 | branch.remote = self.GetRemote(self.remote.name) | 1443 | branch.remote = self.GetRemote(self.remote.name) |
| 1342 | branch.merge = self.revisionExpr | 1444 | branch.merge = self.revisionExpr |
| 1445 | if not branch.merge.startswith('refs/') and not ID_RE.match(self.revisionExpr): | ||
| 1446 | branch.merge = R_HEADS + self.revisionExpr | ||
| 1343 | revid = self.GetRevisionId(all_refs) | 1447 | revid = self.GetRevisionId(all_refs) |
| 1344 | 1448 | ||
| 1345 | if head.startswith(R_HEADS): | 1449 | if head.startswith(R_HEADS): |
| @@ -1362,8 +1466,8 @@ class Project(object): | |||
| 1362 | 1466 | ||
| 1363 | if GitCommand(self, | 1467 | if GitCommand(self, |
| 1364 | ['checkout', '-b', branch.name, revid], | 1468 | ['checkout', '-b', branch.name, revid], |
| 1365 | capture_stdout = True, | 1469 | capture_stdout=True, |
| 1366 | capture_stderr = True).Wait() == 0: | 1470 | capture_stderr=True).Wait() == 0: |
| 1367 | branch.Save() | 1471 | branch.Save() |
| 1368 | return True | 1472 | return True |
| 1369 | return False | 1473 | return False |
| @@ -1409,8 +1513,8 @@ class Project(object): | |||
| 1409 | 1513 | ||
| 1410 | return GitCommand(self, | 1514 | return GitCommand(self, |
| 1411 | ['checkout', name, '--'], | 1515 | ['checkout', name, '--'], |
| 1412 | capture_stdout = True, | 1516 | capture_stdout=True, |
| 1413 | capture_stderr = True).Wait() == 0 | 1517 | capture_stderr=True).Wait() == 0 |
| 1414 | 1518 | ||
| 1415 | def AbandonBranch(self, name): | 1519 | def AbandonBranch(self, name): |
| 1416 | """Destroy a local topic branch. | 1520 | """Destroy a local topic branch. |
| @@ -1444,8 +1548,8 @@ class Project(object): | |||
| 1444 | 1548 | ||
| 1445 | return GitCommand(self, | 1549 | return GitCommand(self, |
| 1446 | ['branch', '-D', name], | 1550 | ['branch', '-D', name], |
| 1447 | capture_stdout = True, | 1551 | capture_stdout=True, |
| 1448 | capture_stderr = True).Wait() == 0 | 1552 | capture_stderr=True).Wait() == 0 |
| 1449 | 1553 | ||
| 1450 | def PruneHeads(self): | 1554 | def PruneHeads(self): |
| 1451 | """Prune any topic branches already merged into upstream. | 1555 | """Prune any topic branches already merged into upstream. |
| @@ -1462,7 +1566,7 @@ class Project(object): | |||
| 1462 | rev = self.GetRevisionId(left) | 1566 | rev = self.GetRevisionId(left) |
| 1463 | if cb is not None \ | 1567 | if cb is not None \ |
| 1464 | and not self._revlist(HEAD + '...' + rev) \ | 1568 | and not self._revlist(HEAD + '...' + rev) \ |
| 1465 | and not self.IsDirty(consider_untracked = False): | 1569 | and not self.IsDirty(consider_untracked=False): |
| 1466 | self.work_git.DetachHead(HEAD) | 1570 | self.work_git.DetachHead(HEAD) |
| 1467 | kill.append(cb) | 1571 | kill.append(cb) |
| 1468 | 1572 | ||
| @@ -1495,7 +1599,7 @@ class Project(object): | |||
| 1495 | 1599 | ||
| 1496 | kept = [] | 1600 | kept = [] |
| 1497 | for branch in kill: | 1601 | for branch in kill: |
| 1498 | if (R_HEADS + branch) in left: | 1602 | if R_HEADS + branch in left: |
| 1499 | branch = self.GetBranch(branch) | 1603 | branch = self.GetBranch(branch) |
| 1500 | base = branch.LocalMerge | 1604 | base = branch.LocalMerge |
| 1501 | if not base: | 1605 | if not base: |
| @@ -1545,8 +1649,8 @@ class Project(object): | |||
| 1545 | def parse_gitmodules(gitdir, rev): | 1649 | def parse_gitmodules(gitdir, rev): |
| 1546 | cmd = ['cat-file', 'blob', '%s:.gitmodules' % rev] | 1650 | cmd = ['cat-file', 'blob', '%s:.gitmodules' % rev] |
| 1547 | try: | 1651 | try: |
| 1548 | p = GitCommand(None, cmd, capture_stdout = True, capture_stderr = True, | 1652 | p = GitCommand(None, cmd, capture_stdout=True, capture_stderr=True, |
| 1549 | bare = True, gitdir = gitdir) | 1653 | bare=True, gitdir=gitdir) |
| 1550 | except GitError: | 1654 | except GitError: |
| 1551 | return [], [] | 1655 | return [], [] |
| 1552 | if p.Wait() != 0: | 1656 | if p.Wait() != 0: |
| @@ -1558,8 +1662,8 @@ class Project(object): | |||
| 1558 | os.write(fd, p.stdout) | 1662 | os.write(fd, p.stdout) |
| 1559 | os.close(fd) | 1663 | os.close(fd) |
| 1560 | cmd = ['config', '--file', temp_gitmodules_path, '--list'] | 1664 | cmd = ['config', '--file', temp_gitmodules_path, '--list'] |
| 1561 | p = GitCommand(None, cmd, capture_stdout = True, capture_stderr = True, | 1665 | p = GitCommand(None, cmd, capture_stdout=True, capture_stderr=True, |
| 1562 | bare = True, gitdir = gitdir) | 1666 | bare=True, gitdir=gitdir) |
| 1563 | if p.Wait() != 0: | 1667 | if p.Wait() != 0: |
| 1564 | return [], [] | 1668 | return [], [] |
| 1565 | gitmodules_lines = p.stdout.split('\n') | 1669 | gitmodules_lines = p.stdout.split('\n') |
| @@ -1592,8 +1696,8 @@ class Project(object): | |||
| 1592 | cmd = ['ls-tree', rev, '--'] | 1696 | cmd = ['ls-tree', rev, '--'] |
| 1593 | cmd.extend(paths) | 1697 | cmd.extend(paths) |
| 1594 | try: | 1698 | try: |
| 1595 | p = GitCommand(None, cmd, capture_stdout = True, capture_stderr = True, | 1699 | p = GitCommand(None, cmd, capture_stdout=True, capture_stderr=True, |
| 1596 | bare = True, gitdir = gitdir) | 1700 | bare=True, gitdir=gitdir) |
| 1597 | except GitError: | 1701 | except GitError: |
| 1598 | return [] | 1702 | return [] |
| 1599 | if p.Wait() != 0: | 1703 | if p.Wait() != 0: |
| @@ -1628,23 +1732,24 @@ class Project(object): | |||
| 1628 | continue | 1732 | continue |
| 1629 | 1733 | ||
| 1630 | remote = RemoteSpec(self.remote.name, | 1734 | remote = RemoteSpec(self.remote.name, |
| 1631 | url = url, | 1735 | url=url, |
| 1632 | review = self.remote.review) | 1736 | review=self.remote.review, |
| 1633 | subproject = Project(manifest = self.manifest, | 1737 | revision=self.remote.revision) |
| 1634 | name = name, | 1738 | subproject = Project(manifest=self.manifest, |
| 1635 | remote = remote, | 1739 | name=name, |
| 1636 | gitdir = gitdir, | 1740 | remote=remote, |
| 1637 | objdir = objdir, | 1741 | gitdir=gitdir, |
| 1638 | worktree = worktree, | 1742 | objdir=objdir, |
| 1639 | relpath = relpath, | 1743 | worktree=worktree, |
| 1640 | revisionExpr = self.revisionExpr, | 1744 | relpath=relpath, |
| 1641 | revisionId = rev, | 1745 | revisionExpr=self.revisionExpr, |
| 1642 | rebase = self.rebase, | 1746 | revisionId=rev, |
| 1643 | groups = self.groups, | 1747 | rebase=self.rebase, |
| 1644 | sync_c = self.sync_c, | 1748 | groups=self.groups, |
| 1645 | sync_s = self.sync_s, | 1749 | sync_c=self.sync_c, |
| 1646 | parent = self, | 1750 | sync_s=self.sync_s, |
| 1647 | is_derived = True) | 1751 | parent=self, |
| 1752 | is_derived=True) | ||
| 1648 | result.append(subproject) | 1753 | result.append(subproject) |
| 1649 | result.extend(subproject.GetDerivedSubprojects()) | 1754 | result.extend(subproject.GetDerivedSubprojects()) |
| 1650 | return result | 1755 | return result |
| @@ -1674,6 +1779,7 @@ class Project(object): | |||
| 1674 | if command.Wait() != 0: | 1779 | if command.Wait() != 0: |
| 1675 | raise GitError('git archive %s: %s' % (self.name, command.stderr)) | 1780 | raise GitError('git archive %s: %s' % (self.name, command.stderr)) |
| 1676 | 1781 | ||
| 1782 | |||
| 1677 | def _RemoteFetch(self, name=None, | 1783 | def _RemoteFetch(self, name=None, |
| 1678 | current_branch_only=False, | 1784 | current_branch_only=False, |
| 1679 | initial=False, | 1785 | initial=False, |
| @@ -1683,26 +1789,43 @@ class Project(object): | |||
| 1683 | 1789 | ||
| 1684 | is_sha1 = False | 1790 | is_sha1 = False |
| 1685 | tag_name = None | 1791 | tag_name = None |
| 1792 | depth = None | ||
| 1793 | |||
| 1794 | # The depth should not be used when fetching to a mirror because | ||
| 1795 | # it will result in a shallow repository that cannot be cloned or | ||
| 1796 | # fetched from. | ||
| 1797 | if not self.manifest.IsMirror: | ||
| 1798 | if self.clone_depth: | ||
| 1799 | depth = self.clone_depth | ||
| 1800 | else: | ||
| 1801 | depth = self.manifest.manifestProject.config.GetString('repo.depth') | ||
| 1802 | # The repo project should never be synced with partial depth | ||
| 1803 | if self.relpath == '.repo/repo': | ||
| 1804 | depth = None | ||
| 1686 | 1805 | ||
| 1687 | if self.clone_depth: | ||
| 1688 | depth = self.clone_depth | ||
| 1689 | else: | ||
| 1690 | depth = self.manifest.manifestProject.config.GetString('repo.depth') | ||
| 1691 | if depth: | 1806 | if depth: |
| 1692 | current_branch_only = True | 1807 | current_branch_only = True |
| 1693 | 1808 | ||
| 1809 | if ID_RE.match(self.revisionExpr) is not None: | ||
| 1810 | is_sha1 = True | ||
| 1811 | |||
| 1694 | if current_branch_only: | 1812 | if current_branch_only: |
| 1695 | if ID_RE.match(self.revisionExpr) is not None: | 1813 | if self.revisionExpr.startswith(R_TAGS): |
| 1696 | is_sha1 = True | ||
| 1697 | elif self.revisionExpr.startswith(R_TAGS): | ||
| 1698 | # this is a tag and its sha1 value should never change | 1814 | # this is a tag and its sha1 value should never change |
| 1699 | tag_name = self.revisionExpr[len(R_TAGS):] | 1815 | tag_name = self.revisionExpr[len(R_TAGS):] |
| 1700 | 1816 | ||
| 1701 | if is_sha1 or tag_name is not None: | 1817 | if is_sha1 or tag_name is not None: |
| 1702 | if self._CheckForSha1(): | 1818 | if self._CheckForSha1(): |
| 1703 | return True | 1819 | return True |
| 1704 | if is_sha1 and (not self.upstream or ID_RE.match(self.upstream)): | 1820 | if is_sha1 and not depth: |
| 1705 | current_branch_only = False | 1821 | # When syncing a specific commit and --depth is not set: |
| 1822 | # * if upstream is explicitly specified and is not a sha1, fetch only | ||
| 1823 | # upstream as users expect only upstream to be fetch. | ||
| 1824 | # Note: The commit might not be in upstream in which case the sync | ||
| 1825 | # will fail. | ||
| 1826 | # * otherwise, fetch all branches to make sure we end up with the | ||
| 1827 | # specific commit. | ||
| 1828 | current_branch_only = self.upstream and not ID_RE.match(self.upstream) | ||
| 1706 | 1829 | ||
| 1707 | if not name: | 1830 | if not name: |
| 1708 | name = self.remote.name | 1831 | name = self.remote.name |
| @@ -1752,9 +1875,7 @@ class Project(object): | |||
| 1752 | 1875 | ||
| 1753 | cmd = ['fetch'] | 1876 | cmd = ['fetch'] |
| 1754 | 1877 | ||
| 1755 | # The --depth option only affects the initial fetch; after that we'll do | 1878 | if depth: |
| 1756 | # full fetches of changes. | ||
| 1757 | if depth and initial: | ||
| 1758 | cmd.append('--depth=%s' % depth) | 1879 | cmd.append('--depth=%s' % depth) |
| 1759 | 1880 | ||
| 1760 | if quiet: | 1881 | if quiet: |
| @@ -1763,46 +1884,74 @@ class Project(object): | |||
| 1763 | cmd.append('--update-head-ok') | 1884 | cmd.append('--update-head-ok') |
| 1764 | cmd.append(name) | 1885 | cmd.append(name) |
| 1765 | 1886 | ||
| 1887 | # If using depth then we should not get all the tags since they may | ||
| 1888 | # be outside of the depth. | ||
| 1889 | if no_tags or depth: | ||
| 1890 | cmd.append('--no-tags') | ||
| 1891 | else: | ||
| 1892 | cmd.append('--tags') | ||
| 1893 | |||
| 1894 | spec = [] | ||
| 1766 | if not current_branch_only: | 1895 | if not current_branch_only: |
| 1767 | # Fetch whole repo | 1896 | # Fetch whole repo |
| 1768 | # If using depth then we should not get all the tags since they may | 1897 | spec.append(str((u'+refs/heads/*:') + remote.ToLocal('refs/heads/*'))) |
| 1769 | # be outside of the depth. | ||
| 1770 | if no_tags or depth: | ||
| 1771 | cmd.append('--no-tags') | ||
| 1772 | else: | ||
| 1773 | cmd.append('--tags') | ||
| 1774 | |||
| 1775 | cmd.append(str((u'+refs/heads/*:') + remote.ToLocal('refs/heads/*'))) | ||
| 1776 | elif tag_name is not None: | 1898 | elif tag_name is not None: |
| 1777 | cmd.append('tag') | 1899 | spec.append('tag') |
| 1778 | cmd.append(tag_name) | 1900 | spec.append(tag_name) |
| 1779 | else: | 1901 | |
| 1902 | if not self.manifest.IsMirror: | ||
| 1780 | branch = self.revisionExpr | 1903 | branch = self.revisionExpr |
| 1781 | if is_sha1: | 1904 | if is_sha1 and depth and git_require((1, 8, 3)): |
| 1782 | branch = self.upstream | 1905 | # Shallow checkout of a specific commit, fetch from that commit and not |
| 1783 | if branch.startswith(R_HEADS): | 1906 | # the heads only as the commit might be deeper in the history. |
| 1784 | branch = branch[len(R_HEADS):] | 1907 | spec.append(branch) |
| 1785 | cmd.append(str((u'+refs/heads/%s:' % branch) + remote.ToLocal('refs/heads/%s' % branch))) | 1908 | else: |
| 1909 | if is_sha1: | ||
| 1910 | branch = self.upstream | ||
| 1911 | if branch is not None and branch.strip(): | ||
| 1912 | if not branch.startswith('refs/'): | ||
| 1913 | branch = R_HEADS + branch | ||
| 1914 | spec.append(str((u'+%s:' % branch) + remote.ToLocal(branch))) | ||
| 1915 | cmd.extend(spec) | ||
| 1916 | |||
| 1917 | shallowfetch = self.config.GetString('repo.shallowfetch') | ||
| 1918 | if shallowfetch and shallowfetch != ' '.join(spec): | ||
| 1919 | GitCommand(self, ['fetch', '--depth=2147483647', name] | ||
| 1920 | + shallowfetch.split(), | ||
| 1921 | bare=True, ssh_proxy=ssh_proxy).Wait() | ||
| 1922 | if depth: | ||
| 1923 | self.config.SetString('repo.shallowfetch', ' '.join(spec)) | ||
| 1924 | else: | ||
| 1925 | self.config.SetString('repo.shallowfetch', None) | ||
| 1786 | 1926 | ||
| 1787 | ok = False | 1927 | ok = False |
| 1788 | for _i in range(2): | 1928 | for _i in range(2): |
| 1789 | ret = GitCommand(self, cmd, bare=True, ssh_proxy=ssh_proxy).Wait() | 1929 | gitcmd = GitCommand(self, cmd, bare=True, ssh_proxy=ssh_proxy) |
| 1930 | ret = gitcmd.Wait() | ||
| 1790 | if ret == 0: | 1931 | if ret == 0: |
| 1791 | ok = True | 1932 | ok = True |
| 1792 | break | 1933 | break |
| 1934 | # If needed, run the 'git remote prune' the first time through the loop | ||
| 1935 | elif (not _i and | ||
| 1936 | "error:" in gitcmd.stderr and | ||
| 1937 | "git remote prune" in gitcmd.stderr): | ||
| 1938 | prunecmd = GitCommand(self, ['remote', 'prune', name], bare=True, | ||
| 1939 | ssh_proxy=ssh_proxy) | ||
| 1940 | ret = prunecmd.Wait() | ||
| 1941 | if ret: | ||
| 1942 | break | ||
| 1943 | continue | ||
| 1793 | elif current_branch_only and is_sha1 and ret == 128: | 1944 | elif current_branch_only and is_sha1 and ret == 128: |
| 1794 | # Exit code 128 means "couldn't find the ref you asked for"; if we're in sha1 | 1945 | # Exit code 128 means "couldn't find the ref you asked for"; if we're in sha1 |
| 1795 | # mode, we just tried sync'ing from the upstream field; it doesn't exist, thus | 1946 | # mode, we just tried sync'ing from the upstream field; it doesn't exist, thus |
| 1796 | # abort the optimization attempt and do a full sync. | 1947 | # abort the optimization attempt and do a full sync. |
| 1797 | break | 1948 | break |
| 1949 | elif ret < 0: | ||
| 1950 | # Git died with a signal, exit immediately | ||
| 1951 | break | ||
| 1798 | time.sleep(random.randint(30, 45)) | 1952 | time.sleep(random.randint(30, 45)) |
| 1799 | 1953 | ||
| 1800 | if initial: | 1954 | if initial: |
| 1801 | # Ensure that some refs exist. Otherwise, we probably aren't looking | ||
| 1802 | # at a real git repository and may have a bad url. | ||
| 1803 | if not self.bare_ref.all: | ||
| 1804 | ok = False | ||
| 1805 | |||
| 1806 | if alt_dir: | 1955 | if alt_dir: |
| 1807 | if old_packed != '': | 1956 | if old_packed != '': |
| 1808 | _lwrite(packed_refs, old_packed) | 1957 | _lwrite(packed_refs, old_packed) |
| @@ -1815,8 +1964,15 @@ class Project(object): | |||
| 1815 | # got what we wanted, else trigger a second run of all | 1964 | # got what we wanted, else trigger a second run of all |
| 1816 | # refs. | 1965 | # refs. |
| 1817 | if not self._CheckForSha1(): | 1966 | if not self._CheckForSha1(): |
| 1818 | return self._RemoteFetch(name=name, current_branch_only=False, | 1967 | if not depth: |
| 1819 | initial=False, quiet=quiet, alt_dir=alt_dir) | 1968 | # Avoid infinite recursion when depth is True (since depth implies |
| 1969 | # current_branch_only) | ||
| 1970 | return self._RemoteFetch(name=name, current_branch_only=False, | ||
| 1971 | initial=False, quiet=quiet, alt_dir=alt_dir) | ||
| 1972 | if self.clone_depth: | ||
| 1973 | self.clone_depth = None | ||
| 1974 | return self._RemoteFetch(name=name, current_branch_only=current_branch_only, | ||
| 1975 | initial=False, quiet=quiet, alt_dir=alt_dir) | ||
| 1820 | 1976 | ||
| 1821 | return ok | 1977 | return ok |
| 1822 | 1978 | ||
| @@ -1877,34 +2033,34 @@ class Project(object): | |||
| 1877 | os.remove(tmpPath) | 2033 | os.remove(tmpPath) |
| 1878 | if 'http_proxy' in os.environ and 'darwin' == sys.platform: | 2034 | if 'http_proxy' in os.environ and 'darwin' == sys.platform: |
| 1879 | cmd += ['--proxy', os.environ['http_proxy']] | 2035 | cmd += ['--proxy', os.environ['http_proxy']] |
| 1880 | cookiefile = self._GetBundleCookieFile(srcUrl) | 2036 | with self._GetBundleCookieFile(srcUrl, quiet) as cookiefile: |
| 1881 | if cookiefile: | 2037 | if cookiefile: |
| 1882 | cmd += ['--cookie', cookiefile] | 2038 | cmd += ['--cookie', cookiefile, '--cookie-jar', cookiefile] |
| 1883 | if srcUrl.startswith('persistent-'): | 2039 | if srcUrl.startswith('persistent-'): |
| 1884 | srcUrl = srcUrl[len('persistent-'):] | 2040 | srcUrl = srcUrl[len('persistent-'):] |
| 1885 | cmd += [srcUrl] | 2041 | cmd += [srcUrl] |
| 1886 | 2042 | ||
| 1887 | if IsTrace(): | 2043 | if IsTrace(): |
| 1888 | Trace('%s', ' '.join(cmd)) | 2044 | Trace('%s', ' '.join(cmd)) |
| 1889 | try: | 2045 | try: |
| 1890 | proc = subprocess.Popen(cmd) | 2046 | proc = subprocess.Popen(cmd) |
| 1891 | except OSError: | 2047 | except OSError: |
| 1892 | return False | 2048 | return False |
| 1893 | 2049 | ||
| 1894 | curlret = proc.wait() | 2050 | curlret = proc.wait() |
| 1895 | 2051 | ||
| 1896 | if curlret == 22: | 2052 | if curlret == 22: |
| 1897 | # From curl man page: | 2053 | # From curl man page: |
| 1898 | # 22: HTTP page not retrieved. The requested url was not found or | 2054 | # 22: HTTP page not retrieved. The requested url was not found or |
| 1899 | # returned another error with the HTTP error code being 400 or above. | 2055 | # returned another error with the HTTP error code being 400 or above. |
| 1900 | # This return code only appears if -f, --fail is used. | 2056 | # This return code only appears if -f, --fail is used. |
| 1901 | if not quiet: | 2057 | if not quiet: |
| 1902 | print("Server does not provide clone.bundle; ignoring.", | 2058 | print("Server does not provide clone.bundle; ignoring.", |
| 1903 | file=sys.stderr) | 2059 | file=sys.stderr) |
| 1904 | return False | 2060 | return False |
| 1905 | 2061 | ||
| 1906 | if os.path.exists(tmpPath): | 2062 | if os.path.exists(tmpPath): |
| 1907 | if curlret == 0 and self._IsValidBundle(tmpPath): | 2063 | if curlret == 0 and self._IsValidBundle(tmpPath, quiet): |
| 1908 | os.rename(tmpPath, dstPath) | 2064 | os.rename(tmpPath, dstPath) |
| 1909 | return True | 2065 | return True |
| 1910 | else: | 2066 | else: |
| @@ -1913,45 +2069,51 @@ class Project(object): | |||
| 1913 | else: | 2069 | else: |
| 1914 | return False | 2070 | return False |
| 1915 | 2071 | ||
| 1916 | def _IsValidBundle(self, path): | 2072 | def _IsValidBundle(self, path, quiet): |
| 1917 | try: | 2073 | try: |
| 1918 | with open(path) as f: | 2074 | with open(path) as f: |
| 1919 | if f.read(16) == '# v2 git bundle\n': | 2075 | if f.read(16) == '# v2 git bundle\n': |
| 1920 | return True | 2076 | return True |
| 1921 | else: | 2077 | else: |
| 1922 | print("Invalid clone.bundle file; ignoring.", file=sys.stderr) | 2078 | if not quiet: |
| 2079 | print("Invalid clone.bundle file; ignoring.", file=sys.stderr) | ||
| 1923 | return False | 2080 | return False |
| 1924 | except OSError: | 2081 | except OSError: |
| 1925 | return False | 2082 | return False |
| 1926 | 2083 | ||
| 1927 | def _GetBundleCookieFile(self, url): | 2084 | @contextlib.contextmanager |
| 2085 | def _GetBundleCookieFile(self, url, quiet): | ||
| 1928 | if url.startswith('persistent-'): | 2086 | if url.startswith('persistent-'): |
| 1929 | try: | 2087 | try: |
| 1930 | p = subprocess.Popen( | 2088 | p = subprocess.Popen( |
| 1931 | ['git-remote-persistent-https', '-print_config', url], | 2089 | ['git-remote-persistent-https', '-print_config', url], |
| 1932 | stdin=subprocess.PIPE, stdout=subprocess.PIPE, | 2090 | stdin=subprocess.PIPE, stdout=subprocess.PIPE, |
| 1933 | stderr=subprocess.PIPE) | 2091 | stderr=subprocess.PIPE) |
| 1934 | p.stdin.close() # Tell subprocess it's ok to close. | 2092 | try: |
| 1935 | prefix = 'http.cookiefile=' | 2093 | prefix = 'http.cookiefile=' |
| 1936 | cookiefile = None | 2094 | cookiefile = None |
| 1937 | for line in p.stdout: | 2095 | for line in p.stdout: |
| 1938 | line = line.strip() | 2096 | line = line.strip() |
| 1939 | if line.startswith(prefix): | 2097 | if line.startswith(prefix): |
| 1940 | cookiefile = line[len(prefix):] | 2098 | cookiefile = line[len(prefix):] |
| 1941 | break | 2099 | break |
| 1942 | if p.wait(): | 2100 | # Leave subprocess open, as cookie file may be transient. |
| 1943 | err_msg = p.stderr.read() | 2101 | if cookiefile: |
| 1944 | if ' -print_config' in err_msg: | 2102 | yield cookiefile |
| 1945 | pass # Persistent proxy doesn't support -print_config. | 2103 | return |
| 1946 | else: | 2104 | finally: |
| 1947 | print(err_msg, file=sys.stderr) | 2105 | p.stdin.close() |
| 1948 | if cookiefile: | 2106 | if p.wait(): |
| 1949 | return cookiefile | 2107 | err_msg = p.stderr.read() |
| 2108 | if ' -print_config' in err_msg: | ||
| 2109 | pass # Persistent proxy doesn't support -print_config. | ||
| 2110 | elif not quiet: | ||
| 2111 | print(err_msg, file=sys.stderr) | ||
| 1950 | except OSError as e: | 2112 | except OSError as e: |
| 1951 | if e.errno == errno.ENOENT: | 2113 | if e.errno == errno.ENOENT: |
| 1952 | pass # No persistent proxy. | 2114 | pass # No persistent proxy. |
| 1953 | raise | 2115 | raise |
| 1954 | return GitConfig.ForUser().GetString('http.cookiefile') | 2116 | yield GitConfig.ForUser().GetString('http.cookiefile') |
| 1955 | 2117 | ||
| 1956 | def _Checkout(self, rev, quiet=False): | 2118 | def _Checkout(self, rev, quiet=False): |
| 1957 | cmd = ['checkout'] | 2119 | cmd = ['checkout'] |
| @@ -1963,7 +2125,7 @@ class Project(object): | |||
| 1963 | if self._allrefs: | 2125 | if self._allrefs: |
| 1964 | raise GitError('%s checkout %s ' % (self.name, rev)) | 2126 | raise GitError('%s checkout %s ' % (self.name, rev)) |
| 1965 | 2127 | ||
| 1966 | def _CherryPick(self, rev, quiet=False): | 2128 | def _CherryPick(self, rev): |
| 1967 | cmd = ['cherry-pick'] | 2129 | cmd = ['cherry-pick'] |
| 1968 | cmd.append(rev) | 2130 | cmd.append(rev) |
| 1969 | cmd.append('--') | 2131 | cmd.append('--') |
| @@ -1971,7 +2133,7 @@ class Project(object): | |||
| 1971 | if self._allrefs: | 2133 | if self._allrefs: |
| 1972 | raise GitError('%s cherry-pick %s ' % (self.name, rev)) | 2134 | raise GitError('%s cherry-pick %s ' % (self.name, rev)) |
| 1973 | 2135 | ||
| 1974 | def _Revert(self, rev, quiet=False): | 2136 | def _Revert(self, rev): |
| 1975 | cmd = ['revert'] | 2137 | cmd = ['revert'] |
| 1976 | cmd.append('--no-edit') | 2138 | cmd.append('--no-edit') |
| 1977 | cmd.append(rev) | 2139 | cmd.append(rev) |
| @@ -1988,7 +2150,7 @@ class Project(object): | |||
| 1988 | if GitCommand(self, cmd).Wait() != 0: | 2150 | if GitCommand(self, cmd).Wait() != 0: |
| 1989 | raise GitError('%s reset --hard %s ' % (self.name, rev)) | 2151 | raise GitError('%s reset --hard %s ' % (self.name, rev)) |
| 1990 | 2152 | ||
| 1991 | def _Rebase(self, upstream, onto = None): | 2153 | def _Rebase(self, upstream, onto=None): |
| 1992 | cmd = ['rebase'] | 2154 | cmd = ['rebase'] |
| 1993 | if onto is not None: | 2155 | if onto is not None: |
| 1994 | cmd.extend(['--onto', onto]) | 2156 | cmd.extend(['--onto', onto]) |
| @@ -2003,64 +2165,80 @@ class Project(object): | |||
| 2003 | if GitCommand(self, cmd).Wait() != 0: | 2165 | if GitCommand(self, cmd).Wait() != 0: |
| 2004 | raise GitError('%s merge %s ' % (self.name, head)) | 2166 | raise GitError('%s merge %s ' % (self.name, head)) |
| 2005 | 2167 | ||
| 2006 | def _InitGitDir(self, mirror_git=None): | 2168 | def _InitGitDir(self, mirror_git=None, force_sync=False): |
| 2007 | if not os.path.exists(self.gitdir): | 2169 | init_git_dir = not os.path.exists(self.gitdir) |
| 2008 | 2170 | init_obj_dir = not os.path.exists(self.objdir) | |
| 2171 | try: | ||
| 2009 | # Initialize the bare repository, which contains all of the objects. | 2172 | # Initialize the bare repository, which contains all of the objects. |
| 2010 | if not os.path.exists(self.objdir): | 2173 | if init_obj_dir: |
| 2011 | os.makedirs(self.objdir) | 2174 | os.makedirs(self.objdir) |
| 2012 | self.bare_objdir.init() | 2175 | self.bare_objdir.init() |
| 2013 | 2176 | ||
| 2014 | # If we have a separate directory to hold refs, initialize it as well. | 2177 | # If we have a separate directory to hold refs, initialize it as well. |
| 2015 | if self.objdir != self.gitdir: | 2178 | if self.objdir != self.gitdir: |
| 2016 | os.makedirs(self.gitdir) | 2179 | if init_git_dir: |
| 2017 | self._ReferenceGitDir(self.objdir, self.gitdir, share_refs=False, | 2180 | os.makedirs(self.gitdir) |
| 2018 | copy_all=True) | ||
| 2019 | 2181 | ||
| 2020 | mp = self.manifest.manifestProject | 2182 | if init_obj_dir or init_git_dir: |
| 2021 | ref_dir = mp.config.GetString('repo.reference') or '' | 2183 | self._ReferenceGitDir(self.objdir, self.gitdir, share_refs=False, |
| 2184 | copy_all=True) | ||
| 2185 | try: | ||
| 2186 | self._CheckDirReference(self.objdir, self.gitdir, share_refs=False) | ||
| 2187 | except GitError as e: | ||
| 2188 | print("Retrying clone after deleting %s" % force_sync, file=sys.stderr) | ||
| 2189 | if force_sync: | ||
| 2190 | try: | ||
| 2191 | shutil.rmtree(os.path.realpath(self.gitdir)) | ||
| 2192 | if self.worktree and os.path.exists( | ||
| 2193 | os.path.realpath(self.worktree)): | ||
| 2194 | shutil.rmtree(os.path.realpath(self.worktree)) | ||
| 2195 | return self._InitGitDir(mirror_git=mirror_git, force_sync=False) | ||
| 2196 | except: | ||
| 2197 | raise e | ||
| 2198 | raise e | ||
| 2199 | |||
| 2200 | if init_git_dir: | ||
| 2201 | mp = self.manifest.manifestProject | ||
| 2202 | ref_dir = mp.config.GetString('repo.reference') or '' | ||
| 2203 | |||
| 2204 | if ref_dir or mirror_git: | ||
| 2205 | if not mirror_git: | ||
| 2206 | mirror_git = os.path.join(ref_dir, self.name + '.git') | ||
| 2207 | repo_git = os.path.join(ref_dir, '.repo', 'projects', | ||
| 2208 | self.relpath + '.git') | ||
| 2209 | |||
| 2210 | if os.path.exists(mirror_git): | ||
| 2211 | ref_dir = mirror_git | ||
| 2212 | |||
| 2213 | elif os.path.exists(repo_git): | ||
| 2214 | ref_dir = repo_git | ||
| 2022 | 2215 | ||
| 2023 | if ref_dir or mirror_git: | 2216 | else: |
| 2024 | if not mirror_git: | 2217 | ref_dir = None |
| 2025 | mirror_git = os.path.join(ref_dir, self.name + '.git') | ||
| 2026 | repo_git = os.path.join(ref_dir, '.repo', 'projects', | ||
| 2027 | self.relpath + '.git') | ||
| 2028 | 2218 | ||
| 2029 | if os.path.exists(mirror_git): | 2219 | if ref_dir: |
| 2030 | ref_dir = mirror_git | 2220 | _lwrite(os.path.join(self.gitdir, 'objects/info/alternates'), |
| 2221 | os.path.join(ref_dir, 'objects') + '\n') | ||
| 2031 | 2222 | ||
| 2032 | elif os.path.exists(repo_git): | 2223 | self._UpdateHooks() |
| 2033 | ref_dir = repo_git | ||
| 2034 | 2224 | ||
| 2225 | m = self.manifest.manifestProject.config | ||
| 2226 | for key in ['user.name', 'user.email']: | ||
| 2227 | if m.Has(key, include_defaults=False): | ||
| 2228 | self.config.SetString(key, m.GetString(key)) | ||
| 2229 | if self.manifest.IsMirror: | ||
| 2230 | self.config.SetString('core.bare', 'true') | ||
| 2035 | else: | 2231 | else: |
| 2036 | ref_dir = None | 2232 | self.config.SetString('core.bare', None) |
| 2037 | 2233 | except Exception: | |
| 2038 | if ref_dir: | 2234 | if init_obj_dir and os.path.exists(self.objdir): |
| 2039 | _lwrite(os.path.join(self.gitdir, 'objects/info/alternates'), | 2235 | shutil.rmtree(self.objdir) |
| 2040 | os.path.join(ref_dir, 'objects') + '\n') | 2236 | if init_git_dir and os.path.exists(self.gitdir): |
| 2041 | 2237 | shutil.rmtree(self.gitdir) | |
| 2042 | self._UpdateHooks() | 2238 | raise |
| 2043 | |||
| 2044 | m = self.manifest.manifestProject.config | ||
| 2045 | for key in ['user.name', 'user.email']: | ||
| 2046 | if m.Has(key, include_defaults = False): | ||
| 2047 | self.config.SetString(key, m.GetString(key)) | ||
| 2048 | if self.manifest.IsMirror: | ||
| 2049 | self.config.SetString('core.bare', 'true') | ||
| 2050 | else: | ||
| 2051 | self.config.SetString('core.bare', None) | ||
| 2052 | 2239 | ||
| 2053 | def _UpdateHooks(self): | 2240 | def _UpdateHooks(self): |
| 2054 | if os.path.exists(self.gitdir): | 2241 | if os.path.exists(self.gitdir): |
| 2055 | # Always recreate hooks since they can have been changed | ||
| 2056 | # since the latest update. | ||
| 2057 | hooks = self._gitdir_path('hooks') | ||
| 2058 | try: | ||
| 2059 | to_rm = os.listdir(hooks) | ||
| 2060 | except OSError: | ||
| 2061 | to_rm = [] | ||
| 2062 | for old_hook in to_rm: | ||
| 2063 | os.remove(os.path.join(hooks, old_hook)) | ||
| 2064 | self._InitHooks() | 2242 | self._InitHooks() |
| 2065 | 2243 | ||
| 2066 | def _InitHooks(self): | 2244 | def _InitHooks(self): |
| @@ -2123,7 +2301,7 @@ class Project(object): | |||
| 2123 | if cur != '' or self.bare_ref.get(ref) != self.revisionId: | 2301 | if cur != '' or self.bare_ref.get(ref) != self.revisionId: |
| 2124 | msg = 'manifest set to %s' % self.revisionId | 2302 | msg = 'manifest set to %s' % self.revisionId |
| 2125 | dst = self.revisionId + '^0' | 2303 | dst = self.revisionId + '^0' |
| 2126 | self.bare_git.UpdateRef(ref, dst, message = msg, detach = True) | 2304 | self.bare_git.UpdateRef(ref, dst, message=msg, detach=True) |
| 2127 | else: | 2305 | else: |
| 2128 | remote = self.GetRemote(self.remote.name) | 2306 | remote = self.GetRemote(self.remote.name) |
| 2129 | dst = remote.ToLocal(self.revisionExpr) | 2307 | dst = remote.ToLocal(self.revisionExpr) |
| @@ -2131,6 +2309,22 @@ class Project(object): | |||
| 2131 | msg = 'manifest set to %s' % self.revisionExpr | 2309 | msg = 'manifest set to %s' % self.revisionExpr |
| 2132 | self.bare_git.symbolic_ref('-m', msg, ref, dst) | 2310 | self.bare_git.symbolic_ref('-m', msg, ref, dst) |
| 2133 | 2311 | ||
| 2312 | def _CheckDirReference(self, srcdir, destdir, share_refs): | ||
| 2313 | symlink_files = self.shareable_files | ||
| 2314 | symlink_dirs = self.shareable_dirs | ||
| 2315 | if share_refs: | ||
| 2316 | symlink_files += self.working_tree_files | ||
| 2317 | symlink_dirs += self.working_tree_dirs | ||
| 2318 | to_symlink = symlink_files + symlink_dirs | ||
| 2319 | for name in set(to_symlink): | ||
| 2320 | dst = os.path.realpath(os.path.join(destdir, name)) | ||
| 2321 | if os.path.lexists(dst): | ||
| 2322 | src = os.path.realpath(os.path.join(srcdir, name)) | ||
| 2323 | # Fail if the links are pointing to the wrong place | ||
| 2324 | if src != dst: | ||
| 2325 | raise GitError('--force-sync not enabled; cannot overwrite a local ' | ||
| 2326 | 'work tree') | ||
| 2327 | |||
| 2134 | def _ReferenceGitDir(self, gitdir, dotgit, share_refs, copy_all): | 2328 | def _ReferenceGitDir(self, gitdir, dotgit, share_refs, copy_all): |
| 2135 | """Update |dotgit| to reference |gitdir|, using symlinks where possible. | 2329 | """Update |dotgit| to reference |gitdir|, using symlinks where possible. |
| 2136 | 2330 | ||
| @@ -2142,13 +2336,11 @@ class Project(object): | |||
| 2142 | copy_all: If true, copy all remaining files from |gitdir| -> |dotgit|. | 2336 | copy_all: If true, copy all remaining files from |gitdir| -> |dotgit|. |
| 2143 | This saves you the effort of initializing |dotgit| yourself. | 2337 | This saves you the effort of initializing |dotgit| yourself. |
| 2144 | """ | 2338 | """ |
| 2145 | # These objects can be shared between several working trees. | 2339 | symlink_files = self.shareable_files |
| 2146 | symlink_files = ['description', 'info'] | 2340 | symlink_dirs = self.shareable_dirs |
| 2147 | symlink_dirs = ['hooks', 'objects', 'rr-cache', 'svn'] | ||
| 2148 | if share_refs: | 2341 | if share_refs: |
| 2149 | # These objects can only be used by a single working tree. | 2342 | symlink_files += self.working_tree_files |
| 2150 | symlink_files += ['config', 'packed-refs'] | 2343 | symlink_dirs += self.working_tree_dirs |
| 2151 | symlink_dirs += ['logs', 'refs'] | ||
| 2152 | to_symlink = symlink_files + symlink_dirs | 2344 | to_symlink = symlink_files + symlink_dirs |
| 2153 | 2345 | ||
| 2154 | to_copy = [] | 2346 | to_copy = [] |
| @@ -2160,13 +2352,21 @@ class Project(object): | |||
| 2160 | src = os.path.realpath(os.path.join(gitdir, name)) | 2352 | src = os.path.realpath(os.path.join(gitdir, name)) |
| 2161 | dst = os.path.realpath(os.path.join(dotgit, name)) | 2353 | dst = os.path.realpath(os.path.join(dotgit, name)) |
| 2162 | 2354 | ||
| 2163 | if os.path.lexists(dst) and not os.path.islink(dst): | 2355 | if os.path.lexists(dst): |
| 2164 | raise GitError('cannot overwrite a local work tree') | 2356 | continue |
| 2165 | 2357 | ||
| 2166 | # If the source dir doesn't exist, create an empty dir. | 2358 | # If the source dir doesn't exist, create an empty dir. |
| 2167 | if name in symlink_dirs and not os.path.lexists(src): | 2359 | if name in symlink_dirs and not os.path.lexists(src): |
| 2168 | os.makedirs(src) | 2360 | os.makedirs(src) |
| 2169 | 2361 | ||
| 2362 | # If the source file doesn't exist, ensure the destination | ||
| 2363 | # file doesn't either. | ||
| 2364 | if name in symlink_files and not os.path.lexists(src): | ||
| 2365 | try: | ||
| 2366 | os.remove(dst) | ||
| 2367 | except OSError: | ||
| 2368 | pass | ||
| 2369 | |||
| 2170 | if name in to_symlink: | 2370 | if name in to_symlink: |
| 2171 | os.symlink(os.path.relpath(src, os.path.dirname(dst)), dst) | 2371 | os.symlink(os.path.relpath(src, os.path.dirname(dst)), dst) |
| 2172 | elif copy_all and not os.path.islink(dst): | 2372 | elif copy_all and not os.path.islink(dst): |
| @@ -2176,26 +2376,44 @@ class Project(object): | |||
| 2176 | shutil.copy(src, dst) | 2376 | shutil.copy(src, dst) |
| 2177 | except OSError as e: | 2377 | except OSError as e: |
| 2178 | if e.errno == errno.EPERM: | 2378 | if e.errno == errno.EPERM: |
| 2179 | raise GitError('filesystem must support symlinks') | 2379 | raise DownloadError('filesystem must support symlinks') |
| 2180 | else: | 2380 | else: |
| 2181 | raise | 2381 | raise |
| 2182 | 2382 | ||
| 2183 | def _InitWorkTree(self): | 2383 | def _InitWorkTree(self, force_sync=False): |
| 2184 | dotgit = os.path.join(self.worktree, '.git') | 2384 | dotgit = os.path.join(self.worktree, '.git') |
| 2185 | if not os.path.exists(dotgit): | 2385 | init_dotgit = not os.path.exists(dotgit) |
| 2186 | os.makedirs(dotgit) | 2386 | try: |
| 2187 | self._ReferenceGitDir(self.gitdir, dotgit, share_refs=True, | 2387 | if init_dotgit: |
| 2188 | copy_all=False) | 2388 | os.makedirs(dotgit) |
| 2189 | 2389 | self._ReferenceGitDir(self.gitdir, dotgit, share_refs=True, | |
| 2190 | _lwrite(os.path.join(dotgit, HEAD), '%s\n' % self.GetRevisionId()) | 2390 | copy_all=False) |
| 2191 | |||
| 2192 | cmd = ['read-tree', '--reset', '-u'] | ||
| 2193 | cmd.append('-v') | ||
| 2194 | cmd.append(HEAD) | ||
| 2195 | if GitCommand(self, cmd).Wait() != 0: | ||
| 2196 | raise GitError("cannot initialize work tree") | ||
| 2197 | 2391 | ||
| 2198 | self._CopyFiles() | 2392 | try: |
| 2393 | self._CheckDirReference(self.gitdir, dotgit, share_refs=True) | ||
| 2394 | except GitError as e: | ||
| 2395 | if force_sync: | ||
| 2396 | try: | ||
| 2397 | shutil.rmtree(dotgit) | ||
| 2398 | return self._InitWorkTree(force_sync=False) | ||
| 2399 | except: | ||
| 2400 | raise e | ||
| 2401 | raise e | ||
| 2402 | |||
| 2403 | if init_dotgit: | ||
| 2404 | _lwrite(os.path.join(dotgit, HEAD), '%s\n' % self.GetRevisionId()) | ||
| 2405 | |||
| 2406 | cmd = ['read-tree', '--reset', '-u'] | ||
| 2407 | cmd.append('-v') | ||
| 2408 | cmd.append(HEAD) | ||
| 2409 | if GitCommand(self, cmd).Wait() != 0: | ||
| 2410 | raise GitError("cannot initialize work tree") | ||
| 2411 | |||
| 2412 | self._CopyAndLinkFiles() | ||
| 2413 | except Exception: | ||
| 2414 | if init_dotgit: | ||
| 2415 | shutil.rmtree(dotgit) | ||
| 2416 | raise | ||
| 2199 | 2417 | ||
| 2200 | def _gitdir_path(self, path): | 2418 | def _gitdir_path(self, path): |
| 2201 | return os.path.realpath(os.path.join(self.gitdir, path)) | 2419 | return os.path.realpath(os.path.join(self.gitdir, path)) |
| @@ -2259,10 +2477,10 @@ class Project(object): | |||
| 2259 | '-z', | 2477 | '-z', |
| 2260 | '--others', | 2478 | '--others', |
| 2261 | '--exclude-standard'], | 2479 | '--exclude-standard'], |
| 2262 | bare = False, | 2480 | bare=False, |
| 2263 | gitdir=self._gitdir, | 2481 | gitdir=self._gitdir, |
| 2264 | capture_stdout = True, | 2482 | capture_stdout=True, |
| 2265 | capture_stderr = True) | 2483 | capture_stderr=True) |
| 2266 | if p.Wait() == 0: | 2484 | if p.Wait() == 0: |
| 2267 | out = p.stdout | 2485 | out = p.stdout |
| 2268 | if out: | 2486 | if out: |
| @@ -2277,9 +2495,9 @@ class Project(object): | |||
| 2277 | p = GitCommand(self._project, | 2495 | p = GitCommand(self._project, |
| 2278 | cmd, | 2496 | cmd, |
| 2279 | gitdir=self._gitdir, | 2497 | gitdir=self._gitdir, |
| 2280 | bare = False, | 2498 | bare=False, |
| 2281 | capture_stdout = True, | 2499 | capture_stdout=True, |
| 2282 | capture_stderr = True) | 2500 | capture_stderr=True) |
| 2283 | try: | 2501 | try: |
| 2284 | out = p.process.stdout.read() | 2502 | out = p.process.stdout.read() |
| 2285 | r = {} | 2503 | r = {} |
| @@ -2287,8 +2505,8 @@ class Project(object): | |||
| 2287 | out = iter(out[:-1].split('\0')) # pylint: disable=W1401 | 2505 | out = iter(out[:-1].split('\0')) # pylint: disable=W1401 |
| 2288 | while out: | 2506 | while out: |
| 2289 | try: | 2507 | try: |
| 2290 | info = out.next() | 2508 | info = next(out) |
| 2291 | path = out.next() | 2509 | path = next(out) |
| 2292 | except StopIteration: | 2510 | except StopIteration: |
| 2293 | break | 2511 | break |
| 2294 | 2512 | ||
| @@ -2314,7 +2532,7 @@ class Project(object): | |||
| 2314 | info = _Info(path, *info) | 2532 | info = _Info(path, *info) |
| 2315 | if info.status in ('R', 'C'): | 2533 | if info.status in ('R', 'C'): |
| 2316 | info.src_path = info.path | 2534 | info.src_path = info.path |
| 2317 | info.path = out.next() | 2535 | info.path = next(out) |
| 2318 | r[info.path] = info | 2536 | r[info.path] = info |
| 2319 | return r | 2537 | return r |
| 2320 | finally: | 2538 | finally: |
| @@ -2385,10 +2603,10 @@ class Project(object): | |||
| 2385 | cmdv.extend(args) | 2603 | cmdv.extend(args) |
| 2386 | p = GitCommand(self._project, | 2604 | p = GitCommand(self._project, |
| 2387 | cmdv, | 2605 | cmdv, |
| 2388 | bare = self._bare, | 2606 | bare=self._bare, |
| 2389 | gitdir=self._gitdir, | 2607 | gitdir=self._gitdir, |
| 2390 | capture_stdout = True, | 2608 | capture_stdout=True, |
| 2391 | capture_stderr = True) | 2609 | capture_stderr=True) |
| 2392 | r = [] | 2610 | r = [] |
| 2393 | for line in p.process.stdout: | 2611 | for line in p.process.stdout: |
| 2394 | if line[-1] == '\n': | 2612 | if line[-1] == '\n': |
| @@ -2438,10 +2656,10 @@ class Project(object): | |||
| 2438 | cmdv.extend(args) | 2656 | cmdv.extend(args) |
| 2439 | p = GitCommand(self._project, | 2657 | p = GitCommand(self._project, |
| 2440 | cmdv, | 2658 | cmdv, |
| 2441 | bare = self._bare, | 2659 | bare=self._bare, |
| 2442 | gitdir=self._gitdir, | 2660 | gitdir=self._gitdir, |
| 2443 | capture_stdout = True, | 2661 | capture_stdout=True, |
| 2444 | capture_stderr = True) | 2662 | capture_stderr=True) |
| 2445 | if p.Wait() != 0: | 2663 | if p.Wait() != 0: |
| 2446 | raise GitError('%s %s: %s' % ( | 2664 | raise GitError('%s %s: %s' % ( |
| 2447 | self._project.name, | 2665 | self._project.name, |
| @@ -2506,9 +2724,9 @@ class _Later(object): | |||
| 2506 | class _SyncColoring(Coloring): | 2724 | class _SyncColoring(Coloring): |
| 2507 | def __init__(self, config): | 2725 | def __init__(self, config): |
| 2508 | Coloring.__init__(self, config, 'reposync') | 2726 | Coloring.__init__(self, config, 'reposync') |
| 2509 | self.project = self.printer('header', attr = 'bold') | 2727 | self.project = self.printer('header', attr='bold') |
| 2510 | self.info = self.printer('info') | 2728 | self.info = self.printer('info') |
| 2511 | self.fail = self.printer('fail', fg='red') | 2729 | self.fail = self.printer('fail', fg='red') |
| 2512 | 2730 | ||
| 2513 | class SyncBuffer(object): | 2731 | class SyncBuffer(object): |
| 2514 | def __init__(self, config, detach_head=False): | 2732 | def __init__(self, config, detach_head=False): |
| @@ -2570,16 +2788,16 @@ class MetaProject(Project): | |||
| 2570 | """ | 2788 | """ |
| 2571 | def __init__(self, manifest, name, gitdir, worktree): | 2789 | def __init__(self, manifest, name, gitdir, worktree): |
| 2572 | Project.__init__(self, | 2790 | Project.__init__(self, |
| 2573 | manifest = manifest, | 2791 | manifest=manifest, |
| 2574 | name = name, | 2792 | name=name, |
| 2575 | gitdir = gitdir, | 2793 | gitdir=gitdir, |
| 2576 | objdir = gitdir, | 2794 | objdir=gitdir, |
| 2577 | worktree = worktree, | 2795 | worktree=worktree, |
| 2578 | remote = RemoteSpec('origin'), | 2796 | remote=RemoteSpec('origin'), |
| 2579 | relpath = '.repo/%s' % name, | 2797 | relpath='.repo/%s' % name, |
| 2580 | revisionExpr = 'refs/heads/master', | 2798 | revisionExpr='refs/heads/master', |
| 2581 | revisionId = None, | 2799 | revisionId=None, |
| 2582 | groups = None) | 2800 | groups=None) |
| 2583 | 2801 | ||
| 2584 | def PreSync(self): | 2802 | def PreSync(self): |
| 2585 | if self.Exists: | 2803 | if self.Exists: |
| @@ -2590,20 +2808,20 @@ class MetaProject(Project): | |||
| 2590 | self.revisionExpr = base | 2808 | self.revisionExpr = base |
| 2591 | self.revisionId = None | 2809 | self.revisionId = None |
| 2592 | 2810 | ||
| 2593 | def MetaBranchSwitch(self, target): | 2811 | def MetaBranchSwitch(self): |
| 2594 | """ Prepare MetaProject for manifest branch switch | 2812 | """ Prepare MetaProject for manifest branch switch |
| 2595 | """ | 2813 | """ |
| 2596 | 2814 | ||
| 2597 | # detach and delete manifest branch, allowing a new | 2815 | # detach and delete manifest branch, allowing a new |
| 2598 | # branch to take over | 2816 | # branch to take over |
| 2599 | syncbuf = SyncBuffer(self.config, detach_head = True) | 2817 | syncbuf = SyncBuffer(self.config, detach_head=True) |
| 2600 | self.Sync_LocalHalf(syncbuf) | 2818 | self.Sync_LocalHalf(syncbuf) |
| 2601 | syncbuf.Finish() | 2819 | syncbuf.Finish() |
| 2602 | 2820 | ||
| 2603 | return GitCommand(self, | 2821 | return GitCommand(self, |
| 2604 | ['update-ref', '-d', 'refs/heads/default'], | 2822 | ['update-ref', '-d', 'refs/heads/default'], |
| 2605 | capture_stdout = True, | 2823 | capture_stdout=True, |
| 2606 | capture_stderr = True).Wait() == 0 | 2824 | capture_stderr=True).Wait() == 0 |
| 2607 | 2825 | ||
| 2608 | 2826 | ||
| 2609 | @property | 2827 | @property |
| @@ -114,6 +114,7 @@ import errno | |||
| 114 | import optparse | 114 | import optparse |
| 115 | import os | 115 | import os |
| 116 | import re | 116 | import re |
| 117 | import shutil | ||
| 117 | import stat | 118 | import stat |
| 118 | import subprocess | 119 | import subprocess |
| 119 | import sys | 120 | import sys |
| @@ -138,10 +139,6 @@ def _print(*objects, **kwargs): | |||
| 138 | 139 | ||
| 139 | # Python version check | 140 | # Python version check |
| 140 | ver = sys.version_info | 141 | ver = sys.version_info |
| 141 | if ver[0] == 3: | ||
| 142 | _print('warning: Python 3 support is currently experimental. YMMV.\n' | ||
| 143 | 'Please use Python 2.6 - 2.7 instead.', | ||
| 144 | file=sys.stderr) | ||
| 145 | if (ver[0], ver[1]) < MIN_PYTHON_VERSION: | 142 | if (ver[0], ver[1]) < MIN_PYTHON_VERSION: |
| 146 | _print('error: Python version %s unsupported.\n' | 143 | _print('error: Python version %s unsupported.\n' |
| 147 | 'Please use Python 2.6 - 2.7 instead.' | 144 | 'Please use Python 2.6 - 2.7 instead.' |
| @@ -465,7 +462,7 @@ def _DownloadBundle(url, local, quiet): | |||
| 465 | try: | 462 | try: |
| 466 | r = urllib.request.urlopen(url) | 463 | r = urllib.request.urlopen(url) |
| 467 | except urllib.error.HTTPError as e: | 464 | except urllib.error.HTTPError as e: |
| 468 | if e.code in [403, 404]: | 465 | if e.code in [401, 403, 404]: |
| 469 | return False | 466 | return False |
| 470 | _print('fatal: Cannot get %s' % url, file=sys.stderr) | 467 | _print('fatal: Cannot get %s' % url, file=sys.stderr) |
| 471 | _print('fatal: HTTP error %s' % e.code, file=sys.stderr) | 468 | _print('fatal: HTTP error %s' % e.code, file=sys.stderr) |
| @@ -741,12 +738,7 @@ def main(orig_args): | |||
| 741 | try: | 738 | try: |
| 742 | _Init(args) | 739 | _Init(args) |
| 743 | except CloneFailure: | 740 | except CloneFailure: |
| 744 | for root, dirs, files in os.walk(repodir, topdown=False): | 741 | shutil.rmtree(os.path.join(repodir, S_repo), ignore_errors=True) |
| 745 | for name in files: | ||
| 746 | os.remove(os.path.join(root, name)) | ||
| 747 | for name in dirs: | ||
| 748 | os.rmdir(os.path.join(root, name)) | ||
| 749 | os.rmdir(repodir) | ||
| 750 | sys.exit(1) | 742 | sys.exit(1) |
| 751 | repo_main, rel_repo_dir = _FindRepo() | 743 | repo_main, rel_repo_dir = _FindRepo() |
| 752 | else: | 744 | else: |
| @@ -772,4 +764,8 @@ def main(orig_args): | |||
| 772 | 764 | ||
| 773 | 765 | ||
| 774 | if __name__ == '__main__': | 766 | if __name__ == '__main__': |
| 767 | if ver[0] == 3: | ||
| 768 | _print('warning: Python 3 support is currently experimental. YMMV.\n' | ||
| 769 | 'Please use Python 2.6 - 2.7 instead.', | ||
| 770 | file=sys.stderr) | ||
| 775 | main(sys.argv[1:]) | 771 | main(sys.argv[1:]) |
diff --git a/subcmds/branches.py b/subcmds/branches.py index f714c1e8..2902684a 100644 --- a/subcmds/branches.py +++ b/subcmds/branches.py | |||
| @@ -47,6 +47,10 @@ class BranchInfo(object): | |||
| 47 | return self.current > 0 | 47 | return self.current > 0 |
| 48 | 48 | ||
| 49 | @property | 49 | @property |
| 50 | def IsSplitCurrent(self): | ||
| 51 | return self.current != 0 and self.current != len(self.projects) | ||
| 52 | |||
| 53 | @property | ||
| 50 | def IsPublished(self): | 54 | def IsPublished(self): |
| 51 | return self.published > 0 | 55 | return self.published > 0 |
| 52 | 56 | ||
| @@ -139,10 +143,14 @@ is shown, then the branch appears in all projects. | |||
| 139 | if in_cnt < project_cnt: | 143 | if in_cnt < project_cnt: |
| 140 | fmt = out.write | 144 | fmt = out.write |
| 141 | paths = [] | 145 | paths = [] |
| 142 | if in_cnt < project_cnt - in_cnt: | 146 | non_cur_paths = [] |
| 147 | if i.IsSplitCurrent or (in_cnt < project_cnt - in_cnt): | ||
| 143 | in_type = 'in' | 148 | in_type = 'in' |
| 144 | for b in i.projects: | 149 | for b in i.projects: |
| 145 | paths.append(b.project.relpath) | 150 | if not i.IsSplitCurrent or b.current: |
| 151 | paths.append(b.project.relpath) | ||
| 152 | else: | ||
| 153 | non_cur_paths.append(b.project.relpath) | ||
| 146 | else: | 154 | else: |
| 147 | fmt = out.notinproject | 155 | fmt = out.notinproject |
| 148 | in_type = 'not in' | 156 | in_type = 'not in' |
| @@ -154,13 +162,19 @@ is shown, then the branch appears in all projects. | |||
| 154 | paths.append(p.relpath) | 162 | paths.append(p.relpath) |
| 155 | 163 | ||
| 156 | s = ' %s %s' % (in_type, ', '.join(paths)) | 164 | s = ' %s %s' % (in_type, ', '.join(paths)) |
| 157 | if width + 7 + len(s) < 80: | 165 | if not i.IsSplitCurrent and (width + 7 + len(s) < 80): |
| 166 | fmt = out.current if i.IsCurrent else fmt | ||
| 158 | fmt(s) | 167 | fmt(s) |
| 159 | else: | 168 | else: |
| 160 | fmt(' %s:' % in_type) | 169 | fmt(' %s:' % in_type) |
| 170 | fmt = out.current if i.IsCurrent else out.write | ||
| 161 | for p in paths: | 171 | for p in paths: |
| 162 | out.nl() | 172 | out.nl() |
| 163 | fmt(width*' ' + ' %s' % p) | 173 | fmt(width*' ' + ' %s' % p) |
| 174 | fmt = out.write | ||
| 175 | for p in non_cur_paths: | ||
| 176 | out.nl() | ||
| 177 | fmt(width*' ' + ' %s' % p) | ||
| 164 | else: | 178 | else: |
| 165 | out.write(' in all projects') | 179 | out.write(' in all projects') |
| 166 | out.nl() | 180 | out.nl() |
diff --git a/subcmds/cherry_pick.py b/subcmds/cherry_pick.py index 520e4c32..1f7dffdc 100644 --- a/subcmds/cherry_pick.py +++ b/subcmds/cherry_pick.py | |||
| @@ -76,6 +76,7 @@ change id will be added. | |||
| 76 | capture_stdout = True, | 76 | capture_stdout = True, |
| 77 | capture_stderr = True) | 77 | capture_stderr = True) |
| 78 | p.stdin.write(new_msg) | 78 | p.stdin.write(new_msg) |
| 79 | p.stdin.close() | ||
| 79 | if p.Wait() != 0: | 80 | if p.Wait() != 0: |
| 80 | print("error: Failed to update commit message", file=sys.stderr) | 81 | print("error: Failed to update commit message", file=sys.stderr) |
| 81 | sys.exit(1) | 82 | sys.exit(1) |
diff --git a/subcmds/download.py b/subcmds/download.py index 098d8b43..a029462e 100644 --- a/subcmds/download.py +++ b/subcmds/download.py | |||
| @@ -93,6 +93,7 @@ makes it available in your project's local working directory. | |||
| 93 | except GitError: | 93 | except GitError: |
| 94 | print('[%s] Could not complete the cherry-pick of %s' \ | 94 | print('[%s] Could not complete the cherry-pick of %s' \ |
| 95 | % (project.name, dl.commit), file=sys.stderr) | 95 | % (project.name, dl.commit), file=sys.stderr) |
| 96 | sys.exit(1) | ||
| 96 | 97 | ||
| 97 | elif opt.revert: | 98 | elif opt.revert: |
| 98 | project._Revert(dl.commit) | 99 | project._Revert(dl.commit) |
diff --git a/subcmds/forall.py b/subcmds/forall.py index e2a420a9..b93cd6d0 100644 --- a/subcmds/forall.py +++ b/subcmds/forall.py | |||
| @@ -14,10 +14,13 @@ | |||
| 14 | # limitations under the License. | 14 | # limitations under the License. |
| 15 | 15 | ||
| 16 | from __future__ import print_function | 16 | from __future__ import print_function |
| 17 | import errno | ||
| 17 | import fcntl | 18 | import fcntl |
| 19 | import multiprocessing | ||
| 18 | import re | 20 | import re |
| 19 | import os | 21 | import os |
| 20 | import select | 22 | import select |
| 23 | import signal | ||
| 21 | import sys | 24 | import sys |
| 22 | import subprocess | 25 | import subprocess |
| 23 | 26 | ||
| @@ -31,6 +34,7 @@ _CAN_COLOR = [ | |||
| 31 | 'log', | 34 | 'log', |
| 32 | ] | 35 | ] |
| 33 | 36 | ||
| 37 | |||
| 34 | class ForallColoring(Coloring): | 38 | class ForallColoring(Coloring): |
| 35 | def __init__(self, config): | 39 | def __init__(self, config): |
| 36 | Coloring.__init__(self, config, 'forall') | 40 | Coloring.__init__(self, config, 'forall') |
| @@ -87,6 +91,12 @@ revision to a locally executed git command, use REPO_LREV. | |||
| 87 | REPO_RREV is the name of the revision from the manifest, exactly | 91 | REPO_RREV is the name of the revision from the manifest, exactly |
| 88 | as written in the manifest. | 92 | as written in the manifest. |
| 89 | 93 | ||
| 94 | REPO_COUNT is the total number of projects being iterated. | ||
| 95 | |||
| 96 | REPO_I is the current (1-based) iteration count. Can be used in | ||
| 97 | conjunction with REPO_COUNT to add a simple progress indicator to your | ||
| 98 | command. | ||
| 99 | |||
| 90 | REPO__* are any extra environment variables, specified by the | 100 | REPO__* are any extra environment variables, specified by the |
| 91 | "annotation" element under any project element. This can be useful | 101 | "annotation" element under any project element. This can be useful |
| 92 | for differentiating trees based on user-specific criteria, or simply | 102 | for differentiating trees based on user-specific criteria, or simply |
| @@ -126,9 +136,35 @@ without iterating through the remaining projects. | |||
| 126 | g.add_option('-v', '--verbose', | 136 | g.add_option('-v', '--verbose', |
| 127 | dest='verbose', action='store_true', | 137 | dest='verbose', action='store_true', |
| 128 | help='Show command error messages') | 138 | help='Show command error messages') |
| 139 | g.add_option('-j', '--jobs', | ||
| 140 | dest='jobs', action='store', type='int', default=1, | ||
| 141 | help='number of commands to execute simultaneously') | ||
| 129 | 142 | ||
| 130 | def WantPager(self, opt): | 143 | def WantPager(self, opt): |
| 131 | return opt.project_header | 144 | return opt.project_header and opt.jobs == 1 |
| 145 | |||
| 146 | def _SerializeProject(self, project): | ||
| 147 | """ Serialize a project._GitGetByExec instance. | ||
| 148 | |||
| 149 | project._GitGetByExec is not pickle-able. Instead of trying to pass it | ||
| 150 | around between processes, make a dict ourselves containing only the | ||
| 151 | attributes that we need. | ||
| 152 | |||
| 153 | """ | ||
| 154 | if not self.manifest.IsMirror: | ||
| 155 | lrev = project.GetRevisionId() | ||
| 156 | else: | ||
| 157 | lrev = None | ||
| 158 | return { | ||
| 159 | 'name': project.name, | ||
| 160 | 'relpath': project.relpath, | ||
| 161 | 'remote_name': project.remote.name, | ||
| 162 | 'lrev': lrev, | ||
| 163 | 'rrev': project.revisionExpr, | ||
| 164 | 'annotations': dict((a.name, a.value) for a in project.annotations), | ||
| 165 | 'gitdir': project.gitdir, | ||
| 166 | 'worktree': project.worktree, | ||
| 167 | } | ||
| 132 | 168 | ||
| 133 | def Execute(self, opt, args): | 169 | def Execute(self, opt, args): |
| 134 | if not opt.command: | 170 | if not opt.command: |
| @@ -167,123 +203,188 @@ without iterating through the remaining projects. | |||
| 167 | # pylint: enable=W0631 | 203 | # pylint: enable=W0631 |
| 168 | 204 | ||
| 169 | mirror = self.manifest.IsMirror | 205 | mirror = self.manifest.IsMirror |
| 170 | out = ForallColoring(self.manifest.manifestProject.config) | ||
| 171 | out.redirect(sys.stdout) | ||
| 172 | |||
| 173 | rc = 0 | 206 | rc = 0 |
| 174 | first = True | 207 | |
| 208 | smart_sync_manifest_name = "smart_sync_override.xml" | ||
| 209 | smart_sync_manifest_path = os.path.join( | ||
| 210 | self.manifest.manifestProject.worktree, smart_sync_manifest_name) | ||
| 211 | |||
| 212 | if os.path.isfile(smart_sync_manifest_path): | ||
| 213 | self.manifest.Override(smart_sync_manifest_path) | ||
| 175 | 214 | ||
| 176 | if not opt.regex: | 215 | if not opt.regex: |
| 177 | projects = self.GetProjects(args) | 216 | projects = self.GetProjects(args) |
| 178 | else: | 217 | else: |
| 179 | projects = self.FindProjects(args) | 218 | projects = self.FindProjects(args) |
| 180 | 219 | ||
| 181 | for project in projects: | 220 | os.environ['REPO_COUNT'] = str(len(projects)) |
| 182 | env = os.environ.copy() | 221 | |
| 183 | def setenv(name, val): | 222 | pool = multiprocessing.Pool(opt.jobs, InitWorker) |
| 184 | if val is None: | 223 | try: |
| 185 | val = '' | 224 | config = self.manifest.manifestProject.config |
| 186 | env[name] = val.encode() | 225 | results_it = pool.imap( |
| 187 | 226 | DoWorkWrapper, | |
| 188 | setenv('REPO_PROJECT', project.name) | 227 | self.ProjectArgs(projects, mirror, opt, cmd, shell, config)) |
| 189 | setenv('REPO_PATH', project.relpath) | 228 | pool.close() |
| 190 | setenv('REPO_REMOTE', project.remote.name) | 229 | for r in results_it: |
| 191 | setenv('REPO_LREV', project.GetRevisionId()) | 230 | rc = rc or r |
| 192 | setenv('REPO_RREV', project.revisionExpr) | 231 | if r != 0 and opt.abort_on_errors: |
| 193 | for a in project.annotations: | 232 | raise Exception('Aborting due to previous error') |
| 194 | setenv("REPO__%s" % (a.name), a.value) | 233 | except (KeyboardInterrupt, WorkerKeyboardInterrupt): |
| 195 | 234 | # Catch KeyboardInterrupt raised inside and outside of workers | |
| 196 | if mirror: | 235 | print('Interrupted - terminating the pool') |
| 197 | setenv('GIT_DIR', project.gitdir) | 236 | pool.terminate() |
| 198 | cwd = project.gitdir | 237 | rc = rc or errno.EINTR |
| 199 | else: | 238 | except Exception as e: |
| 200 | cwd = project.worktree | 239 | # Catch any other exceptions raised |
| 201 | 240 | print('Got an error, terminating the pool: %r' % e, | |
| 202 | if not os.path.exists(cwd): | 241 | file=sys.stderr) |
| 203 | if (opt.project_header and opt.verbose) \ | 242 | pool.terminate() |
| 204 | or not opt.project_header: | 243 | rc = rc or getattr(e, 'errno', 1) |
| 205 | print('skipping %s/' % project.relpath, file=sys.stderr) | 244 | finally: |
| 206 | continue | 245 | pool.join() |
| 207 | |||
| 208 | if opt.project_header: | ||
| 209 | stdin = subprocess.PIPE | ||
| 210 | stdout = subprocess.PIPE | ||
| 211 | stderr = subprocess.PIPE | ||
| 212 | else: | ||
| 213 | stdin = None | ||
| 214 | stdout = None | ||
| 215 | stderr = None | ||
| 216 | |||
| 217 | p = subprocess.Popen(cmd, | ||
| 218 | cwd = cwd, | ||
| 219 | shell = shell, | ||
| 220 | env = env, | ||
| 221 | stdin = stdin, | ||
| 222 | stdout = stdout, | ||
| 223 | stderr = stderr) | ||
| 224 | |||
| 225 | if opt.project_header: | ||
| 226 | class sfd(object): | ||
| 227 | def __init__(self, fd, dest): | ||
| 228 | self.fd = fd | ||
| 229 | self.dest = dest | ||
| 230 | def fileno(self): | ||
| 231 | return self.fd.fileno() | ||
| 232 | |||
| 233 | empty = True | ||
| 234 | errbuf = '' | ||
| 235 | |||
| 236 | p.stdin.close() | ||
| 237 | s_in = [sfd(p.stdout, sys.stdout), | ||
| 238 | sfd(p.stderr, sys.stderr)] | ||
| 239 | |||
| 240 | for s in s_in: | ||
| 241 | flags = fcntl.fcntl(s.fd, fcntl.F_GETFL) | ||
| 242 | fcntl.fcntl(s.fd, fcntl.F_SETFL, flags | os.O_NONBLOCK) | ||
| 243 | |||
| 244 | while s_in: | ||
| 245 | in_ready, _out_ready, _err_ready = select.select(s_in, [], []) | ||
| 246 | for s in in_ready: | ||
| 247 | buf = s.fd.read(4096) | ||
| 248 | if not buf: | ||
| 249 | s.fd.close() | ||
| 250 | s_in.remove(s) | ||
| 251 | continue | ||
| 252 | |||
| 253 | if not opt.verbose: | ||
| 254 | if s.fd != p.stdout: | ||
| 255 | errbuf += buf | ||
| 256 | continue | ||
| 257 | |||
| 258 | if empty: | ||
| 259 | if first: | ||
| 260 | first = False | ||
| 261 | else: | ||
| 262 | out.nl() | ||
| 263 | |||
| 264 | if mirror: | ||
| 265 | project_header_path = project.name | ||
| 266 | else: | ||
| 267 | project_header_path = project.relpath | ||
| 268 | out.project('project %s/', project_header_path) | ||
| 269 | out.nl() | ||
| 270 | out.flush() | ||
| 271 | if errbuf: | ||
| 272 | sys.stderr.write(errbuf) | ||
| 273 | sys.stderr.flush() | ||
| 274 | errbuf = '' | ||
| 275 | empty = False | ||
| 276 | |||
| 277 | s.dest.write(buf) | ||
| 278 | s.dest.flush() | ||
| 279 | |||
| 280 | r = p.wait() | ||
| 281 | if r != 0: | ||
| 282 | if r != rc: | ||
| 283 | rc = r | ||
| 284 | if opt.abort_on_errors: | ||
| 285 | print("error: %s: Aborting due to previous error" % project.relpath, | ||
| 286 | file=sys.stderr) | ||
| 287 | sys.exit(r) | ||
| 288 | if rc != 0: | 246 | if rc != 0: |
| 289 | sys.exit(rc) | 247 | sys.exit(rc) |
| 248 | |||
| 249 | def ProjectArgs(self, projects, mirror, opt, cmd, shell, config): | ||
| 250 | for cnt, p in enumerate(projects): | ||
| 251 | try: | ||
| 252 | project = self._SerializeProject(p) | ||
| 253 | except Exception as e: | ||
| 254 | print('Project list error: %r' % e, | ||
| 255 | file=sys.stderr) | ||
| 256 | return | ||
| 257 | except KeyboardInterrupt: | ||
| 258 | print('Project list interrupted', | ||
| 259 | file=sys.stderr) | ||
| 260 | return | ||
| 261 | yield [mirror, opt, cmd, shell, cnt, config, project] | ||
| 262 | |||
| 263 | class WorkerKeyboardInterrupt(Exception): | ||
| 264 | """ Keyboard interrupt exception for worker processes. """ | ||
| 265 | pass | ||
| 266 | |||
| 267 | |||
| 268 | def InitWorker(): | ||
| 269 | signal.signal(signal.SIGINT, signal.SIG_IGN) | ||
| 270 | |||
| 271 | def DoWorkWrapper(args): | ||
| 272 | """ A wrapper around the DoWork() method. | ||
| 273 | |||
| 274 | Catch the KeyboardInterrupt exceptions here and re-raise them as a different, | ||
| 275 | ``Exception``-based exception to stop it flooding the console with stacktraces | ||
| 276 | and making the parent hang indefinitely. | ||
| 277 | |||
| 278 | """ | ||
| 279 | project = args.pop() | ||
| 280 | try: | ||
| 281 | return DoWork(project, *args) | ||
| 282 | except KeyboardInterrupt: | ||
| 283 | print('%s: Worker interrupted' % project['name']) | ||
| 284 | raise WorkerKeyboardInterrupt() | ||
| 285 | |||
| 286 | |||
| 287 | def DoWork(project, mirror, opt, cmd, shell, cnt, config): | ||
| 288 | env = os.environ.copy() | ||
| 289 | def setenv(name, val): | ||
| 290 | if val is None: | ||
| 291 | val = '' | ||
| 292 | if hasattr(val, 'encode'): | ||
| 293 | val = val.encode() | ||
| 294 | env[name] = val | ||
| 295 | |||
| 296 | setenv('REPO_PROJECT', project['name']) | ||
| 297 | setenv('REPO_PATH', project['relpath']) | ||
| 298 | setenv('REPO_REMOTE', project['remote_name']) | ||
| 299 | setenv('REPO_LREV', project['lrev']) | ||
| 300 | setenv('REPO_RREV', project['rrev']) | ||
| 301 | setenv('REPO_I', str(cnt + 1)) | ||
| 302 | for name in project['annotations']: | ||
| 303 | setenv("REPO__%s" % (name), project['annotations'][name]) | ||
| 304 | |||
| 305 | if mirror: | ||
| 306 | setenv('GIT_DIR', project['gitdir']) | ||
| 307 | cwd = project['gitdir'] | ||
| 308 | else: | ||
| 309 | cwd = project['worktree'] | ||
| 310 | |||
| 311 | if not os.path.exists(cwd): | ||
| 312 | if (opt.project_header and opt.verbose) \ | ||
| 313 | or not opt.project_header: | ||
| 314 | print('skipping %s/' % project['relpath'], file=sys.stderr) | ||
| 315 | return | ||
| 316 | |||
| 317 | if opt.project_header: | ||
| 318 | stdin = subprocess.PIPE | ||
| 319 | stdout = subprocess.PIPE | ||
| 320 | stderr = subprocess.PIPE | ||
| 321 | else: | ||
| 322 | stdin = None | ||
| 323 | stdout = None | ||
| 324 | stderr = None | ||
| 325 | |||
| 326 | p = subprocess.Popen(cmd, | ||
| 327 | cwd=cwd, | ||
| 328 | shell=shell, | ||
| 329 | env=env, | ||
| 330 | stdin=stdin, | ||
| 331 | stdout=stdout, | ||
| 332 | stderr=stderr) | ||
| 333 | |||
| 334 | if opt.project_header: | ||
| 335 | out = ForallColoring(config) | ||
| 336 | out.redirect(sys.stdout) | ||
| 337 | class sfd(object): | ||
| 338 | def __init__(self, fd, dest): | ||
| 339 | self.fd = fd | ||
| 340 | self.dest = dest | ||
| 341 | def fileno(self): | ||
| 342 | return self.fd.fileno() | ||
| 343 | |||
| 344 | empty = True | ||
| 345 | errbuf = '' | ||
| 346 | |||
| 347 | p.stdin.close() | ||
| 348 | s_in = [sfd(p.stdout, sys.stdout), | ||
| 349 | sfd(p.stderr, sys.stderr)] | ||
| 350 | |||
| 351 | for s in s_in: | ||
| 352 | flags = fcntl.fcntl(s.fd, fcntl.F_GETFL) | ||
| 353 | fcntl.fcntl(s.fd, fcntl.F_SETFL, flags | os.O_NONBLOCK) | ||
| 354 | |||
| 355 | while s_in: | ||
| 356 | in_ready, _out_ready, _err_ready = select.select(s_in, [], []) | ||
| 357 | for s in in_ready: | ||
| 358 | buf = s.fd.read(4096) | ||
| 359 | if not buf: | ||
| 360 | s.fd.close() | ||
| 361 | s_in.remove(s) | ||
| 362 | continue | ||
| 363 | |||
| 364 | if not opt.verbose: | ||
| 365 | if s.fd != p.stdout: | ||
| 366 | errbuf += buf | ||
| 367 | continue | ||
| 368 | |||
| 369 | if empty and out: | ||
| 370 | if not cnt == 0: | ||
| 371 | out.nl() | ||
| 372 | |||
| 373 | if mirror: | ||
| 374 | project_header_path = project['name'] | ||
| 375 | else: | ||
| 376 | project_header_path = project['relpath'] | ||
| 377 | out.project('project %s/', project_header_path) | ||
| 378 | out.nl() | ||
| 379 | out.flush() | ||
| 380 | if errbuf: | ||
| 381 | sys.stderr.write(errbuf) | ||
| 382 | sys.stderr.flush() | ||
| 383 | errbuf = '' | ||
| 384 | empty = False | ||
| 385 | |||
| 386 | s.dest.write(buf) | ||
| 387 | s.dest.flush() | ||
| 388 | |||
| 389 | r = p.wait() | ||
| 390 | return r | ||
diff --git a/subcmds/info.py b/subcmds/info.py index d42860ae..ed196e90 100644 --- a/subcmds/info.py +++ b/subcmds/info.py | |||
| @@ -59,7 +59,8 @@ class Info(PagedCommand): | |||
| 59 | or 'all,-notdefault') | 59 | or 'all,-notdefault') |
| 60 | 60 | ||
| 61 | self.heading("Manifest branch: ") | 61 | self.heading("Manifest branch: ") |
| 62 | self.headtext(self.manifest.default.revisionExpr) | 62 | if self.manifest.default.revisionExpr: |
| 63 | self.headtext(self.manifest.default.revisionExpr) | ||
| 63 | self.out.nl() | 64 | self.out.nl() |
| 64 | self.heading("Manifest merge branch: ") | 65 | self.heading("Manifest merge branch: ") |
| 65 | self.headtext(mergeBranch) | 66 | self.headtext(mergeBranch) |
diff --git a/subcmds/init.py b/subcmds/init.py index b1fcb69c..dbb6ddda 100644 --- a/subcmds/init.py +++ b/subcmds/init.py | |||
| @@ -27,7 +27,7 @@ else: | |||
| 27 | import imp | 27 | import imp |
| 28 | import urlparse | 28 | import urlparse |
| 29 | urllib = imp.new_module('urllib') | 29 | urllib = imp.new_module('urllib') |
| 30 | urllib.parse = urlparse.urlparse | 30 | urllib.parse = urlparse |
| 31 | 31 | ||
| 32 | from color import Coloring | 32 | from color import Coloring |
| 33 | from command import InteractiveCommand, MirrorSafeCommand | 33 | from command import InteractiveCommand, MirrorSafeCommand |
| @@ -153,7 +153,7 @@ to update the working directory files. | |||
| 153 | # server where this git is located, so let's save that here. | 153 | # server where this git is located, so let's save that here. |
| 154 | mirrored_manifest_git = None | 154 | mirrored_manifest_git = None |
| 155 | if opt.reference: | 155 | if opt.reference: |
| 156 | manifest_git_path = urllib.parse(opt.manifest_url).path[1:] | 156 | manifest_git_path = urllib.parse.urlparse(opt.manifest_url).path[1:] |
| 157 | mirrored_manifest_git = os.path.join(opt.reference, manifest_git_path) | 157 | mirrored_manifest_git = os.path.join(opt.reference, manifest_git_path) |
| 158 | if not mirrored_manifest_git.endswith(".git"): | 158 | if not mirrored_manifest_git.endswith(".git"): |
| 159 | mirrored_manifest_git += ".git" | 159 | mirrored_manifest_git += ".git" |
| @@ -233,7 +233,7 @@ to update the working directory files. | |||
| 233 | sys.exit(1) | 233 | sys.exit(1) |
| 234 | 234 | ||
| 235 | if opt.manifest_branch: | 235 | if opt.manifest_branch: |
| 236 | m.MetaBranchSwitch(opt.manifest_branch) | 236 | m.MetaBranchSwitch() |
| 237 | 237 | ||
| 238 | syncbuf = SyncBuffer(m.config) | 238 | syncbuf = SyncBuffer(m.config) |
| 239 | m.Sync_LocalHalf(syncbuf) | 239 | m.Sync_LocalHalf(syncbuf) |
diff --git a/subcmds/start.py b/subcmds/start.py index 2d723fc2..60ad41e0 100644 --- a/subcmds/start.py +++ b/subcmds/start.py | |||
| @@ -59,9 +59,13 @@ revision specified in the manifest. | |||
| 59 | for project in all_projects: | 59 | for project in all_projects: |
| 60 | pm.update() | 60 | pm.update() |
| 61 | # If the current revision is a specific SHA1 then we can't push back | 61 | # If the current revision is a specific SHA1 then we can't push back |
| 62 | # to it so substitute the manifest default revision instead. | 62 | # to it; so substitute with dest_branch if defined, or with manifest |
| 63 | # default revision instead. | ||
| 63 | if IsId(project.revisionExpr): | 64 | if IsId(project.revisionExpr): |
| 64 | project.revisionExpr = self.manifest.default.revisionExpr | 65 | if project.dest_branch: |
| 66 | project.revisionExpr = project.dest_branch | ||
| 67 | else: | ||
| 68 | project.revisionExpr = self.manifest.default.revisionExpr | ||
| 65 | if not project.StartBranch(nb): | 69 | if not project.StartBranch(nb): |
| 66 | err.append(project) | 70 | err.append(project) |
| 67 | pm.end() | 71 | pm.end() |
diff --git a/subcmds/status.py b/subcmds/status.py index 41c4429a..38c229b1 100644 --- a/subcmds/status.py +++ b/subcmds/status.py | |||
| @@ -22,15 +22,8 @@ except ImportError: | |||
| 22 | 22 | ||
| 23 | import glob | 23 | import glob |
| 24 | 24 | ||
| 25 | from pyversion import is_python3 | ||
| 26 | if is_python3(): | ||
| 27 | import io | ||
| 28 | else: | ||
| 29 | import StringIO as io | ||
| 30 | |||
| 31 | import itertools | 25 | import itertools |
| 32 | import os | 26 | import os |
| 33 | import sys | ||
| 34 | 27 | ||
| 35 | from color import Coloring | 28 | from color import Coloring |
| 36 | 29 | ||
| @@ -97,7 +90,7 @@ the following meanings: | |||
| 97 | dest='orphans', action='store_true', | 90 | dest='orphans', action='store_true', |
| 98 | help="include objects in working directory outside of repo projects") | 91 | help="include objects in working directory outside of repo projects") |
| 99 | 92 | ||
| 100 | def _StatusHelper(self, project, clean_counter, sem, output): | 93 | def _StatusHelper(self, project, clean_counter, sem): |
| 101 | """Obtains the status for a specific project. | 94 | """Obtains the status for a specific project. |
| 102 | 95 | ||
| 103 | Obtains the status for a project, redirecting the output to | 96 | Obtains the status for a project, redirecting the output to |
| @@ -111,9 +104,9 @@ the following meanings: | |||
| 111 | output: Where to output the status. | 104 | output: Where to output the status. |
| 112 | """ | 105 | """ |
| 113 | try: | 106 | try: |
| 114 | state = project.PrintWorkTreeStatus(output) | 107 | state = project.PrintWorkTreeStatus() |
| 115 | if state == 'CLEAN': | 108 | if state == 'CLEAN': |
| 116 | clean_counter.next() | 109 | next(clean_counter) |
| 117 | finally: | 110 | finally: |
| 118 | sem.release() | 111 | sem.release() |
| 119 | 112 | ||
| @@ -122,16 +115,16 @@ the following meanings: | |||
| 122 | status_header = ' --\t' | 115 | status_header = ' --\t' |
| 123 | for item in dirs: | 116 | for item in dirs: |
| 124 | if not os.path.isdir(item): | 117 | if not os.path.isdir(item): |
| 125 | outstring.write(''.join([status_header, item])) | 118 | outstring.append(''.join([status_header, item])) |
| 126 | continue | 119 | continue |
| 127 | if item in proj_dirs: | 120 | if item in proj_dirs: |
| 128 | continue | 121 | continue |
| 129 | if item in proj_dirs_parents: | 122 | if item in proj_dirs_parents: |
| 130 | self._FindOrphans(glob.glob('%s/.*' % item) + \ | 123 | self._FindOrphans(glob.glob('%s/.*' % item) + |
| 131 | glob.glob('%s/*' % item), \ | 124 | glob.glob('%s/*' % item), |
| 132 | proj_dirs, proj_dirs_parents, outstring) | 125 | proj_dirs, proj_dirs_parents, outstring) |
| 133 | continue | 126 | continue |
| 134 | outstring.write(''.join([status_header, item, '/'])) | 127 | outstring.append(''.join([status_header, item, '/'])) |
| 135 | 128 | ||
| 136 | def Execute(self, opt, args): | 129 | def Execute(self, opt, args): |
| 137 | all_projects = self.GetProjects(args) | 130 | all_projects = self.GetProjects(args) |
| @@ -141,30 +134,21 @@ the following meanings: | |||
| 141 | for project in all_projects: | 134 | for project in all_projects: |
| 142 | state = project.PrintWorkTreeStatus() | 135 | state = project.PrintWorkTreeStatus() |
| 143 | if state == 'CLEAN': | 136 | if state == 'CLEAN': |
| 144 | counter.next() | 137 | next(counter) |
| 145 | else: | 138 | else: |
| 146 | sem = _threading.Semaphore(opt.jobs) | 139 | sem = _threading.Semaphore(opt.jobs) |
| 147 | threads_and_output = [] | 140 | threads = [] |
| 148 | for project in all_projects: | 141 | for project in all_projects: |
| 149 | sem.acquire() | 142 | sem.acquire() |
| 150 | 143 | ||
| 151 | class BufList(io.StringIO): | ||
| 152 | def dump(self, ostream): | ||
| 153 | for entry in self.buflist: | ||
| 154 | ostream.write(entry) | ||
| 155 | |||
| 156 | output = BufList() | ||
| 157 | |||
| 158 | t = _threading.Thread(target=self._StatusHelper, | 144 | t = _threading.Thread(target=self._StatusHelper, |
| 159 | args=(project, counter, sem, output)) | 145 | args=(project, counter, sem)) |
| 160 | threads_and_output.append((t, output)) | 146 | threads.append(t) |
| 161 | t.daemon = True | 147 | t.daemon = True |
| 162 | t.start() | 148 | t.start() |
| 163 | for (t, output) in threads_and_output: | 149 | for t in threads: |
| 164 | t.join() | 150 | t.join() |
| 165 | output.dump(sys.stdout) | 151 | if len(all_projects) == next(counter): |
| 166 | output.close() | ||
| 167 | if len(all_projects) == counter.next(): | ||
| 168 | print('nothing to commit (working directory clean)') | 152 | print('nothing to commit (working directory clean)') |
| 169 | 153 | ||
| 170 | if opt.orphans: | 154 | if opt.orphans: |
| @@ -188,23 +172,21 @@ the following meanings: | |||
| 188 | try: | 172 | try: |
| 189 | os.chdir(self.manifest.topdir) | 173 | os.chdir(self.manifest.topdir) |
| 190 | 174 | ||
| 191 | outstring = io.StringIO() | 175 | outstring = [] |
| 192 | self._FindOrphans(glob.glob('.*') + \ | 176 | self._FindOrphans(glob.glob('.*') + |
| 193 | glob.glob('*'), \ | 177 | glob.glob('*'), |
| 194 | proj_dirs, proj_dirs_parents, outstring) | 178 | proj_dirs, proj_dirs_parents, outstring) |
| 195 | 179 | ||
| 196 | if outstring.buflist: | 180 | if outstring: |
| 197 | output = StatusColoring(self.manifest.globalConfig) | 181 | output = StatusColoring(self.manifest.globalConfig) |
| 198 | output.project('Objects not within a project (orphans)') | 182 | output.project('Objects not within a project (orphans)') |
| 199 | output.nl() | 183 | output.nl() |
| 200 | for entry in outstring.buflist: | 184 | for entry in outstring: |
| 201 | output.untracked(entry) | 185 | output.untracked(entry) |
| 202 | output.nl() | 186 | output.nl() |
| 203 | else: | 187 | else: |
| 204 | print('No orphan files or directories') | 188 | print('No orphan files or directories') |
| 205 | 189 | ||
| 206 | outstring.close() | ||
| 207 | |||
| 208 | finally: | 190 | finally: |
| 209 | # Restore CWD. | 191 | # Restore CWD. |
| 210 | os.chdir(orig_path) | 192 | os.chdir(orig_path) |
diff --git a/subcmds/sync.py b/subcmds/sync.py index b50df099..43d450be 100644 --- a/subcmds/sync.py +++ b/subcmds/sync.py | |||
| @@ -14,10 +14,10 @@ | |||
| 14 | # limitations under the License. | 14 | # limitations under the License. |
| 15 | 15 | ||
| 16 | from __future__ import print_function | 16 | from __future__ import print_function |
| 17 | import json | ||
| 17 | import netrc | 18 | import netrc |
| 18 | from optparse import SUPPRESS_HELP | 19 | from optparse import SUPPRESS_HELP |
| 19 | import os | 20 | import os |
| 20 | import pickle | ||
| 21 | import re | 21 | import re |
| 22 | import shutil | 22 | import shutil |
| 23 | import socket | 23 | import socket |
| @@ -119,6 +119,11 @@ credentials. | |||
| 119 | The -f/--force-broken option can be used to proceed with syncing | 119 | The -f/--force-broken option can be used to proceed with syncing |
| 120 | other projects if a project sync fails. | 120 | other projects if a project sync fails. |
| 121 | 121 | ||
| 122 | The --force-sync option can be used to overwrite existing git | ||
| 123 | directories if they have previously been linked to a different | ||
| 124 | object direcotry. WARNING: This may cause data to be lost since | ||
| 125 | refs may be removed when overwriting. | ||
| 126 | |||
| 122 | The --no-clone-bundle option disables any attempt to use | 127 | The --no-clone-bundle option disables any attempt to use |
| 123 | $URL/clone.bundle to bootstrap a new Git repository from a | 128 | $URL/clone.bundle to bootstrap a new Git repository from a |
| 124 | resumeable bundle file on a content delivery network. This | 129 | resumeable bundle file on a content delivery network. This |
| @@ -128,6 +133,13 @@ HTTP client or proxy configuration, but the Git binary works. | |||
| 128 | The --fetch-submodules option enables fetching Git submodules | 133 | The --fetch-submodules option enables fetching Git submodules |
| 129 | of a project from server. | 134 | of a project from server. |
| 130 | 135 | ||
| 136 | The -c/--current-branch option can be used to only fetch objects that | ||
| 137 | are on the branch specified by a project's revision. | ||
| 138 | |||
| 139 | The --optimized-fetch option can be used to only fetch projects that | ||
| 140 | are fixed to a sha1 revision if the sha1 revision does not already | ||
| 141 | exist locally. | ||
| 142 | |||
| 131 | SSH Connections | 143 | SSH Connections |
| 132 | --------------- | 144 | --------------- |
| 133 | 145 | ||
| @@ -167,6 +179,11 @@ later is required to fix a server side protocol bug. | |||
| 167 | p.add_option('-f', '--force-broken', | 179 | p.add_option('-f', '--force-broken', |
| 168 | dest='force_broken', action='store_true', | 180 | dest='force_broken', action='store_true', |
| 169 | help="continue sync even if a project fails to sync") | 181 | help="continue sync even if a project fails to sync") |
| 182 | p.add_option('--force-sync', | ||
| 183 | dest='force_sync', action='store_true', | ||
| 184 | help="overwrite an existing git directory if it needs to " | ||
| 185 | "point to a different object directory. WARNING: this " | ||
| 186 | "may cause loss of data") | ||
| 170 | p.add_option('-l', '--local-only', | 187 | p.add_option('-l', '--local-only', |
| 171 | dest='local_only', action='store_true', | 188 | dest='local_only', action='store_true', |
| 172 | help="only update working tree, don't fetch") | 189 | help="only update working tree, don't fetch") |
| @@ -203,6 +220,9 @@ later is required to fix a server side protocol bug. | |||
| 203 | p.add_option('--no-tags', | 220 | p.add_option('--no-tags', |
| 204 | dest='no_tags', action='store_true', | 221 | dest='no_tags', action='store_true', |
| 205 | help="don't fetch tags") | 222 | help="don't fetch tags") |
| 223 | p.add_option('--optimized-fetch', | ||
| 224 | dest='optimized_fetch', action='store_true', | ||
| 225 | help='only fetch projects fixed to sha1 if revision does not exist locally') | ||
| 206 | if show_smart: | 226 | if show_smart: |
| 207 | p.add_option('-s', '--smart-sync', | 227 | p.add_option('-s', '--smart-sync', |
| 208 | dest='smart_sync', action='store_true', | 228 | dest='smart_sync', action='store_true', |
| @@ -271,8 +291,10 @@ later is required to fix a server side protocol bug. | |||
| 271 | success = project.Sync_NetworkHalf( | 291 | success = project.Sync_NetworkHalf( |
| 272 | quiet=opt.quiet, | 292 | quiet=opt.quiet, |
| 273 | current_branch_only=opt.current_branch_only, | 293 | current_branch_only=opt.current_branch_only, |
| 294 | force_sync=opt.force_sync, | ||
| 274 | clone_bundle=not opt.no_clone_bundle, | 295 | clone_bundle=not opt.no_clone_bundle, |
| 275 | no_tags=opt.no_tags, archive=self.manifest.IsArchive) | 296 | no_tags=opt.no_tags, archive=self.manifest.IsArchive, |
| 297 | optimized_fetch=opt.optimized_fetch) | ||
| 276 | self._fetch_times.Set(project, time.time() - start) | 298 | self._fetch_times.Set(project, time.time() - start) |
| 277 | 299 | ||
| 278 | # Lock around all the rest of the code, since printing, updating a set | 300 | # Lock around all the rest of the code, since printing, updating a set |
| @@ -508,6 +530,9 @@ later is required to fix a server side protocol bug. | |||
| 508 | self.manifest.Override(opt.manifest_name) | 530 | self.manifest.Override(opt.manifest_name) |
| 509 | 531 | ||
| 510 | manifest_name = opt.manifest_name | 532 | manifest_name = opt.manifest_name |
| 533 | smart_sync_manifest_name = "smart_sync_override.xml" | ||
| 534 | smart_sync_manifest_path = os.path.join( | ||
| 535 | self.manifest.manifestProject.worktree, smart_sync_manifest_name) | ||
| 511 | 536 | ||
| 512 | if opt.smart_sync or opt.smart_tag: | 537 | if opt.smart_sync or opt.smart_tag: |
| 513 | if not self.manifest.manifest_server: | 538 | if not self.manifest.manifest_server: |
| @@ -560,7 +585,10 @@ later is required to fix a server side protocol bug. | |||
| 560 | branch = branch[len(R_HEADS):] | 585 | branch = branch[len(R_HEADS):] |
| 561 | 586 | ||
| 562 | env = os.environ.copy() | 587 | env = os.environ.copy() |
| 563 | if 'TARGET_PRODUCT' in env and 'TARGET_BUILD_VARIANT' in env: | 588 | if 'SYNC_TARGET' in env: |
| 589 | target = env['SYNC_TARGET'] | ||
| 590 | [success, manifest_str] = server.GetApprovedManifest(branch, target) | ||
| 591 | elif 'TARGET_PRODUCT' in env and 'TARGET_BUILD_VARIANT' in env: | ||
| 564 | target = '%s-%s' % (env['TARGET_PRODUCT'], | 592 | target = '%s-%s' % (env['TARGET_PRODUCT'], |
| 565 | env['TARGET_BUILD_VARIANT']) | 593 | env['TARGET_BUILD_VARIANT']) |
| 566 | [success, manifest_str] = server.GetApprovedManifest(branch, target) | 594 | [success, manifest_str] = server.GetApprovedManifest(branch, target) |
| @@ -571,17 +599,16 @@ later is required to fix a server side protocol bug. | |||
| 571 | [success, manifest_str] = server.GetManifest(opt.smart_tag) | 599 | [success, manifest_str] = server.GetManifest(opt.smart_tag) |
| 572 | 600 | ||
| 573 | if success: | 601 | if success: |
| 574 | manifest_name = "smart_sync_override.xml" | 602 | manifest_name = smart_sync_manifest_name |
| 575 | manifest_path = os.path.join(self.manifest.manifestProject.worktree, | ||
| 576 | manifest_name) | ||
| 577 | try: | 603 | try: |
| 578 | f = open(manifest_path, 'w') | 604 | f = open(smart_sync_manifest_path, 'w') |
| 579 | try: | 605 | try: |
| 580 | f.write(manifest_str) | 606 | f.write(manifest_str) |
| 581 | finally: | 607 | finally: |
| 582 | f.close() | 608 | f.close() |
| 583 | except IOError: | 609 | except IOError as e: |
| 584 | print('error: cannot write manifest to %s' % manifest_path, | 610 | print('error: cannot write manifest to %s:\n%s' |
| 611 | % (smart_sync_manifest_path, e), | ||
| 585 | file=sys.stderr) | 612 | file=sys.stderr) |
| 586 | sys.exit(1) | 613 | sys.exit(1) |
| 587 | self._ReloadManifest(manifest_name) | 614 | self._ReloadManifest(manifest_name) |
| @@ -598,6 +625,13 @@ later is required to fix a server side protocol bug. | |||
| 598 | % (self.manifest.manifest_server, e.errcode, e.errmsg), | 625 | % (self.manifest.manifest_server, e.errcode, e.errmsg), |
| 599 | file=sys.stderr) | 626 | file=sys.stderr) |
| 600 | sys.exit(1) | 627 | sys.exit(1) |
| 628 | else: # Not smart sync or smart tag mode | ||
| 629 | if os.path.isfile(smart_sync_manifest_path): | ||
| 630 | try: | ||
| 631 | os.remove(smart_sync_manifest_path) | ||
| 632 | except OSError as e: | ||
| 633 | print('error: failed to remove existing smart sync override manifest: %s' % | ||
| 634 | e, file=sys.stderr) | ||
| 601 | 635 | ||
| 602 | rp = self.manifest.repoProject | 636 | rp = self.manifest.repoProject |
| 603 | rp.PreSync() | 637 | rp.PreSync() |
| @@ -611,7 +645,8 @@ later is required to fix a server side protocol bug. | |||
| 611 | if not opt.local_only: | 645 | if not opt.local_only: |
| 612 | mp.Sync_NetworkHalf(quiet=opt.quiet, | 646 | mp.Sync_NetworkHalf(quiet=opt.quiet, |
| 613 | current_branch_only=opt.current_branch_only, | 647 | current_branch_only=opt.current_branch_only, |
| 614 | no_tags=opt.no_tags) | 648 | no_tags=opt.no_tags, |
| 649 | optimized_fetch=opt.optimized_fetch) | ||
| 615 | 650 | ||
| 616 | if mp.HasChanges: | 651 | if mp.HasChanges: |
| 617 | syncbuf = SyncBuffer(mp.config) | 652 | syncbuf = SyncBuffer(mp.config) |
| @@ -674,7 +709,7 @@ later is required to fix a server side protocol bug. | |||
| 674 | for project in all_projects: | 709 | for project in all_projects: |
| 675 | pm.update() | 710 | pm.update() |
| 676 | if project.worktree: | 711 | if project.worktree: |
| 677 | project.Sync_LocalHalf(syncbuf) | 712 | project.Sync_LocalHalf(syncbuf, force_sync=opt.force_sync) |
| 678 | pm.end() | 713 | pm.end() |
| 679 | print(file=sys.stderr) | 714 | print(file=sys.stderr) |
| 680 | if not syncbuf.Finish(): | 715 | if not syncbuf.Finish(): |
| @@ -762,7 +797,7 @@ class _FetchTimes(object): | |||
| 762 | _ALPHA = 0.5 | 797 | _ALPHA = 0.5 |
| 763 | 798 | ||
| 764 | def __init__(self, manifest): | 799 | def __init__(self, manifest): |
| 765 | self._path = os.path.join(manifest.repodir, '.repopickle_fetchtimes') | 800 | self._path = os.path.join(manifest.repodir, '.repo_fetchtimes.json') |
| 766 | self._times = None | 801 | self._times = None |
| 767 | self._seen = set() | 802 | self._seen = set() |
| 768 | 803 | ||
| @@ -781,22 +816,17 @@ class _FetchTimes(object): | |||
| 781 | def _Load(self): | 816 | def _Load(self): |
| 782 | if self._times is None: | 817 | if self._times is None: |
| 783 | try: | 818 | try: |
| 784 | f = open(self._path, 'rb') | 819 | f = open(self._path) |
| 785 | except IOError: | ||
| 786 | self._times = {} | ||
| 787 | return self._times | ||
| 788 | try: | ||
| 789 | try: | 820 | try: |
| 790 | self._times = pickle.load(f) | 821 | self._times = json.load(f) |
| 791 | except IOError: | 822 | finally: |
| 792 | try: | 823 | f.close() |
| 793 | os.remove(self._path) | 824 | except (IOError, ValueError): |
| 794 | except OSError: | 825 | try: |
| 795 | pass | 826 | os.remove(self._path) |
| 796 | self._times = {} | 827 | except OSError: |
| 797 | finally: | 828 | pass |
| 798 | f.close() | 829 | self._times = {} |
| 799 | return self._times | ||
| 800 | 830 | ||
| 801 | def Save(self): | 831 | def Save(self): |
| 802 | if self._times is None: | 832 | if self._times is None: |
| @@ -810,13 +840,13 @@ class _FetchTimes(object): | |||
| 810 | del self._times[name] | 840 | del self._times[name] |
| 811 | 841 | ||
| 812 | try: | 842 | try: |
| 813 | f = open(self._path, 'wb') | 843 | f = open(self._path, 'w') |
| 814 | try: | 844 | try: |
| 815 | pickle.dump(self._times, f) | 845 | json.dump(self._times, f, indent=2) |
| 816 | except (IOError, OSError, pickle.PickleError): | 846 | finally: |
| 817 | try: | 847 | f.close() |
| 818 | os.remove(self._path) | 848 | except (IOError, TypeError): |
| 819 | except OSError: | 849 | try: |
| 820 | pass | 850 | os.remove(self._path) |
| 821 | finally: | 851 | except OSError: |
| 822 | f.close() | 852 | pass |
diff --git a/subcmds/upload.py b/subcmds/upload.py index e2fa261e..674fc17d 100644 --- a/subcmds/upload.py +++ b/subcmds/upload.py | |||
| @@ -25,10 +25,12 @@ from git_command import GitCommand | |||
| 25 | from project import RepoHook | 25 | from project import RepoHook |
| 26 | 26 | ||
| 27 | from pyversion import is_python3 | 27 | from pyversion import is_python3 |
| 28 | # pylint:disable=W0622 | ||
| 28 | if not is_python3(): | 29 | if not is_python3(): |
| 29 | # pylint:disable=W0622 | ||
| 30 | input = raw_input | 30 | input = raw_input |
| 31 | # pylint:enable=W0622 | 31 | else: |
| 32 | unicode = str | ||
| 33 | # pylint:enable=W0622 | ||
| 32 | 34 | ||
| 33 | UNUSUAL_COMMIT_THRESHOLD = 5 | 35 | UNUSUAL_COMMIT_THRESHOLD = 5 |
| 34 | 36 | ||
| @@ -337,13 +339,17 @@ Gerrit Code Review: http://code.google.com/p/gerrit/ | |||
| 337 | self._AppendAutoList(branch, people) | 339 | self._AppendAutoList(branch, people) |
| 338 | 340 | ||
| 339 | # Check if there are local changes that may have been forgotten | 341 | # Check if there are local changes that may have been forgotten |
| 340 | if branch.project.HasChanges(): | 342 | changes = branch.project.UncommitedFiles() |
| 343 | if changes: | ||
| 341 | key = 'review.%s.autoupload' % branch.project.remote.review | 344 | key = 'review.%s.autoupload' % branch.project.remote.review |
| 342 | answer = branch.project.config.GetBoolean(key) | 345 | answer = branch.project.config.GetBoolean(key) |
| 343 | 346 | ||
| 344 | # if they want to auto upload, let's not ask because it could be automated | 347 | # if they want to auto upload, let's not ask because it could be automated |
| 345 | if answer is None: | 348 | if answer is None: |
| 346 | sys.stdout.write('Uncommitted changes in ' + branch.project.name + ' (did you forget to amend?). Continue uploading? (y/N) ') | 349 | sys.stdout.write('Uncommitted changes in ' + branch.project.name) |
| 350 | sys.stdout.write(' (did you forget to amend?):\n') | ||
| 351 | sys.stdout.write('\n'.join(changes) + '\n') | ||
| 352 | sys.stdout.write('Continue uploading? (y/N) ') | ||
| 347 | a = sys.stdin.readline().strip().lower() | 353 | a = sys.stdin.readline().strip().lower() |
| 348 | if a not in ('y', 'yes', 't', 'true', 'on'): | 354 | if a not in ('y', 'yes', 't', 'true', 'on'): |
| 349 | print("skipping upload", file=sys.stderr) | 355 | print("skipping upload", file=sys.stderr) |
