diff options
| -rw-r--r-- | .mailmap | 1 | ||||
| -rw-r--r-- | README.md | 2 | ||||
| -rw-r--r-- | command.py | 2 | ||||
| -rw-r--r-- | docs/manifest-format.txt | 23 | ||||
| -rw-r--r-- | docs/repo-hooks.md | 105 | ||||
| -rw-r--r-- | editor.py | 10 | ||||
| -rw-r--r-- | event_log.py | 177 | ||||
| -rw-r--r-- | git_command.py | 30 | ||||
| -rw-r--r-- | git_config.py | 35 | ||||
| -rw-r--r-- | git_refs.py | 13 | ||||
| -rwxr-xr-x | hooks/pre-auto-gc | 16 | ||||
| -rwxr-xr-x | main.py | 22 | ||||
| -rw-r--r-- | manifest_xml.py | 29 | ||||
| -rwxr-xr-x | pager.py | 38 | ||||
| -rw-r--r-- | platform_utils.py | 315 | ||||
| -rw-r--r-- | platform_utils_win32.py | 217 | ||||
| -rw-r--r-- | progress.py | 12 | ||||
| -rw-r--r-- | project.py | 273 | ||||
| -rwxr-xr-x | repo | 26 | ||||
| -rw-r--r-- | subcmds/abandon.py | 71 | ||||
| -rw-r--r-- | subcmds/download.py | 5 | ||||
| -rw-r--r-- | subcmds/forall.py | 36 | ||||
| -rw-r--r-- | subcmds/gitc_delete.py | 6 | ||||
| -rw-r--r-- | subcmds/init.py | 26 | ||||
| -rw-r--r-- | subcmds/stage.py | 4 | ||||
| -rw-r--r-- | subcmds/start.py | 10 | ||||
| -rw-r--r-- | subcmds/status.py | 12 | ||||
| -rw-r--r-- | subcmds/sync.py | 84 | ||||
| -rw-r--r-- | subcmds/upload.py | 30 |
29 files changed, 1369 insertions, 261 deletions
| @@ -1,4 +1,5 @@ | |||
| 1 | Anthony Newnam <anthony.newnam@garmin.com> Anthony <anthony@bnovc.com> | 1 | Anthony Newnam <anthony.newnam@garmin.com> Anthony <anthony@bnovc.com> |
| 2 | He Ping <tdihp@hotmail.com> heping <tdihp@hotmail.com> | ||
| 2 | Hu Xiuyun <xiuyun.hu@hisilicon.com> Hu xiuyun <xiuyun.hu@hisilicon.com> | 3 | Hu Xiuyun <xiuyun.hu@hisilicon.com> Hu xiuyun <xiuyun.hu@hisilicon.com> |
| 3 | Hu Xiuyun <xiuyun.hu@hisilicon.com> Hu Xiuyun <clouds08@qq.com> | 4 | Hu Xiuyun <xiuyun.hu@hisilicon.com> Hu Xiuyun <clouds08@qq.com> |
| 4 | Jelly Chen <chenguodong@huawei.com> chenguodong <chenguodong@huawei.com> | 5 | Jelly Chen <chenguodong@huawei.com> chenguodong <chenguodong@huawei.com> |
| @@ -11,4 +11,6 @@ that you can put anywhere in your path. | |||
| 11 | * Source: https://code.google.com/p/git-repo/ | 11 | * Source: https://code.google.com/p/git-repo/ |
| 12 | * Overview: https://source.android.com/source/developing.html | 12 | * Overview: https://source.android.com/source/developing.html |
| 13 | * Docs: https://source.android.com/source/using-repo.html | 13 | * Docs: https://source.android.com/source/using-repo.html |
| 14 | * [repo Manifest Format](./docs/manifest-format.txt) | ||
| 15 | * [repo Hooks](./docs/repo-hooks.md) | ||
| 14 | * [Submitting patches](./SUBMITTING_PATCHES.md) | 16 | * [Submitting patches](./SUBMITTING_PATCHES.md) |
| @@ -19,6 +19,7 @@ import platform | |||
| 19 | import re | 19 | import re |
| 20 | import sys | 20 | import sys |
| 21 | 21 | ||
| 22 | from event_log import EventLog | ||
| 22 | from error import NoSuchProjectError | 23 | from error import NoSuchProjectError |
| 23 | from error import InvalidProjectGroupsError | 24 | from error import InvalidProjectGroupsError |
| 24 | 25 | ||
| @@ -28,6 +29,7 @@ class Command(object): | |||
| 28 | """ | 29 | """ |
| 29 | 30 | ||
| 30 | common = False | 31 | common = False |
| 32 | event_log = EventLog() | ||
| 31 | manifest = None | 33 | manifest = None |
| 32 | _optparse = None | 34 | _optparse = None |
| 33 | 35 | ||
diff --git a/docs/manifest-format.txt b/docs/manifest-format.txt index 2a07f199..77784099 100644 --- a/docs/manifest-format.txt +++ b/docs/manifest-format.txt | |||
| @@ -27,11 +27,12 @@ following DTD: | |||
| 27 | remove-project*, | 27 | remove-project*, |
| 28 | project*, | 28 | project*, |
| 29 | extend-project*, | 29 | extend-project*, |
| 30 | repo-hooks?)> | 30 | repo-hooks?, |
| 31 | include*)> | ||
| 31 | 32 | ||
| 32 | <!ELEMENT notice (#PCDATA)> | 33 | <!ELEMENT notice (#PCDATA)> |
| 33 | 34 | ||
| 34 | <!ELEMENT remote (EMPTY)> | 35 | <!ELEMENT remote EMPTY> |
| 35 | <!ATTLIST remote name ID #REQUIRED> | 36 | <!ATTLIST remote name ID #REQUIRED> |
| 36 | <!ATTLIST remote alias CDATA #IMPLIED> | 37 | <!ATTLIST remote alias CDATA #IMPLIED> |
| 37 | <!ATTLIST remote fetch CDATA #REQUIRED> | 38 | <!ATTLIST remote fetch CDATA #REQUIRED> |
| @@ -39,7 +40,7 @@ following DTD: | |||
| 39 | <!ATTLIST remote review CDATA #IMPLIED> | 40 | <!ATTLIST remote review CDATA #IMPLIED> |
| 40 | <!ATTLIST remote revision CDATA #IMPLIED> | 41 | <!ATTLIST remote revision CDATA #IMPLIED> |
| 41 | 42 | ||
| 42 | <!ELEMENT default (EMPTY)> | 43 | <!ELEMENT default EMPTY> |
| 43 | <!ATTLIST default remote IDREF #IMPLIED> | 44 | <!ATTLIST default remote IDREF #IMPLIED> |
| 44 | <!ATTLIST default revision CDATA #IMPLIED> | 45 | <!ATTLIST default revision CDATA #IMPLIED> |
| 45 | <!ATTLIST default dest-branch CDATA #IMPLIED> | 46 | <!ATTLIST default dest-branch CDATA #IMPLIED> |
| @@ -47,7 +48,7 @@ following DTD: | |||
| 47 | <!ATTLIST default sync-c CDATA #IMPLIED> | 48 | <!ATTLIST default sync-c CDATA #IMPLIED> |
| 48 | <!ATTLIST default sync-s CDATA #IMPLIED> | 49 | <!ATTLIST default sync-s CDATA #IMPLIED> |
| 49 | 50 | ||
| 50 | <!ELEMENT manifest-server (EMPTY)> | 51 | <!ELEMENT manifest-server EMPTY> |
| 51 | <!ATTLIST manifest-server url CDATA #REQUIRED> | 52 | <!ATTLIST manifest-server url CDATA #REQUIRED> |
| 52 | 53 | ||
| 53 | <!ELEMENT project (annotation*, | 54 | <!ELEMENT project (annotation*, |
| @@ -66,32 +67,32 @@ following DTD: | |||
| 66 | <!ATTLIST project clone-depth CDATA #IMPLIED> | 67 | <!ATTLIST project clone-depth CDATA #IMPLIED> |
| 67 | <!ATTLIST project force-path CDATA #IMPLIED> | 68 | <!ATTLIST project force-path CDATA #IMPLIED> |
| 68 | 69 | ||
| 69 | <!ELEMENT annotation (EMPTY)> | 70 | <!ELEMENT annotation EMPTY> |
| 70 | <!ATTLIST annotation name CDATA #REQUIRED> | 71 | <!ATTLIST annotation name CDATA #REQUIRED> |
| 71 | <!ATTLIST annotation value CDATA #REQUIRED> | 72 | <!ATTLIST annotation value CDATA #REQUIRED> |
| 72 | <!ATTLIST annotation keep CDATA "true"> | 73 | <!ATTLIST annotation keep CDATA "true"> |
| 73 | 74 | ||
| 74 | <!ELEMENT copyfile (EMPTY)> | 75 | <!ELEMENT copyfile EMPTY> |
| 75 | <!ATTLIST copyfile src CDATA #REQUIRED> | 76 | <!ATTLIST copyfile src CDATA #REQUIRED> |
| 76 | <!ATTLIST copyfile dest CDATA #REQUIRED> | 77 | <!ATTLIST copyfile dest CDATA #REQUIRED> |
| 77 | 78 | ||
| 78 | <!ELEMENT linkfile (EMPTY)> | 79 | <!ELEMENT linkfile EMPTY> |
| 79 | <!ATTLIST linkfile src CDATA #REQUIRED> | 80 | <!ATTLIST linkfile src CDATA #REQUIRED> |
| 80 | <!ATTLIST linkfile dest CDATA #REQUIRED> | 81 | <!ATTLIST linkfile dest CDATA #REQUIRED> |
| 81 | 82 | ||
| 82 | <!ELEMENT extend-project (EMPTY)> | 83 | <!ELEMENT extend-project EMPTY> |
| 83 | <!ATTLIST extend-project name CDATA #REQUIRED> | 84 | <!ATTLIST extend-project name CDATA #REQUIRED> |
| 84 | <!ATTLIST extend-project path CDATA #IMPLIED> | 85 | <!ATTLIST extend-project path CDATA #IMPLIED> |
| 85 | <!ATTLIST extend-project groups CDATA #IMPLIED> | 86 | <!ATTLIST extend-project groups CDATA #IMPLIED> |
| 86 | 87 | ||
| 87 | <!ELEMENT remove-project (EMPTY)> | 88 | <!ELEMENT remove-project EMPTY> |
| 88 | <!ATTLIST remove-project name CDATA #REQUIRED> | 89 | <!ATTLIST remove-project name CDATA #REQUIRED> |
| 89 | 90 | ||
| 90 | <!ELEMENT repo-hooks (EMPTY)> | 91 | <!ELEMENT repo-hooks EMPTY> |
| 91 | <!ATTLIST repo-hooks in-project CDATA #REQUIRED> | 92 | <!ATTLIST repo-hooks in-project CDATA #REQUIRED> |
| 92 | <!ATTLIST repo-hooks enabled-list CDATA #REQUIRED> | 93 | <!ATTLIST repo-hooks enabled-list CDATA #REQUIRED> |
| 93 | 94 | ||
| 94 | <!ELEMENT include (EMPTY)> | 95 | <!ELEMENT include EMPTY> |
| 95 | <!ATTLIST include name CDATA #REQUIRED> | 96 | <!ATTLIST include name CDATA #REQUIRED> |
| 96 | ]> | 97 | ]> |
| 97 | 98 | ||
diff --git a/docs/repo-hooks.md b/docs/repo-hooks.md new file mode 100644 index 00000000..c8eb945f --- /dev/null +++ b/docs/repo-hooks.md | |||
| @@ -0,0 +1,105 @@ | |||
| 1 | # repo hooks | ||
| 2 | |||
| 3 | [TOC] | ||
| 4 | |||
| 5 | Repo provides a mechanism to hook specific stages of the runtime with custom | ||
| 6 | python modules. All the hooks live in one git project which is checked out by | ||
| 7 | the manifest (specified during `repo init`), and the manifest itself defines | ||
| 8 | which hooks are registered. | ||
| 9 | |||
| 10 | These are useful to run linters, check formatting, and run quick unittests | ||
| 11 | before allowing a step to proceed (e.g. before uploading a commit to Gerrit). | ||
| 12 | |||
| 13 | A complete example can be found in the Android project. It can be easily | ||
| 14 | re-used by any repo based project and is not specific to Android.<br> | ||
| 15 | https://android.googlesource.com/platform/tools/repohooks | ||
| 16 | |||
| 17 | ## Approvals | ||
| 18 | |||
| 19 | When a hook is processed the first time, the user is prompted for approval. | ||
| 20 | We don't want to execute arbitrary code without explicit consent. For manifests | ||
| 21 | fetched via secure protocols (e.g. https://), the user is prompted once. For | ||
| 22 | insecure protocols (e.g. http://), the user is prompted whenever the registered | ||
| 23 | repohooks project is updated and a hook is triggered. | ||
| 24 | |||
| 25 | ## Manifest Settings | ||
| 26 | |||
| 27 | For the full syntax, see the [repo manifest format](./manifest-format.txt). | ||
| 28 | |||
| 29 | Here's a short example from | ||
| 30 | [Android](https://android.googlesource.com/platform/manifest/+/master/default.xml). | ||
| 31 | The `<project>` line checks out the repohooks git repo to the local | ||
| 32 | `tools/repohooks/` path. The `<repo-hooks>` line says to look in the project | ||
| 33 | with the name `platform/tools/repohooks` for hooks to run during the | ||
| 34 | `pre-upload` phase. | ||
| 35 | |||
| 36 | ```xml | ||
| 37 | <project path="tools/repohooks" name="platform/tools/repohooks" /> | ||
| 38 | <repo-hooks in-project="platform/tools/repohooks" enabled-list="pre-upload" /> | ||
| 39 | ``` | ||
| 40 | |||
| 41 | ## Source Layout | ||
| 42 | |||
| 43 | The repohooks git repo should have a python file with the same name as the hook. | ||
| 44 | So if you want to support the `pre-upload` hook, you'll need to create a file | ||
| 45 | named `pre-upload.py`. Repo will dynamically load that module when processing | ||
| 46 | the hook and then call the `main` function in it. | ||
| 47 | |||
| 48 | Hooks should have their `main` accept `**kwargs` for future compatibility. | ||
| 49 | |||
| 50 | ## Runtime | ||
| 51 | |||
| 52 | Hook return values are ignored. | ||
| 53 | |||
| 54 | Any uncaught exceptions from the hook will cause the step to fail. This is | ||
| 55 | intended as a fallback safety check though rather than the normal flow. If | ||
| 56 | you want your hook to trigger a failure, it should call `sys.exit()` (after | ||
| 57 | displaying relevant diagnostics). | ||
| 58 | |||
| 59 | Output (stdout & stderr) are not filtered in any way. Hooks should generally | ||
| 60 | not be too verbose. A short summary is nice, and some status information when | ||
| 61 | long running operations occur, but long/verbose output should be used only if | ||
| 62 | the hook ultimately fails. | ||
| 63 | |||
| 64 | The hook runs from the top level of the git repo where the operation is started. | ||
| 65 | e.g. If you're in the git repo `src/foo/`, that is where the hook runs, even if | ||
| 66 | the `repo` command was started from a subdir like `src/foo/bar/`. | ||
| 67 | |||
| 68 | Python's `sys.path` is modified so that the top of repohooks directory comes | ||
| 69 | first. This should help simplify the hook logic to easily allow importing of | ||
| 70 | local modules. | ||
| 71 | |||
| 72 | Repo does not modify the state of the git checkout. This means that the hooks | ||
| 73 | might be running in a dirty git repo with many commits and checked out to the | ||
| 74 | latest one. If the hook wants to operate on specific git commits, it needs to | ||
| 75 | manually discover the list of pending commits, extract the diff/commit, and | ||
| 76 | then check it directly. Hooks should not normally modify the active git repo | ||
| 77 | (such as checking out a specific commit to run checks) without first prompting | ||
| 78 | the user. Although user interaction is discouraged in the common case, it can | ||
| 79 | be useful when deploying automatic fixes. | ||
| 80 | |||
| 81 | ## Hooks | ||
| 82 | |||
| 83 | Here are all the points available for hooking. | ||
| 84 | |||
| 85 | ### pre-upload | ||
| 86 | |||
| 87 | This hook runs when people run `repo upload`. | ||
| 88 | |||
| 89 | The `pre-upload.py` file should be defined like: | ||
| 90 | |||
| 91 | ```py | ||
| 92 | def main(project_list, worktree_list=None, **kwargs): | ||
| 93 | """Main function invoked directly by repo. | ||
| 94 | |||
| 95 | We must use the name "main" as that is what repo requires. | ||
| 96 | |||
| 97 | Args: | ||
| 98 | project_list: List of projects to run on. | ||
| 99 | worktree_list: A list of directories. It should be the same length as | ||
| 100 | project_list, so that each entry in project_list matches with a | ||
| 101 | directory in worktree_list. If None, we will attempt to calculate | ||
| 102 | the directories automatically. | ||
| 103 | kwargs: Leave this here for forward-compatibility. | ||
| 104 | """ | ||
| 105 | ``` | ||
| @@ -21,6 +21,7 @@ import subprocess | |||
| 21 | import tempfile | 21 | import tempfile |
| 22 | 22 | ||
| 23 | from error import EditorError | 23 | from error import EditorError |
| 24 | import platform_utils | ||
| 24 | 25 | ||
| 25 | class Editor(object): | 26 | class Editor(object): |
| 26 | """Manages the user's preferred text editor.""" | 27 | """Manages the user's preferred text editor.""" |
| @@ -82,7 +83,12 @@ least one of these before using this command.""", file=sys.stderr) | |||
| 82 | os.close(fd) | 83 | os.close(fd) |
| 83 | fd = None | 84 | fd = None |
| 84 | 85 | ||
| 85 | if re.compile("^.*[$ \t'].*$").match(editor): | 86 | if platform_utils.isWindows(): |
| 87 | # Split on spaces, respecting quoted strings | ||
| 88 | import shlex | ||
| 89 | args = shlex.split(editor) | ||
| 90 | shell = False | ||
| 91 | elif re.compile("^.*[$ \t'].*$").match(editor): | ||
| 86 | args = [editor + ' "$@"', 'sh'] | 92 | args = [editor + ' "$@"', 'sh'] |
| 87 | shell = True | 93 | shell = True |
| 88 | else: | 94 | else: |
| @@ -107,4 +113,4 @@ least one of these before using this command.""", file=sys.stderr) | |||
| 107 | finally: | 113 | finally: |
| 108 | if fd: | 114 | if fd: |
| 109 | os.close(fd) | 115 | os.close(fd) |
| 110 | os.remove(path) | 116 | platform_utils.remove(path) |
diff --git a/event_log.py b/event_log.py new file mode 100644 index 00000000..d73511da --- /dev/null +++ b/event_log.py | |||
| @@ -0,0 +1,177 @@ | |||
| 1 | # | ||
| 2 | # Copyright (C) 2017 The Android Open Source Project | ||
| 3 | # | ||
| 4 | # Licensed under the Apache License, Version 2.0 (the "License"); | ||
| 5 | # you may not use this file except in compliance with the License. | ||
| 6 | # You may obtain a copy of the License at | ||
| 7 | # | ||
| 8 | # http://www.apache.org/licenses/LICENSE-2.0 | ||
| 9 | # | ||
| 10 | # Unless required by applicable law or agreed to in writing, software | ||
| 11 | # distributed under the License is distributed on an "AS IS" BASIS, | ||
| 12 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. | ||
| 13 | # See the License for the specific language governing permissions and | ||
| 14 | # limitations under the License. | ||
| 15 | |||
| 16 | from __future__ import print_function | ||
| 17 | |||
| 18 | import json | ||
| 19 | import multiprocessing | ||
| 20 | |||
| 21 | TASK_COMMAND = 'command' | ||
| 22 | TASK_SYNC_NETWORK = 'sync-network' | ||
| 23 | TASK_SYNC_LOCAL = 'sync-local' | ||
| 24 | |||
| 25 | class EventLog(object): | ||
| 26 | """Event log that records events that occurred during a repo invocation. | ||
| 27 | |||
| 28 | Events are written to the log as a consecutive JSON entries, one per line. | ||
| 29 | Each entry contains the following keys: | ||
| 30 | - id: A ('RepoOp', ID) tuple, suitable for storing in a datastore. | ||
| 31 | The ID is only unique for the invocation of the repo command. | ||
| 32 | - name: Name of the object being operated upon. | ||
| 33 | - task_name: The task that was performed. | ||
| 34 | - start: Timestamp of when the operation started. | ||
| 35 | - finish: Timestamp of when the operation finished. | ||
| 36 | - success: Boolean indicating if the operation was successful. | ||
| 37 | - try_count: A counter indicating the try count of this task. | ||
| 38 | |||
| 39 | Optionally: | ||
| 40 | - parent: A ('RepoOp', ID) tuple indicating the parent event for nested | ||
| 41 | events. | ||
| 42 | |||
| 43 | Valid task_names include: | ||
| 44 | - command: The invocation of a subcommand. | ||
| 45 | - sync-network: The network component of a sync command. | ||
| 46 | - sync-local: The local component of a sync command. | ||
| 47 | |||
| 48 | Specific tasks may include additional informational properties. | ||
| 49 | """ | ||
| 50 | |||
| 51 | def __init__(self): | ||
| 52 | """Initializes the event log.""" | ||
| 53 | self._log = [] | ||
| 54 | self._next_id = _EventIdGenerator() | ||
| 55 | self._parent = None | ||
| 56 | |||
| 57 | def Add(self, name, task_name, start, finish=None, success=None, | ||
| 58 | try_count=1, kind='RepoOp'): | ||
| 59 | """Add an event to the log. | ||
| 60 | |||
| 61 | Args: | ||
| 62 | name: Name of the object being operated upon. | ||
| 63 | task_name: A sub-task that was performed for name. | ||
| 64 | start: Timestamp of when the operation started. | ||
| 65 | finish: Timestamp of when the operation finished. | ||
| 66 | success: Boolean indicating if the operation was successful. | ||
| 67 | try_count: A counter indicating the try count of this task. | ||
| 68 | kind: The kind of the object for the unique identifier. | ||
| 69 | |||
| 70 | Returns: | ||
| 71 | A dictionary of the event added to the log. | ||
| 72 | """ | ||
| 73 | event = { | ||
| 74 | 'id': (kind, self._next_id.next()), | ||
| 75 | 'name': name, | ||
| 76 | 'task_name': task_name, | ||
| 77 | 'start_time': start, | ||
| 78 | 'try': try_count, | ||
| 79 | } | ||
| 80 | |||
| 81 | if self._parent: | ||
| 82 | event['parent'] = self._parent['id'] | ||
| 83 | |||
| 84 | if success is not None or finish is not None: | ||
| 85 | self.FinishEvent(event, finish, success) | ||
| 86 | |||
| 87 | self._log.append(event) | ||
| 88 | return event | ||
| 89 | |||
| 90 | def AddSync(self, project, task_name, start, finish, success): | ||
| 91 | """Add a event to the log for a sync command. | ||
| 92 | |||
| 93 | Args: | ||
| 94 | project: Project being synced. | ||
| 95 | task_name: A sub-task that was performed for name. | ||
| 96 | One of (TASK_SYNC_NETWORK, TASK_SYNC_LOCAL) | ||
| 97 | start: Timestamp of when the operation started. | ||
| 98 | finish: Timestamp of when the operation finished. | ||
| 99 | success: Boolean indicating if the operation was successful. | ||
| 100 | |||
| 101 | Returns: | ||
| 102 | A dictionary of the event added to the log. | ||
| 103 | """ | ||
| 104 | event = self.Add(project.relpath, success, start, finish, task_name) | ||
| 105 | if event is not None: | ||
| 106 | event['project'] = project.name | ||
| 107 | if project.revisionExpr: | ||
| 108 | event['revision'] = project.revisionExpr | ||
| 109 | if project.remote.url: | ||
| 110 | event['project_url'] = project.remote.url | ||
| 111 | if project.remote.fetchUrl: | ||
| 112 | event['remote_url'] = project.remote.fetchUrl | ||
| 113 | try: | ||
| 114 | event['git_hash'] = project.GetCommitRevisionId() | ||
| 115 | except Exception: | ||
| 116 | pass | ||
| 117 | return event | ||
| 118 | |||
| 119 | def GetStatusString(self, success): | ||
| 120 | """Converst a boolean success to a status string. | ||
| 121 | |||
| 122 | Args: | ||
| 123 | success: Boolean indicating if the operation was successful. | ||
| 124 | |||
| 125 | Returns: | ||
| 126 | status string. | ||
| 127 | """ | ||
| 128 | return 'pass' if success else 'fail' | ||
| 129 | |||
| 130 | def FinishEvent(self, event, finish, success): | ||
| 131 | """Finishes an incomplete event. | ||
| 132 | |||
| 133 | Args: | ||
| 134 | event: An event that has been added to the log. | ||
| 135 | finish: Timestamp of when the operation finished. | ||
| 136 | success: Boolean indicating if the operation was successful. | ||
| 137 | |||
| 138 | Returns: | ||
| 139 | A dictionary of the event added to the log. | ||
| 140 | """ | ||
| 141 | event['status'] = self.GetStatusString(success) | ||
| 142 | event['finish_time'] = finish | ||
| 143 | return event | ||
| 144 | |||
| 145 | def SetParent(self, event): | ||
| 146 | """Set a parent event for all new entities. | ||
| 147 | |||
| 148 | Args: | ||
| 149 | event: The event to use as a parent. | ||
| 150 | """ | ||
| 151 | self._parent = event | ||
| 152 | |||
| 153 | def Write(self, filename): | ||
| 154 | """Writes the log out to a file. | ||
| 155 | |||
| 156 | Args: | ||
| 157 | filename: The file to write the log to. | ||
| 158 | """ | ||
| 159 | with open(filename, 'w+') as f: | ||
| 160 | for e in self._log: | ||
| 161 | json.dump(e, f, sort_keys=True) | ||
| 162 | f.write('\n') | ||
| 163 | |||
| 164 | |||
| 165 | def _EventIdGenerator(): | ||
| 166 | """Returns multi-process safe iterator that generates locally unique id. | ||
| 167 | |||
| 168 | Yields: | ||
| 169 | A unique, to this invocation of the program, integer id. | ||
| 170 | """ | ||
| 171 | eid = multiprocessing.Value('i', 1) | ||
| 172 | |||
| 173 | while True: | ||
| 174 | with eid.get_lock(): | ||
| 175 | val = eid.value | ||
| 176 | eid.value += 1 | ||
| 177 | yield val | ||
diff --git a/git_command.py b/git_command.py index 9f7d2930..b1e9e172 100644 --- a/git_command.py +++ b/git_command.py | |||
| @@ -14,14 +14,14 @@ | |||
| 14 | # limitations under the License. | 14 | # limitations under the License. |
| 15 | 15 | ||
| 16 | from __future__ import print_function | 16 | from __future__ import print_function |
| 17 | import fcntl | ||
| 18 | import os | 17 | import os |
| 19 | import select | ||
| 20 | import sys | 18 | import sys |
| 21 | import subprocess | 19 | import subprocess |
| 22 | import tempfile | 20 | import tempfile |
| 23 | from signal import SIGTERM | 21 | from signal import SIGTERM |
| 22 | |||
| 24 | from error import GitError | 23 | from error import GitError |
| 24 | import platform_utils | ||
| 25 | from trace import REPO_TRACE, IsTrace, Trace | 25 | from trace import REPO_TRACE, IsTrace, Trace |
| 26 | from wrapper import Wrapper | 26 | from wrapper import Wrapper |
| 27 | 27 | ||
| @@ -78,16 +78,6 @@ def terminate_ssh_clients(): | |||
| 78 | 78 | ||
| 79 | _git_version = None | 79 | _git_version = None |
| 80 | 80 | ||
| 81 | class _sfd(object): | ||
| 82 | """select file descriptor class""" | ||
| 83 | def __init__(self, fd, dest, std_name): | ||
| 84 | assert std_name in ('stdout', 'stderr') | ||
| 85 | self.fd = fd | ||
| 86 | self.dest = dest | ||
| 87 | self.std_name = std_name | ||
| 88 | def fileno(self): | ||
| 89 | return self.fd.fileno() | ||
| 90 | |||
| 91 | class _GitCall(object): | 81 | class _GitCall(object): |
| 92 | def version(self): | 82 | def version(self): |
| 93 | p = GitCommand(None, ['--version'], capture_stdout=True) | 83 | p = GitCommand(None, ['--version'], capture_stdout=True) |
| @@ -162,6 +152,7 @@ class GitCommand(object): | |||
| 162 | if ssh_proxy: | 152 | if ssh_proxy: |
| 163 | _setenv(env, 'REPO_SSH_SOCK', ssh_sock()) | 153 | _setenv(env, 'REPO_SSH_SOCK', ssh_sock()) |
| 164 | _setenv(env, 'GIT_SSH', _ssh_proxy()) | 154 | _setenv(env, 'GIT_SSH', _ssh_proxy()) |
| 155 | _setenv(env, 'GIT_SSH_VARIANT', 'ssh') | ||
| 165 | if 'http_proxy' in env and 'darwin' == sys.platform: | 156 | if 'http_proxy' in env and 'darwin' == sys.platform: |
| 166 | s = "'http.proxy=%s'" % (env['http_proxy'],) | 157 | s = "'http.proxy=%s'" % (env['http_proxy'],) |
| 167 | p = env.get('GIT_CONFIG_PARAMETERS') | 158 | p = env.get('GIT_CONFIG_PARAMETERS') |
| @@ -253,19 +244,16 @@ class GitCommand(object): | |||
| 253 | 244 | ||
| 254 | def _CaptureOutput(self): | 245 | def _CaptureOutput(self): |
| 255 | p = self.process | 246 | p = self.process |
| 256 | s_in = [_sfd(p.stdout, sys.stdout, 'stdout'), | 247 | s_in = platform_utils.FileDescriptorStreams.create() |
| 257 | _sfd(p.stderr, sys.stderr, 'stderr')] | 248 | s_in.add(p.stdout, sys.stdout, 'stdout') |
| 249 | s_in.add(p.stderr, sys.stderr, 'stderr') | ||
| 258 | self.stdout = '' | 250 | self.stdout = '' |
| 259 | self.stderr = '' | 251 | self.stderr = '' |
| 260 | 252 | ||
| 261 | for s in s_in: | 253 | while not s_in.is_done: |
| 262 | flags = fcntl.fcntl(s.fd, fcntl.F_GETFL) | 254 | in_ready = s_in.select() |
| 263 | fcntl.fcntl(s.fd, fcntl.F_SETFL, flags | os.O_NONBLOCK) | ||
| 264 | |||
| 265 | while s_in: | ||
| 266 | in_ready, _, _ = select.select(s_in, [], []) | ||
| 267 | for s in in_ready: | 255 | for s in in_ready: |
| 268 | buf = s.fd.read(4096) | 256 | buf = s.read() |
| 269 | if not buf: | 257 | if not buf: |
| 270 | s_in.remove(s) | 258 | s_in.remove(s) |
| 271 | continue | 259 | continue |
diff --git a/git_config.py b/git_config.py index e2236785..3ba9dbd1 100644 --- a/git_config.py +++ b/git_config.py | |||
| @@ -20,6 +20,7 @@ import errno | |||
| 20 | import json | 20 | import json |
| 21 | import os | 21 | import os |
| 22 | import re | 22 | import re |
| 23 | import ssl | ||
| 23 | import subprocess | 24 | import subprocess |
| 24 | import sys | 25 | import sys |
| 25 | try: | 26 | try: |
| @@ -41,6 +42,7 @@ else: | |||
| 41 | 42 | ||
| 42 | from signal import SIGTERM | 43 | from signal import SIGTERM |
| 43 | from error import GitError, UploadError | 44 | from error import GitError, UploadError |
| 45 | import platform_utils | ||
| 44 | from trace import Trace | 46 | from trace import Trace |
| 45 | if is_python3(): | 47 | if is_python3(): |
| 46 | from http.client import HTTPException | 48 | from http.client import HTTPException |
| @@ -50,16 +52,24 @@ else: | |||
| 50 | from git_command import GitCommand | 52 | from git_command import GitCommand |
| 51 | from git_command import ssh_sock | 53 | from git_command import ssh_sock |
| 52 | from git_command import terminate_ssh_clients | 54 | from git_command import terminate_ssh_clients |
| 55 | from git_refs import R_CHANGES, R_HEADS, R_TAGS | ||
| 53 | 56 | ||
| 54 | R_HEADS = 'refs/heads/' | ||
| 55 | R_TAGS = 'refs/tags/' | ||
| 56 | ID_RE = re.compile(r'^[0-9a-f]{40}$') | 57 | ID_RE = re.compile(r'^[0-9a-f]{40}$') |
| 57 | 58 | ||
| 58 | REVIEW_CACHE = dict() | 59 | REVIEW_CACHE = dict() |
| 59 | 60 | ||
| 61 | def IsChange(rev): | ||
| 62 | return rev.startswith(R_CHANGES) | ||
| 63 | |||
| 60 | def IsId(rev): | 64 | def IsId(rev): |
| 61 | return ID_RE.match(rev) | 65 | return ID_RE.match(rev) |
| 62 | 66 | ||
| 67 | def IsTag(rev): | ||
| 68 | return rev.startswith(R_TAGS) | ||
| 69 | |||
| 70 | def IsImmutable(rev): | ||
| 71 | return IsChange(rev) or IsId(rev) or IsTag(rev) | ||
| 72 | |||
| 63 | def _key(name): | 73 | def _key(name): |
| 64 | parts = name.split('.') | 74 | parts = name.split('.') |
| 65 | if len(parts) < 2: | 75 | if len(parts) < 2: |
| @@ -259,7 +269,7 @@ class GitConfig(object): | |||
| 259 | try: | 269 | try: |
| 260 | if os.path.getmtime(self._json) \ | 270 | if os.path.getmtime(self._json) \ |
| 261 | <= os.path.getmtime(self.file): | 271 | <= os.path.getmtime(self.file): |
| 262 | os.remove(self._json) | 272 | platform_utils.remove(self._json) |
| 263 | return None | 273 | return None |
| 264 | except OSError: | 274 | except OSError: |
| 265 | return None | 275 | return None |
| @@ -271,7 +281,7 @@ class GitConfig(object): | |||
| 271 | finally: | 281 | finally: |
| 272 | fd.close() | 282 | fd.close() |
| 273 | except (IOError, ValueError): | 283 | except (IOError, ValueError): |
| 274 | os.remove(self._json) | 284 | platform_utils.remove(self._json) |
| 275 | return None | 285 | return None |
| 276 | 286 | ||
| 277 | def _SaveJson(self, cache): | 287 | def _SaveJson(self, cache): |
| @@ -283,7 +293,7 @@ class GitConfig(object): | |||
| 283 | fd.close() | 293 | fd.close() |
| 284 | except (IOError, TypeError): | 294 | except (IOError, TypeError): |
| 285 | if os.path.exists(self._json): | 295 | if os.path.exists(self._json): |
| 286 | os.remove(self._json) | 296 | platform_utils.remove(self._json) |
| 287 | 297 | ||
| 288 | def _ReadGit(self): | 298 | def _ReadGit(self): |
| 289 | """ | 299 | """ |
| @@ -604,7 +614,7 @@ class Remote(object): | |||
| 604 | connectionUrl = self._InsteadOf() | 614 | connectionUrl = self._InsteadOf() |
| 605 | return _preconnect(connectionUrl) | 615 | return _preconnect(connectionUrl) |
| 606 | 616 | ||
| 607 | def ReviewUrl(self, userEmail): | 617 | def ReviewUrl(self, userEmail, validate_certs): |
| 608 | if self._review_url is None: | 618 | if self._review_url is None: |
| 609 | if self.review is None: | 619 | if self.review is None: |
| 610 | return None | 620 | return None |
| @@ -612,7 +622,7 @@ class Remote(object): | |||
| 612 | u = self.review | 622 | u = self.review |
| 613 | if u.startswith('persistent-'): | 623 | if u.startswith('persistent-'): |
| 614 | u = u[len('persistent-'):] | 624 | u = u[len('persistent-'):] |
| 615 | if u.split(':')[0] not in ('http', 'https', 'sso'): | 625 | if u.split(':')[0] not in ('http', 'https', 'sso', 'ssh'): |
| 616 | u = 'http://%s' % u | 626 | u = 'http://%s' % u |
| 617 | if u.endswith('/Gerrit'): | 627 | if u.endswith('/Gerrit'): |
| 618 | u = u[:len(u) - len('/Gerrit')] | 628 | u = u[:len(u) - len('/Gerrit')] |
| @@ -628,13 +638,20 @@ class Remote(object): | |||
| 628 | host, port = os.environ['REPO_HOST_PORT_INFO'].split() | 638 | host, port = os.environ['REPO_HOST_PORT_INFO'].split() |
| 629 | self._review_url = self._SshReviewUrl(userEmail, host, port) | 639 | self._review_url = self._SshReviewUrl(userEmail, host, port) |
| 630 | REVIEW_CACHE[u] = self._review_url | 640 | REVIEW_CACHE[u] = self._review_url |
| 631 | elif u.startswith('sso:'): | 641 | elif u.startswith('sso:') or u.startswith('ssh:'): |
| 632 | self._review_url = u # Assume it's right | 642 | self._review_url = u # Assume it's right |
| 633 | REVIEW_CACHE[u] = self._review_url | 643 | REVIEW_CACHE[u] = self._review_url |
| 644 | elif 'REPO_IGNORE_SSH_INFO' in os.environ: | ||
| 645 | self._review_url = http_url | ||
| 646 | REVIEW_CACHE[u] = self._review_url | ||
| 634 | else: | 647 | else: |
| 635 | try: | 648 | try: |
| 636 | info_url = u + 'ssh_info' | 649 | info_url = u + 'ssh_info' |
| 637 | info = urllib.request.urlopen(info_url).read() | 650 | if not validate_certs: |
| 651 | context = ssl._create_unverified_context() | ||
| 652 | info = urllib.request.urlopen(info_url, context=context).read() | ||
| 653 | else: | ||
| 654 | info = urllib.request.urlopen(info_url).read() | ||
| 638 | if info == 'NOT_AVAILABLE' or '<' in info: | 655 | if info == 'NOT_AVAILABLE' or '<' in info: |
| 639 | # If `info` contains '<', we assume the server gave us some sort | 656 | # If `info` contains '<', we assume the server gave us some sort |
| 640 | # of HTML response back, like maybe a login page. | 657 | # of HTML response back, like maybe a login page. |
diff --git a/git_refs.py b/git_refs.py index 3c266061..7feaffb1 100644 --- a/git_refs.py +++ b/git_refs.py | |||
| @@ -16,11 +16,12 @@ | |||
| 16 | import os | 16 | import os |
| 17 | from trace import Trace | 17 | from trace import Trace |
| 18 | 18 | ||
| 19 | HEAD = 'HEAD' | 19 | HEAD = 'HEAD' |
| 20 | R_HEADS = 'refs/heads/' | 20 | R_CHANGES = 'refs/changes/' |
| 21 | R_TAGS = 'refs/tags/' | 21 | R_HEADS = 'refs/heads/' |
| 22 | R_PUB = 'refs/published/' | 22 | R_TAGS = 'refs/tags/' |
| 23 | R_M = 'refs/remotes/m/' | 23 | R_PUB = 'refs/published/' |
| 24 | R_M = 'refs/remotes/m/' | ||
| 24 | 25 | ||
| 25 | 26 | ||
| 26 | class GitRefs(object): | 27 | class GitRefs(object): |
| @@ -138,7 +139,7 @@ class GitRefs(object): | |||
| 138 | 139 | ||
| 139 | def _ReadLoose1(self, path, name): | 140 | def _ReadLoose1(self, path, name): |
| 140 | try: | 141 | try: |
| 141 | fd = open(path, 'rb') | 142 | fd = open(path) |
| 142 | except IOError: | 143 | except IOError: |
| 143 | return | 144 | return |
| 144 | 145 | ||
diff --git a/hooks/pre-auto-gc b/hooks/pre-auto-gc index 43403022..c4107f51 100755 --- a/hooks/pre-auto-gc +++ b/hooks/pre-auto-gc | |||
| @@ -1,9 +1,9 @@ | |||
| 1 | #!/bin/sh | 1 | #!/bin/sh |
| 2 | # | 2 | # |
| 3 | # An example hook script to verify if you are on battery, in case you | 3 | # An example hook script to verify if you are on battery, in case you |
| 4 | # are running Linux or OS X. Called by git-gc --auto with no arguments. | 4 | # are running Windows, Linux or OS X. Called by git-gc --auto with no |
| 5 | # The hook should exit with non-zero status after issuing an appropriate | 5 | # arguments. The hook should exit with non-zero status after issuing an |
| 6 | # message if it wants to stop the auto repacking. | 6 | # appropriate message if it wants to stop the auto repacking. |
| 7 | 7 | ||
| 8 | # This program is free software; you can redistribute it and/or modify | 8 | # This program is free software; you can redistribute it and/or modify |
| 9 | # it under the terms of the GNU General Public License as published by | 9 | # it under the terms of the GNU General Public License as published by |
| @@ -19,6 +19,16 @@ | |||
| 19 | # along with this program; if not, write to the Free Software | 19 | # along with this program; if not, write to the Free Software |
| 20 | # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA | 20 | # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA |
| 21 | 21 | ||
| 22 | if uname -s | grep -q "_NT-" | ||
| 23 | then | ||
| 24 | if test -x $SYSTEMROOT/System32/Wbem/wmic | ||
| 25 | then | ||
| 26 | STATUS=$(wmic path win32_battery get batterystatus /format:list | tr -d '\r\n') | ||
| 27 | [ "$STATUS" = "BatteryStatus=2" ] && exit 0 || exit 1 | ||
| 28 | fi | ||
| 29 | exit 0 | ||
| 30 | fi | ||
| 31 | |||
| 22 | if test -x /sbin/on_ac_power && /sbin/on_ac_power | 32 | if test -x /sbin/on_ac_power && /sbin/on_ac_power |
| 23 | then | 33 | then |
| 24 | exit 0 | 34 | exit 0 |
| @@ -37,6 +37,7 @@ except ImportError: | |||
| 37 | kerberos = None | 37 | kerberos = None |
| 38 | 38 | ||
| 39 | from color import SetDefaultColoring | 39 | from color import SetDefaultColoring |
| 40 | import event_log | ||
| 40 | from trace import SetTrace | 41 | from trace import SetTrace |
| 41 | from git_command import git, GitCommand | 42 | from git_command import git, GitCommand |
| 42 | from git_config import init_ssh, close_ssh | 43 | from git_config import init_ssh, close_ssh |
| @@ -54,7 +55,7 @@ from error import NoSuchProjectError | |||
| 54 | from error import RepoChangedException | 55 | from error import RepoChangedException |
| 55 | import gitc_utils | 56 | import gitc_utils |
| 56 | from manifest_xml import GitcManifest, XmlManifest | 57 | from manifest_xml import GitcManifest, XmlManifest |
| 57 | from pager import RunPager | 58 | from pager import RunPager, TerminatePager |
| 58 | from wrapper import WrapperPath, Wrapper | 59 | from wrapper import WrapperPath, Wrapper |
| 59 | 60 | ||
| 60 | from subcmds import all_commands | 61 | from subcmds import all_commands |
| @@ -85,6 +86,9 @@ global_options.add_option('--time', | |||
| 85 | global_options.add_option('--version', | 86 | global_options.add_option('--version', |
| 86 | dest='show_version', action='store_true', | 87 | dest='show_version', action='store_true', |
| 87 | help='display this version of repo') | 88 | help='display this version of repo') |
| 89 | global_options.add_option('--event-log', | ||
| 90 | dest='event_log', action='store', | ||
| 91 | help='filename of event log to append timeline to') | ||
| 88 | 92 | ||
| 89 | class _Repo(object): | 93 | class _Repo(object): |
| 90 | def __init__(self, repodir): | 94 | def __init__(self, repodir): |
| @@ -176,6 +180,8 @@ class _Repo(object): | |||
| 176 | RunPager(config) | 180 | RunPager(config) |
| 177 | 181 | ||
| 178 | start = time.time() | 182 | start = time.time() |
| 183 | cmd_event = cmd.event_log.Add(name, event_log.TASK_COMMAND, start) | ||
| 184 | cmd.event_log.SetParent(cmd_event) | ||
| 179 | try: | 185 | try: |
| 180 | result = cmd.Execute(copts, cargs) | 186 | result = cmd.Execute(copts, cargs) |
| 181 | except (DownloadError, ManifestInvalidRevisionError, | 187 | except (DownloadError, ManifestInvalidRevisionError, |
| @@ -198,8 +204,13 @@ class _Repo(object): | |||
| 198 | else: | 204 | else: |
| 199 | print('error: project group must be enabled for the project in the current directory', file=sys.stderr) | 205 | print('error: project group must be enabled for the project in the current directory', file=sys.stderr) |
| 200 | result = 1 | 206 | result = 1 |
| 207 | except SystemExit as e: | ||
| 208 | if e.code: | ||
| 209 | result = e.code | ||
| 210 | raise | ||
| 201 | finally: | 211 | finally: |
| 202 | elapsed = time.time() - start | 212 | finish = time.time() |
| 213 | elapsed = finish - start | ||
| 203 | hours, remainder = divmod(elapsed, 3600) | 214 | hours, remainder = divmod(elapsed, 3600) |
| 204 | minutes, seconds = divmod(remainder, 60) | 215 | minutes, seconds = divmod(remainder, 60) |
| 205 | if gopts.time: | 216 | if gopts.time: |
| @@ -209,6 +220,12 @@ class _Repo(object): | |||
| 209 | print('real\t%dh%dm%.3fs' % (hours, minutes, seconds), | 220 | print('real\t%dh%dm%.3fs' % (hours, minutes, seconds), |
| 210 | file=sys.stderr) | 221 | file=sys.stderr) |
| 211 | 222 | ||
| 223 | cmd.event_log.FinishEvent(cmd_event, finish, | ||
| 224 | result is None or result == 0) | ||
| 225 | if gopts.event_log: | ||
| 226 | cmd.event_log.Write(os.path.abspath( | ||
| 227 | os.path.expanduser(gopts.event_log))) | ||
| 228 | |||
| 212 | return result | 229 | return result |
| 213 | 230 | ||
| 214 | 231 | ||
| @@ -525,6 +542,7 @@ def _Main(argv): | |||
| 525 | print('fatal: %s' % e, file=sys.stderr) | 542 | print('fatal: %s' % e, file=sys.stderr) |
| 526 | result = 128 | 543 | result = 128 |
| 527 | 544 | ||
| 545 | TerminatePager() | ||
| 528 | sys.exit(result) | 546 | sys.exit(result) |
| 529 | 547 | ||
| 530 | if __name__ == '__main__': | 548 | if __name__ == '__main__': |
diff --git a/manifest_xml.py b/manifest_xml.py index 9c882af6..9b5d7847 100644 --- a/manifest_xml.py +++ b/manifest_xml.py | |||
| @@ -32,6 +32,7 @@ else: | |||
| 32 | import gitc_utils | 32 | import gitc_utils |
| 33 | from git_config import GitConfig | 33 | from git_config import GitConfig |
| 34 | from git_refs import R_HEADS, HEAD | 34 | from git_refs import R_HEADS, HEAD |
| 35 | import platform_utils | ||
| 35 | from project import RemoteSpec, Project, MetaProject | 36 | from project import RemoteSpec, Project, MetaProject |
| 36 | from error import ManifestParseError, ManifestInvalidRevisionError | 37 | from error import ManifestParseError, ManifestInvalidRevisionError |
| 37 | 38 | ||
| @@ -40,8 +41,18 @@ LOCAL_MANIFEST_NAME = 'local_manifest.xml' | |||
| 40 | LOCAL_MANIFESTS_DIR_NAME = 'local_manifests' | 41 | LOCAL_MANIFESTS_DIR_NAME = 'local_manifests' |
| 41 | 42 | ||
| 42 | # urljoin gets confused if the scheme is not known. | 43 | # urljoin gets confused if the scheme is not known. |
| 43 | urllib.parse.uses_relative.extend(['ssh', 'git', 'persistent-https', 'rpc']) | 44 | urllib.parse.uses_relative.extend([ |
| 44 | urllib.parse.uses_netloc.extend(['ssh', 'git', 'persistent-https', 'rpc']) | 45 | 'ssh', |
| 46 | 'git', | ||
| 47 | 'persistent-https', | ||
| 48 | 'sso', | ||
| 49 | 'rpc']) | ||
| 50 | urllib.parse.uses_netloc.extend([ | ||
| 51 | 'ssh', | ||
| 52 | 'git', | ||
| 53 | 'persistent-https', | ||
| 54 | 'sso', | ||
| 55 | 'rpc']) | ||
| 45 | 56 | ||
| 46 | class _Default(object): | 57 | class _Default(object): |
| 47 | """Project defaults within the manifest.""" | 58 | """Project defaults within the manifest.""" |
| @@ -100,7 +111,8 @@ class _XmlRemote(object): | |||
| 100 | return url | 111 | return url |
| 101 | 112 | ||
| 102 | def ToRemoteSpec(self, projectName): | 113 | def ToRemoteSpec(self, projectName): |
| 103 | url = self.resolvedFetchUrl.rstrip('/') + '/' + projectName | 114 | fetchUrl = self.resolvedFetchUrl.rstrip('/') |
| 115 | url = fetchUrl + '/' + projectName | ||
| 104 | remoteName = self.name | 116 | remoteName = self.name |
| 105 | if self.remoteAlias: | 117 | if self.remoteAlias: |
| 106 | remoteName = self.remoteAlias | 118 | remoteName = self.remoteAlias |
| @@ -108,7 +120,8 @@ class _XmlRemote(object): | |||
| 108 | url=url, | 120 | url=url, |
| 109 | pushUrl=self.pushUrl, | 121 | pushUrl=self.pushUrl, |
| 110 | review=self.reviewUrl, | 122 | review=self.reviewUrl, |
| 111 | orig_name=self.name) | 123 | orig_name=self.name, |
| 124 | fetchUrl=self.fetchUrl) | ||
| 112 | 125 | ||
| 113 | class XmlManifest(object): | 126 | class XmlManifest(object): |
| 114 | """manages the repo configuration file""" | 127 | """manages the repo configuration file""" |
| @@ -153,8 +166,8 @@ class XmlManifest(object): | |||
| 153 | 166 | ||
| 154 | try: | 167 | try: |
| 155 | if os.path.lexists(self.manifestFile): | 168 | if os.path.lexists(self.manifestFile): |
| 156 | os.remove(self.manifestFile) | 169 | platform_utils.remove(self.manifestFile) |
| 157 | os.symlink('manifests/%s' % name, self.manifestFile) | 170 | platform_utils.symlink(os.path.join('manifests', name), self.manifestFile) |
| 158 | except OSError as e: | 171 | except OSError as e: |
| 159 | raise ManifestParseError('cannot link manifest %s: %s' % (name, str(e))) | 172 | raise ManifestParseError('cannot link manifest %s: %s' % (name, str(e))) |
| 160 | 173 | ||
| @@ -383,6 +396,10 @@ class XmlManifest(object): | |||
| 383 | def IsArchive(self): | 396 | def IsArchive(self): |
| 384 | return self.manifestProject.config.GetBoolean('repo.archive') | 397 | return self.manifestProject.config.GetBoolean('repo.archive') |
| 385 | 398 | ||
| 399 | @property | ||
| 400 | def HasSubmodules(self): | ||
| 401 | return self.manifestProject.config.GetBoolean('repo.submodules') | ||
| 402 | |||
| 386 | def _Unload(self): | 403 | def _Unload(self): |
| 387 | self._loaded = False | 404 | self._loaded = False |
| 388 | self._projects = {} | 405 | self._projects = {} |
| @@ -16,19 +16,53 @@ | |||
| 16 | from __future__ import print_function | 16 | from __future__ import print_function |
| 17 | import os | 17 | import os |
| 18 | import select | 18 | import select |
| 19 | import subprocess | ||
| 19 | import sys | 20 | import sys |
| 20 | 21 | ||
| 22 | import platform_utils | ||
| 23 | |||
| 21 | active = False | 24 | active = False |
| 25 | pager_process = None | ||
| 26 | old_stdout = None | ||
| 27 | old_stderr = None | ||
| 22 | 28 | ||
| 23 | def RunPager(globalConfig): | 29 | def RunPager(globalConfig): |
| 24 | global active | ||
| 25 | |||
| 26 | if not os.isatty(0) or not os.isatty(1): | 30 | if not os.isatty(0) or not os.isatty(1): |
| 27 | return | 31 | return |
| 28 | pager = _SelectPager(globalConfig) | 32 | pager = _SelectPager(globalConfig) |
| 29 | if pager == '' or pager == 'cat': | 33 | if pager == '' or pager == 'cat': |
| 30 | return | 34 | return |
| 31 | 35 | ||
| 36 | if platform_utils.isWindows(): | ||
| 37 | _PipePager(pager); | ||
| 38 | else: | ||
| 39 | _ForkPager(pager) | ||
| 40 | |||
| 41 | def TerminatePager(): | ||
| 42 | global pager_process, old_stdout, old_stderr | ||
| 43 | if pager_process: | ||
| 44 | sys.stdout.flush() | ||
| 45 | sys.stderr.flush() | ||
| 46 | pager_process.stdin.close() | ||
| 47 | pager_process.wait(); | ||
| 48 | pager_process = None | ||
| 49 | # Restore initial stdout/err in case there is more output in this process | ||
| 50 | # after shutting down the pager process | ||
| 51 | sys.stdout = old_stdout | ||
| 52 | sys.stderr = old_stderr | ||
| 53 | |||
| 54 | def _PipePager(pager): | ||
| 55 | global pager_process, old_stdout, old_stderr | ||
| 56 | assert pager_process is None, "Only one active pager process at a time" | ||
| 57 | # Create pager process, piping stdout/err into its stdin | ||
| 58 | pager_process = subprocess.Popen([pager], stdin=subprocess.PIPE, stdout=sys.stdout, stderr=sys.stderr) | ||
| 59 | old_stdout = sys.stdout | ||
| 60 | old_stderr = sys.stderr | ||
| 61 | sys.stdout = pager_process.stdin | ||
| 62 | sys.stderr = pager_process.stdin | ||
| 63 | |||
| 64 | def _ForkPager(pager): | ||
| 65 | global active | ||
| 32 | # This process turns into the pager; a child it forks will | 66 | # This process turns into the pager; a child it forks will |
| 33 | # do the real processing and output back to the pager. This | 67 | # do the real processing and output back to the pager. This |
| 34 | # is necessary to keep the pager in control of the tty. | 68 | # is necessary to keep the pager in control of the tty. |
diff --git a/platform_utils.py b/platform_utils.py new file mode 100644 index 00000000..33cb2ec3 --- /dev/null +++ b/platform_utils.py | |||
| @@ -0,0 +1,315 @@ | |||
| 1 | # | ||
| 2 | # Copyright (C) 2016 The Android Open Source Project | ||
| 3 | # | ||
| 4 | # Licensed under the Apache License, Version 2.0 (the "License"); | ||
| 5 | # you may not use this file except in compliance with the License. | ||
| 6 | # You may obtain a copy of the License at | ||
| 7 | # | ||
| 8 | # http://www.apache.org/licenses/LICENSE-2.0 | ||
| 9 | # | ||
| 10 | # Unless required by applicable law or agreed to in writing, software | ||
| 11 | # distributed under the License is distributed on an "AS IS" BASIS, | ||
| 12 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. | ||
| 13 | # See the License for the specific language governing permissions and | ||
| 14 | # limitations under the License. | ||
| 15 | |||
| 16 | import errno | ||
| 17 | import os | ||
| 18 | import platform | ||
| 19 | import select | ||
| 20 | import shutil | ||
| 21 | import stat | ||
| 22 | |||
| 23 | from Queue import Queue | ||
| 24 | from threading import Thread | ||
| 25 | |||
| 26 | |||
| 27 | def isWindows(): | ||
| 28 | """ Returns True when running with the native port of Python for Windows, | ||
| 29 | False when running on any other platform (including the Cygwin port of | ||
| 30 | Python). | ||
| 31 | """ | ||
| 32 | # Note: The cygwin port of Python returns "CYGWIN_NT_xxx" | ||
| 33 | return platform.system() == "Windows" | ||
| 34 | |||
| 35 | |||
| 36 | class FileDescriptorStreams(object): | ||
| 37 | """ Platform agnostic abstraction enabling non-blocking I/O over a | ||
| 38 | collection of file descriptors. This abstraction is required because | ||
| 39 | fctnl(os.O_NONBLOCK) is not supported on Windows. | ||
| 40 | """ | ||
| 41 | @classmethod | ||
| 42 | def create(cls): | ||
| 43 | """ Factory method: instantiates the concrete class according to the | ||
| 44 | current platform. | ||
| 45 | """ | ||
| 46 | if isWindows(): | ||
| 47 | return _FileDescriptorStreamsThreads() | ||
| 48 | else: | ||
| 49 | return _FileDescriptorStreamsNonBlocking() | ||
| 50 | |||
| 51 | def __init__(self): | ||
| 52 | self.streams = [] | ||
| 53 | |||
| 54 | def add(self, fd, dest, std_name): | ||
| 55 | """ Wraps an existing file descriptor as a stream. | ||
| 56 | """ | ||
| 57 | self.streams.append(self._create_stream(fd, dest, std_name)) | ||
| 58 | |||
| 59 | def remove(self, stream): | ||
| 60 | """ Removes a stream, when done with it. | ||
| 61 | """ | ||
| 62 | self.streams.remove(stream) | ||
| 63 | |||
| 64 | @property | ||
| 65 | def is_done(self): | ||
| 66 | """ Returns True when all streams have been processed. | ||
| 67 | """ | ||
| 68 | return len(self.streams) == 0 | ||
| 69 | |||
| 70 | def select(self): | ||
| 71 | """ Returns the set of streams that have data available to read. | ||
| 72 | The returned streams each expose a read() and a close() method. | ||
| 73 | When done with a stream, call the remove(stream) method. | ||
| 74 | """ | ||
| 75 | raise NotImplementedError | ||
| 76 | |||
| 77 | def _create_stream(fd, dest, std_name): | ||
| 78 | """ Creates a new stream wrapping an existing file descriptor. | ||
| 79 | """ | ||
| 80 | raise NotImplementedError | ||
| 81 | |||
| 82 | |||
| 83 | class _FileDescriptorStreamsNonBlocking(FileDescriptorStreams): | ||
| 84 | """ Implementation of FileDescriptorStreams for platforms that support | ||
| 85 | non blocking I/O. | ||
| 86 | """ | ||
| 87 | class Stream(object): | ||
| 88 | """ Encapsulates a file descriptor """ | ||
| 89 | def __init__(self, fd, dest, std_name): | ||
| 90 | self.fd = fd | ||
| 91 | self.dest = dest | ||
| 92 | self.std_name = std_name | ||
| 93 | self.set_non_blocking() | ||
| 94 | |||
| 95 | def set_non_blocking(self): | ||
| 96 | import fcntl | ||
| 97 | flags = fcntl.fcntl(self.fd, fcntl.F_GETFL) | ||
| 98 | fcntl.fcntl(self.fd, fcntl.F_SETFL, flags | os.O_NONBLOCK) | ||
| 99 | |||
| 100 | def fileno(self): | ||
| 101 | return self.fd.fileno() | ||
| 102 | |||
| 103 | def read(self): | ||
| 104 | return self.fd.read(4096) | ||
| 105 | |||
| 106 | def close(self): | ||
| 107 | self.fd.close() | ||
| 108 | |||
| 109 | def _create_stream(self, fd, dest, std_name): | ||
| 110 | return self.Stream(fd, dest, std_name) | ||
| 111 | |||
| 112 | def select(self): | ||
| 113 | ready_streams, _, _ = select.select(self.streams, [], []) | ||
| 114 | return ready_streams | ||
| 115 | |||
| 116 | |||
| 117 | class _FileDescriptorStreamsThreads(FileDescriptorStreams): | ||
| 118 | """ Implementation of FileDescriptorStreams for platforms that don't support | ||
| 119 | non blocking I/O. This implementation requires creating threads issuing | ||
| 120 | blocking read operations on file descriptors. | ||
| 121 | """ | ||
| 122 | def __init__(self): | ||
| 123 | super(_FileDescriptorStreamsThreads, self).__init__() | ||
| 124 | # The queue is shared accross all threads so we can simulate the | ||
| 125 | # behavior of the select() function | ||
| 126 | self.queue = Queue(10) # Limit incoming data from streams | ||
| 127 | |||
| 128 | def _create_stream(self, fd, dest, std_name): | ||
| 129 | return self.Stream(fd, dest, std_name, self.queue) | ||
| 130 | |||
| 131 | def select(self): | ||
| 132 | # Return only one stream at a time, as it is the most straighforward | ||
| 133 | # thing to do and it is compatible with the select() function. | ||
| 134 | item = self.queue.get() | ||
| 135 | stream = item.stream | ||
| 136 | stream.data = item.data | ||
| 137 | return [stream] | ||
| 138 | |||
| 139 | class QueueItem(object): | ||
| 140 | """ Item put in the shared queue """ | ||
| 141 | def __init__(self, stream, data): | ||
| 142 | self.stream = stream | ||
| 143 | self.data = data | ||
| 144 | |||
| 145 | class Stream(object): | ||
| 146 | """ Encapsulates a file descriptor """ | ||
| 147 | def __init__(self, fd, dest, std_name, queue): | ||
| 148 | self.fd = fd | ||
| 149 | self.dest = dest | ||
| 150 | self.std_name = std_name | ||
| 151 | self.queue = queue | ||
| 152 | self.data = None | ||
| 153 | self.thread = Thread(target=self.read_to_queue) | ||
| 154 | self.thread.daemon = True | ||
| 155 | self.thread.start() | ||
| 156 | |||
| 157 | def close(self): | ||
| 158 | self.fd.close() | ||
| 159 | |||
| 160 | def read(self): | ||
| 161 | data = self.data | ||
| 162 | self.data = None | ||
| 163 | return data | ||
| 164 | |||
| 165 | def read_to_queue(self): | ||
| 166 | """ The thread function: reads everything from the file descriptor into | ||
| 167 | the shared queue and terminates when reaching EOF. | ||
| 168 | """ | ||
| 169 | for line in iter(self.fd.readline, b''): | ||
| 170 | self.queue.put(_FileDescriptorStreamsThreads.QueueItem(self, line)) | ||
| 171 | self.fd.close() | ||
| 172 | self.queue.put(_FileDescriptorStreamsThreads.QueueItem(self, None)) | ||
| 173 | |||
| 174 | |||
| 175 | def symlink(source, link_name): | ||
| 176 | """Creates a symbolic link pointing to source named link_name. | ||
| 177 | Note: On Windows, source must exist on disk, as the implementation needs | ||
| 178 | to know whether to create a "File" or a "Directory" symbolic link. | ||
| 179 | """ | ||
| 180 | if isWindows(): | ||
| 181 | import platform_utils_win32 | ||
| 182 | source = _validate_winpath(source) | ||
| 183 | link_name = _validate_winpath(link_name) | ||
| 184 | target = os.path.join(os.path.dirname(link_name), source) | ||
| 185 | if os.path.isdir(target): | ||
| 186 | platform_utils_win32.create_dirsymlink(source, link_name) | ||
| 187 | else: | ||
| 188 | platform_utils_win32.create_filesymlink(source, link_name) | ||
| 189 | else: | ||
| 190 | return os.symlink(source, link_name) | ||
| 191 | |||
| 192 | |||
| 193 | def _validate_winpath(path): | ||
| 194 | path = os.path.normpath(path) | ||
| 195 | if _winpath_is_valid(path): | ||
| 196 | return path | ||
| 197 | raise ValueError("Path \"%s\" must be a relative path or an absolute " | ||
| 198 | "path starting with a drive letter".format(path)) | ||
| 199 | |||
| 200 | |||
| 201 | def _winpath_is_valid(path): | ||
| 202 | """Windows only: returns True if path is relative (e.g. ".\\foo") or is | ||
| 203 | absolute including a drive letter (e.g. "c:\\foo"). Returns False if path | ||
| 204 | is ambiguous (e.g. "x:foo" or "\\foo"). | ||
| 205 | """ | ||
| 206 | assert isWindows() | ||
| 207 | path = os.path.normpath(path) | ||
| 208 | drive, tail = os.path.splitdrive(path) | ||
| 209 | if tail: | ||
| 210 | if not drive: | ||
| 211 | return tail[0] != os.sep # "\\foo" is invalid | ||
| 212 | else: | ||
| 213 | return tail[0] == os.sep # "x:foo" is invalid | ||
| 214 | else: | ||
| 215 | return not drive # "x:" is invalid | ||
| 216 | |||
| 217 | |||
| 218 | def rmtree(path): | ||
| 219 | if isWindows(): | ||
| 220 | shutil.rmtree(path, onerror=handle_rmtree_error) | ||
| 221 | else: | ||
| 222 | shutil.rmtree(path) | ||
| 223 | |||
| 224 | |||
| 225 | def handle_rmtree_error(function, path, excinfo): | ||
| 226 | # Allow deleting read-only files | ||
| 227 | os.chmod(path, stat.S_IWRITE) | ||
| 228 | function(path) | ||
| 229 | |||
| 230 | |||
| 231 | def rename(src, dst): | ||
| 232 | if isWindows(): | ||
| 233 | # On Windows, rename fails if destination exists, see | ||
| 234 | # https://docs.python.org/2/library/os.html#os.rename | ||
| 235 | try: | ||
| 236 | os.rename(src, dst) | ||
| 237 | except OSError as e: | ||
| 238 | if e.errno == errno.EEXIST: | ||
| 239 | os.remove(dst) | ||
| 240 | os.rename(src, dst) | ||
| 241 | else: | ||
| 242 | raise | ||
| 243 | else: | ||
| 244 | os.rename(src, dst) | ||
| 245 | |||
| 246 | |||
| 247 | def remove(path): | ||
| 248 | """Remove (delete) the file path. This is a replacement for os.remove, but | ||
| 249 | allows deleting read-only files on Windows. | ||
| 250 | """ | ||
| 251 | if isWindows(): | ||
| 252 | try: | ||
| 253 | os.remove(path) | ||
| 254 | except OSError as e: | ||
| 255 | if e.errno == errno.EACCES: | ||
| 256 | os.chmod(path, stat.S_IWRITE) | ||
| 257 | os.remove(path) | ||
| 258 | else: | ||
| 259 | raise | ||
| 260 | else: | ||
| 261 | os.remove(path) | ||
| 262 | |||
| 263 | |||
| 264 | def islink(path): | ||
| 265 | """Test whether a path is a symbolic link. | ||
| 266 | |||
| 267 | Availability: Windows, Unix. | ||
| 268 | """ | ||
| 269 | if isWindows(): | ||
| 270 | import platform_utils_win32 | ||
| 271 | return platform_utils_win32.islink(path) | ||
| 272 | else: | ||
| 273 | return os.path.islink(path) | ||
| 274 | |||
| 275 | |||
| 276 | def readlink(path): | ||
| 277 | """Return a string representing the path to which the symbolic link | ||
| 278 | points. The result may be either an absolute or relative pathname; | ||
| 279 | if it is relative, it may be converted to an absolute pathname using | ||
| 280 | os.path.join(os.path.dirname(path), result). | ||
| 281 | |||
| 282 | Availability: Windows, Unix. | ||
| 283 | """ | ||
| 284 | if isWindows(): | ||
| 285 | import platform_utils_win32 | ||
| 286 | return platform_utils_win32.readlink(path) | ||
| 287 | else: | ||
| 288 | return os.readlink(path) | ||
| 289 | |||
| 290 | |||
| 291 | def realpath(path): | ||
| 292 | """Return the canonical path of the specified filename, eliminating | ||
| 293 | any symbolic links encountered in the path. | ||
| 294 | |||
| 295 | Availability: Windows, Unix. | ||
| 296 | """ | ||
| 297 | if isWindows(): | ||
| 298 | current_path = os.path.abspath(path) | ||
| 299 | path_tail = [] | ||
| 300 | for c in range(0, 100): # Avoid cycles | ||
| 301 | if islink(current_path): | ||
| 302 | target = readlink(current_path) | ||
| 303 | current_path = os.path.join(os.path.dirname(current_path), target) | ||
| 304 | else: | ||
| 305 | basename = os.path.basename(current_path) | ||
| 306 | if basename == '': | ||
| 307 | path_tail.append(current_path) | ||
| 308 | break | ||
| 309 | path_tail.append(basename) | ||
| 310 | current_path = os.path.dirname(current_path) | ||
| 311 | path_tail.reverse() | ||
| 312 | result = os.path.normpath(os.path.join(*path_tail)) | ||
| 313 | return result | ||
| 314 | else: | ||
| 315 | return os.path.realpath(path) | ||
diff --git a/platform_utils_win32.py b/platform_utils_win32.py new file mode 100644 index 00000000..fe76b3d6 --- /dev/null +++ b/platform_utils_win32.py | |||
| @@ -0,0 +1,217 @@ | |||
| 1 | # | ||
| 2 | # Copyright (C) 2016 The Android Open Source Project | ||
| 3 | # | ||
| 4 | # Licensed under the Apache License, Version 2.0 (the "License"); | ||
| 5 | # you may not use this file except in compliance with the License. | ||
| 6 | # You may obtain a copy of the License at | ||
| 7 | # | ||
| 8 | # http://www.apache.org/licenses/LICENSE-2.0 | ||
| 9 | # | ||
| 10 | # Unless required by applicable law or agreed to in writing, software | ||
| 11 | # distributed under the License is distributed on an "AS IS" BASIS, | ||
| 12 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. | ||
| 13 | # See the License for the specific language governing permissions and | ||
| 14 | # limitations under the License. | ||
| 15 | |||
| 16 | import errno | ||
| 17 | |||
| 18 | from ctypes import WinDLL, get_last_error, FormatError, WinError, addressof | ||
| 19 | from ctypes import c_buffer | ||
| 20 | from ctypes.wintypes import BOOL, LPCWSTR, DWORD, HANDLE, POINTER, c_ubyte | ||
| 21 | from ctypes.wintypes import WCHAR, USHORT, LPVOID, Structure, Union, ULONG | ||
| 22 | from ctypes.wintypes import byref | ||
| 23 | |||
| 24 | kernel32 = WinDLL('kernel32', use_last_error=True) | ||
| 25 | |||
| 26 | LPDWORD = POINTER(DWORD) | ||
| 27 | UCHAR = c_ubyte | ||
| 28 | |||
| 29 | # Win32 error codes | ||
| 30 | ERROR_SUCCESS = 0 | ||
| 31 | ERROR_NOT_SUPPORTED = 50 | ||
| 32 | ERROR_PRIVILEGE_NOT_HELD = 1314 | ||
| 33 | |||
| 34 | # Win32 API entry points | ||
| 35 | CreateSymbolicLinkW = kernel32.CreateSymbolicLinkW | ||
| 36 | CreateSymbolicLinkW.restype = BOOL | ||
| 37 | CreateSymbolicLinkW.argtypes = (LPCWSTR, # lpSymlinkFileName In | ||
| 38 | LPCWSTR, # lpTargetFileName In | ||
| 39 | DWORD) # dwFlags In | ||
| 40 | |||
| 41 | # Symbolic link creation flags | ||
| 42 | SYMBOLIC_LINK_FLAG_FILE = 0x00 | ||
| 43 | SYMBOLIC_LINK_FLAG_DIRECTORY = 0x01 | ||
| 44 | |||
| 45 | GetFileAttributesW = kernel32.GetFileAttributesW | ||
| 46 | GetFileAttributesW.restype = DWORD | ||
| 47 | GetFileAttributesW.argtypes = (LPCWSTR,) # lpFileName In | ||
| 48 | |||
| 49 | INVALID_FILE_ATTRIBUTES = 0xFFFFFFFF | ||
| 50 | FILE_ATTRIBUTE_REPARSE_POINT = 0x00400 | ||
| 51 | |||
| 52 | CreateFileW = kernel32.CreateFileW | ||
| 53 | CreateFileW.restype = HANDLE | ||
| 54 | CreateFileW.argtypes = (LPCWSTR, # lpFileName In | ||
| 55 | DWORD, # dwDesiredAccess In | ||
| 56 | DWORD, # dwShareMode In | ||
| 57 | LPVOID, # lpSecurityAttributes In_opt | ||
| 58 | DWORD, # dwCreationDisposition In | ||
| 59 | DWORD, # dwFlagsAndAttributes In | ||
| 60 | HANDLE) # hTemplateFile In_opt | ||
| 61 | |||
| 62 | CloseHandle = kernel32.CloseHandle | ||
| 63 | CloseHandle.restype = BOOL | ||
| 64 | CloseHandle.argtypes = (HANDLE,) # hObject In | ||
| 65 | |||
| 66 | INVALID_HANDLE_VALUE = HANDLE(-1).value | ||
| 67 | OPEN_EXISTING = 3 | ||
| 68 | FILE_FLAG_BACKUP_SEMANTICS = 0x02000000 | ||
| 69 | FILE_FLAG_OPEN_REPARSE_POINT = 0x00200000 | ||
| 70 | |||
| 71 | DeviceIoControl = kernel32.DeviceIoControl | ||
| 72 | DeviceIoControl.restype = BOOL | ||
| 73 | DeviceIoControl.argtypes = (HANDLE, # hDevice In | ||
| 74 | DWORD, # dwIoControlCode In | ||
| 75 | LPVOID, # lpInBuffer In_opt | ||
| 76 | DWORD, # nInBufferSize In | ||
| 77 | LPVOID, # lpOutBuffer Out_opt | ||
| 78 | DWORD, # nOutBufferSize In | ||
| 79 | LPDWORD, # lpBytesReturned Out_opt | ||
| 80 | LPVOID) # lpOverlapped Inout_opt | ||
| 81 | |||
| 82 | # Device I/O control flags and options | ||
| 83 | FSCTL_GET_REPARSE_POINT = 0x000900A8 | ||
| 84 | IO_REPARSE_TAG_MOUNT_POINT = 0xA0000003 | ||
| 85 | IO_REPARSE_TAG_SYMLINK = 0xA000000C | ||
| 86 | MAXIMUM_REPARSE_DATA_BUFFER_SIZE = 0x4000 | ||
| 87 | |||
| 88 | |||
| 89 | class GENERIC_REPARSE_BUFFER(Structure): | ||
| 90 | _fields_ = (('DataBuffer', UCHAR * 1),) | ||
| 91 | |||
| 92 | |||
| 93 | class SYMBOLIC_LINK_REPARSE_BUFFER(Structure): | ||
| 94 | _fields_ = (('SubstituteNameOffset', USHORT), | ||
| 95 | ('SubstituteNameLength', USHORT), | ||
| 96 | ('PrintNameOffset', USHORT), | ||
| 97 | ('PrintNameLength', USHORT), | ||
| 98 | ('Flags', ULONG), | ||
| 99 | ('PathBuffer', WCHAR * 1)) | ||
| 100 | |||
| 101 | @property | ||
| 102 | def PrintName(self): | ||
| 103 | arrayt = WCHAR * (self.PrintNameLength // 2) | ||
| 104 | offset = type(self).PathBuffer.offset + self.PrintNameOffset | ||
| 105 | return arrayt.from_address(addressof(self) + offset).value | ||
| 106 | |||
| 107 | |||
| 108 | class MOUNT_POINT_REPARSE_BUFFER(Structure): | ||
| 109 | _fields_ = (('SubstituteNameOffset', USHORT), | ||
| 110 | ('SubstituteNameLength', USHORT), | ||
| 111 | ('PrintNameOffset', USHORT), | ||
| 112 | ('PrintNameLength', USHORT), | ||
| 113 | ('PathBuffer', WCHAR * 1)) | ||
| 114 | |||
| 115 | @property | ||
| 116 | def PrintName(self): | ||
| 117 | arrayt = WCHAR * (self.PrintNameLength // 2) | ||
| 118 | offset = type(self).PathBuffer.offset + self.PrintNameOffset | ||
| 119 | return arrayt.from_address(addressof(self) + offset).value | ||
| 120 | |||
| 121 | |||
| 122 | class REPARSE_DATA_BUFFER(Structure): | ||
| 123 | class REPARSE_BUFFER(Union): | ||
| 124 | _fields_ = (('SymbolicLinkReparseBuffer', SYMBOLIC_LINK_REPARSE_BUFFER), | ||
| 125 | ('MountPointReparseBuffer', MOUNT_POINT_REPARSE_BUFFER), | ||
| 126 | ('GenericReparseBuffer', GENERIC_REPARSE_BUFFER)) | ||
| 127 | _fields_ = (('ReparseTag', ULONG), | ||
| 128 | ('ReparseDataLength', USHORT), | ||
| 129 | ('Reserved', USHORT), | ||
| 130 | ('ReparseBuffer', REPARSE_BUFFER)) | ||
| 131 | _anonymous_ = ('ReparseBuffer',) | ||
| 132 | |||
| 133 | |||
| 134 | def create_filesymlink(source, link_name): | ||
| 135 | """Creates a Windows file symbolic link source pointing to link_name.""" | ||
| 136 | _create_symlink(source, link_name, SYMBOLIC_LINK_FLAG_FILE) | ||
| 137 | |||
| 138 | |||
| 139 | def create_dirsymlink(source, link_name): | ||
| 140 | """Creates a Windows directory symbolic link source pointing to link_name. | ||
| 141 | """ | ||
| 142 | _create_symlink(source, link_name, SYMBOLIC_LINK_FLAG_DIRECTORY) | ||
| 143 | |||
| 144 | |||
| 145 | def _create_symlink(source, link_name, dwFlags): | ||
| 146 | # Note: Win32 documentation for CreateSymbolicLink is incorrect. | ||
| 147 | # On success, the function returns "1". | ||
| 148 | # On error, the function returns some random value (e.g. 1280). | ||
| 149 | # The best bet seems to use "GetLastError" and check for error/success. | ||
| 150 | CreateSymbolicLinkW(link_name, source, dwFlags) | ||
| 151 | code = get_last_error() | ||
| 152 | if code != ERROR_SUCCESS: | ||
| 153 | error_desc = FormatError(code).strip() | ||
| 154 | if code == ERROR_PRIVILEGE_NOT_HELD: | ||
| 155 | raise OSError(errno.EPERM, error_desc, link_name) | ||
| 156 | _raise_winerror( | ||
| 157 | code, | ||
| 158 | 'Error creating symbolic link \"%s\"'.format(link_name)) | ||
| 159 | |||
| 160 | |||
| 161 | def islink(path): | ||
| 162 | result = GetFileAttributesW(path) | ||
| 163 | if result == INVALID_FILE_ATTRIBUTES: | ||
| 164 | return False | ||
| 165 | return bool(result & FILE_ATTRIBUTE_REPARSE_POINT) | ||
| 166 | |||
| 167 | |||
| 168 | def readlink(path): | ||
| 169 | reparse_point_handle = CreateFileW(path, | ||
| 170 | 0, | ||
| 171 | 0, | ||
| 172 | None, | ||
| 173 | OPEN_EXISTING, | ||
| 174 | FILE_FLAG_OPEN_REPARSE_POINT | | ||
| 175 | FILE_FLAG_BACKUP_SEMANTICS, | ||
| 176 | None) | ||
| 177 | if reparse_point_handle == INVALID_HANDLE_VALUE: | ||
| 178 | _raise_winerror( | ||
| 179 | get_last_error(), | ||
| 180 | 'Error opening symblic link \"%s\"'.format(path)) | ||
| 181 | target_buffer = c_buffer(MAXIMUM_REPARSE_DATA_BUFFER_SIZE) | ||
| 182 | n_bytes_returned = DWORD() | ||
| 183 | io_result = DeviceIoControl(reparse_point_handle, | ||
| 184 | FSCTL_GET_REPARSE_POINT, | ||
| 185 | None, | ||
| 186 | 0, | ||
| 187 | target_buffer, | ||
| 188 | len(target_buffer), | ||
| 189 | byref(n_bytes_returned), | ||
| 190 | None) | ||
| 191 | CloseHandle(reparse_point_handle) | ||
| 192 | if not io_result: | ||
| 193 | _raise_winerror( | ||
| 194 | get_last_error(), | ||
| 195 | 'Error reading symblic link \"%s\"'.format(path)) | ||
| 196 | rdb = REPARSE_DATA_BUFFER.from_buffer(target_buffer) | ||
| 197 | if rdb.ReparseTag == IO_REPARSE_TAG_SYMLINK: | ||
| 198 | return _preserve_encoding(path, rdb.SymbolicLinkReparseBuffer.PrintName) | ||
| 199 | elif rdb.ReparseTag == IO_REPARSE_TAG_MOUNT_POINT: | ||
| 200 | return _preserve_encoding(path, rdb.MountPointReparseBuffer.PrintName) | ||
| 201 | # Unsupported reparse point type | ||
| 202 | _raise_winerror( | ||
| 203 | ERROR_NOT_SUPPORTED, | ||
| 204 | 'Error reading symblic link \"%s\"'.format(path)) | ||
| 205 | |||
| 206 | |||
| 207 | def _preserve_encoding(source, target): | ||
| 208 | """Ensures target is the same string type (i.e. unicode or str) as source.""" | ||
| 209 | if isinstance(source, unicode): | ||
| 210 | return unicode(target) | ||
| 211 | return str(target) | ||
| 212 | |||
| 213 | |||
| 214 | def _raise_winerror(code, error_desc): | ||
| 215 | win_error_desc = FormatError(code).strip() | ||
| 216 | error_desc = "%s: %s".format(error_desc, win_error_desc) | ||
| 217 | raise WinError(code, error_desc) | ||
diff --git a/progress.py b/progress.py index d948654f..0dd5d1a8 100644 --- a/progress.py +++ b/progress.py | |||
| @@ -21,7 +21,8 @@ from trace import IsTrace | |||
| 21 | _NOT_TTY = not os.isatty(2) | 21 | _NOT_TTY = not os.isatty(2) |
| 22 | 22 | ||
| 23 | class Progress(object): | 23 | class Progress(object): |
| 24 | def __init__(self, title, total=0, units=''): | 24 | def __init__(self, title, total=0, units='', print_newline=False, |
| 25 | always_print_percentage=False): | ||
| 25 | self._title = title | 26 | self._title = title |
| 26 | self._total = total | 27 | self._total = total |
| 27 | self._done = 0 | 28 | self._done = 0 |
| @@ -29,6 +30,8 @@ class Progress(object): | |||
| 29 | self._start = time() | 30 | self._start = time() |
| 30 | self._show = False | 31 | self._show = False |
| 31 | self._units = units | 32 | self._units = units |
| 33 | self._print_newline = print_newline | ||
| 34 | self._always_print_percentage = always_print_percentage | ||
| 32 | 35 | ||
| 33 | def update(self, inc=1): | 36 | def update(self, inc=1): |
| 34 | self._done += inc | 37 | self._done += inc |
| @@ -50,13 +53,14 @@ class Progress(object): | |||
| 50 | else: | 53 | else: |
| 51 | p = (100 * self._done) / self._total | 54 | p = (100 * self._done) / self._total |
| 52 | 55 | ||
| 53 | if self._lastp != p: | 56 | if self._lastp != p or self._always_print_percentage: |
| 54 | self._lastp = p | 57 | self._lastp = p |
| 55 | sys.stderr.write('\r%s: %3d%% (%d%s/%d%s) ' % ( | 58 | sys.stderr.write('\r%s: %3d%% (%d%s/%d%s)%s' % ( |
| 56 | self._title, | 59 | self._title, |
| 57 | p, | 60 | p, |
| 58 | self._done, self._units, | 61 | self._done, self._units, |
| 59 | self._total, self._units)) | 62 | self._total, self._units, |
| 63 | "\n" if self._print_newline else "")) | ||
| 60 | sys.stderr.flush() | 64 | sys.stderr.flush() |
| 61 | 65 | ||
| 62 | def end(self): | 66 | def end(self): |
| @@ -35,6 +35,7 @@ from git_config import GitConfig, IsId, GetSchemeFromUrl, GetUrlCookieFile, \ | |||
| 35 | from error import GitError, HookError, UploadError, DownloadError | 35 | from error import GitError, HookError, UploadError, DownloadError |
| 36 | from error import ManifestInvalidRevisionError | 36 | from error import ManifestInvalidRevisionError |
| 37 | from error import NoManifestException | 37 | from error import NoManifestException |
| 38 | import platform_utils | ||
| 38 | from trace import IsTrace, Trace | 39 | from trace import IsTrace, Trace |
| 39 | 40 | ||
| 40 | from git_refs import GitRefs, HEAD, R_HEADS, R_TAGS, R_PUB, R_M | 41 | from git_refs import GitRefs, HEAD, R_HEADS, R_TAGS, R_PUB, R_M |
| @@ -62,9 +63,9 @@ def _lwrite(path, content): | |||
| 62 | fd.close() | 63 | fd.close() |
| 63 | 64 | ||
| 64 | try: | 65 | try: |
| 65 | os.rename(lock, path) | 66 | platform_utils.rename(lock, path) |
| 66 | except OSError: | 67 | except OSError: |
| 67 | os.remove(lock) | 68 | platform_utils.remove(lock) |
| 68 | raise | 69 | raise |
| 69 | 70 | ||
| 70 | 71 | ||
| @@ -102,7 +103,7 @@ def _ProjectHooks(): | |||
| 102 | """ | 103 | """ |
| 103 | global _project_hook_list | 104 | global _project_hook_list |
| 104 | if _project_hook_list is None: | 105 | if _project_hook_list is None: |
| 105 | d = os.path.realpath(os.path.abspath(os.path.dirname(__file__))) | 106 | d = platform_utils.realpath(os.path.abspath(os.path.dirname(__file__))) |
| 106 | d = os.path.join(d, 'hooks') | 107 | d = os.path.join(d, 'hooks') |
| 107 | _project_hook_list = [os.path.join(d, x) for x in os.listdir(d)] | 108 | _project_hook_list = [os.path.join(d, x) for x in os.listdir(d)] |
| 108 | return _project_hook_list | 109 | return _project_hook_list |
| @@ -176,12 +177,20 @@ class ReviewableBranch(object): | |||
| 176 | def UploadForReview(self, people, | 177 | def UploadForReview(self, people, |
| 177 | auto_topic=False, | 178 | auto_topic=False, |
| 178 | draft=False, | 179 | draft=False, |
| 179 | dest_branch=None): | 180 | private=False, |
| 181 | wip=False, | ||
| 182 | dest_branch=None, | ||
| 183 | validate_certs=True, | ||
| 184 | push_options=None): | ||
| 180 | self.project.UploadForReview(self.name, | 185 | self.project.UploadForReview(self.name, |
| 181 | people, | 186 | people, |
| 182 | auto_topic=auto_topic, | 187 | auto_topic=auto_topic, |
| 183 | draft=draft, | 188 | draft=draft, |
| 184 | dest_branch=dest_branch) | 189 | private=private, |
| 190 | wip=wip, | ||
| 191 | dest_branch=dest_branch, | ||
| 192 | validate_certs=validate_certs, | ||
| 193 | push_options=push_options) | ||
| 185 | 194 | ||
| 186 | def GetPublishedRefs(self): | 195 | def GetPublishedRefs(self): |
| 187 | refs = {} | 196 | refs = {} |
| @@ -243,7 +252,7 @@ class _CopyFile(object): | |||
| 243 | try: | 252 | try: |
| 244 | # remove existing file first, since it might be read-only | 253 | # remove existing file first, since it might be read-only |
| 245 | if os.path.exists(dest): | 254 | if os.path.exists(dest): |
| 246 | os.remove(dest) | 255 | platform_utils.remove(dest) |
| 247 | else: | 256 | else: |
| 248 | dest_dir = os.path.dirname(dest) | 257 | dest_dir = os.path.dirname(dest) |
| 249 | if not os.path.isdir(dest_dir): | 258 | if not os.path.isdir(dest_dir): |
| @@ -268,16 +277,16 @@ class _LinkFile(object): | |||
| 268 | 277 | ||
| 269 | def __linkIt(self, relSrc, absDest): | 278 | def __linkIt(self, relSrc, absDest): |
| 270 | # link file if it does not exist or is out of date | 279 | # link file if it does not exist or is out of date |
| 271 | if not os.path.islink(absDest) or (os.readlink(absDest) != relSrc): | 280 | if not platform_utils.islink(absDest) or (platform_utils.readlink(absDest) != relSrc): |
| 272 | try: | 281 | try: |
| 273 | # remove existing file first, since it might be read-only | 282 | # remove existing file first, since it might be read-only |
| 274 | if os.path.lexists(absDest): | 283 | if os.path.lexists(absDest): |
| 275 | os.remove(absDest) | 284 | platform_utils.remove(absDest) |
| 276 | else: | 285 | else: |
| 277 | dest_dir = os.path.dirname(absDest) | 286 | dest_dir = os.path.dirname(absDest) |
| 278 | if not os.path.isdir(dest_dir): | 287 | if not os.path.isdir(dest_dir): |
| 279 | os.makedirs(dest_dir) | 288 | os.makedirs(dest_dir) |
| 280 | os.symlink(relSrc, absDest) | 289 | platform_utils.symlink(relSrc, absDest) |
| 281 | except IOError: | 290 | except IOError: |
| 282 | _error('Cannot link file %s to %s', relSrc, absDest) | 291 | _error('Cannot link file %s to %s', relSrc, absDest) |
| 283 | 292 | ||
| @@ -323,13 +332,15 @@ class RemoteSpec(object): | |||
| 323 | pushUrl=None, | 332 | pushUrl=None, |
| 324 | review=None, | 333 | review=None, |
| 325 | revision=None, | 334 | revision=None, |
| 326 | orig_name=None): | 335 | orig_name=None, |
| 336 | fetchUrl=None): | ||
| 327 | self.name = name | 337 | self.name = name |
| 328 | self.url = url | 338 | self.url = url |
| 329 | self.pushUrl = pushUrl | 339 | self.pushUrl = pushUrl |
| 330 | self.review = review | 340 | self.review = review |
| 331 | self.revision = revision | 341 | self.revision = revision |
| 332 | self.orig_name = orig_name | 342 | self.orig_name = orig_name |
| 343 | self.fetchUrl = fetchUrl | ||
| 333 | 344 | ||
| 334 | 345 | ||
| 335 | class RepoHook(object): | 346 | class RepoHook(object): |
| @@ -687,7 +698,7 @@ class Project(object): | |||
| 687 | self.gitdir = gitdir.replace('\\', '/') | 698 | self.gitdir = gitdir.replace('\\', '/') |
| 688 | self.objdir = objdir.replace('\\', '/') | 699 | self.objdir = objdir.replace('\\', '/') |
| 689 | if worktree: | 700 | if worktree: |
| 690 | self.worktree = os.path.normpath(worktree.replace('\\', '/')) | 701 | self.worktree = os.path.normpath(worktree).replace('\\', '/') |
| 691 | else: | 702 | else: |
| 692 | self.worktree = None | 703 | self.worktree = None |
| 693 | self.relpath = relpath | 704 | self.relpath = relpath |
| @@ -911,11 +922,13 @@ class Project(object): | |||
| 911 | else: | 922 | else: |
| 912 | return False | 923 | return False |
| 913 | 924 | ||
| 914 | def PrintWorkTreeStatus(self, output_redir=None): | 925 | def PrintWorkTreeStatus(self, output_redir=None, quiet=False): |
| 915 | """Prints the status of the repository to stdout. | 926 | """Prints the status of the repository to stdout. |
| 916 | 927 | ||
| 917 | Args: | 928 | Args: |
| 918 | output: If specified, redirect the output to this object. | 929 | output: If specified, redirect the output to this object. |
| 930 | quiet: If True then only print the project name. Do not print | ||
| 931 | the modified files, branch name, etc. | ||
| 919 | """ | 932 | """ |
| 920 | if not os.path.isdir(self.worktree): | 933 | if not os.path.isdir(self.worktree): |
| 921 | if output_redir is None: | 934 | if output_redir is None: |
| @@ -941,6 +954,10 @@ class Project(object): | |||
| 941 | out.redirect(output_redir) | 954 | out.redirect(output_redir) |
| 942 | out.project('project %-40s', self.relpath + '/ ') | 955 | out.project('project %-40s', self.relpath + '/ ') |
| 943 | 956 | ||
| 957 | if quiet: | ||
| 958 | out.nl() | ||
| 959 | return 'DIRTY' | ||
| 960 | |||
| 944 | branch = self.CurrentBranch | 961 | branch = self.CurrentBranch |
| 945 | if branch is None: | 962 | if branch is None: |
| 946 | out.nobranch('(*** NO BRANCH ***)') | 963 | out.nobranch('(*** NO BRANCH ***)') |
| @@ -1099,7 +1116,11 @@ class Project(object): | |||
| 1099 | people=([], []), | 1116 | people=([], []), |
| 1100 | auto_topic=False, | 1117 | auto_topic=False, |
| 1101 | draft=False, | 1118 | draft=False, |
| 1102 | dest_branch=None): | 1119 | private=False, |
| 1120 | wip=False, | ||
| 1121 | dest_branch=None, | ||
| 1122 | validate_certs=True, | ||
| 1123 | push_options=None): | ||
| 1103 | """Uploads the named branch for code review. | 1124 | """Uploads the named branch for code review. |
| 1104 | """ | 1125 | """ |
| 1105 | if branch is None: | 1126 | if branch is None: |
| @@ -1124,7 +1145,7 @@ class Project(object): | |||
| 1124 | branch.remote.projectname = self.name | 1145 | branch.remote.projectname = self.name |
| 1125 | branch.remote.Save() | 1146 | branch.remote.Save() |
| 1126 | 1147 | ||
| 1127 | url = branch.remote.ReviewUrl(self.UserEmail) | 1148 | url = branch.remote.ReviewUrl(self.UserEmail, validate_certs) |
| 1128 | if url is None: | 1149 | if url is None: |
| 1129 | raise UploadError('review not configured') | 1150 | raise UploadError('review not configured') |
| 1130 | cmd = ['push'] | 1151 | cmd = ['push'] |
| @@ -1137,6 +1158,10 @@ class Project(object): | |||
| 1137 | rp.append('--cc=%s' % sq(e)) | 1158 | rp.append('--cc=%s' % sq(e)) |
| 1138 | cmd.append('--receive-pack=%s' % " ".join(rp)) | 1159 | cmd.append('--receive-pack=%s' % " ".join(rp)) |
| 1139 | 1160 | ||
| 1161 | for push_option in (push_options or []): | ||
| 1162 | cmd.append('-o') | ||
| 1163 | cmd.append(push_option) | ||
| 1164 | |||
| 1140 | cmd.append(url) | 1165 | cmd.append(url) |
| 1141 | 1166 | ||
| 1142 | if dest_branch.startswith(R_HEADS): | 1167 | if dest_branch.startswith(R_HEADS): |
| @@ -1150,9 +1175,14 @@ class Project(object): | |||
| 1150 | dest_branch) | 1175 | dest_branch) |
| 1151 | if auto_topic: | 1176 | if auto_topic: |
| 1152 | ref_spec = ref_spec + '/' + branch.name | 1177 | ref_spec = ref_spec + '/' + branch.name |
| 1178 | |||
| 1153 | if not url.startswith('ssh://'): | 1179 | if not url.startswith('ssh://'): |
| 1154 | rp = ['r=%s' % p for p in people[0]] + \ | 1180 | rp = ['r=%s' % p for p in people[0]] + \ |
| 1155 | ['cc=%s' % p for p in people[1]] | 1181 | ['cc=%s' % p for p in people[1]] |
| 1182 | if private: | ||
| 1183 | rp = rp + ['private'] | ||
| 1184 | if wip: | ||
| 1185 | rp = rp + ['wip'] | ||
| 1156 | if rp: | 1186 | if rp: |
| 1157 | ref_spec = ref_spec + '%' + ','.join(rp) | 1187 | ref_spec = ref_spec + '%' + ','.join(rp) |
| 1158 | cmd.append(ref_spec) | 1188 | cmd.append(ref_spec) |
| @@ -1192,7 +1222,8 @@ class Project(object): | |||
| 1192 | no_tags=False, | 1222 | no_tags=False, |
| 1193 | archive=False, | 1223 | archive=False, |
| 1194 | optimized_fetch=False, | 1224 | optimized_fetch=False, |
| 1195 | prune=False): | 1225 | prune=False, |
| 1226 | submodules=False): | ||
| 1196 | """Perform only the network IO portion of the sync process. | 1227 | """Perform only the network IO portion of the sync process. |
| 1197 | Local working directory/branch state is not affected. | 1228 | Local working directory/branch state is not affected. |
| 1198 | """ | 1229 | """ |
| @@ -1218,7 +1249,7 @@ class Project(object): | |||
| 1218 | if not self._ExtractArchive(tarpath, path=topdir): | 1249 | if not self._ExtractArchive(tarpath, path=topdir): |
| 1219 | return False | 1250 | return False |
| 1220 | try: | 1251 | try: |
| 1221 | os.remove(tarpath) | 1252 | platform_utils.remove(tarpath) |
| 1222 | except OSError as e: | 1253 | except OSError as e: |
| 1223 | _warn("Cannot remove archive %s: %s", tarpath, str(e)) | 1254 | _warn("Cannot remove archive %s: %s", tarpath, str(e)) |
| 1224 | self._CopyAndLinkFiles() | 1255 | self._CopyAndLinkFiles() |
| @@ -1234,7 +1265,7 @@ class Project(object): | |||
| 1234 | if is_new: | 1265 | if is_new: |
| 1235 | alt = os.path.join(self.gitdir, 'objects/info/alternates') | 1266 | alt = os.path.join(self.gitdir, 'objects/info/alternates') |
| 1236 | try: | 1267 | try: |
| 1237 | fd = open(alt, 'rb') | 1268 | fd = open(alt) |
| 1238 | try: | 1269 | try: |
| 1239 | alt_dir = fd.readline().rstrip() | 1270 | alt_dir = fd.readline().rstrip() |
| 1240 | finally: | 1271 | finally: |
| @@ -1258,13 +1289,19 @@ class Project(object): | |||
| 1258 | elif self.manifest.default.sync_c: | 1289 | elif self.manifest.default.sync_c: |
| 1259 | current_branch_only = True | 1290 | current_branch_only = True |
| 1260 | 1291 | ||
| 1292 | if self.clone_depth: | ||
| 1293 | depth = self.clone_depth | ||
| 1294 | else: | ||
| 1295 | depth = self.manifest.manifestProject.config.GetString('repo.depth') | ||
| 1296 | |||
| 1261 | need_to_fetch = not (optimized_fetch and | 1297 | need_to_fetch = not (optimized_fetch and |
| 1262 | (ID_RE.match(self.revisionExpr) and | 1298 | (ID_RE.match(self.revisionExpr) and |
| 1263 | self._CheckForSha1())) | 1299 | self._CheckForImmutableRevision())) |
| 1264 | if (need_to_fetch and | 1300 | if (need_to_fetch and |
| 1265 | not self._RemoteFetch(initial=is_new, quiet=quiet, alt_dir=alt_dir, | 1301 | not self._RemoteFetch(initial=is_new, quiet=quiet, alt_dir=alt_dir, |
| 1266 | current_branch_only=current_branch_only, | 1302 | current_branch_only=current_branch_only, |
| 1267 | no_tags=no_tags, prune=prune)): | 1303 | no_tags=no_tags, prune=prune, depth=depth, |
| 1304 | submodules=submodules)): | ||
| 1268 | return False | 1305 | return False |
| 1269 | 1306 | ||
| 1270 | if self.worktree: | 1307 | if self.worktree: |
| @@ -1272,7 +1309,7 @@ class Project(object): | |||
| 1272 | else: | 1309 | else: |
| 1273 | self._InitMirrorHead() | 1310 | self._InitMirrorHead() |
| 1274 | try: | 1311 | try: |
| 1275 | os.remove(os.path.join(self.gitdir, 'FETCH_HEAD')) | 1312 | platform_utils.remove(os.path.join(self.gitdir, 'FETCH_HEAD')) |
| 1276 | except OSError: | 1313 | except OSError: |
| 1277 | pass | 1314 | pass |
| 1278 | return True | 1315 | return True |
| @@ -1320,11 +1357,11 @@ class Project(object): | |||
| 1320 | raise ManifestInvalidRevisionError('revision %s in %s not found' % | 1357 | raise ManifestInvalidRevisionError('revision %s in %s not found' % |
| 1321 | (self.revisionExpr, self.name)) | 1358 | (self.revisionExpr, self.name)) |
| 1322 | 1359 | ||
| 1323 | def Sync_LocalHalf(self, syncbuf, force_sync=False): | 1360 | def Sync_LocalHalf(self, syncbuf, force_sync=False, submodules=False): |
| 1324 | """Perform only the local IO portion of the sync process. | 1361 | """Perform only the local IO portion of the sync process. |
| 1325 | Network access is not required. | 1362 | Network access is not required. |
| 1326 | """ | 1363 | """ |
| 1327 | self._InitWorkTree(force_sync=force_sync) | 1364 | self._InitWorkTree(force_sync=force_sync, submodules=submodules) |
| 1328 | all_refs = self.bare_ref.all | 1365 | all_refs = self.bare_ref.all |
| 1329 | self.CleanPublishedCache(all_refs) | 1366 | self.CleanPublishedCache(all_refs) |
| 1330 | revid = self.GetRevisionId(all_refs) | 1367 | revid = self.GetRevisionId(all_refs) |
| @@ -1333,6 +1370,9 @@ class Project(object): | |||
| 1333 | self._FastForward(revid) | 1370 | self._FastForward(revid) |
| 1334 | self._CopyAndLinkFiles() | 1371 | self._CopyAndLinkFiles() |
| 1335 | 1372 | ||
| 1373 | def _dosubmodules(): | ||
| 1374 | self._SyncSubmodules(quiet=True) | ||
| 1375 | |||
| 1336 | head = self.work_git.GetHead() | 1376 | head = self.work_git.GetHead() |
| 1337 | if head.startswith(R_HEADS): | 1377 | if head.startswith(R_HEADS): |
| 1338 | branch = head[len(R_HEADS):] | 1378 | branch = head[len(R_HEADS):] |
| @@ -1366,6 +1406,8 @@ class Project(object): | |||
| 1366 | 1406 | ||
| 1367 | try: | 1407 | try: |
| 1368 | self._Checkout(revid, quiet=True) | 1408 | self._Checkout(revid, quiet=True) |
| 1409 | if submodules: | ||
| 1410 | self._SyncSubmodules(quiet=True) | ||
| 1369 | except GitError as e: | 1411 | except GitError as e: |
| 1370 | syncbuf.fail(self, e) | 1412 | syncbuf.fail(self, e) |
| 1371 | return | 1413 | return |
| @@ -1390,6 +1432,8 @@ class Project(object): | |||
| 1390 | branch.name) | 1432 | branch.name) |
| 1391 | try: | 1433 | try: |
| 1392 | self._Checkout(revid, quiet=True) | 1434 | self._Checkout(revid, quiet=True) |
| 1435 | if submodules: | ||
| 1436 | self._SyncSubmodules(quiet=True) | ||
| 1393 | except GitError as e: | 1437 | except GitError as e: |
| 1394 | syncbuf.fail(self, e) | 1438 | syncbuf.fail(self, e) |
| 1395 | return | 1439 | return |
| @@ -1415,6 +1459,8 @@ class Project(object): | |||
| 1415 | # strict subset. We can fast-forward safely. | 1459 | # strict subset. We can fast-forward safely. |
| 1416 | # | 1460 | # |
| 1417 | syncbuf.later1(self, _doff) | 1461 | syncbuf.later1(self, _doff) |
| 1462 | if submodules: | ||
| 1463 | syncbuf.later1(self, _dosubmodules) | ||
| 1418 | return | 1464 | return |
| 1419 | 1465 | ||
| 1420 | # Examine the local commits not in the remote. Find the | 1466 | # Examine the local commits not in the remote. Find the |
| @@ -1466,19 +1512,28 @@ class Project(object): | |||
| 1466 | branch.Save() | 1512 | branch.Save() |
| 1467 | 1513 | ||
| 1468 | if cnt_mine > 0 and self.rebase: | 1514 | if cnt_mine > 0 and self.rebase: |
| 1515 | def _docopyandlink(): | ||
| 1516 | self._CopyAndLinkFiles() | ||
| 1517 | |||
| 1469 | def _dorebase(): | 1518 | def _dorebase(): |
| 1470 | self._Rebase(upstream='%s^1' % last_mine, onto=revid) | 1519 | self._Rebase(upstream='%s^1' % last_mine, onto=revid) |
| 1471 | self._CopyAndLinkFiles() | ||
| 1472 | syncbuf.later2(self, _dorebase) | 1520 | syncbuf.later2(self, _dorebase) |
| 1521 | if submodules: | ||
| 1522 | syncbuf.later2(self, _dosubmodules) | ||
| 1523 | syncbuf.later2(self, _docopyandlink) | ||
| 1473 | elif local_changes: | 1524 | elif local_changes: |
| 1474 | try: | 1525 | try: |
| 1475 | self._ResetHard(revid) | 1526 | self._ResetHard(revid) |
| 1527 | if submodules: | ||
| 1528 | self._SyncSubmodules(quiet=True) | ||
| 1476 | self._CopyAndLinkFiles() | 1529 | self._CopyAndLinkFiles() |
| 1477 | except GitError as e: | 1530 | except GitError as e: |
| 1478 | syncbuf.fail(self, e) | 1531 | syncbuf.fail(self, e) |
| 1479 | return | 1532 | return |
| 1480 | else: | 1533 | else: |
| 1481 | syncbuf.later1(self, _doff) | 1534 | syncbuf.later1(self, _doff) |
| 1535 | if submodules: | ||
| 1536 | syncbuf.later1(self, _dosubmodules) | ||
| 1482 | 1537 | ||
| 1483 | def AddCopyFile(self, src, dest, absdest): | 1538 | def AddCopyFile(self, src, dest, absdest): |
| 1484 | # dest should already be an absolute path, but src is project relative | 1539 | # dest should already be an absolute path, but src is project relative |
| @@ -1764,7 +1819,7 @@ class Project(object): | |||
| 1764 | except GitError: | 1819 | except GitError: |
| 1765 | return [], [] | 1820 | return [], [] |
| 1766 | finally: | 1821 | finally: |
| 1767 | os.remove(temp_gitmodules_path) | 1822 | platform_utils.remove(temp_gitmodules_path) |
| 1768 | 1823 | ||
| 1769 | names = set() | 1824 | names = set() |
| 1770 | paths = {} | 1825 | paths = {} |
| @@ -1851,7 +1906,7 @@ class Project(object): | |||
| 1851 | 1906 | ||
| 1852 | 1907 | ||
| 1853 | # Direct Git Commands ## | 1908 | # Direct Git Commands ## |
| 1854 | def _CheckForSha1(self): | 1909 | def _CheckForImmutableRevision(self): |
| 1855 | try: | 1910 | try: |
| 1856 | # if revision (sha or tag) is not present then following function | 1911 | # if revision (sha or tag) is not present then following function |
| 1857 | # throws an error. | 1912 | # throws an error. |
| @@ -1880,23 +1935,18 @@ class Project(object): | |||
| 1880 | quiet=False, | 1935 | quiet=False, |
| 1881 | alt_dir=None, | 1936 | alt_dir=None, |
| 1882 | no_tags=False, | 1937 | no_tags=False, |
| 1883 | prune=False): | 1938 | prune=False, |
| 1939 | depth=None, | ||
| 1940 | submodules=False): | ||
| 1884 | 1941 | ||
| 1885 | is_sha1 = False | 1942 | is_sha1 = False |
| 1886 | tag_name = None | 1943 | tag_name = None |
| 1887 | depth = None | ||
| 1888 | |||
| 1889 | # The depth should not be used when fetching to a mirror because | 1944 | # The depth should not be used when fetching to a mirror because |
| 1890 | # it will result in a shallow repository that cannot be cloned or | 1945 | # it will result in a shallow repository that cannot be cloned or |
| 1891 | # fetched from. | 1946 | # fetched from. |
| 1892 | if not self.manifest.IsMirror: | 1947 | # The repo project should also never be synced with partial depth. |
| 1893 | if self.clone_depth: | 1948 | if self.manifest.IsMirror or self.relpath == '.repo/repo': |
| 1894 | depth = self.clone_depth | 1949 | depth = None |
| 1895 | else: | ||
| 1896 | depth = self.manifest.manifestProject.config.GetString('repo.depth') | ||
| 1897 | # The repo project should never be synced with partial depth | ||
| 1898 | if self.relpath == '.repo/repo': | ||
| 1899 | depth = None | ||
| 1900 | 1950 | ||
| 1901 | if depth: | 1951 | if depth: |
| 1902 | current_branch_only = True | 1952 | current_branch_only = True |
| @@ -1910,7 +1960,9 @@ class Project(object): | |||
| 1910 | tag_name = self.revisionExpr[len(R_TAGS):] | 1960 | tag_name = self.revisionExpr[len(R_TAGS):] |
| 1911 | 1961 | ||
| 1912 | if is_sha1 or tag_name is not None: | 1962 | if is_sha1 or tag_name is not None: |
| 1913 | if self._CheckForSha1(): | 1963 | if self._CheckForImmutableRevision(): |
| 1964 | print('Skipped fetching project %s (already have persistent ref)' | ||
| 1965 | % self.name) | ||
| 1914 | return True | 1966 | return True |
| 1915 | if is_sha1 and not depth: | 1967 | if is_sha1 and not depth: |
| 1916 | # When syncing a specific commit and --depth is not set: | 1968 | # When syncing a specific commit and --depth is not set: |
| @@ -1958,15 +2010,17 @@ class Project(object): | |||
| 1958 | ids.add(ref_id) | 2010 | ids.add(ref_id) |
| 1959 | tmp.add(r) | 2011 | tmp.add(r) |
| 1960 | 2012 | ||
| 1961 | tmp_packed = '' | 2013 | tmp_packed_lines = [] |
| 1962 | old_packed = '' | 2014 | old_packed_lines = [] |
| 1963 | 2015 | ||
| 1964 | for r in sorted(all_refs): | 2016 | for r in sorted(all_refs): |
| 1965 | line = '%s %s\n' % (all_refs[r], r) | 2017 | line = '%s %s\n' % (all_refs[r], r) |
| 1966 | tmp_packed += line | 2018 | tmp_packed_lines.append(line) |
| 1967 | if r not in tmp: | 2019 | if r not in tmp: |
| 1968 | old_packed += line | 2020 | old_packed_lines.append(line) |
| 1969 | 2021 | ||
| 2022 | tmp_packed = ''.join(tmp_packed_lines) | ||
| 2023 | old_packed = ''.join(old_packed_lines) | ||
| 1970 | _lwrite(packed_refs, tmp_packed) | 2024 | _lwrite(packed_refs, tmp_packed) |
| 1971 | else: | 2025 | else: |
| 1972 | alt_dir = None | 2026 | alt_dir = None |
| @@ -1999,6 +2053,9 @@ class Project(object): | |||
| 1999 | if prune: | 2053 | if prune: |
| 2000 | cmd.append('--prune') | 2054 | cmd.append('--prune') |
| 2001 | 2055 | ||
| 2056 | if submodules: | ||
| 2057 | cmd.append('--recurse-submodules=on-demand') | ||
| 2058 | |||
| 2002 | spec = [] | 2059 | spec = [] |
| 2003 | if not current_branch_only: | 2060 | if not current_branch_only: |
| 2004 | # Fetch whole repo | 2061 | # Fetch whole repo |
| @@ -2054,24 +2111,25 @@ class Project(object): | |||
| 2054 | if old_packed != '': | 2111 | if old_packed != '': |
| 2055 | _lwrite(packed_refs, old_packed) | 2112 | _lwrite(packed_refs, old_packed) |
| 2056 | else: | 2113 | else: |
| 2057 | os.remove(packed_refs) | 2114 | platform_utils.remove(packed_refs) |
| 2058 | self.bare_git.pack_refs('--all', '--prune') | 2115 | self.bare_git.pack_refs('--all', '--prune') |
| 2059 | 2116 | ||
| 2060 | if is_sha1 and current_branch_only and self.upstream: | 2117 | if is_sha1 and current_branch_only: |
| 2061 | # We just synced the upstream given branch; verify we | 2118 | # We just synced the upstream given branch; verify we |
| 2062 | # got what we wanted, else trigger a second run of all | 2119 | # got what we wanted, else trigger a second run of all |
| 2063 | # refs. | 2120 | # refs. |
| 2064 | if not self._CheckForSha1(): | 2121 | if not self._CheckForImmutableRevision(): |
| 2065 | if not depth: | 2122 | if current_branch_only and depth: |
| 2066 | # Avoid infinite recursion when depth is True (since depth implies | 2123 | # Sync the current branch only with depth set to None |
| 2067 | # current_branch_only) | ||
| 2068 | return self._RemoteFetch(name=name, current_branch_only=False, | ||
| 2069 | initial=False, quiet=quiet, alt_dir=alt_dir) | ||
| 2070 | if self.clone_depth: | ||
| 2071 | self.clone_depth = None | ||
| 2072 | return self._RemoteFetch(name=name, | 2124 | return self._RemoteFetch(name=name, |
| 2073 | current_branch_only=current_branch_only, | 2125 | current_branch_only=current_branch_only, |
| 2074 | initial=False, quiet=quiet, alt_dir=alt_dir) | 2126 | initial=False, quiet=quiet, alt_dir=alt_dir, |
| 2127 | depth=None) | ||
| 2128 | else: | ||
| 2129 | # Avoid infinite recursion: sync all branches with depth set to None | ||
| 2130 | return self._RemoteFetch(name=name, current_branch_only=False, | ||
| 2131 | initial=False, quiet=quiet, alt_dir=alt_dir, | ||
| 2132 | depth=None) | ||
| 2075 | 2133 | ||
| 2076 | return ok | 2134 | return ok |
| 2077 | 2135 | ||
| @@ -2115,14 +2173,14 @@ class Project(object): | |||
| 2115 | 2173 | ||
| 2116 | ok = GitCommand(self, cmd, bare=True).Wait() == 0 | 2174 | ok = GitCommand(self, cmd, bare=True).Wait() == 0 |
| 2117 | if os.path.exists(bundle_dst): | 2175 | if os.path.exists(bundle_dst): |
| 2118 | os.remove(bundle_dst) | 2176 | platform_utils.remove(bundle_dst) |
| 2119 | if os.path.exists(bundle_tmp): | 2177 | if os.path.exists(bundle_tmp): |
| 2120 | os.remove(bundle_tmp) | 2178 | platform_utils.remove(bundle_tmp) |
| 2121 | return ok | 2179 | return ok |
| 2122 | 2180 | ||
| 2123 | def _FetchBundle(self, srcUrl, tmpPath, dstPath, quiet): | 2181 | def _FetchBundle(self, srcUrl, tmpPath, dstPath, quiet): |
| 2124 | if os.path.exists(dstPath): | 2182 | if os.path.exists(dstPath): |
| 2125 | os.remove(dstPath) | 2183 | platform_utils.remove(dstPath) |
| 2126 | 2184 | ||
| 2127 | cmd = ['curl', '--fail', '--output', tmpPath, '--netrc', '--location'] | 2185 | cmd = ['curl', '--fail', '--output', tmpPath, '--netrc', '--location'] |
| 2128 | if quiet: | 2186 | if quiet: |
| @@ -2132,7 +2190,7 @@ class Project(object): | |||
| 2132 | if size >= 1024: | 2190 | if size >= 1024: |
| 2133 | cmd += ['--continue-at', '%d' % (size,)] | 2191 | cmd += ['--continue-at', '%d' % (size,)] |
| 2134 | else: | 2192 | else: |
| 2135 | os.remove(tmpPath) | 2193 | platform_utils.remove(tmpPath) |
| 2136 | if 'http_proxy' in os.environ and 'darwin' == sys.platform: | 2194 | if 'http_proxy' in os.environ and 'darwin' == sys.platform: |
| 2137 | cmd += ['--proxy', os.environ['http_proxy']] | 2195 | cmd += ['--proxy', os.environ['http_proxy']] |
| 2138 | with GetUrlCookieFile(srcUrl, quiet) as (cookiefile, _proxy): | 2196 | with GetUrlCookieFile(srcUrl, quiet) as (cookiefile, _proxy): |
| @@ -2163,10 +2221,10 @@ class Project(object): | |||
| 2163 | 2221 | ||
| 2164 | if os.path.exists(tmpPath): | 2222 | if os.path.exists(tmpPath): |
| 2165 | if curlret == 0 and self._IsValidBundle(tmpPath, quiet): | 2223 | if curlret == 0 and self._IsValidBundle(tmpPath, quiet): |
| 2166 | os.rename(tmpPath, dstPath) | 2224 | platform_utils.rename(tmpPath, dstPath) |
| 2167 | return True | 2225 | return True |
| 2168 | else: | 2226 | else: |
| 2169 | os.remove(tmpPath) | 2227 | platform_utils.remove(tmpPath) |
| 2170 | return False | 2228 | return False |
| 2171 | else: | 2229 | else: |
| 2172 | return False | 2230 | return False |
| @@ -2218,6 +2276,13 @@ class Project(object): | |||
| 2218 | if GitCommand(self, cmd).Wait() != 0: | 2276 | if GitCommand(self, cmd).Wait() != 0: |
| 2219 | raise GitError('%s reset --hard %s ' % (self.name, rev)) | 2277 | raise GitError('%s reset --hard %s ' % (self.name, rev)) |
| 2220 | 2278 | ||
| 2279 | def _SyncSubmodules(self, quiet=True): | ||
| 2280 | cmd = ['submodule', 'update', '--init', '--recursive'] | ||
| 2281 | if quiet: | ||
| 2282 | cmd.append('-q') | ||
| 2283 | if GitCommand(self, cmd).Wait() != 0: | ||
| 2284 | raise GitError('%s submodule update --init --recursive %s ' % self.name) | ||
| 2285 | |||
| 2221 | def _Rebase(self, upstream, onto=None): | 2286 | def _Rebase(self, upstream, onto=None): |
| 2222 | cmd = ['rebase'] | 2287 | cmd = ['rebase'] |
| 2223 | if onto is not None: | 2288 | if onto is not None: |
| @@ -2257,10 +2322,10 @@ class Project(object): | |||
| 2257 | print("Retrying clone after deleting %s" % | 2322 | print("Retrying clone after deleting %s" % |
| 2258 | self.gitdir, file=sys.stderr) | 2323 | self.gitdir, file=sys.stderr) |
| 2259 | try: | 2324 | try: |
| 2260 | shutil.rmtree(os.path.realpath(self.gitdir)) | 2325 | platform_utils.rmtree(platform_utils.realpath(self.gitdir)) |
| 2261 | if self.worktree and os.path.exists(os.path.realpath | 2326 | if self.worktree and os.path.exists(platform_utils.realpath |
| 2262 | (self.worktree)): | 2327 | (self.worktree)): |
| 2263 | shutil.rmtree(os.path.realpath(self.worktree)) | 2328 | platform_utils.rmtree(platform_utils.realpath(self.worktree)) |
| 2264 | return self._InitGitDir(mirror_git=mirror_git, force_sync=False) | 2329 | return self._InitGitDir(mirror_git=mirror_git, force_sync=False) |
| 2265 | except: | 2330 | except: |
| 2266 | raise e | 2331 | raise e |
| @@ -2302,9 +2367,9 @@ class Project(object): | |||
| 2302 | self.config.SetString('core.bare', None) | 2367 | self.config.SetString('core.bare', None) |
| 2303 | except Exception: | 2368 | except Exception: |
| 2304 | if init_obj_dir and os.path.exists(self.objdir): | 2369 | if init_obj_dir and os.path.exists(self.objdir): |
| 2305 | shutil.rmtree(self.objdir) | 2370 | platform_utils.rmtree(self.objdir) |
| 2306 | if init_git_dir and os.path.exists(self.gitdir): | 2371 | if init_git_dir and os.path.exists(self.gitdir): |
| 2307 | shutil.rmtree(self.gitdir) | 2372 | platform_utils.rmtree(self.gitdir) |
| 2308 | raise | 2373 | raise |
| 2309 | 2374 | ||
| 2310 | def _UpdateHooks(self): | 2375 | def _UpdateHooks(self): |
| @@ -2312,7 +2377,7 @@ class Project(object): | |||
| 2312 | self._InitHooks() | 2377 | self._InitHooks() |
| 2313 | 2378 | ||
| 2314 | def _InitHooks(self): | 2379 | def _InitHooks(self): |
| 2315 | hooks = os.path.realpath(self._gitdir_path('hooks')) | 2380 | hooks = platform_utils.realpath(self._gitdir_path('hooks')) |
| 2316 | if not os.path.exists(hooks): | 2381 | if not os.path.exists(hooks): |
| 2317 | os.makedirs(hooks) | 2382 | os.makedirs(hooks) |
| 2318 | for stock_hook in _ProjectHooks(): | 2383 | for stock_hook in _ProjectHooks(): |
| @@ -2328,20 +2393,21 @@ class Project(object): | |||
| 2328 | continue | 2393 | continue |
| 2329 | 2394 | ||
| 2330 | dst = os.path.join(hooks, name) | 2395 | dst = os.path.join(hooks, name) |
| 2331 | if os.path.islink(dst): | 2396 | if platform_utils.islink(dst): |
| 2332 | continue | 2397 | continue |
| 2333 | if os.path.exists(dst): | 2398 | if os.path.exists(dst): |
| 2334 | if filecmp.cmp(stock_hook, dst, shallow=False): | 2399 | if filecmp.cmp(stock_hook, dst, shallow=False): |
| 2335 | os.remove(dst) | 2400 | platform_utils.remove(dst) |
| 2336 | else: | 2401 | else: |
| 2337 | _warn("%s: Not replacing locally modified %s hook", | 2402 | _warn("%s: Not replacing locally modified %s hook", |
| 2338 | self.relpath, name) | 2403 | self.relpath, name) |
| 2339 | continue | 2404 | continue |
| 2340 | try: | 2405 | try: |
| 2341 | os.symlink(os.path.relpath(stock_hook, os.path.dirname(dst)), dst) | 2406 | platform_utils.symlink( |
| 2407 | os.path.relpath(stock_hook, os.path.dirname(dst)), dst) | ||
| 2342 | except OSError as e: | 2408 | except OSError as e: |
| 2343 | if e.errno == errno.EPERM: | 2409 | if e.errno == errno.EPERM: |
| 2344 | raise GitError('filesystem must support symlinks') | 2410 | raise GitError(self._get_symlink_error_message()) |
| 2345 | else: | 2411 | else: |
| 2346 | raise | 2412 | raise |
| 2347 | 2413 | ||
| @@ -2389,11 +2455,12 @@ class Project(object): | |||
| 2389 | symlink_dirs += self.working_tree_dirs | 2455 | symlink_dirs += self.working_tree_dirs |
| 2390 | to_symlink = symlink_files + symlink_dirs | 2456 | to_symlink = symlink_files + symlink_dirs |
| 2391 | for name in set(to_symlink): | 2457 | for name in set(to_symlink): |
| 2392 | dst = os.path.realpath(os.path.join(destdir, name)) | 2458 | dst = platform_utils.realpath(os.path.join(destdir, name)) |
| 2393 | if os.path.lexists(dst): | 2459 | if os.path.lexists(dst): |
| 2394 | src = os.path.realpath(os.path.join(srcdir, name)) | 2460 | src = platform_utils.realpath(os.path.join(srcdir, name)) |
| 2395 | # Fail if the links are pointing to the wrong place | 2461 | # Fail if the links are pointing to the wrong place |
| 2396 | if src != dst: | 2462 | if src != dst: |
| 2463 | _error('%s is different in %s vs %s', name, destdir, srcdir) | ||
| 2397 | raise GitError('--force-sync not enabled; cannot overwrite a local ' | 2464 | raise GitError('--force-sync not enabled; cannot overwrite a local ' |
| 2398 | 'work tree. If you\'re comfortable with the ' | 2465 | 'work tree. If you\'re comfortable with the ' |
| 2399 | 'possibility of losing the work tree\'s git metadata,' | 2466 | 'possibility of losing the work tree\'s git metadata,' |
| @@ -2422,10 +2489,10 @@ class Project(object): | |||
| 2422 | if copy_all: | 2489 | if copy_all: |
| 2423 | to_copy = os.listdir(gitdir) | 2490 | to_copy = os.listdir(gitdir) |
| 2424 | 2491 | ||
| 2425 | dotgit = os.path.realpath(dotgit) | 2492 | dotgit = platform_utils.realpath(dotgit) |
| 2426 | for name in set(to_copy).union(to_symlink): | 2493 | for name in set(to_copy).union(to_symlink): |
| 2427 | try: | 2494 | try: |
| 2428 | src = os.path.realpath(os.path.join(gitdir, name)) | 2495 | src = platform_utils.realpath(os.path.join(gitdir, name)) |
| 2429 | dst = os.path.join(dotgit, name) | 2496 | dst = os.path.join(dotgit, name) |
| 2430 | 2497 | ||
| 2431 | if os.path.lexists(dst): | 2498 | if os.path.lexists(dst): |
| @@ -2435,28 +2502,30 @@ class Project(object): | |||
| 2435 | if name in symlink_dirs and not os.path.lexists(src): | 2502 | if name in symlink_dirs and not os.path.lexists(src): |
| 2436 | os.makedirs(src) | 2503 | os.makedirs(src) |
| 2437 | 2504 | ||
| 2505 | if name in to_symlink: | ||
| 2506 | platform_utils.symlink( | ||
| 2507 | os.path.relpath(src, os.path.dirname(dst)), dst) | ||
| 2508 | elif copy_all and not platform_utils.islink(dst): | ||
| 2509 | if os.path.isdir(src): | ||
| 2510 | shutil.copytree(src, dst) | ||
| 2511 | elif os.path.isfile(src): | ||
| 2512 | shutil.copy(src, dst) | ||
| 2513 | |||
| 2438 | # If the source file doesn't exist, ensure the destination | 2514 | # If the source file doesn't exist, ensure the destination |
| 2439 | # file doesn't either. | 2515 | # file doesn't either. |
| 2440 | if name in symlink_files and not os.path.lexists(src): | 2516 | if name in symlink_files and not os.path.lexists(src): |
| 2441 | try: | 2517 | try: |
| 2442 | os.remove(dst) | 2518 | platform_utils.remove(dst) |
| 2443 | except OSError: | 2519 | except OSError: |
| 2444 | pass | 2520 | pass |
| 2445 | 2521 | ||
| 2446 | if name in to_symlink: | ||
| 2447 | os.symlink(os.path.relpath(src, os.path.dirname(dst)), dst) | ||
| 2448 | elif copy_all and not os.path.islink(dst): | ||
| 2449 | if os.path.isdir(src): | ||
| 2450 | shutil.copytree(src, dst) | ||
| 2451 | elif os.path.isfile(src): | ||
| 2452 | shutil.copy(src, dst) | ||
| 2453 | except OSError as e: | 2522 | except OSError as e: |
| 2454 | if e.errno == errno.EPERM: | 2523 | if e.errno == errno.EPERM: |
| 2455 | raise DownloadError('filesystem must support symlinks') | 2524 | raise DownloadError(self._get_symlink_error_message()) |
| 2456 | else: | 2525 | else: |
| 2457 | raise | 2526 | raise |
| 2458 | 2527 | ||
| 2459 | def _InitWorkTree(self, force_sync=False): | 2528 | def _InitWorkTree(self, force_sync=False, submodules=False): |
| 2460 | dotgit = os.path.join(self.worktree, '.git') | 2529 | dotgit = os.path.join(self.worktree, '.git') |
| 2461 | init_dotgit = not os.path.exists(dotgit) | 2530 | init_dotgit = not os.path.exists(dotgit) |
| 2462 | try: | 2531 | try: |
| @@ -2470,8 +2539,8 @@ class Project(object): | |||
| 2470 | except GitError as e: | 2539 | except GitError as e: |
| 2471 | if force_sync: | 2540 | if force_sync: |
| 2472 | try: | 2541 | try: |
| 2473 | shutil.rmtree(dotgit) | 2542 | platform_utils.rmtree(dotgit) |
| 2474 | return self._InitWorkTree(force_sync=False) | 2543 | return self._InitWorkTree(force_sync=False, submodules=submodules) |
| 2475 | except: | 2544 | except: |
| 2476 | raise e | 2545 | raise e |
| 2477 | raise e | 2546 | raise e |
| @@ -2485,14 +2554,24 @@ class Project(object): | |||
| 2485 | if GitCommand(self, cmd).Wait() != 0: | 2554 | if GitCommand(self, cmd).Wait() != 0: |
| 2486 | raise GitError("cannot initialize work tree") | 2555 | raise GitError("cannot initialize work tree") |
| 2487 | 2556 | ||
| 2557 | if submodules: | ||
| 2558 | self._SyncSubmodules(quiet=True) | ||
| 2488 | self._CopyAndLinkFiles() | 2559 | self._CopyAndLinkFiles() |
| 2489 | except Exception: | 2560 | except Exception: |
| 2490 | if init_dotgit: | 2561 | if init_dotgit: |
| 2491 | shutil.rmtree(dotgit) | 2562 | platform_utils.rmtree(dotgit) |
| 2492 | raise | 2563 | raise |
| 2493 | 2564 | ||
| 2565 | def _get_symlink_error_message(self): | ||
| 2566 | if platform_utils.isWindows(): | ||
| 2567 | return ('Unable to create symbolic link. Please re-run the command as ' | ||
| 2568 | 'Administrator, or see ' | ||
| 2569 | 'https://github.com/git-for-windows/git/wiki/Symbolic-Links ' | ||
| 2570 | 'for other options.') | ||
| 2571 | return 'filesystem must support symlinks' | ||
| 2572 | |||
| 2494 | def _gitdir_path(self, path): | 2573 | def _gitdir_path(self, path): |
| 2495 | return os.path.realpath(os.path.join(self.gitdir, path)) | 2574 | return platform_utils.realpath(os.path.join(self.gitdir, path)) |
| 2496 | 2575 | ||
| 2497 | def _revlist(self, *args, **kw): | 2576 | def _revlist(self, *args, **kw): |
| 2498 | a = [] | 2577 | a = [] |
| @@ -2627,11 +2706,11 @@ class Project(object): | |||
| 2627 | else: | 2706 | else: |
| 2628 | path = os.path.join(self._project.worktree, '.git', HEAD) | 2707 | path = os.path.join(self._project.worktree, '.git', HEAD) |
| 2629 | try: | 2708 | try: |
| 2630 | fd = open(path, 'rb') | 2709 | fd = open(path) |
| 2631 | except IOError as e: | 2710 | except IOError as e: |
| 2632 | raise NoManifestException(path, str(e)) | 2711 | raise NoManifestException(path, str(e)) |
| 2633 | try: | 2712 | try: |
| 2634 | line = fd.read() | 2713 | line = fd.readline() |
| 2635 | finally: | 2714 | finally: |
| 2636 | fd.close() | 2715 | fd.close() |
| 2637 | try: | 2716 | try: |
| @@ -2833,13 +2912,14 @@ class SyncBuffer(object): | |||
| 2833 | 2912 | ||
| 2834 | self.detach_head = detach_head | 2913 | self.detach_head = detach_head |
| 2835 | self.clean = True | 2914 | self.clean = True |
| 2915 | self.recent_clean = True | ||
| 2836 | 2916 | ||
| 2837 | def info(self, project, fmt, *args): | 2917 | def info(self, project, fmt, *args): |
| 2838 | self._messages.append(_InfoMessage(project, fmt % args)) | 2918 | self._messages.append(_InfoMessage(project, fmt % args)) |
| 2839 | 2919 | ||
| 2840 | def fail(self, project, err=None): | 2920 | def fail(self, project, err=None): |
| 2841 | self._failures.append(_Failure(project, err)) | 2921 | self._failures.append(_Failure(project, err)) |
| 2842 | self.clean = False | 2922 | self._MarkUnclean() |
| 2843 | 2923 | ||
| 2844 | def later1(self, project, what): | 2924 | def later1(self, project, what): |
| 2845 | self._later_queue1.append(_Later(project, what)) | 2925 | self._later_queue1.append(_Later(project, what)) |
| @@ -2853,6 +2933,15 @@ class SyncBuffer(object): | |||
| 2853 | self._PrintMessages() | 2933 | self._PrintMessages() |
| 2854 | return self.clean | 2934 | return self.clean |
| 2855 | 2935 | ||
| 2936 | def Recently(self): | ||
| 2937 | recent_clean = self.recent_clean | ||
| 2938 | self.recent_clean = True | ||
| 2939 | return recent_clean | ||
| 2940 | |||
| 2941 | def _MarkUnclean(self): | ||
| 2942 | self.clean = False | ||
| 2943 | self.recent_clean = False | ||
| 2944 | |||
| 2856 | def _RunLater(self): | 2945 | def _RunLater(self): |
| 2857 | for q in ['_later_queue1', '_later_queue2']: | 2946 | for q in ['_later_queue1', '_later_queue2']: |
| 2858 | if not self._RunQueue(q): | 2947 | if not self._RunQueue(q): |
| @@ -2861,7 +2950,7 @@ class SyncBuffer(object): | |||
| 2861 | def _RunQueue(self, queue): | 2950 | def _RunQueue(self, queue): |
| 2862 | for m in getattr(self, queue): | 2951 | for m in getattr(self, queue): |
| 2863 | if not m.Run(self): | 2952 | if not m.Run(self): |
| 2864 | self.clean = False | 2953 | self._MarkUnclean() |
| 2865 | return False | 2954 | return False |
| 2866 | setattr(self, queue, []) | 2955 | setattr(self, queue, []) |
| 2867 | return True | 2956 | return True |
| @@ -2903,14 +2992,14 @@ class MetaProject(Project): | |||
| 2903 | self.revisionExpr = base | 2992 | self.revisionExpr = base |
| 2904 | self.revisionId = None | 2993 | self.revisionId = None |
| 2905 | 2994 | ||
| 2906 | def MetaBranchSwitch(self): | 2995 | def MetaBranchSwitch(self, submodules=False): |
| 2907 | """ Prepare MetaProject for manifest branch switch | 2996 | """ Prepare MetaProject for manifest branch switch |
| 2908 | """ | 2997 | """ |
| 2909 | 2998 | ||
| 2910 | # detach and delete manifest branch, allowing a new | 2999 | # detach and delete manifest branch, allowing a new |
| 2911 | # branch to take over | 3000 | # branch to take over |
| 2912 | syncbuf = SyncBuffer(self.config, detach_head=True) | 3001 | syncbuf = SyncBuffer(self.config, detach_head=True) |
| 2913 | self.Sync_LocalHalf(syncbuf) | 3002 | self.Sync_LocalHalf(syncbuf, submodules=submodules) |
| 2914 | syncbuf.Finish() | 3003 | syncbuf.Finish() |
| 2915 | 3004 | ||
| 2916 | return GitCommand(self, | 3005 | return GitCommand(self, |
| @@ -23,7 +23,7 @@ REPO_REV = 'stable' | |||
| 23 | # limitations under the License. | 23 | # limitations under the License. |
| 24 | 24 | ||
| 25 | # increment this whenever we make important changes to this script | 25 | # increment this whenever we make important changes to this script |
| 26 | VERSION = (1, 23) | 26 | VERSION = (1, 24) |
| 27 | 27 | ||
| 28 | # increment this if the MAINTAINER_KEYS block is modified | 28 | # increment this if the MAINTAINER_KEYS block is modified |
| 29 | KEYRING_VERSION = (1, 2) | 29 | KEYRING_VERSION = (1, 2) |
| @@ -120,6 +120,7 @@ GITC_FS_ROOT_DIR = '/gitc/manifest-rw/' | |||
| 120 | 120 | ||
| 121 | import errno | 121 | import errno |
| 122 | import optparse | 122 | import optparse |
| 123 | import platform | ||
| 123 | import re | 124 | import re |
| 124 | import shutil | 125 | import shutil |
| 125 | import stat | 126 | import stat |
| @@ -175,6 +176,9 @@ group.add_option('-b', '--manifest-branch', | |||
| 175 | group.add_option('-m', '--manifest-name', | 176 | group.add_option('-m', '--manifest-name', |
| 176 | dest='manifest_name', | 177 | dest='manifest_name', |
| 177 | help='initial manifest file', metavar='NAME.xml') | 178 | help='initial manifest file', metavar='NAME.xml') |
| 179 | group.add_option('-c', '--current-branch', | ||
| 180 | dest='current_branch_only', action='store_true', | ||
| 181 | help='fetch only current manifest branch from server') | ||
| 178 | group.add_option('--mirror', | 182 | group.add_option('--mirror', |
| 179 | dest='mirror', action='store_true', | 183 | dest='mirror', action='store_true', |
| 180 | help='create a replica of the remote repositories ' | 184 | help='create a replica of the remote repositories ' |
| @@ -189,6 +193,9 @@ group.add_option('--archive', | |||
| 189 | dest='archive', action='store_true', | 193 | dest='archive', action='store_true', |
| 190 | help='checkout an archive instead of a git repository for ' | 194 | help='checkout an archive instead of a git repository for ' |
| 191 | 'each project. See git archive.') | 195 | 'each project. See git archive.') |
| 196 | group.add_option('--submodules', | ||
| 197 | dest='submodules', action='store_true', | ||
| 198 | help='sync any submodules associated with the manifest repo') | ||
| 192 | group.add_option('-g', '--groups', | 199 | group.add_option('-g', '--groups', |
| 193 | dest='groups', default='default', | 200 | dest='groups', default='default', |
| 194 | help='restrict manifest projects to ones with specified ' | 201 | help='restrict manifest projects to ones with specified ' |
| @@ -202,6 +209,9 @@ group.add_option('-p', '--platform', | |||
| 202 | group.add_option('--no-clone-bundle', | 209 | group.add_option('--no-clone-bundle', |
| 203 | dest='no_clone_bundle', action='store_true', | 210 | dest='no_clone_bundle', action='store_true', |
| 204 | help='disable use of /clone.bundle on HTTP/HTTPS') | 211 | help='disable use of /clone.bundle on HTTP/HTTPS') |
| 212 | group.add_option('--no-tags', | ||
| 213 | dest='no_tags', action='store_true', | ||
| 214 | help="don't fetch tags in the manifest") | ||
| 205 | 215 | ||
| 206 | 216 | ||
| 207 | # Tool | 217 | # Tool |
| @@ -347,6 +357,10 @@ def _Init(args, gitc_init=False): | |||
| 347 | dst = os.path.abspath(os.path.join(repodir, S_repo)) | 357 | dst = os.path.abspath(os.path.join(repodir, S_repo)) |
| 348 | _Clone(url, dst, opt.quiet, not opt.no_clone_bundle) | 358 | _Clone(url, dst, opt.quiet, not opt.no_clone_bundle) |
| 349 | 359 | ||
| 360 | if not os.path.isfile('%s/repo' % dst): | ||
| 361 | _print("warning: '%s' does not look like a git-repo repository, is " | ||
| 362 | "REPO_URL set correctly?" % url, file=sys.stderr) | ||
| 363 | |||
| 350 | if can_verify and not opt.no_repo_verify: | 364 | if can_verify and not opt.no_repo_verify: |
| 351 | rev = _Verify(dst, branch, opt.quiet) | 365 | rev = _Verify(dst, branch, opt.quiet) |
| 352 | else: | 366 | else: |
| @@ -853,7 +867,10 @@ def main(orig_args): | |||
| 853 | try: | 867 | try: |
| 854 | _Init(args, gitc_init=(cmd == 'gitc-init')) | 868 | _Init(args, gitc_init=(cmd == 'gitc-init')) |
| 855 | except CloneFailure: | 869 | except CloneFailure: |
| 856 | shutil.rmtree(os.path.join(repodir, S_repo), ignore_errors=True) | 870 | path = os.path.join(repodir, S_repo) |
| 871 | _print("fatal: cloning the git-repo repository failed, will remove " | ||
| 872 | "'%s' " % path, file=sys.stderr) | ||
| 873 | shutil.rmtree(path, ignore_errors=True) | ||
| 857 | sys.exit(1) | 874 | sys.exit(1) |
| 858 | repo_main, rel_repo_dir = _FindRepo() | 875 | repo_main, rel_repo_dir = _FindRepo() |
| 859 | else: | 876 | else: |
| @@ -871,7 +888,10 @@ def main(orig_args): | |||
| 871 | me.extend(orig_args) | 888 | me.extend(orig_args) |
| 872 | me.extend(extra_args) | 889 | me.extend(extra_args) |
| 873 | try: | 890 | try: |
| 874 | os.execv(sys.executable, me) | 891 | if platform.system() == "Windows": |
| 892 | sys.exit(subprocess.call(me)) | ||
| 893 | else: | ||
| 894 | os.execv(sys.executable, me) | ||
| 875 | except OSError as e: | 895 | except OSError as e: |
| 876 | _print("fatal: unable to start %s" % repo_main, file=sys.stderr) | 896 | _print("fatal: unable to start %s" % repo_main, file=sys.stderr) |
| 877 | _print("fatal: %s" % e, file=sys.stderr) | 897 | _print("fatal: %s" % e, file=sys.stderr) |
diff --git a/subcmds/abandon.py b/subcmds/abandon.py index b94ccdd3..be32dc5c 100644 --- a/subcmds/abandon.py +++ b/subcmds/abandon.py | |||
| @@ -16,6 +16,7 @@ | |||
| 16 | from __future__ import print_function | 16 | from __future__ import print_function |
| 17 | import sys | 17 | import sys |
| 18 | from command import Command | 18 | from command import Command |
| 19 | from collections import defaultdict | ||
| 19 | from git_command import git | 20 | from git_command import git |
| 20 | from progress import Progress | 21 | from progress import Progress |
| 21 | 22 | ||
| @@ -23,49 +24,75 @@ class Abandon(Command): | |||
| 23 | common = True | 24 | common = True |
| 24 | helpSummary = "Permanently abandon a development branch" | 25 | helpSummary = "Permanently abandon a development branch" |
| 25 | helpUsage = """ | 26 | helpUsage = """ |
| 26 | %prog <branchname> [<project>...] | 27 | %prog [--all | <branchname>] [<project>...] |
| 27 | 28 | ||
| 28 | This subcommand permanently abandons a development branch by | 29 | This subcommand permanently abandons a development branch by |
| 29 | deleting it (and all its history) from your local repository. | 30 | deleting it (and all its history) from your local repository. |
| 30 | 31 | ||
| 31 | It is equivalent to "git branch -D <branchname>". | 32 | It is equivalent to "git branch -D <branchname>". |
| 32 | """ | 33 | """ |
| 34 | def _Options(self, p): | ||
| 35 | p.add_option('--all', | ||
| 36 | dest='all', action='store_true', | ||
| 37 | help='delete all branches in all projects') | ||
| 33 | 38 | ||
| 34 | def Execute(self, opt, args): | 39 | def Execute(self, opt, args): |
| 35 | if not args: | 40 | if not opt.all and not args: |
| 36 | self.Usage() | 41 | self.Usage() |
| 37 | 42 | ||
| 38 | nb = args[0] | 43 | if not opt.all: |
| 39 | if not git.check_ref_format('heads/%s' % nb): | 44 | nb = args[0] |
| 40 | print("error: '%s' is not a valid name" % nb, file=sys.stderr) | 45 | if not git.check_ref_format('heads/%s' % nb): |
| 41 | sys.exit(1) | 46 | print("error: '%s' is not a valid name" % nb, file=sys.stderr) |
| 47 | sys.exit(1) | ||
| 48 | else: | ||
| 49 | args.insert(0,None) | ||
| 50 | nb = "'All local branches'" | ||
| 42 | 51 | ||
| 43 | nb = args[0] | 52 | err = defaultdict(list) |
| 44 | err = [] | 53 | success = defaultdict(list) |
| 45 | success = [] | ||
| 46 | all_projects = self.GetProjects(args[1:]) | 54 | all_projects = self.GetProjects(args[1:]) |
| 47 | 55 | ||
| 48 | pm = Progress('Abandon %s' % nb, len(all_projects)) | 56 | pm = Progress('Abandon %s' % nb, len(all_projects)) |
| 49 | for project in all_projects: | 57 | for project in all_projects: |
| 50 | pm.update() | 58 | pm.update() |
| 51 | 59 | ||
| 52 | status = project.AbandonBranch(nb) | 60 | if opt.all: |
| 53 | if status is not None: | 61 | branches = project.GetBranches().keys() |
| 54 | if status: | 62 | else: |
| 55 | success.append(project) | 63 | branches = [nb] |
| 56 | else: | 64 | |
| 57 | err.append(project) | 65 | for name in branches: |
| 66 | status = project.AbandonBranch(name) | ||
| 67 | if status is not None: | ||
| 68 | if status: | ||
| 69 | success[name].append(project) | ||
| 70 | else: | ||
| 71 | err[name].append(project) | ||
| 58 | pm.end() | 72 | pm.end() |
| 59 | 73 | ||
| 74 | width = 25 | ||
| 75 | for name in branches: | ||
| 76 | if width < len(name): | ||
| 77 | width = len(name) | ||
| 78 | |||
| 60 | if err: | 79 | if err: |
| 61 | for p in err: | 80 | for br in err.keys(): |
| 62 | print("error: %s/: cannot abandon %s" % (p.relpath, nb), | 81 | err_msg = "error: cannot abandon %s" %br |
| 63 | file=sys.stderr) | 82 | print(err_msg, file=sys.stderr) |
| 83 | for proj in err[br]: | ||
| 84 | print(' '*len(err_msg) + " | %s" % proj.relpath, file=sys.stderr) | ||
| 64 | sys.exit(1) | 85 | sys.exit(1) |
| 65 | elif not success: | 86 | elif not success: |
| 66 | print('error: no project has branch %s' % nb, file=sys.stderr) | 87 | print('error: no project has local branch(es) : %s' % nb, |
| 88 | file=sys.stderr) | ||
| 67 | sys.exit(1) | 89 | sys.exit(1) |
| 68 | else: | 90 | else: |
| 69 | print('Abandoned in %d project(s):\n %s' | 91 | print('Abandoned branches:', file=sys.stderr) |
| 70 | % (len(success), '\n '.join(p.relpath for p in success)), | 92 | for br in success.keys(): |
| 71 | file=sys.stderr) | 93 | if len(all_projects) > 1 and len(all_projects) == len(success[br]): |
| 94 | result = "all project" | ||
| 95 | else: | ||
| 96 | result = "%s" % ( | ||
| 97 | ('\n'+' '*width + '| ').join(p.relpath for p in success[br])) | ||
| 98 | print("%s%s| %s\n" % (br,' '*(width-len(br)), result),file=sys.stderr) | ||
diff --git a/subcmds/download.py b/subcmds/download.py index a029462e..e1010aa2 100644 --- a/subcmds/download.py +++ b/subcmds/download.py | |||
| @@ -26,11 +26,12 @@ class Download(Command): | |||
| 26 | common = True | 26 | common = True |
| 27 | helpSummary = "Download and checkout a change" | 27 | helpSummary = "Download and checkout a change" |
| 28 | helpUsage = """ | 28 | helpUsage = """ |
| 29 | %prog {project change[/patchset]}... | 29 | %prog {[project] change[/patchset]}... |
| 30 | """ | 30 | """ |
| 31 | helpDescription = """ | 31 | helpDescription = """ |
| 32 | The '%prog' command downloads a change from the review system and | 32 | The '%prog' command downloads a change from the review system and |
| 33 | makes it available in your project's local working directory. | 33 | makes it available in your project's local working directory. |
| 34 | If no project is specified try to use current directory as a project. | ||
| 34 | """ | 35 | """ |
| 35 | 36 | ||
| 36 | def _Options(self, p): | 37 | def _Options(self, p): |
| @@ -55,7 +56,7 @@ makes it available in your project's local working directory. | |||
| 55 | m = CHANGE_RE.match(a) | 56 | m = CHANGE_RE.match(a) |
| 56 | if m: | 57 | if m: |
| 57 | if not project: | 58 | if not project: |
| 58 | self.Usage() | 59 | project = self.GetProjects(".")[0] |
| 59 | chg_id = int(m.group(1)) | 60 | chg_id = int(m.group(1)) |
| 60 | if m.group(2): | 61 | if m.group(2): |
| 61 | ps_id = int(m.group(2)) | 62 | ps_id = int(m.group(2)) |
diff --git a/subcmds/forall.py b/subcmds/forall.py index 07ee8d58..52eb5e28 100644 --- a/subcmds/forall.py +++ b/subcmds/forall.py | |||
| @@ -15,17 +15,16 @@ | |||
| 15 | 15 | ||
| 16 | from __future__ import print_function | 16 | from __future__ import print_function |
| 17 | import errno | 17 | import errno |
| 18 | import fcntl | ||
| 19 | import multiprocessing | 18 | import multiprocessing |
| 20 | import re | 19 | import re |
| 21 | import os | 20 | import os |
| 22 | import select | ||
| 23 | import signal | 21 | import signal |
| 24 | import sys | 22 | import sys |
| 25 | import subprocess | 23 | import subprocess |
| 26 | 24 | ||
| 27 | from color import Coloring | 25 | from color import Coloring |
| 28 | from command import Command, MirrorSafeCommand | 26 | from command import Command, MirrorSafeCommand |
| 27 | import platform_utils | ||
| 29 | 28 | ||
| 30 | _CAN_COLOR = [ | 29 | _CAN_COLOR = [ |
| 31 | 'branch', | 30 | 'branch', |
| @@ -105,6 +104,13 @@ annotating tree details. | |||
| 105 | shell positional arguments ($1, $2, .., $#) are set to any arguments | 104 | shell positional arguments ($1, $2, .., $#) are set to any arguments |
| 106 | following <command>. | 105 | following <command>. |
| 107 | 106 | ||
| 107 | Example: to list projects: | ||
| 108 | |||
| 109 | %prog% forall -c 'echo $REPO_PROJECT' | ||
| 110 | |||
| 111 | Notice that $REPO_PROJECT is quoted to ensure it is expanded in | ||
| 112 | the context of running <command> instead of in the calling shell. | ||
| 113 | |||
| 108 | Unless -p is used, stdin, stdout, stderr are inherited from the | 114 | Unless -p is used, stdin, stdout, stderr are inherited from the |
| 109 | terminal and are not redirected. | 115 | terminal and are not redirected. |
| 110 | 116 | ||
| @@ -344,35 +350,25 @@ def DoWork(project, mirror, opt, cmd, shell, cnt, config): | |||
| 344 | if opt.project_header: | 350 | if opt.project_header: |
| 345 | out = ForallColoring(config) | 351 | out = ForallColoring(config) |
| 346 | out.redirect(sys.stdout) | 352 | out.redirect(sys.stdout) |
| 347 | class sfd(object): | ||
| 348 | def __init__(self, fd, dest): | ||
| 349 | self.fd = fd | ||
| 350 | self.dest = dest | ||
| 351 | def fileno(self): | ||
| 352 | return self.fd.fileno() | ||
| 353 | |||
| 354 | empty = True | 353 | empty = True |
| 355 | errbuf = '' | 354 | errbuf = '' |
| 356 | 355 | ||
| 357 | p.stdin.close() | 356 | p.stdin.close() |
| 358 | s_in = [sfd(p.stdout, sys.stdout), | 357 | s_in = platform_utils.FileDescriptorStreams.create() |
| 359 | sfd(p.stderr, sys.stderr)] | 358 | s_in.add(p.stdout, sys.stdout, 'stdout') |
| 360 | 359 | s_in.add(p.stderr, sys.stderr, 'stderr') | |
| 361 | for s in s_in: | ||
| 362 | flags = fcntl.fcntl(s.fd, fcntl.F_GETFL) | ||
| 363 | fcntl.fcntl(s.fd, fcntl.F_SETFL, flags | os.O_NONBLOCK) | ||
| 364 | 360 | ||
| 365 | while s_in: | 361 | while not s_in.is_done: |
| 366 | in_ready, _out_ready, _err_ready = select.select(s_in, [], []) | 362 | in_ready = s_in.select() |
| 367 | for s in in_ready: | 363 | for s in in_ready: |
| 368 | buf = s.fd.read(4096) | 364 | buf = s.read() |
| 369 | if not buf: | 365 | if not buf: |
| 370 | s.fd.close() | 366 | s.close() |
| 371 | s_in.remove(s) | 367 | s_in.remove(s) |
| 372 | continue | 368 | continue |
| 373 | 369 | ||
| 374 | if not opt.verbose: | 370 | if not opt.verbose: |
| 375 | if s.fd != p.stdout: | 371 | if s.std_name == 'stderr': |
| 376 | errbuf += buf | 372 | errbuf += buf |
| 377 | continue | 373 | continue |
| 378 | 374 | ||
diff --git a/subcmds/gitc_delete.py b/subcmds/gitc_delete.py index 7380c352..54f62f46 100644 --- a/subcmds/gitc_delete.py +++ b/subcmds/gitc_delete.py | |||
| @@ -14,12 +14,10 @@ | |||
| 14 | # limitations under the License. | 14 | # limitations under the License. |
| 15 | 15 | ||
| 16 | from __future__ import print_function | 16 | from __future__ import print_function |
| 17 | import os | ||
| 18 | import shutil | ||
| 19 | import sys | 17 | import sys |
| 20 | 18 | ||
| 21 | from command import Command, GitcClientCommand | 19 | from command import Command, GitcClientCommand |
| 22 | import gitc_utils | 20 | import platform_utils |
| 23 | 21 | ||
| 24 | from pyversion import is_python3 | 22 | from pyversion import is_python3 |
| 25 | if not is_python3(): | 23 | if not is_python3(): |
| @@ -52,4 +50,4 @@ and all locally downloaded sources. | |||
| 52 | if not response == 'yes': | 50 | if not response == 'yes': |
| 53 | print('Response was not "yes"\n Exiting...') | 51 | print('Response was not "yes"\n Exiting...') |
| 54 | sys.exit(1) | 52 | sys.exit(1) |
| 55 | shutil.rmtree(self.gitc_manifest.gitc_client_dir) | 53 | platform_utils.rmtree(self.gitc_manifest.gitc_client_dir) |
diff --git a/subcmds/init.py b/subcmds/init.py index 45d69b79..eeddca06 100644 --- a/subcmds/init.py +++ b/subcmds/init.py | |||
| @@ -17,7 +17,6 @@ from __future__ import print_function | |||
| 17 | import os | 17 | import os |
| 18 | import platform | 18 | import platform |
| 19 | import re | 19 | import re |
| 20 | import shutil | ||
| 21 | import sys | 20 | import sys |
| 22 | 21 | ||
| 23 | from pyversion import is_python3 | 22 | from pyversion import is_python3 |
| @@ -35,6 +34,7 @@ from error import ManifestParseError | |||
| 35 | from project import SyncBuffer | 34 | from project import SyncBuffer |
| 36 | from git_config import GitConfig | 35 | from git_config import GitConfig |
| 37 | from git_command import git_require, MIN_GIT_VERSION | 36 | from git_command import git_require, MIN_GIT_VERSION |
| 37 | import platform_utils | ||
| 38 | 38 | ||
| 39 | class Init(InteractiveCommand, MirrorSafeCommand): | 39 | class Init(InteractiveCommand, MirrorSafeCommand): |
| 40 | common = True | 40 | common = True |
| @@ -91,6 +91,9 @@ to update the working directory files. | |||
| 91 | g.add_option('-b', '--manifest-branch', | 91 | g.add_option('-b', '--manifest-branch', |
| 92 | dest='manifest_branch', | 92 | dest='manifest_branch', |
| 93 | help='manifest branch or revision', metavar='REVISION') | 93 | help='manifest branch or revision', metavar='REVISION') |
| 94 | g.add_option('-c', '--current-branch', | ||
| 95 | dest='current_branch_only', action='store_true', | ||
| 96 | help='fetch only current manifest branch from server') | ||
| 94 | g.add_option('-m', '--manifest-name', | 97 | g.add_option('-m', '--manifest-name', |
| 95 | dest='manifest_name', default='default.xml', | 98 | dest='manifest_name', default='default.xml', |
| 96 | help='initial manifest file', metavar='NAME.xml') | 99 | help='initial manifest file', metavar='NAME.xml') |
| @@ -108,6 +111,9 @@ to update the working directory files. | |||
| 108 | dest='archive', action='store_true', | 111 | dest='archive', action='store_true', |
| 109 | help='checkout an archive instead of a git repository for ' | 112 | help='checkout an archive instead of a git repository for ' |
| 110 | 'each project. See git archive.') | 113 | 'each project. See git archive.') |
| 114 | g.add_option('--submodules', | ||
| 115 | dest='submodules', action='store_true', | ||
| 116 | help='sync any submodules associated with the manifest repo') | ||
| 111 | g.add_option('-g', '--groups', | 117 | g.add_option('-g', '--groups', |
| 112 | dest='groups', default='default', | 118 | dest='groups', default='default', |
| 113 | help='restrict manifest projects to ones with specified ' | 119 | help='restrict manifest projects to ones with specified ' |
| @@ -121,6 +127,9 @@ to update the working directory files. | |||
| 121 | g.add_option('--no-clone-bundle', | 127 | g.add_option('--no-clone-bundle', |
| 122 | dest='no_clone_bundle', action='store_true', | 128 | dest='no_clone_bundle', action='store_true', |
| 123 | help='disable use of /clone.bundle on HTTP/HTTPS') | 129 | help='disable use of /clone.bundle on HTTP/HTTPS') |
| 130 | g.add_option('--no-tags', | ||
| 131 | dest='no_tags', action='store_true', | ||
| 132 | help="don't fetch tags in the manifest") | ||
| 124 | 133 | ||
| 125 | # Tool | 134 | # Tool |
| 126 | g = p.add_option_group('repo Version options') | 135 | g = p.add_option_group('repo Version options') |
| @@ -230,22 +239,27 @@ to update the working directory files. | |||
| 230 | 'in another location.', file=sys.stderr) | 239 | 'in another location.', file=sys.stderr) |
| 231 | sys.exit(1) | 240 | sys.exit(1) |
| 232 | 241 | ||
| 242 | if opt.submodules: | ||
| 243 | m.config.SetString('repo.submodules', 'true') | ||
| 244 | |||
| 233 | if not m.Sync_NetworkHalf(is_new=is_new, quiet=opt.quiet, | 245 | if not m.Sync_NetworkHalf(is_new=is_new, quiet=opt.quiet, |
| 234 | clone_bundle=not opt.no_clone_bundle): | 246 | clone_bundle=not opt.no_clone_bundle, |
| 247 | current_branch_only=opt.current_branch_only, | ||
| 248 | no_tags=opt.no_tags, submodules=opt.submodules): | ||
| 235 | r = m.GetRemote(m.remote.name) | 249 | r = m.GetRemote(m.remote.name) |
| 236 | print('fatal: cannot obtain manifest %s' % r.url, file=sys.stderr) | 250 | print('fatal: cannot obtain manifest %s' % r.url, file=sys.stderr) |
| 237 | 251 | ||
| 238 | # Better delete the manifest git dir if we created it; otherwise next | 252 | # Better delete the manifest git dir if we created it; otherwise next |
| 239 | # time (when user fixes problems) we won't go through the "is_new" logic. | 253 | # time (when user fixes problems) we won't go through the "is_new" logic. |
| 240 | if is_new: | 254 | if is_new: |
| 241 | shutil.rmtree(m.gitdir) | 255 | platform_utils.rmtree(m.gitdir) |
| 242 | sys.exit(1) | 256 | sys.exit(1) |
| 243 | 257 | ||
| 244 | if opt.manifest_branch: | 258 | if opt.manifest_branch: |
| 245 | m.MetaBranchSwitch() | 259 | m.MetaBranchSwitch(submodules=opt.submodules) |
| 246 | 260 | ||
| 247 | syncbuf = SyncBuffer(m.config) | 261 | syncbuf = SyncBuffer(m.config) |
| 248 | m.Sync_LocalHalf(syncbuf) | 262 | m.Sync_LocalHalf(syncbuf, submodules=opt.submodules) |
| 249 | syncbuf.Finish() | 263 | syncbuf.Finish() |
| 250 | 264 | ||
| 251 | if is_new or m.CurrentBranch is None: | 265 | if is_new or m.CurrentBranch is None: |
| @@ -387,7 +401,7 @@ to update the working directory files. | |||
| 387 | git_require(MIN_GIT_VERSION, fail=True) | 401 | git_require(MIN_GIT_VERSION, fail=True) |
| 388 | 402 | ||
| 389 | if opt.reference: | 403 | if opt.reference: |
| 390 | opt.reference = os.path.expanduser(opt.reference) | 404 | opt.reference = os.path.abspath(os.path.expanduser(opt.reference)) |
| 391 | 405 | ||
| 392 | # Check this here, else manifest will be tagged "not new" and init won't be | 406 | # Check this here, else manifest will be tagged "not new" and init won't be |
| 393 | # possible anymore without removing the .repo/manifests directory. | 407 | # possible anymore without removing the .repo/manifests directory. |
diff --git a/subcmds/stage.py b/subcmds/stage.py index 28849764..9d354268 100644 --- a/subcmds/stage.py +++ b/subcmds/stage.py | |||
| @@ -60,8 +60,8 @@ The '%prog' command stages files to prepare the next commit. | |||
| 60 | out.nl() | 60 | out.nl() |
| 61 | 61 | ||
| 62 | for i in range(len(all_projects)): | 62 | for i in range(len(all_projects)): |
| 63 | p = all_projects[i] | 63 | project = all_projects[i] |
| 64 | out.write('%3d: %s', i + 1, p.relpath + '/') | 64 | out.write('%3d: %s', i + 1, project.relpath + '/') |
| 65 | out.nl() | 65 | out.nl() |
| 66 | out.nl() | 66 | out.nl() |
| 67 | 67 | ||
diff --git a/subcmds/start.py b/subcmds/start.py index 290b6897..c3ec303e 100644 --- a/subcmds/start.py +++ b/subcmds/start.py | |||
| @@ -18,7 +18,7 @@ import os | |||
| 18 | import sys | 18 | import sys |
| 19 | 19 | ||
| 20 | from command import Command | 20 | from command import Command |
| 21 | from git_config import IsId | 21 | from git_config import IsImmutable |
| 22 | from git_command import git | 22 | from git_command import git |
| 23 | import gitc_utils | 23 | import gitc_utils |
| 24 | from progress import Progress | 24 | from progress import Progress |
| @@ -96,11 +96,11 @@ revision specified in the manifest. | |||
| 96 | project.Sync_LocalHalf(sync_buf) | 96 | project.Sync_LocalHalf(sync_buf) |
| 97 | project.revisionId = gitc_project.old_revision | 97 | project.revisionId = gitc_project.old_revision |
| 98 | 98 | ||
| 99 | # If the current revision is a specific SHA1 then we can't push back | 99 | # If the current revision is immutable, such as a SHA1, a tag or |
| 100 | # to it; so substitute with dest_branch if defined, or with manifest | 100 | # a change, then we can't push back to it. Substitute with |
| 101 | # default revision instead. | 101 | # dest_branch, if defined; or with manifest default revision instead. |
| 102 | branch_merge = '' | 102 | branch_merge = '' |
| 103 | if IsId(project.revisionExpr): | 103 | if IsImmutable(project.revisionExpr): |
| 104 | if project.dest_branch: | 104 | if project.dest_branch: |
| 105 | branch_merge = project.dest_branch | 105 | branch_merge = project.dest_branch |
| 106 | else: | 106 | else: |
diff --git a/subcmds/status.py b/subcmds/status.py index 38c229b1..60e26ff4 100644 --- a/subcmds/status.py +++ b/subcmds/status.py | |||
| @@ -89,8 +89,10 @@ the following meanings: | |||
| 89 | p.add_option('-o', '--orphans', | 89 | p.add_option('-o', '--orphans', |
| 90 | dest='orphans', action='store_true', | 90 | dest='orphans', action='store_true', |
| 91 | help="include objects in working directory outside of repo projects") | 91 | help="include objects in working directory outside of repo projects") |
| 92 | p.add_option('-q', '--quiet', action='store_true', | ||
| 93 | help="only print the name of modified projects") | ||
| 92 | 94 | ||
| 93 | def _StatusHelper(self, project, clean_counter, sem): | 95 | def _StatusHelper(self, project, clean_counter, sem, quiet): |
| 94 | """Obtains the status for a specific project. | 96 | """Obtains the status for a specific project. |
| 95 | 97 | ||
| 96 | Obtains the status for a project, redirecting the output to | 98 | Obtains the status for a project, redirecting the output to |
| @@ -104,7 +106,7 @@ the following meanings: | |||
| 104 | output: Where to output the status. | 106 | output: Where to output the status. |
| 105 | """ | 107 | """ |
| 106 | try: | 108 | try: |
| 107 | state = project.PrintWorkTreeStatus() | 109 | state = project.PrintWorkTreeStatus(quiet=quiet) |
| 108 | if state == 'CLEAN': | 110 | if state == 'CLEAN': |
| 109 | next(clean_counter) | 111 | next(clean_counter) |
| 110 | finally: | 112 | finally: |
| @@ -132,7 +134,7 @@ the following meanings: | |||
| 132 | 134 | ||
| 133 | if opt.jobs == 1: | 135 | if opt.jobs == 1: |
| 134 | for project in all_projects: | 136 | for project in all_projects: |
| 135 | state = project.PrintWorkTreeStatus() | 137 | state = project.PrintWorkTreeStatus(quiet=opt.quiet) |
| 136 | if state == 'CLEAN': | 138 | if state == 'CLEAN': |
| 137 | next(counter) | 139 | next(counter) |
| 138 | else: | 140 | else: |
| @@ -142,13 +144,13 @@ the following meanings: | |||
| 142 | sem.acquire() | 144 | sem.acquire() |
| 143 | 145 | ||
| 144 | t = _threading.Thread(target=self._StatusHelper, | 146 | t = _threading.Thread(target=self._StatusHelper, |
| 145 | args=(project, counter, sem)) | 147 | args=(project, counter, sem, opt.quiet)) |
| 146 | threads.append(t) | 148 | threads.append(t) |
| 147 | t.daemon = True | 149 | t.daemon = True |
| 148 | t.start() | 150 | t.start() |
| 149 | for t in threads: | 151 | for t in threads: |
| 150 | t.join() | 152 | t.join() |
| 151 | if len(all_projects) == next(counter): | 153 | if not opt.quiet and len(all_projects) == next(counter): |
| 152 | print('nothing to commit (working directory clean)') | 154 | print('nothing to commit (working directory clean)') |
| 153 | 155 | ||
| 154 | if opt.orphans: | 156 | if opt.orphans: |
diff --git a/subcmds/sync.py b/subcmds/sync.py index 7ba9ebfc..cda47fdd 100644 --- a/subcmds/sync.py +++ b/subcmds/sync.py | |||
| @@ -19,7 +19,6 @@ import netrc | |||
| 19 | from optparse import SUPPRESS_HELP | 19 | from optparse import SUPPRESS_HELP |
| 20 | import os | 20 | import os |
| 21 | import re | 21 | import re |
| 22 | import shutil | ||
| 23 | import socket | 22 | import socket |
| 24 | import subprocess | 23 | import subprocess |
| 25 | import sys | 24 | import sys |
| @@ -64,6 +63,7 @@ try: | |||
| 64 | except ImportError: | 63 | except ImportError: |
| 65 | multiprocessing = None | 64 | multiprocessing = None |
| 66 | 65 | ||
| 66 | import event_log | ||
| 67 | from git_command import GIT, git_require | 67 | from git_command import GIT, git_require |
| 68 | from git_config import GetUrlCookieFile | 68 | from git_config import GetUrlCookieFile |
| 69 | from git_refs import R_HEADS, HEAD | 69 | from git_refs import R_HEADS, HEAD |
| @@ -72,6 +72,7 @@ from project import Project | |||
| 72 | from project import RemoteSpec | 72 | from project import RemoteSpec |
| 73 | from command import Command, MirrorSafeCommand | 73 | from command import Command, MirrorSafeCommand |
| 74 | from error import RepoChangedException, GitError, ManifestParseError | 74 | from error import RepoChangedException, GitError, ManifestParseError |
| 75 | import platform_utils | ||
| 75 | from project import SyncBuffer | 76 | from project import SyncBuffer |
| 76 | from progress import Progress | 77 | from progress import Progress |
| 77 | from wrapper import Wrapper | 78 | from wrapper import Wrapper |
| @@ -255,7 +256,7 @@ later is required to fix a server side protocol bug. | |||
| 255 | dest='repo_upgraded', action='store_true', | 256 | dest='repo_upgraded', action='store_true', |
| 256 | help=SUPPRESS_HELP) | 257 | help=SUPPRESS_HELP) |
| 257 | 258 | ||
| 258 | def _FetchProjectList(self, opt, projects, *args, **kwargs): | 259 | def _FetchProjectList(self, opt, projects, sem, *args, **kwargs): |
| 259 | """Main function of the fetch threads when jobs are > 1. | 260 | """Main function of the fetch threads when jobs are > 1. |
| 260 | 261 | ||
| 261 | Delegates most of the work to _FetchHelper. | 262 | Delegates most of the work to _FetchHelper. |
| @@ -263,15 +264,20 @@ later is required to fix a server side protocol bug. | |||
| 263 | Args: | 264 | Args: |
| 264 | opt: Program options returned from optparse. See _Options(). | 265 | opt: Program options returned from optparse. See _Options(). |
| 265 | projects: Projects to fetch. | 266 | projects: Projects to fetch. |
| 267 | sem: We'll release() this semaphore when we exit so that another thread | ||
| 268 | can be started up. | ||
| 266 | *args, **kwargs: Remaining arguments to pass to _FetchHelper. See the | 269 | *args, **kwargs: Remaining arguments to pass to _FetchHelper. See the |
| 267 | _FetchHelper docstring for details. | 270 | _FetchHelper docstring for details. |
| 268 | """ | 271 | """ |
| 269 | for project in projects: | 272 | try: |
| 270 | success = self._FetchHelper(opt, project, *args, **kwargs) | 273 | for project in projects: |
| 271 | if not success and not opt.force_broken: | 274 | success = self._FetchHelper(opt, project, *args, **kwargs) |
| 272 | break | 275 | if not success and not opt.force_broken: |
| 276 | break | ||
| 277 | finally: | ||
| 278 | sem.release() | ||
| 273 | 279 | ||
| 274 | def _FetchHelper(self, opt, project, lock, fetched, pm, sem, err_event): | 280 | def _FetchHelper(self, opt, project, lock, fetched, pm, err_event): |
| 275 | """Fetch git objects for a single project. | 281 | """Fetch git objects for a single project. |
| 276 | 282 | ||
| 277 | Args: | 283 | Args: |
| @@ -283,8 +289,6 @@ later is required to fix a server side protocol bug. | |||
| 283 | (with our lock held). | 289 | (with our lock held). |
| 284 | pm: Instance of a Project object. We will call pm.update() (with our | 290 | pm: Instance of a Project object. We will call pm.update() (with our |
| 285 | lock held). | 291 | lock held). |
| 286 | sem: We'll release() this semaphore when we exit so that another thread | ||
| 287 | can be started up. | ||
| 288 | err_event: We'll set this event in the case of an error (after printing | 292 | err_event: We'll set this event in the case of an error (after printing |
| 289 | out info about the error). | 293 | out info about the error). |
| 290 | 294 | ||
| @@ -301,9 +305,10 @@ later is required to fix a server side protocol bug. | |||
| 301 | # - We always set err_event in the case of an exception. | 305 | # - We always set err_event in the case of an exception. |
| 302 | # - We always make sure we call sem.release(). | 306 | # - We always make sure we call sem.release(). |
| 303 | # - We always make sure we unlock the lock if we locked it. | 307 | # - We always make sure we unlock the lock if we locked it. |
| 308 | start = time.time() | ||
| 309 | success = False | ||
| 304 | try: | 310 | try: |
| 305 | try: | 311 | try: |
| 306 | start = time.time() | ||
| 307 | success = project.Sync_NetworkHalf( | 312 | success = project.Sync_NetworkHalf( |
| 308 | quiet=opt.quiet, | 313 | quiet=opt.quiet, |
| 309 | current_branch_only=opt.current_branch_only, | 314 | current_branch_only=opt.current_branch_only, |
| @@ -321,7 +326,9 @@ later is required to fix a server side protocol bug. | |||
| 321 | 326 | ||
| 322 | if not success: | 327 | if not success: |
| 323 | err_event.set() | 328 | err_event.set() |
| 324 | print('error: Cannot fetch %s' % project.name, file=sys.stderr) | 329 | print('error: Cannot fetch %s from %s' |
| 330 | % (project.name, project.remote.url), | ||
| 331 | file=sys.stderr) | ||
| 325 | if opt.force_broken: | 332 | if opt.force_broken: |
| 326 | print('warn: --force-broken, continuing to sync', | 333 | print('warn: --force-broken, continuing to sync', |
| 327 | file=sys.stderr) | 334 | file=sys.stderr) |
| @@ -340,14 +347,18 @@ later is required to fix a server side protocol bug. | |||
| 340 | finally: | 347 | finally: |
| 341 | if did_lock: | 348 | if did_lock: |
| 342 | lock.release() | 349 | lock.release() |
| 343 | sem.release() | 350 | finish = time.time() |
| 351 | self.event_log.AddSync(project, event_log.TASK_SYNC_NETWORK, | ||
| 352 | start, finish, success) | ||
| 344 | 353 | ||
| 345 | return success | 354 | return success |
| 346 | 355 | ||
| 347 | def _Fetch(self, projects, opt): | 356 | def _Fetch(self, projects, opt): |
| 348 | fetched = set() | 357 | fetched = set() |
| 349 | lock = _threading.Lock() | 358 | lock = _threading.Lock() |
| 350 | pm = Progress('Fetching projects', len(projects)) | 359 | pm = Progress('Fetching projects', len(projects), |
| 360 | print_newline=not(opt.quiet), | ||
| 361 | always_print_percentage=opt.quiet) | ||
| 351 | 362 | ||
| 352 | objdir_project_map = dict() | 363 | objdir_project_map = dict() |
| 353 | for project in projects: | 364 | for project in projects: |
| @@ -365,10 +376,10 @@ later is required to fix a server side protocol bug. | |||
| 365 | sem.acquire() | 376 | sem.acquire() |
| 366 | kwargs = dict(opt=opt, | 377 | kwargs = dict(opt=opt, |
| 367 | projects=project_list, | 378 | projects=project_list, |
| 379 | sem=sem, | ||
| 368 | lock=lock, | 380 | lock=lock, |
| 369 | fetched=fetched, | 381 | fetched=fetched, |
| 370 | pm=pm, | 382 | pm=pm, |
| 371 | sem=sem, | ||
| 372 | err_event=err_event) | 383 | err_event=err_event) |
| 373 | if self.jobs > 1: | 384 | if self.jobs > 1: |
| 374 | t = _threading.Thread(target = self._FetchProjectList, | 385 | t = _threading.Thread(target = self._FetchProjectList, |
| @@ -384,7 +395,7 @@ later is required to fix a server side protocol bug. | |||
| 384 | t.join() | 395 | t.join() |
| 385 | 396 | ||
| 386 | # If we saw an error, exit with code 1 so that other scripts can check. | 397 | # If we saw an error, exit with code 1 so that other scripts can check. |
| 387 | if err_event.isSet(): | 398 | if err_event.isSet() and not opt.force_broken: |
| 388 | print('\nerror: Exited sync due to fetch errors', file=sys.stderr) | 399 | print('\nerror: Exited sync due to fetch errors', file=sys.stderr) |
| 389 | sys.exit(1) | 400 | sys.exit(1) |
| 390 | 401 | ||
| @@ -464,7 +475,7 @@ later is required to fix a server side protocol bug. | |||
| 464 | # working git repository around. There shouldn't be any git projects here, | 475 | # working git repository around. There shouldn't be any git projects here, |
| 465 | # so rmtree works. | 476 | # so rmtree works. |
| 466 | try: | 477 | try: |
| 467 | shutil.rmtree(os.path.join(path, '.git')) | 478 | platform_utils.rmtree(os.path.join(path, '.git')) |
| 468 | except OSError: | 479 | except OSError: |
| 469 | print('Failed to remove %s' % os.path.join(path, '.git'), file=sys.stderr) | 480 | print('Failed to remove %s' % os.path.join(path, '.git'), file=sys.stderr) |
| 470 | print('error: Failed to delete obsolete path %s' % path, file=sys.stderr) | 481 | print('error: Failed to delete obsolete path %s' % path, file=sys.stderr) |
| @@ -478,7 +489,7 @@ later is required to fix a server side protocol bug. | |||
| 478 | for root, dirs, files in os.walk(path): | 489 | for root, dirs, files in os.walk(path): |
| 479 | for f in files: | 490 | for f in files: |
| 480 | try: | 491 | try: |
| 481 | os.remove(os.path.join(root, f)) | 492 | platform_utils.remove(os.path.join(root, f)) |
| 482 | except OSError: | 493 | except OSError: |
| 483 | print('Failed to remove %s' % os.path.join(root, f), file=sys.stderr) | 494 | print('Failed to remove %s' % os.path.join(root, f), file=sys.stderr) |
| 484 | failed = True | 495 | failed = True |
| @@ -487,9 +498,9 @@ later is required to fix a server side protocol bug. | |||
| 487 | dirs_to_remove += [os.path.join(root, d) for d in dirs | 498 | dirs_to_remove += [os.path.join(root, d) for d in dirs |
| 488 | if os.path.join(root, d) not in dirs_to_remove] | 499 | if os.path.join(root, d) not in dirs_to_remove] |
| 489 | for d in reversed(dirs_to_remove): | 500 | for d in reversed(dirs_to_remove): |
| 490 | if os.path.islink(d): | 501 | if platform_utils.islink(d): |
| 491 | try: | 502 | try: |
| 492 | os.remove(d) | 503 | platform_utils.remove(d) |
| 493 | except OSError: | 504 | except OSError: |
| 494 | print('Failed to remove %s' % os.path.join(root, d), file=sys.stderr) | 505 | print('Failed to remove %s' % os.path.join(root, d), file=sys.stderr) |
| 495 | failed = True | 506 | failed = True |
| @@ -701,7 +712,7 @@ later is required to fix a server side protocol bug. | |||
| 701 | else: # Not smart sync or smart tag mode | 712 | else: # Not smart sync or smart tag mode |
| 702 | if os.path.isfile(smart_sync_manifest_path): | 713 | if os.path.isfile(smart_sync_manifest_path): |
| 703 | try: | 714 | try: |
| 704 | os.remove(smart_sync_manifest_path) | 715 | platform_utils.remove(smart_sync_manifest_path) |
| 705 | except OSError as e: | 716 | except OSError as e: |
| 706 | print('error: failed to remove existing smart sync override manifest: %s' % | 717 | print('error: failed to remove existing smart sync override manifest: %s' % |
| 707 | e, file=sys.stderr) | 718 | e, file=sys.stderr) |
| @@ -716,15 +727,24 @@ later is required to fix a server side protocol bug. | |||
| 716 | _PostRepoUpgrade(self.manifest, quiet=opt.quiet) | 727 | _PostRepoUpgrade(self.manifest, quiet=opt.quiet) |
| 717 | 728 | ||
| 718 | if not opt.local_only: | 729 | if not opt.local_only: |
| 719 | mp.Sync_NetworkHalf(quiet=opt.quiet, | 730 | start = time.time() |
| 720 | current_branch_only=opt.current_branch_only, | 731 | success = mp.Sync_NetworkHalf(quiet=opt.quiet, |
| 721 | no_tags=opt.no_tags, | 732 | current_branch_only=opt.current_branch_only, |
| 722 | optimized_fetch=opt.optimized_fetch) | 733 | no_tags=opt.no_tags, |
| 734 | optimized_fetch=opt.optimized_fetch, | ||
| 735 | submodules=self.manifest.HasSubmodules) | ||
| 736 | finish = time.time() | ||
| 737 | self.event_log.AddSync(mp, event_log.TASK_SYNC_NETWORK, | ||
| 738 | start, finish, success) | ||
| 723 | 739 | ||
| 724 | if mp.HasChanges: | 740 | if mp.HasChanges: |
| 725 | syncbuf = SyncBuffer(mp.config) | 741 | syncbuf = SyncBuffer(mp.config) |
| 726 | mp.Sync_LocalHalf(syncbuf) | 742 | start = time.time() |
| 727 | if not syncbuf.Finish(): | 743 | mp.Sync_LocalHalf(syncbuf, submodules=self.manifest.HasSubmodules) |
| 744 | clean = syncbuf.Finish() | ||
| 745 | self.event_log.AddSync(mp, event_log.TASK_SYNC_LOCAL, | ||
| 746 | start, time.time(), clean) | ||
| 747 | if not clean: | ||
| 728 | sys.exit(1) | 748 | sys.exit(1) |
| 729 | self._ReloadManifest(manifest_name) | 749 | self._ReloadManifest(manifest_name) |
| 730 | if opt.jobs is None: | 750 | if opt.jobs is None: |
| @@ -761,8 +781,8 @@ later is required to fix a server side protocol bug. | |||
| 761 | # generate a new args list to represent the opened projects. | 781 | # generate a new args list to represent the opened projects. |
| 762 | # TODO: make this more reliable -- if there's a project name/path overlap, | 782 | # TODO: make this more reliable -- if there's a project name/path overlap, |
| 763 | # this may choose the wrong project. | 783 | # this may choose the wrong project. |
| 764 | args = [os.path.relpath(self.manifest.paths[p].worktree, os.getcwd()) | 784 | args = [os.path.relpath(self.manifest.paths[path].worktree, os.getcwd()) |
| 765 | for p in opened_projects] | 785 | for path in opened_projects] |
| 766 | if not args: | 786 | if not args: |
| 767 | return | 787 | return |
| 768 | all_projects = self.GetProjects(args, | 788 | all_projects = self.GetProjects(args, |
| @@ -818,7 +838,10 @@ later is required to fix a server side protocol bug. | |||
| 818 | for project in all_projects: | 838 | for project in all_projects: |
| 819 | pm.update() | 839 | pm.update() |
| 820 | if project.worktree: | 840 | if project.worktree: |
| 841 | start = time.time() | ||
| 821 | project.Sync_LocalHalf(syncbuf, force_sync=opt.force_sync) | 842 | project.Sync_LocalHalf(syncbuf, force_sync=opt.force_sync) |
| 843 | self.event_log.AddSync(project, event_log.TASK_SYNC_LOCAL, | ||
| 844 | start, time.time(), syncbuf.Recently()) | ||
| 822 | pm.end() | 845 | pm.end() |
| 823 | print(file=sys.stderr) | 846 | print(file=sys.stderr) |
| 824 | if not syncbuf.Finish(): | 847 | if not syncbuf.Finish(): |
| @@ -902,6 +925,7 @@ def _VerifyTag(project): | |||
| 902 | return False | 925 | return False |
| 903 | return True | 926 | return True |
| 904 | 927 | ||
| 928 | |||
| 905 | class _FetchTimes(object): | 929 | class _FetchTimes(object): |
| 906 | _ALPHA = 0.5 | 930 | _ALPHA = 0.5 |
| 907 | 931 | ||
| @@ -932,7 +956,7 @@ class _FetchTimes(object): | |||
| 932 | f.close() | 956 | f.close() |
| 933 | except (IOError, ValueError): | 957 | except (IOError, ValueError): |
| 934 | try: | 958 | try: |
| 935 | os.remove(self._path) | 959 | platform_utils.remove(self._path) |
| 936 | except OSError: | 960 | except OSError: |
| 937 | pass | 961 | pass |
| 938 | self._times = {} | 962 | self._times = {} |
| @@ -956,7 +980,7 @@ class _FetchTimes(object): | |||
| 956 | f.close() | 980 | f.close() |
| 957 | except (IOError, TypeError): | 981 | except (IOError, TypeError): |
| 958 | try: | 982 | try: |
| 959 | os.remove(self._path) | 983 | platform_utils.remove(self._path) |
| 960 | except OSError: | 984 | except OSError: |
| 961 | pass | 985 | pass |
| 962 | 986 | ||
diff --git a/subcmds/upload.py b/subcmds/upload.py index 1172dadc..77eaf81a 100644 --- a/subcmds/upload.py +++ b/subcmds/upload.py | |||
| @@ -154,6 +154,16 @@ Gerrit Code Review: http://code.google.com/p/gerrit/ | |||
| 154 | p.add_option('-d', '--draft', | 154 | p.add_option('-d', '--draft', |
| 155 | action='store_true', dest='draft', default=False, | 155 | action='store_true', dest='draft', default=False, |
| 156 | help='If specified, upload as a draft.') | 156 | help='If specified, upload as a draft.') |
| 157 | p.add_option('-p', '--private', | ||
| 158 | action='store_true', dest='private', default=False, | ||
| 159 | help='If specified, upload as a private change.') | ||
| 160 | p.add_option('-w', '--wip', | ||
| 161 | action='store_true', dest='wip', default=False, | ||
| 162 | help='If specified, upload as a work-in-progress change.') | ||
| 163 | p.add_option('-o', '--push-option', | ||
| 164 | type='string', action='append', dest='push_options', | ||
| 165 | default=[], | ||
| 166 | help='Additional push options to transmit') | ||
| 157 | p.add_option('-D', '--destination', '--dest', | 167 | p.add_option('-D', '--destination', '--dest', |
| 158 | type='string', action='store', dest='dest_branch', | 168 | type='string', action='store', dest='dest_branch', |
| 159 | metavar='BRANCH', | 169 | metavar='BRANCH', |
| @@ -175,6 +185,9 @@ Gerrit Code Review: http://code.google.com/p/gerrit/ | |||
| 175 | # Never run upload hooks, but upload anyway (AKA bypass hooks). | 185 | # Never run upload hooks, but upload anyway (AKA bypass hooks). |
| 176 | # - no-verify=True, verify=True: | 186 | # - no-verify=True, verify=True: |
| 177 | # Invalid | 187 | # Invalid |
| 188 | p.add_option('--no-cert-checks', | ||
| 189 | dest='validate_certs', action='store_false', default=True, | ||
| 190 | help='Disable verifying ssl certs (unsafe).') | ||
| 178 | p.add_option('--no-verify', | 191 | p.add_option('--no-verify', |
| 179 | dest='bypass_hooks', action='store_true', | 192 | dest='bypass_hooks', action='store_true', |
| 180 | help='Do not run the upload hook.') | 193 | help='Do not run the upload hook.') |
| @@ -198,7 +211,8 @@ Gerrit Code Review: http://code.google.com/p/gerrit/ | |||
| 198 | commit_list = branch.commits | 211 | commit_list = branch.commits |
| 199 | 212 | ||
| 200 | destination = opt.dest_branch or project.dest_branch or project.revisionExpr | 213 | destination = opt.dest_branch or project.dest_branch or project.revisionExpr |
| 201 | print('Upload project %s/ to remote branch %s:' % (project.relpath, destination)) | 214 | print('Upload project %s/ to remote branch %s%s:' % |
| 215 | (project.relpath, destination, ' (draft)' if opt.draft else '')) | ||
| 202 | print(' branch %s (%2d commit%s, %s):' % ( | 216 | print(' branch %s (%2d commit%s, %s):' % ( |
| 203 | name, | 217 | name, |
| 204 | len(commit_list), | 218 | len(commit_list), |
| @@ -377,7 +391,15 @@ Gerrit Code Review: http://code.google.com/p/gerrit/ | |||
| 377 | branch.uploaded = False | 391 | branch.uploaded = False |
| 378 | continue | 392 | continue |
| 379 | 393 | ||
| 380 | branch.UploadForReview(people, auto_topic=opt.auto_topic, draft=opt.draft, dest_branch=destination) | 394 | branch.UploadForReview(people, |
| 395 | auto_topic=opt.auto_topic, | ||
| 396 | draft=opt.draft, | ||
| 397 | private=opt.private, | ||
| 398 | wip=opt.wip, | ||
| 399 | dest_branch=destination, | ||
| 400 | validate_certs=opt.validate_certs, | ||
| 401 | push_options=opt.push_options) | ||
| 402 | |||
| 381 | branch.uploaded = True | 403 | branch.uploaded = True |
| 382 | except UploadError as e: | 404 | except UploadError as e: |
| 383 | branch.error = e | 405 | branch.error = e |
| @@ -463,8 +485,8 @@ Gerrit Code Review: http://code.google.com/p/gerrit/ | |||
| 463 | self.manifest.topdir, | 485 | self.manifest.topdir, |
| 464 | self.manifest.manifestProject.GetRemote('origin').url, | 486 | self.manifest.manifestProject.GetRemote('origin').url, |
| 465 | abort_if_user_denies=True) | 487 | abort_if_user_denies=True) |
| 466 | pending_proj_names = [project.name for (project, avail) in pending] | 488 | pending_proj_names = [project.name for (project, available) in pending] |
| 467 | pending_worktrees = [project.worktree for (project, avail) in pending] | 489 | pending_worktrees = [project.worktree for (project, available) in pending] |
| 468 | try: | 490 | try: |
| 469 | hook.Run(opt.allow_all_hooks, project_list=pending_proj_names, | 491 | hook.Run(opt.allow_all_hooks, project_list=pending_proj_names, |
| 470 | worktree_list=pending_worktrees) | 492 | worktree_list=pending_worktrees) |
