summaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
-rw-r--r--.flake83
-rw-r--r--color.py314
-rw-r--r--command.py831
-rw-r--r--editor.py184
-rw-r--r--error.py130
-rw-r--r--event_log.py302
-rw-r--r--fetch.py46
-rw-r--r--git_command.py561
-rw-r--r--git_config.py1464
-rw-r--r--git_refs.py283
-rw-r--r--git_superproject.py880
-rw-r--r--git_trace2_event_log.py595
-rw-r--r--gitc_utils.py231
-rw-r--r--hooks.py988
-rwxr-xr-xmain.py1268
-rw-r--r--manifest_xml.py4107
-rw-r--r--pager.py159
-rw-r--r--platform_utils.py388
-rw-r--r--platform_utils_win32.py245
-rw-r--r--progress.py223
-rw-r--r--project.py7874
-rwxr-xr-xrelease/sign-launcher.py178
-rwxr-xr-xrelease/sign-tag.py171
-rw-r--r--release/update_manpages.py186
-rw-r--r--release/util.py78
-rwxr-xr-xrepo18
-rw-r--r--repo_trace.py184
-rwxr-xr-xrun_tests24
-rw-r--r--run_tests.vpython345
-rwxr-xr-xsetup.py48
-rw-r--r--ssh.py474
-rw-r--r--subcmds/__init__.py50
-rw-r--r--subcmds/abandon.py167
-rw-r--r--subcmds/branches.py287
-rw-r--r--subcmds/checkout.py83
-rw-r--r--subcmds/cherry_pick.py177
-rw-r--r--subcmds/diff.py85
-rw-r--r--subcmds/diffmanifests.py374
-rw-r--r--subcmds/download.py304
-rw-r--r--subcmds/forall.py516
-rw-r--r--subcmds/gitc_delete.py42
-rw-r--r--subcmds/gitc_init.py69
-rw-r--r--subcmds/grep.py529
-rw-r--r--subcmds/help.py330
-rw-r--r--subcmds/info.py413
-rw-r--r--subcmds/init.py544
-rw-r--r--subcmds/list.py158
-rw-r--r--subcmds/manifest.py230
-rw-r--r--subcmds/overview.py124
-rw-r--r--subcmds/prune.py107
-rw-r--r--subcmds/rebase.py313
-rw-r--r--subcmds/selfupdate.py63
-rw-r--r--subcmds/smartsync.py18
-rw-r--r--subcmds/stage.py181
-rw-r--r--subcmds/start.py250
-rw-r--r--subcmds/status.py239
-rw-r--r--subcmds/sync.py3130
-rw-r--r--subcmds/upload.py1152
-rw-r--r--subcmds/version.py75
-rw-r--r--tests/conftest.py4
-rw-r--r--tests/test_editor.py44
-rw-r--r--tests/test_error.py60
-rw-r--r--tests/test_git_command.py222
-rw-r--r--tests/test_git_config.py318
-rw-r--r--tests/test_git_superproject.py782
-rw-r--r--tests/test_git_trace2_event_log.py725
-rw-r--r--tests/test_hooks.py63
-rw-r--r--tests/test_manifest_xml.py1414
-rw-r--r--tests/test_platform_utils.py54
-rw-r--r--tests/test_project.py855
-rw-r--r--tests/test_repo_trace.py68
-rw-r--r--tests/test_ssh.py90
-rw-r--r--tests/test_subcmds.py86
-rw-r--r--tests/test_subcmds_init.py51
-rw-r--r--tests/test_subcmds_sync.py215
-rw-r--r--tests/test_update_manpages.py10
-rw-r--r--tests/test_wrapper.py1029
-rw-r--r--tox.ini1
-rw-r--r--wrapper.py14
79 files changed, 20808 insertions, 17789 deletions
diff --git a/.flake8 b/.flake8
index 82453b56..dd7f4d36 100644
--- a/.flake8
+++ b/.flake8
@@ -1,5 +1,8 @@
1[flake8] 1[flake8]
2max-line-length = 80 2max-line-length = 80
3per-file-ignores =
4 # E501: line too long
5 tests/test_git_superproject.py: E501
3extend-ignore = 6extend-ignore =
4 # E203: Whitespace before ':' 7 # E203: Whitespace before ':'
5 # See https://github.com/PyCQA/pycodestyle/issues/373 8 # See https://github.com/PyCQA/pycodestyle/issues/373
diff --git a/color.py b/color.py
index fdd72534..8f29b59f 100644
--- a/color.py
+++ b/color.py
@@ -17,196 +17,200 @@ import sys
17 17
18import pager 18import pager
19 19
20COLORS = {None: -1, 20COLORS = {
21 'normal': -1, 21 None: -1,
22 'black': 0, 22 "normal": -1,
23 'red': 1, 23 "black": 0,
24 'green': 2, 24 "red": 1,
25 'yellow': 3, 25 "green": 2,
26 'blue': 4, 26 "yellow": 3,
27 'magenta': 5, 27 "blue": 4,
28 'cyan': 6, 28 "magenta": 5,
29 'white': 7} 29 "cyan": 6,
30 30 "white": 7,
31ATTRS = {None: -1, 31}
32 'bold': 1, 32
33 'dim': 2, 33ATTRS = {None: -1, "bold": 1, "dim": 2, "ul": 4, "blink": 5, "reverse": 7}
34 'ul': 4,
35 'blink': 5,
36 'reverse': 7}
37 34
38RESET = "\033[m" 35RESET = "\033[m"
39 36
40 37
41def is_color(s): 38def is_color(s):
42 return s in COLORS 39 return s in COLORS
43 40
44 41
45def is_attr(s): 42def is_attr(s):
46 return s in ATTRS 43 return s in ATTRS
47 44
48 45
49def _Color(fg=None, bg=None, attr=None): 46def _Color(fg=None, bg=None, attr=None):
50 fg = COLORS[fg] 47 fg = COLORS[fg]
51 bg = COLORS[bg] 48 bg = COLORS[bg]
52 attr = ATTRS[attr] 49 attr = ATTRS[attr]
53 50
54 if attr >= 0 or fg >= 0 or bg >= 0: 51 if attr >= 0 or fg >= 0 or bg >= 0:
55 need_sep = False 52 need_sep = False
56 code = "\033[" 53 code = "\033["
57 54
58 if attr >= 0: 55 if attr >= 0:
59 code += chr(ord('0') + attr) 56 code += chr(ord("0") + attr)
60 need_sep = True 57 need_sep = True
61 58
62 if fg >= 0: 59 if fg >= 0:
63 if need_sep: 60 if need_sep:
64 code += ';' 61 code += ";"
65 need_sep = True 62 need_sep = True
66 63
67 if fg < 8: 64 if fg < 8:
68 code += '3%c' % (ord('0') + fg) 65 code += "3%c" % (ord("0") + fg)
69 else: 66 else:
70 code += '38;5;%d' % fg 67 code += "38;5;%d" % fg
71 68
72 if bg >= 0: 69 if bg >= 0:
73 if need_sep: 70 if need_sep:
74 code += ';' 71 code += ";"
75 72
76 if bg < 8: 73 if bg < 8:
77 code += '4%c' % (ord('0') + bg) 74 code += "4%c" % (ord("0") + bg)
78 else: 75 else:
79 code += '48;5;%d' % bg 76 code += "48;5;%d" % bg
80 code += 'm' 77 code += "m"
81 else: 78 else:
82 code = '' 79 code = ""
83 return code 80 return code
84 81
85 82
86DEFAULT = None 83DEFAULT = None
87 84
88 85
89def SetDefaultColoring(state): 86def SetDefaultColoring(state):
90 """Set coloring behavior to |state|. 87 """Set coloring behavior to |state|.
91 88
92 This is useful for overriding config options via the command line. 89 This is useful for overriding config options via the command line.
93 """ 90 """
94 if state is None: 91 if state is None:
95 # Leave it alone -- return quick! 92 # Leave it alone -- return quick!
96 return 93 return
97 94
98 global DEFAULT 95 global DEFAULT
99 state = state.lower() 96 state = state.lower()
100 if state in ('auto',): 97 if state in ("auto",):
101 DEFAULT = state 98 DEFAULT = state
102 elif state in ('always', 'yes', 'true', True): 99 elif state in ("always", "yes", "true", True):
103 DEFAULT = 'always' 100 DEFAULT = "always"
104 elif state in ('never', 'no', 'false', False): 101 elif state in ("never", "no", "false", False):
105 DEFAULT = 'never' 102 DEFAULT = "never"
106 103
107 104
108class Coloring(object): 105class Coloring(object):
109 def __init__(self, config, section_type): 106 def __init__(self, config, section_type):
110 self._section = 'color.%s' % section_type 107 self._section = "color.%s" % section_type
111 self._config = config 108 self._config = config
112 self._out = sys.stdout 109 self._out = sys.stdout
113 110
114 on = DEFAULT 111 on = DEFAULT
115 if on is None: 112 if on is None:
116 on = self._config.GetString(self._section) 113 on = self._config.GetString(self._section)
117 if on is None: 114 if on is None:
118 on = self._config.GetString('color.ui') 115 on = self._config.GetString("color.ui")
119 116
120 if on == 'auto': 117 if on == "auto":
121 if pager.active or os.isatty(1): 118 if pager.active or os.isatty(1):
122 self._on = True 119 self._on = True
123 else: 120 else:
124 self._on = False 121 self._on = False
125 elif on in ('true', 'always'): 122 elif on in ("true", "always"):
126 self._on = True 123 self._on = True
127 else: 124 else:
128 self._on = False 125 self._on = False
129 126
130 def redirect(self, out): 127 def redirect(self, out):
131 self._out = out 128 self._out = out
132 129
133 @property 130 @property
134 def is_on(self): 131 def is_on(self):
135 return self._on 132 return self._on
136 133
137 def write(self, fmt, *args): 134 def write(self, fmt, *args):
138 self._out.write(fmt % args) 135 self._out.write(fmt % args)
139 136
140 def flush(self): 137 def flush(self):
141 self._out.flush() 138 self._out.flush()
142 139
143 def nl(self): 140 def nl(self):
144 self._out.write('\n') 141 self._out.write("\n")
145 142
146 def printer(self, opt=None, fg=None, bg=None, attr=None): 143 def printer(self, opt=None, fg=None, bg=None, attr=None):
147 s = self 144 s = self
148 c = self.colorer(opt, fg, bg, attr) 145 c = self.colorer(opt, fg, bg, attr)
149 146
150 def f(fmt, *args): 147 def f(fmt, *args):
151 s._out.write(c(fmt, *args)) 148 s._out.write(c(fmt, *args))
152 return f
153 149
154 def nofmt_printer(self, opt=None, fg=None, bg=None, attr=None): 150 return f
155 s = self
156 c = self.nofmt_colorer(opt, fg, bg, attr)
157 151
158 def f(fmt): 152 def nofmt_printer(self, opt=None, fg=None, bg=None, attr=None):
159 s._out.write(c(fmt)) 153 s = self
160 return f 154 c = self.nofmt_colorer(opt, fg, bg, attr)
161 155
162 def colorer(self, opt=None, fg=None, bg=None, attr=None): 156 def f(fmt):
163 if self._on: 157 s._out.write(c(fmt))
164 c = self._parse(opt, fg, bg, attr)
165 158
166 def f(fmt, *args): 159 return f
167 output = fmt % args
168 return ''.join([c, output, RESET])
169 return f
170 else:
171 160
172 def f(fmt, *args): 161 def colorer(self, opt=None, fg=None, bg=None, attr=None):
173 return fmt % args 162 if self._on:
174 return f 163 c = self._parse(opt, fg, bg, attr)
175 164
176 def nofmt_colorer(self, opt=None, fg=None, bg=None, attr=None): 165 def f(fmt, *args):
177 if self._on: 166 output = fmt % args
178 c = self._parse(opt, fg, bg, attr) 167 return "".join([c, output, RESET])
179 168
180 def f(fmt): 169 return f
181 return ''.join([c, fmt, RESET])
182 return f
183 else:
184 def f(fmt):
185 return fmt
186 return f
187
188 def _parse(self, opt, fg, bg, attr):
189 if not opt:
190 return _Color(fg, bg, attr)
191
192 v = self._config.GetString('%s.%s' % (self._section, opt))
193 if v is None:
194 return _Color(fg, bg, attr)
195
196 v = v.strip().lower()
197 if v == "reset":
198 return RESET
199 elif v == '':
200 return _Color(fg, bg, attr)
201
202 have_fg = False
203 for a in v.split(' '):
204 if is_color(a):
205 if have_fg:
206 bg = a
207 else: 170 else:
208 fg = a
209 elif is_attr(a):
210 attr = a
211 171
212 return _Color(fg, bg, attr) 172 def f(fmt, *args):
173 return fmt % args
174
175 return f
176
177 def nofmt_colorer(self, opt=None, fg=None, bg=None, attr=None):
178 if self._on:
179 c = self._parse(opt, fg, bg, attr)
180
181 def f(fmt):
182 return "".join([c, fmt, RESET])
183
184 return f
185 else:
186
187 def f(fmt):
188 return fmt
189
190 return f
191
192 def _parse(self, opt, fg, bg, attr):
193 if not opt:
194 return _Color(fg, bg, attr)
195
196 v = self._config.GetString("%s.%s" % (self._section, opt))
197 if v is None:
198 return _Color(fg, bg, attr)
199
200 v = v.strip().lower()
201 if v == "reset":
202 return RESET
203 elif v == "":
204 return _Color(fg, bg, attr)
205
206 have_fg = False
207 for a in v.split(" "):
208 if is_color(a):
209 if have_fg:
210 bg = a
211 else:
212 fg = a
213 elif is_attr(a):
214 attr = a
215
216 return _Color(fg, bg, attr)
diff --git a/command.py b/command.py
index 68f36f03..939a4630 100644
--- a/command.py
+++ b/command.py
@@ -25,7 +25,7 @@ import progress
25 25
26 26
27# Are we generating man-pages? 27# Are we generating man-pages?
28GENERATE_MANPAGES = os.environ.get('_REPO_GENERATE_MANPAGES_') == ' indeed! ' 28GENERATE_MANPAGES = os.environ.get("_REPO_GENERATE_MANPAGES_") == " indeed! "
29 29
30 30
31# Number of projects to submit to a single worker process at a time. 31# Number of projects to submit to a single worker process at a time.
@@ -43,403 +43,470 @@ DEFAULT_LOCAL_JOBS = min(os.cpu_count(), 8)
43 43
44 44
45class Command(object): 45class Command(object):
46 """Base class for any command line action in repo. 46 """Base class for any command line action in repo."""
47 """ 47
48 48 # Singleton for all commands to track overall repo command execution and
49 # Singleton for all commands to track overall repo command execution and 49 # provide event summary to callers. Only used by sync subcommand currently.
50 # provide event summary to callers. Only used by sync subcommand currently. 50 #
51 # 51 # NB: This is being replaced by git trace2 events. See git_trace2_event_log.
52 # NB: This is being replaced by git trace2 events. See git_trace2_event_log. 52 event_log = EventLog()
53 event_log = EventLog() 53
54 54 # Whether this command is a "common" one, i.e. whether the user would
55 # Whether this command is a "common" one, i.e. whether the user would commonly 55 # commonly use it or it's a more uncommon command. This is used by the help
56 # use it or it's a more uncommon command. This is used by the help command to 56 # command to show short-vs-full summaries.
57 # show short-vs-full summaries. 57 COMMON = False
58 COMMON = False 58
59 59 # Whether this command supports running in parallel. If greater than 0,
60 # Whether this command supports running in parallel. If greater than 0, 60 # it is the number of parallel jobs to default to.
61 # it is the number of parallel jobs to default to. 61 PARALLEL_JOBS = None
62 PARALLEL_JOBS = None 62
63 63 # Whether this command supports Multi-manifest. If False, then main.py will
64 # Whether this command supports Multi-manifest. If False, then main.py will 64 # iterate over the manifests and invoke the command once per (sub)manifest.
65 # iterate over the manifests and invoke the command once per (sub)manifest. 65 # This is only checked after calling ValidateOptions, so that partially
66 # This is only checked after calling ValidateOptions, so that partially 66 # migrated subcommands can set it to False.
67 # migrated subcommands can set it to False. 67 MULTI_MANIFEST_SUPPORT = True
68 MULTI_MANIFEST_SUPPORT = True 68
69 69 def __init__(
70 def __init__(self, repodir=None, client=None, manifest=None, gitc_manifest=None, 70 self,
71 git_event_log=None, outer_client=None, outer_manifest=None): 71 repodir=None,
72 self.repodir = repodir 72 client=None,
73 self.client = client 73 manifest=None,
74 self.outer_client = outer_client or client 74 gitc_manifest=None,
75 self.manifest = manifest 75 git_event_log=None,
76 self.gitc_manifest = gitc_manifest 76 outer_client=None,
77 self.git_event_log = git_event_log 77 outer_manifest=None,
78 self.outer_manifest = outer_manifest 78 ):
79 79 self.repodir = repodir
80 # Cache for the OptionParser property. 80 self.client = client
81 self._optparse = None 81 self.outer_client = outer_client or client
82 82 self.manifest = manifest
83 def WantPager(self, _opt): 83 self.gitc_manifest = gitc_manifest
84 return False 84 self.git_event_log = git_event_log
85 85 self.outer_manifest = outer_manifest
86 def ReadEnvironmentOptions(self, opts): 86
87 """ Set options from environment variables. """ 87 # Cache for the OptionParser property.
88 88 self._optparse = None
89 env_options = self._RegisteredEnvironmentOptions() 89
90 90 def WantPager(self, _opt):
91 for env_key, opt_key in env_options.items(): 91 return False
92 # Get the user-set option value if any 92
93 opt_value = getattr(opts, opt_key) 93 def ReadEnvironmentOptions(self, opts):
94 94 """Set options from environment variables."""
95 # If the value is set, it means the user has passed it as a command 95
96 # line option, and we should use that. Otherwise we can try to set it 96 env_options = self._RegisteredEnvironmentOptions()
97 # with the value from the corresponding environment variable. 97
98 if opt_value is not None: 98 for env_key, opt_key in env_options.items():
99 continue 99 # Get the user-set option value if any
100 100 opt_value = getattr(opts, opt_key)
101 env_value = os.environ.get(env_key) 101
102 if env_value is not None: 102 # If the value is set, it means the user has passed it as a command
103 setattr(opts, opt_key, env_value) 103 # line option, and we should use that. Otherwise we can try to set
104 104 # it with the value from the corresponding environment variable.
105 return opts 105 if opt_value is not None:
106 106 continue
107 @property 107
108 def OptionParser(self): 108 env_value = os.environ.get(env_key)
109 if self._optparse is None: 109 if env_value is not None:
110 try: 110 setattr(opts, opt_key, env_value)
111 me = 'repo %s' % self.NAME 111
112 usage = self.helpUsage.strip().replace('%prog', me) 112 return opts
113 except AttributeError: 113
114 usage = 'repo %s' % self.NAME 114 @property
115 epilog = 'Run `repo help %s` to view the detailed manual.' % self.NAME 115 def OptionParser(self):
116 self._optparse = optparse.OptionParser(usage=usage, epilog=epilog) 116 if self._optparse is None:
117 self._CommonOptions(self._optparse) 117 try:
118 self._Options(self._optparse) 118 me = "repo %s" % self.NAME
119 return self._optparse 119 usage = self.helpUsage.strip().replace("%prog", me)
120 120 except AttributeError:
121 def _CommonOptions(self, p, opt_v=True): 121 usage = "repo %s" % self.NAME
122 """Initialize the option parser with common options. 122 epilog = (
123 123 "Run `repo help %s` to view the detailed manual." % self.NAME
124 These will show up for *all* subcommands, so use sparingly. 124 )
125 NB: Keep in sync with repo:InitParser(). 125 self._optparse = optparse.OptionParser(usage=usage, epilog=epilog)
126 """ 126 self._CommonOptions(self._optparse)
127 g = p.add_option_group('Logging options') 127 self._Options(self._optparse)
128 opts = ['-v'] if opt_v else [] 128 return self._optparse
129 g.add_option(*opts, '--verbose', 129
130 dest='output_mode', action='store_true', 130 def _CommonOptions(self, p, opt_v=True):
131 help='show all output') 131 """Initialize the option parser with common options.
132 g.add_option('-q', '--quiet', 132
133 dest='output_mode', action='store_false', 133 These will show up for *all* subcommands, so use sparingly.
134 help='only show errors') 134 NB: Keep in sync with repo:InitParser().
135 135 """
136 if self.PARALLEL_JOBS is not None: 136 g = p.add_option_group("Logging options")
137 default = 'based on number of CPU cores' 137 opts = ["-v"] if opt_v else []
138 if not GENERATE_MANPAGES: 138 g.add_option(
139 # Only include active cpu count if we aren't generating man pages. 139 *opts,
140 default = f'%default; {default}' 140 "--verbose",
141 p.add_option( 141 dest="output_mode",
142 '-j', '--jobs', 142 action="store_true",
143 type=int, default=self.PARALLEL_JOBS, 143 help="show all output",
144 help=f'number of jobs to run in parallel (default: {default})') 144 )
145 145 g.add_option(
146 m = p.add_option_group('Multi-manifest options') 146 "-q",
147 m.add_option('--outer-manifest', action='store_true', default=None, 147 "--quiet",
148 help='operate starting at the outermost manifest') 148 dest="output_mode",
149 m.add_option('--no-outer-manifest', dest='outer_manifest', 149 action="store_false",
150 action='store_false', help='do not operate on outer manifests') 150 help="only show errors",
151 m.add_option('--this-manifest-only', action='store_true', default=None, 151 )
152 help='only operate on this (sub)manifest') 152
153 m.add_option('--no-this-manifest-only', '--all-manifests', 153 if self.PARALLEL_JOBS is not None:
154 dest='this_manifest_only', action='store_false', 154 default = "based on number of CPU cores"
155 help='operate on this manifest and its submanifests') 155 if not GENERATE_MANPAGES:
156 156 # Only include active cpu count if we aren't generating man
157 def _Options(self, p): 157 # pages.
158 """Initialize the option parser with subcommand-specific options.""" 158 default = f"%default; {default}"
159 159 p.add_option(
160 def _RegisteredEnvironmentOptions(self): 160 "-j",
161 """Get options that can be set from environment variables. 161 "--jobs",
162 162 type=int,
163 Return a dictionary mapping environment variable name 163 default=self.PARALLEL_JOBS,
164 to option key name that it can override. 164 help=f"number of jobs to run in parallel (default: {default})",
165 165 )
166 Example: {'REPO_MY_OPTION': 'my_option'} 166
167 167 m = p.add_option_group("Multi-manifest options")
168 Will allow the option with key value 'my_option' to be set 168 m.add_option(
169 from the value in the environment variable named 'REPO_MY_OPTION'. 169 "--outer-manifest",
170 170 action="store_true",
171 Note: This does not work properly for options that are explicitly 171 default=None,
172 set to None by the user, or options that are defined with a 172 help="operate starting at the outermost manifest",
173 default value other than None. 173 )
174 174 m.add_option(
175 """ 175 "--no-outer-manifest",
176 return {} 176 dest="outer_manifest",
177 177 action="store_false",
178 def Usage(self): 178 help="do not operate on outer manifests",
179 """Display usage and terminate. 179 )
180 """ 180 m.add_option(
181 self.OptionParser.print_usage() 181 "--this-manifest-only",
182 sys.exit(1) 182 action="store_true",
183 183 default=None,
184 def CommonValidateOptions(self, opt, args): 184 help="only operate on this (sub)manifest",
185 """Validate common options.""" 185 )
186 opt.quiet = opt.output_mode is False 186 m.add_option(
187 opt.verbose = opt.output_mode is True 187 "--no-this-manifest-only",
188 if opt.outer_manifest is None: 188 "--all-manifests",
189 # By default, treat multi-manifest instances as a single manifest from 189 dest="this_manifest_only",
190 # the user's perspective. 190 action="store_false",
191 opt.outer_manifest = True 191 help="operate on this manifest and its submanifests",
192 192 )
193 def ValidateOptions(self, opt, args): 193
194 """Validate the user options & arguments before executing. 194 def _Options(self, p):
195 195 """Initialize the option parser with subcommand-specific options."""
196 This is meant to help break the code up into logical steps. Some tips: 196
197 * Use self.OptionParser.error to display CLI related errors. 197 def _RegisteredEnvironmentOptions(self):
198 * Adjust opt member defaults as makes sense. 198 """Get options that can be set from environment variables.
199 * Adjust the args list, but do so inplace so the caller sees updates. 199
200 * Try to avoid updating self state. Leave that to Execute. 200 Return a dictionary mapping environment variable name
201 """ 201 to option key name that it can override.
202 202
203 def Execute(self, opt, args): 203 Example: {'REPO_MY_OPTION': 'my_option'}
204 """Perform the action, after option parsing is complete. 204
205 """ 205 Will allow the option with key value 'my_option' to be set
206 raise NotImplementedError 206 from the value in the environment variable named 'REPO_MY_OPTION'.
207 207
208 @staticmethod 208 Note: This does not work properly for options that are explicitly
209 def ExecuteInParallel(jobs, func, inputs, callback, output=None, ordered=False): 209 set to None by the user, or options that are defined with a
210 """Helper for managing parallel execution boiler plate. 210 default value other than None.
211 211
212 For subcommands that can easily split their work up. 212 """
213 213 return {}
214 Args: 214
215 jobs: How many parallel processes to use. 215 def Usage(self):
216 func: The function to apply to each of the |inputs|. Usually a 216 """Display usage and terminate."""
217 functools.partial for wrapping additional arguments. It will be run 217 self.OptionParser.print_usage()
218 in a separate process, so it must be pickalable, so nested functions 218 sys.exit(1)
219 won't work. Methods on the subcommand Command class should work. 219
220 inputs: The list of items to process. Must be a list. 220 def CommonValidateOptions(self, opt, args):
221 callback: The function to pass the results to for processing. It will be 221 """Validate common options."""
222 executed in the main thread and process the results of |func| as they 222 opt.quiet = opt.output_mode is False
223 become available. Thus it may be a local nested function. Its return 223 opt.verbose = opt.output_mode is True
224 value is passed back directly. It takes three arguments: 224 if opt.outer_manifest is None:
225 - The processing pool (or None with one job). 225 # By default, treat multi-manifest instances as a single manifest
226 - The |output| argument. 226 # from the user's perspective.
227 - An iterator for the results. 227 opt.outer_manifest = True
228 output: An output manager. May be progress.Progess or color.Coloring. 228
229 ordered: Whether the jobs should be processed in order. 229 def ValidateOptions(self, opt, args):
230 230 """Validate the user options & arguments before executing.
231 Returns: 231
232 The |callback| function's results are returned. 232 This is meant to help break the code up into logical steps. Some tips:
233 """ 233 * Use self.OptionParser.error to display CLI related errors.
234 try: 234 * Adjust opt member defaults as makes sense.
235 # NB: Multiprocessing is heavy, so don't spin it up for one job. 235 * Adjust the args list, but do so inplace so the caller sees updates.
236 if len(inputs) == 1 or jobs == 1: 236 * Try to avoid updating self state. Leave that to Execute.
237 return callback(None, output, (func(x) for x in inputs)) 237 """
238 else: 238
239 with multiprocessing.Pool(jobs) as pool: 239 def Execute(self, opt, args):
240 submit = pool.imap if ordered else pool.imap_unordered 240 """Perform the action, after option parsing is complete."""
241 return callback(pool, output, submit(func, inputs, chunksize=WORKER_BATCH_SIZE)) 241 raise NotImplementedError
242 finally: 242
243 if isinstance(output, progress.Progress): 243 @staticmethod
244 output.end() 244 def ExecuteInParallel(
245 245 jobs, func, inputs, callback, output=None, ordered=False
246 def _ResetPathToProjectMap(self, projects): 246 ):
247 self._by_path = dict((p.worktree, p) for p in projects) 247 """Helper for managing parallel execution boiler plate.
248 248
249 def _UpdatePathToProjectMap(self, project): 249 For subcommands that can easily split their work up.
250 self._by_path[project.worktree] = project 250
251 251 Args:
252 def _GetProjectByPath(self, manifest, path): 252 jobs: How many parallel processes to use.
253 project = None 253 func: The function to apply to each of the |inputs|. Usually a
254 if os.path.exists(path): 254 functools.partial for wrapping additional arguments. It will be
255 oldpath = None 255 run in a separate process, so it must be pickalable, so nested
256 while (path and 256 functions won't work. Methods on the subcommand Command class
257 path != oldpath and 257 should work.
258 path != manifest.topdir): 258 inputs: The list of items to process. Must be a list.
259 callback: The function to pass the results to for processing. It
260 will be executed in the main thread and process the results of
261 |func| as they become available. Thus it may be a local nested
262 function. Its return value is passed back directly. It takes
263 three arguments:
264 - The processing pool (or None with one job).
265 - The |output| argument.
266 - An iterator for the results.
267 output: An output manager. May be progress.Progess or
268 color.Coloring.
269 ordered: Whether the jobs should be processed in order.
270
271 Returns:
272 The |callback| function's results are returned.
273 """
259 try: 274 try:
260 project = self._by_path[path] 275 # NB: Multiprocessing is heavy, so don't spin it up for one job.
261 break 276 if len(inputs) == 1 or jobs == 1:
262 except KeyError: 277 return callback(None, output, (func(x) for x in inputs))
263 oldpath = path 278 else:
264 path = os.path.dirname(path) 279 with multiprocessing.Pool(jobs) as pool:
265 if not project and path == manifest.topdir: 280 submit = pool.imap if ordered else pool.imap_unordered
266 try: 281 return callback(
267 project = self._by_path[path] 282 pool,
268 except KeyError: 283 output,
269 pass 284 submit(func, inputs, chunksize=WORKER_BATCH_SIZE),
270 else: 285 )
271 try: 286 finally:
272 project = self._by_path[path] 287 if isinstance(output, progress.Progress):
273 except KeyError: 288 output.end()
274 pass 289
275 return project 290 def _ResetPathToProjectMap(self, projects):
276 291 self._by_path = dict((p.worktree, p) for p in projects)
277 def GetProjects(self, args, manifest=None, groups='', missing_ok=False, 292
278 submodules_ok=False, all_manifests=False): 293 def _UpdatePathToProjectMap(self, project):
279 """A list of projects that match the arguments. 294 self._by_path[project.worktree] = project
280 295
281 Args: 296 def _GetProjectByPath(self, manifest, path):
282 args: a list of (case-insensitive) strings, projects to search for. 297 project = None
283 manifest: an XmlManifest, the manifest to use, or None for default. 298 if os.path.exists(path):
284 groups: a string, the manifest groups in use. 299 oldpath = None
285 missing_ok: a boolean, whether to allow missing projects. 300 while path and path != oldpath and path != manifest.topdir:
286 submodules_ok: a boolean, whether to allow submodules. 301 try:
287 all_manifests: a boolean, if True then all manifests and submanifests are 302 project = self._by_path[path]
288 used. If False, then only the local (sub)manifest is used. 303 break
289 304 except KeyError:
290 Returns: 305 oldpath = path
291 A list of matching Project instances. 306 path = os.path.dirname(path)
292 """ 307 if not project and path == manifest.topdir:
293 if all_manifests: 308 try:
294 if not manifest: 309 project = self._by_path[path]
295 manifest = self.manifest.outer_client 310 except KeyError:
296 all_projects_list = manifest.all_projects 311 pass
297 else: 312 else:
298 if not manifest: 313 try:
299 manifest = self.manifest 314 project = self._by_path[path]
300 all_projects_list = manifest.projects 315 except KeyError:
301 result = [] 316 pass
302 317 return project
303 if not groups: 318
304 groups = manifest.GetGroupsStr() 319 def GetProjects(
305 groups = [x for x in re.split(r'[,\s]+', groups) if x] 320 self,
306 321 args,
307 if not args: 322 manifest=None,
308 derived_projects = {} 323 groups="",
309 for project in all_projects_list: 324 missing_ok=False,
310 if submodules_ok or project.sync_s: 325 submodules_ok=False,
311 derived_projects.update((p.name, p) 326 all_manifests=False,
312 for p in project.GetDerivedSubprojects()) 327 ):
313 all_projects_list.extend(derived_projects.values()) 328 """A list of projects that match the arguments.
314 for project in all_projects_list: 329
315 if (missing_ok or project.Exists) and project.MatchesGroups(groups): 330 Args:
316 result.append(project) 331 args: a list of (case-insensitive) strings, projects to search for.
317 else: 332 manifest: an XmlManifest, the manifest to use, or None for default.
318 self._ResetPathToProjectMap(all_projects_list) 333 groups: a string, the manifest groups in use.
319 334 missing_ok: a boolean, whether to allow missing projects.
320 for arg in args: 335 submodules_ok: a boolean, whether to allow submodules.
321 # We have to filter by manifest groups in case the requested project is 336 all_manifests: a boolean, if True then all manifests and
322 # checked out multiple times or differently based on them. 337 submanifests are used. If False, then only the local
323 projects = [project 338 (sub)manifest is used.
339
340 Returns:
341 A list of matching Project instances.
342 """
343 if all_manifests:
344 if not manifest:
345 manifest = self.manifest.outer_client
346 all_projects_list = manifest.all_projects
347 else:
348 if not manifest:
349 manifest = self.manifest
350 all_projects_list = manifest.projects
351 result = []
352
353 if not groups:
354 groups = manifest.GetGroupsStr()
355 groups = [x for x in re.split(r"[,\s]+", groups) if x]
356
357 if not args:
358 derived_projects = {}
359 for project in all_projects_list:
360 if submodules_ok or project.sync_s:
361 derived_projects.update(
362 (p.name, p) for p in project.GetDerivedSubprojects()
363 )
364 all_projects_list.extend(derived_projects.values())
365 for project in all_projects_list:
366 if (missing_ok or project.Exists) and project.MatchesGroups(
367 groups
368 ):
369 result.append(project)
370 else:
371 self._ResetPathToProjectMap(all_projects_list)
372
373 for arg in args:
374 # We have to filter by manifest groups in case the requested
375 # project is checked out multiple times or differently based on
376 # them.
377 projects = [
378 project
324 for project in manifest.GetProjectsWithName( 379 for project in manifest.GetProjectsWithName(
325 arg, all_manifests=all_manifests) 380 arg, all_manifests=all_manifests
326 if project.MatchesGroups(groups)] 381 )
327 382 if project.MatchesGroups(groups)
328 if not projects: 383 ]
329 path = os.path.abspath(arg).replace('\\', '/') 384
330 tree = manifest 385 if not projects:
331 if all_manifests: 386 path = os.path.abspath(arg).replace("\\", "/")
332 # Look for the deepest matching submanifest. 387 tree = manifest
333 for tree in reversed(list(manifest.all_manifests)): 388 if all_manifests:
334 if path.startswith(tree.topdir): 389 # Look for the deepest matching submanifest.
335 break 390 for tree in reversed(list(manifest.all_manifests)):
336 project = self._GetProjectByPath(tree, path) 391 if path.startswith(tree.topdir):
337 392 break
338 # If it's not a derived project, update path->project mapping and 393 project = self._GetProjectByPath(tree, path)
339 # search again, as arg might actually point to a derived subproject. 394
340 if (project and not project.Derived and (submodules_ok or 395 # If it's not a derived project, update path->project
341 project.sync_s)): 396 # mapping and search again, as arg might actually point to
342 search_again = False 397 # a derived subproject.
343 for subproject in project.GetDerivedSubprojects(): 398 if (
344 self._UpdatePathToProjectMap(subproject) 399 project
345 search_again = True 400 and not project.Derived
346 if search_again: 401 and (submodules_ok or project.sync_s)
347 project = self._GetProjectByPath(manifest, path) or project 402 ):
348 403 search_again = False
349 if project: 404 for subproject in project.GetDerivedSubprojects():
350 projects = [project] 405 self._UpdatePathToProjectMap(subproject)
351 406 search_again = True
352 if not projects: 407 if search_again:
353 raise NoSuchProjectError(arg) 408 project = (
354 409 self._GetProjectByPath(manifest, path)
355 for project in projects: 410 or project
356 if not missing_ok and not project.Exists: 411 )
357 raise NoSuchProjectError('%s (%s)' % ( 412
358 arg, project.RelPath(local=not all_manifests))) 413 if project:
359 if not project.MatchesGroups(groups): 414 projects = [project]
360 raise InvalidProjectGroupsError(arg) 415
361 416 if not projects:
362 result.extend(projects) 417 raise NoSuchProjectError(arg)
363 418
364 def _getpath(x): 419 for project in projects:
365 return x.relpath 420 if not missing_ok and not project.Exists:
366 result.sort(key=_getpath) 421 raise NoSuchProjectError(
367 return result 422 "%s (%s)"
368 423 % (arg, project.RelPath(local=not all_manifests))
369 def FindProjects(self, args, inverse=False, all_manifests=False): 424 )
370 """Find projects from command line arguments. 425 if not project.MatchesGroups(groups):
371 426 raise InvalidProjectGroupsError(arg)
372 Args: 427
373 args: a list of (case-insensitive) strings, projects to search for. 428 result.extend(projects)
374 inverse: a boolean, if True, then projects not matching any |args| are 429
375 returned. 430 def _getpath(x):
376 all_manifests: a boolean, if True then all manifests and submanifests are 431 return x.relpath
377 used. If False, then only the local (sub)manifest is used. 432
378 """ 433 result.sort(key=_getpath)
379 result = [] 434 return result
380 patterns = [re.compile(r'%s' % a, re.IGNORECASE) for a in args] 435
381 for project in self.GetProjects('', all_manifests=all_manifests): 436 def FindProjects(self, args, inverse=False, all_manifests=False):
382 paths = [project.name, project.RelPath(local=not all_manifests)] 437 """Find projects from command line arguments.
383 for pattern in patterns: 438
384 match = any(pattern.search(x) for x in paths) 439 Args:
385 if not inverse and match: 440 args: a list of (case-insensitive) strings, projects to search for.
386 result.append(project) 441 inverse: a boolean, if True, then projects not matching any |args|
387 break 442 are returned.
388 if inverse and match: 443 all_manifests: a boolean, if True then all manifests and
389 break 444 submanifests are used. If False, then only the local
390 else: 445 (sub)manifest is used.
391 if inverse: 446 """
392 result.append(project) 447 result = []
393 result.sort(key=lambda project: (project.manifest.path_prefix, 448 patterns = [re.compile(r"%s" % a, re.IGNORECASE) for a in args]
394 project.relpath)) 449 for project in self.GetProjects("", all_manifests=all_manifests):
395 return result 450 paths = [project.name, project.RelPath(local=not all_manifests)]
396 451 for pattern in patterns:
397 def ManifestList(self, opt): 452 match = any(pattern.search(x) for x in paths)
398 """Yields all of the manifests to traverse. 453 if not inverse and match:
399 454 result.append(project)
400 Args: 455 break
401 opt: The command options. 456 if inverse and match:
402 """ 457 break
403 top = self.outer_manifest 458 else:
404 if not opt.outer_manifest or opt.this_manifest_only: 459 if inverse:
405 top = self.manifest 460 result.append(project)
406 yield top 461 result.sort(
407 if not opt.this_manifest_only: 462 key=lambda project: (project.manifest.path_prefix, project.relpath)
408 for child in top.all_children: 463 )
409 yield child 464 return result
465
466 def ManifestList(self, opt):
467 """Yields all of the manifests to traverse.
468
469 Args:
470 opt: The command options.
471 """
472 top = self.outer_manifest
473 if not opt.outer_manifest or opt.this_manifest_only:
474 top = self.manifest
475 yield top
476 if not opt.this_manifest_only:
477 for child in top.all_children:
478 yield child
410 479
411 480
412class InteractiveCommand(Command): 481class InteractiveCommand(Command):
413 """Command which requires user interaction on the tty and 482 """Command which requires user interaction on the tty and must not run
414 must not run within a pager, even if the user asks to. 483 within a pager, even if the user asks to.
415 """ 484 """
416 485
417 def WantPager(self, _opt): 486 def WantPager(self, _opt):
418 return False 487 return False
419 488
420 489
421class PagedCommand(Command): 490class PagedCommand(Command):
422 """Command which defaults to output in a pager, as its 491 """Command which defaults to output in a pager, as its display tends to be
423 display tends to be larger than one screen full. 492 larger than one screen full.
424 """ 493 """
425 494
426 def WantPager(self, _opt): 495 def WantPager(self, _opt):
427 return True 496 return True
428 497
429 498
430class MirrorSafeCommand(object): 499class MirrorSafeCommand(object):
431 """Command permits itself to run within a mirror, 500 """Command permits itself to run within a mirror, and does not require a
432 and does not require a working directory. 501 working directory.
433 """ 502 """
434 503
435 504
436class GitcAvailableCommand(object): 505class GitcAvailableCommand(object):
437 """Command that requires GITC to be available, but does 506 """Command that requires GITC to be available, but does not require the
438 not require the local client to be a GITC client. 507 local client to be a GITC client.
439 """ 508 """
440 509
441 510
442class GitcClientCommand(object): 511class GitcClientCommand(object):
443 """Command that requires the local client to be a GITC 512 """Command that requires the local client to be a GITC client."""
444 client.
445 """
diff --git a/editor.py b/editor.py
index b84a42d4..96835aba 100644
--- a/editor.py
+++ b/editor.py
@@ -23,93 +23,99 @@ import platform_utils
23 23
24 24
25class Editor(object): 25class Editor(object):
26 """Manages the user's preferred text editor.""" 26 """Manages the user's preferred text editor."""
27 27
28 _editor = None 28 _editor = None
29 globalConfig = None 29 globalConfig = None
30 30
31 @classmethod 31 @classmethod
32 def _GetEditor(cls): 32 def _GetEditor(cls):
33 if cls._editor is None: 33 if cls._editor is None:
34 cls._editor = cls._SelectEditor() 34 cls._editor = cls._SelectEditor()
35 return cls._editor 35 return cls._editor
36 36
37 @classmethod 37 @classmethod
38 def _SelectEditor(cls): 38 def _SelectEditor(cls):
39 e = os.getenv('GIT_EDITOR') 39 e = os.getenv("GIT_EDITOR")
40 if e: 40 if e:
41 return e 41 return e
42 42
43 if cls.globalConfig: 43 if cls.globalConfig:
44 e = cls.globalConfig.GetString('core.editor') 44 e = cls.globalConfig.GetString("core.editor")
45 if e: 45 if e:
46 return e 46 return e
47 47
48 e = os.getenv('VISUAL') 48 e = os.getenv("VISUAL")
49 if e: 49 if e:
50 return e 50 return e
51 51
52 e = os.getenv('EDITOR') 52 e = os.getenv("EDITOR")
53 if e: 53 if e:
54 return e 54 return e
55 55
56 if os.getenv('TERM') == 'dumb': 56 if os.getenv("TERM") == "dumb":
57 print( 57 print(
58 """No editor specified in GIT_EDITOR, core.editor, VISUAL or EDITOR. 58 """No editor specified in GIT_EDITOR, core.editor, VISUAL or EDITOR.
59Tried to fall back to vi but terminal is dumb. Please configure at 59Tried to fall back to vi but terminal is dumb. Please configure at
60least one of these before using this command.""", file=sys.stderr) 60least one of these before using this command.""", # noqa: E501
61 sys.exit(1) 61 file=sys.stderr,
62 62 )
63 return 'vi' 63 sys.exit(1)
64 64
65 @classmethod 65 return "vi"
66 def EditString(cls, data): 66
67 """Opens an editor to edit the given content. 67 @classmethod
68 68 def EditString(cls, data):
69 Args: 69 """Opens an editor to edit the given content.
70 data: The text to edit. 70
71 71 Args:
72 Returns: 72 data: The text to edit.
73 New value of edited text. 73
74 74 Returns:
75 Raises: 75 New value of edited text.
76 EditorError: The editor failed to run. 76
77 """ 77 Raises:
78 editor = cls._GetEditor() 78 EditorError: The editor failed to run.
79 if editor == ':': 79 """
80 return data 80 editor = cls._GetEditor()
81 81 if editor == ":":
82 fd, path = tempfile.mkstemp() 82 return data
83 try: 83
84 os.write(fd, data.encode('utf-8')) 84 fd, path = tempfile.mkstemp()
85 os.close(fd) 85 try:
86 fd = None 86 os.write(fd, data.encode("utf-8"))
87 87 os.close(fd)
88 if platform_utils.isWindows(): 88 fd = None
89 # Split on spaces, respecting quoted strings 89
90 import shlex 90 if platform_utils.isWindows():
91 args = shlex.split(editor) 91 # Split on spaces, respecting quoted strings
92 shell = False 92 import shlex
93 elif re.compile("^.*[$ \t'].*$").match(editor): 93
94 args = [editor + ' "$@"', 'sh'] 94 args = shlex.split(editor)
95 shell = True 95 shell = False
96 else: 96 elif re.compile("^.*[$ \t'].*$").match(editor):
97 args = [editor] 97 args = [editor + ' "$@"', "sh"]
98 shell = False 98 shell = True
99 args.append(path) 99 else:
100 100 args = [editor]
101 try: 101 shell = False
102 rc = subprocess.Popen(args, shell=shell).wait() 102 args.append(path)
103 except OSError as e: 103
104 raise EditorError('editor failed, %s: %s %s' 104 try:
105 % (str(e), editor, path)) 105 rc = subprocess.Popen(args, shell=shell).wait()
106 if rc != 0: 106 except OSError as e:
107 raise EditorError('editor failed with exit status %d: %s %s' 107 raise EditorError(
108 % (rc, editor, path)) 108 "editor failed, %s: %s %s" % (str(e), editor, path)
109 109 )
110 with open(path, mode='rb') as fd2: 110 if rc != 0:
111 return fd2.read().decode('utf-8') 111 raise EditorError(
112 finally: 112 "editor failed with exit status %d: %s %s"
113 if fd: 113 % (rc, editor, path)
114 os.close(fd) 114 )
115 platform_utils.remove(path) 115
116 with open(path, mode="rb") as fd2:
117 return fd2.read().decode("utf-8")
118 finally:
119 if fd:
120 os.close(fd)
121 platform_utils.remove(path)
diff --git a/error.py b/error.py
index cbefcb7e..3cf34d54 100644
--- a/error.py
+++ b/error.py
@@ -14,122 +14,112 @@
14 14
15 15
16class ManifestParseError(Exception): 16class ManifestParseError(Exception):
17 """Failed to parse the manifest file. 17 """Failed to parse the manifest file."""
18 """
19 18
20 19
21class ManifestInvalidRevisionError(ManifestParseError): 20class ManifestInvalidRevisionError(ManifestParseError):
22 """The revision value in a project is incorrect. 21 """The revision value in a project is incorrect."""
23 """
24 22
25 23
26class ManifestInvalidPathError(ManifestParseError): 24class ManifestInvalidPathError(ManifestParseError):
27 """A path used in <copyfile> or <linkfile> is incorrect. 25 """A path used in <copyfile> or <linkfile> is incorrect."""
28 """
29 26
30 27
31class NoManifestException(Exception): 28class NoManifestException(Exception):
32 """The required manifest does not exist. 29 """The required manifest does not exist."""
33 """
34 30
35 def __init__(self, path, reason): 31 def __init__(self, path, reason):
36 super().__init__(path, reason) 32 super().__init__(path, reason)
37 self.path = path 33 self.path = path
38 self.reason = reason 34 self.reason = reason
39 35
40 def __str__(self): 36 def __str__(self):
41 return self.reason 37 return self.reason
42 38
43 39
44class EditorError(Exception): 40class EditorError(Exception):
45 """Unspecified error from the user's text editor. 41 """Unspecified error from the user's text editor."""
46 """
47 42
48 def __init__(self, reason): 43 def __init__(self, reason):
49 super().__init__(reason) 44 super().__init__(reason)
50 self.reason = reason 45 self.reason = reason
51 46
52 def __str__(self): 47 def __str__(self):
53 return self.reason 48 return self.reason
54 49
55 50
56class GitError(Exception): 51class GitError(Exception):
57 """Unspecified internal error from git. 52 """Unspecified internal error from git."""
58 """
59 53
60 def __init__(self, command): 54 def __init__(self, command):
61 super().__init__(command) 55 super().__init__(command)
62 self.command = command 56 self.command = command
63 57
64 def __str__(self): 58 def __str__(self):
65 return self.command 59 return self.command
66 60
67 61
68class UploadError(Exception): 62class UploadError(Exception):
69 """A bundle upload to Gerrit did not succeed. 63 """A bundle upload to Gerrit did not succeed."""
70 """
71 64
72 def __init__(self, reason): 65 def __init__(self, reason):
73 super().__init__(reason) 66 super().__init__(reason)
74 self.reason = reason 67 self.reason = reason
75 68
76 def __str__(self): 69 def __str__(self):
77 return self.reason 70 return self.reason
78 71
79 72
80class DownloadError(Exception): 73class DownloadError(Exception):
81 """Cannot download a repository. 74 """Cannot download a repository."""
82 """
83 75
84 def __init__(self, reason): 76 def __init__(self, reason):
85 super().__init__(reason) 77 super().__init__(reason)
86 self.reason = reason 78 self.reason = reason
87 79
88 def __str__(self): 80 def __str__(self):
89 return self.reason 81 return self.reason
90 82
91 83
92class NoSuchProjectError(Exception): 84class NoSuchProjectError(Exception):
93 """A specified project does not exist in the work tree. 85 """A specified project does not exist in the work tree."""
94 """
95 86
96 def __init__(self, name=None): 87 def __init__(self, name=None):
97 super().__init__(name) 88 super().__init__(name)
98 self.name = name 89 self.name = name
99 90
100 def __str__(self): 91 def __str__(self):
101 if self.name is None: 92 if self.name is None:
102 return 'in current directory' 93 return "in current directory"
103 return self.name 94 return self.name
104 95
105 96
106class InvalidProjectGroupsError(Exception): 97class InvalidProjectGroupsError(Exception):
107 """A specified project is not suitable for the specified groups 98 """A specified project is not suitable for the specified groups"""
108 """
109 99
110 def __init__(self, name=None): 100 def __init__(self, name=None):
111 super().__init__(name) 101 super().__init__(name)
112 self.name = name 102 self.name = name
113 103
114 def __str__(self): 104 def __str__(self):
115 if self.name is None: 105 if self.name is None:
116 return 'in current directory' 106 return "in current directory"
117 return self.name 107 return self.name
118 108
119 109
120class RepoChangedException(Exception): 110class RepoChangedException(Exception):
121 """Thrown if 'repo sync' results in repo updating its internal 111 """Thrown if 'repo sync' results in repo updating its internal
122 repo or manifest repositories. In this special case we must 112 repo or manifest repositories. In this special case we must
123 use exec to re-execute repo with the new code and manifest. 113 use exec to re-execute repo with the new code and manifest.
124 """ 114 """
125 115
126 def __init__(self, extra_args=None): 116 def __init__(self, extra_args=None):
127 super().__init__(extra_args) 117 super().__init__(extra_args)
128 self.extra_args = extra_args or [] 118 self.extra_args = extra_args or []
129 119
130 120
131class HookError(Exception): 121class HookError(Exception):
132 """Thrown if a 'repo-hook' could not be run. 122 """Thrown if a 'repo-hook' could not be run.
133 123
134 The common case is that the file wasn't present when we tried to run it. 124 The common case is that the file wasn't present when we tried to run it.
135 """ 125 """
diff --git a/event_log.py b/event_log.py
index c77c5648..b1f8bdf9 100644
--- a/event_log.py
+++ b/event_log.py
@@ -15,161 +15,169 @@
15import json 15import json
16import multiprocessing 16import multiprocessing
17 17
18TASK_COMMAND = 'command' 18TASK_COMMAND = "command"
19TASK_SYNC_NETWORK = 'sync-network' 19TASK_SYNC_NETWORK = "sync-network"
20TASK_SYNC_LOCAL = 'sync-local' 20TASK_SYNC_LOCAL = "sync-local"
21 21
22 22
23class EventLog(object): 23class EventLog(object):
24 """Event log that records events that occurred during a repo invocation. 24 """Event log that records events that occurred during a repo invocation.
25 25
26 Events are written to the log as a consecutive JSON entries, one per line. 26 Events are written to the log as a consecutive JSON entries, one per line.
27 Each entry contains the following keys: 27 Each entry contains the following keys:
28 - id: A ('RepoOp', ID) tuple, suitable for storing in a datastore. 28 - id: A ('RepoOp', ID) tuple, suitable for storing in a datastore.
29 The ID is only unique for the invocation of the repo command. 29 The ID is only unique for the invocation of the repo command.
30 - name: Name of the object being operated upon. 30 - name: Name of the object being operated upon.
31 - task_name: The task that was performed. 31 - task_name: The task that was performed.
32 - start: Timestamp of when the operation started. 32 - start: Timestamp of when the operation started.
33 - finish: Timestamp of when the operation finished. 33 - finish: Timestamp of when the operation finished.
34 - success: Boolean indicating if the operation was successful. 34 - success: Boolean indicating if the operation was successful.
35 - try_count: A counter indicating the try count of this task. 35 - try_count: A counter indicating the try count of this task.
36 36
37 Optionally: 37 Optionally:
38 - parent: A ('RepoOp', ID) tuple indicating the parent event for nested 38 - parent: A ('RepoOp', ID) tuple indicating the parent event for nested
39 events. 39 events.
40 40
41 Valid task_names include: 41 Valid task_names include:
42 - command: The invocation of a subcommand. 42 - command: The invocation of a subcommand.
43 - sync-network: The network component of a sync command. 43 - sync-network: The network component of a sync command.
44 - sync-local: The local component of a sync command. 44 - sync-local: The local component of a sync command.
45 45
46 Specific tasks may include additional informational properties. 46 Specific tasks may include additional informational properties.
47 """
48
49 def __init__(self):
50 """Initializes the event log."""
51 self._log = []
52 self._parent = None
53
54 def Add(self, name, task_name, start, finish=None, success=None,
55 try_count=1, kind='RepoOp'):
56 """Add an event to the log.
57
58 Args:
59 name: Name of the object being operated upon.
60 task_name: A sub-task that was performed for name.
61 start: Timestamp of when the operation started.
62 finish: Timestamp of when the operation finished.
63 success: Boolean indicating if the operation was successful.
64 try_count: A counter indicating the try count of this task.
65 kind: The kind of the object for the unique identifier.
66
67 Returns:
68 A dictionary of the event added to the log.
69 """ 47 """
70 event = {
71 'id': (kind, _NextEventId()),
72 'name': name,
73 'task_name': task_name,
74 'start_time': start,
75 'try': try_count,
76 }
77
78 if self._parent:
79 event['parent'] = self._parent['id']
80
81 if success is not None or finish is not None:
82 self.FinishEvent(event, finish, success)
83
84 self._log.append(event)
85 return event
86
87 def AddSync(self, project, task_name, start, finish, success):
88 """Add a event to the log for a sync command.
89
90 Args:
91 project: Project being synced.
92 task_name: A sub-task that was performed for name.
93 One of (TASK_SYNC_NETWORK, TASK_SYNC_LOCAL)
94 start: Timestamp of when the operation started.
95 finish: Timestamp of when the operation finished.
96 success: Boolean indicating if the operation was successful.
97 48
98 Returns: 49 def __init__(self):
99 A dictionary of the event added to the log. 50 """Initializes the event log."""
100 """ 51 self._log = []
101 event = self.Add(project.relpath, task_name, start, finish, success) 52 self._parent = None
102 if event is not None: 53
103 event['project'] = project.name 54 def Add(
104 if project.revisionExpr: 55 self,
105 event['revision'] = project.revisionExpr 56 name,
106 if project.remote.url: 57 task_name,
107 event['project_url'] = project.remote.url 58 start,
108 if project.remote.fetchUrl: 59 finish=None,
109 event['remote_url'] = project.remote.fetchUrl 60 success=None,
110 try: 61 try_count=1,
111 event['git_hash'] = project.GetCommitRevisionId() 62 kind="RepoOp",
112 except Exception: 63 ):
113 pass 64 """Add an event to the log.
114 return event 65
115 66 Args:
116 def GetStatusString(self, success): 67 name: Name of the object being operated upon.
117 """Converst a boolean success to a status string. 68 task_name: A sub-task that was performed for name.
118 69 start: Timestamp of when the operation started.
119 Args: 70 finish: Timestamp of when the operation finished.
120 success: Boolean indicating if the operation was successful. 71 success: Boolean indicating if the operation was successful.
121 72 try_count: A counter indicating the try count of this task.
122 Returns: 73 kind: The kind of the object for the unique identifier.
123 status string. 74
124 """ 75 Returns:
125 return 'pass' if success else 'fail' 76 A dictionary of the event added to the log.
126 77 """
127 def FinishEvent(self, event, finish, success): 78 event = {
128 """Finishes an incomplete event. 79 "id": (kind, _NextEventId()),
129 80 "name": name,
130 Args: 81 "task_name": task_name,
131 event: An event that has been added to the log. 82 "start_time": start,
132 finish: Timestamp of when the operation finished. 83 "try": try_count,
133 success: Boolean indicating if the operation was successful. 84 }
134 85
135 Returns: 86 if self._parent:
136 A dictionary of the event added to the log. 87 event["parent"] = self._parent["id"]
137 """ 88
138 event['status'] = self.GetStatusString(success) 89 if success is not None or finish is not None:
139 event['finish_time'] = finish 90 self.FinishEvent(event, finish, success)
140 return event 91
141 92 self._log.append(event)
142 def SetParent(self, event): 93 return event
143 """Set a parent event for all new entities. 94
144 95 def AddSync(self, project, task_name, start, finish, success):
145 Args: 96 """Add a event to the log for a sync command.
146 event: The event to use as a parent. 97
147 """ 98 Args:
148 self._parent = event 99 project: Project being synced.
149 100 task_name: A sub-task that was performed for name.
150 def Write(self, filename): 101 One of (TASK_SYNC_NETWORK, TASK_SYNC_LOCAL)
151 """Writes the log out to a file. 102 start: Timestamp of when the operation started.
152 103 finish: Timestamp of when the operation finished.
153 Args: 104 success: Boolean indicating if the operation was successful.
154 filename: The file to write the log to. 105
155 """ 106 Returns:
156 with open(filename, 'w+') as f: 107 A dictionary of the event added to the log.
157 for e in self._log: 108 """
158 json.dump(e, f, sort_keys=True) 109 event = self.Add(project.relpath, task_name, start, finish, success)
159 f.write('\n') 110 if event is not None:
111 event["project"] = project.name
112 if project.revisionExpr:
113 event["revision"] = project.revisionExpr
114 if project.remote.url:
115 event["project_url"] = project.remote.url
116 if project.remote.fetchUrl:
117 event["remote_url"] = project.remote.fetchUrl
118 try:
119 event["git_hash"] = project.GetCommitRevisionId()
120 except Exception:
121 pass
122 return event
123
124 def GetStatusString(self, success):
125 """Converst a boolean success to a status string.
126
127 Args:
128 success: Boolean indicating if the operation was successful.
129
130 Returns:
131 status string.
132 """
133 return "pass" if success else "fail"
134
135 def FinishEvent(self, event, finish, success):
136 """Finishes an incomplete event.
137
138 Args:
139 event: An event that has been added to the log.
140 finish: Timestamp of when the operation finished.
141 success: Boolean indicating if the operation was successful.
142
143 Returns:
144 A dictionary of the event added to the log.
145 """
146 event["status"] = self.GetStatusString(success)
147 event["finish_time"] = finish
148 return event
149
150 def SetParent(self, event):
151 """Set a parent event for all new entities.
152
153 Args:
154 event: The event to use as a parent.
155 """
156 self._parent = event
157
158 def Write(self, filename):
159 """Writes the log out to a file.
160
161 Args:
162 filename: The file to write the log to.
163 """
164 with open(filename, "w+") as f:
165 for e in self._log:
166 json.dump(e, f, sort_keys=True)
167 f.write("\n")
160 168
161 169
162# An integer id that is unique across this invocation of the program. 170# An integer id that is unique across this invocation of the program.
163_EVENT_ID = multiprocessing.Value('i', 1) 171_EVENT_ID = multiprocessing.Value("i", 1)
164 172
165 173
166def _NextEventId(): 174def _NextEventId():
167 """Helper function for grabbing the next unique id. 175 """Helper function for grabbing the next unique id.
168 176
169 Returns: 177 Returns:
170 A unique, to this invocation of the program, integer id. 178 A unique, to this invocation of the program, integer id.
171 """ 179 """
172 with _EVENT_ID.get_lock(): 180 with _EVENT_ID.get_lock():
173 val = _EVENT_ID.value 181 val = _EVENT_ID.value
174 _EVENT_ID.value += 1 182 _EVENT_ID.value += 1
175 return val 183 return val
diff --git a/fetch.py b/fetch.py
index c954a9c2..31f8152f 100644
--- a/fetch.py
+++ b/fetch.py
@@ -21,25 +21,29 @@ from urllib.request import urlopen
21 21
22 22
23def fetch_file(url, verbose=False): 23def fetch_file(url, verbose=False):
24 """Fetch a file from the specified source using the appropriate protocol. 24 """Fetch a file from the specified source using the appropriate protocol.
25 25
26 Returns: 26 Returns:
27 The contents of the file as bytes. 27 The contents of the file as bytes.
28 """ 28 """
29 scheme = urlparse(url).scheme 29 scheme = urlparse(url).scheme
30 if scheme == 'gs': 30 if scheme == "gs":
31 cmd = ['gsutil', 'cat', url] 31 cmd = ["gsutil", "cat", url]
32 try: 32 try:
33 result = subprocess.run( 33 result = subprocess.run(
34 cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE, 34 cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE, check=True
35 check=True) 35 )
36 if result.stderr and verbose: 36 if result.stderr and verbose:
37 print('warning: non-fatal error running "gsutil": %s' % result.stderr, 37 print(
38 file=sys.stderr) 38 'warning: non-fatal error running "gsutil": %s'
39 return result.stdout 39 % result.stderr,
40 except subprocess.CalledProcessError as e: 40 file=sys.stderr,
41 print('fatal: error running "gsutil": %s' % e.stderr, 41 )
42 file=sys.stderr) 42 return result.stdout
43 sys.exit(1) 43 except subprocess.CalledProcessError as e:
44 with urlopen(url) as f: 44 print(
45 return f.read() 45 'fatal: error running "gsutil": %s' % e.stderr, file=sys.stderr
46 )
47 sys.exit(1)
48 with urlopen(url) as f:
49 return f.read()
diff --git a/git_command.py b/git_command.py
index d4d4bed4..c7245ade 100644
--- a/git_command.py
+++ b/git_command.py
@@ -24,7 +24,7 @@ import platform_utils
24from repo_trace import REPO_TRACE, IsTrace, Trace 24from repo_trace import REPO_TRACE, IsTrace, Trace
25from wrapper import Wrapper 25from wrapper import Wrapper
26 26
27GIT = 'git' 27GIT = "git"
28# NB: These do not need to be kept in sync with the repo launcher script. 28# NB: These do not need to be kept in sync with the repo launcher script.
29# These may be much newer as it allows the repo launcher to roll between 29# These may be much newer as it allows the repo launcher to roll between
30# different repo releases while source versions might require a newer git. 30# different repo releases while source versions might require a newer git.
@@ -36,126 +36,138 @@ GIT = 'git'
36# git-1.7 is in (EOL) Ubuntu Precise. git-1.9 is in Ubuntu Trusty. 36# git-1.7 is in (EOL) Ubuntu Precise. git-1.9 is in Ubuntu Trusty.
37MIN_GIT_VERSION_SOFT = (1, 9, 1) 37MIN_GIT_VERSION_SOFT = (1, 9, 1)
38MIN_GIT_VERSION_HARD = (1, 7, 2) 38MIN_GIT_VERSION_HARD = (1, 7, 2)
39GIT_DIR = 'GIT_DIR' 39GIT_DIR = "GIT_DIR"
40 40
41LAST_GITDIR = None 41LAST_GITDIR = None
42LAST_CWD = None 42LAST_CWD = None
43 43
44 44
45class _GitCall(object): 45class _GitCall(object):
46 @functools.lru_cache(maxsize=None) 46 @functools.lru_cache(maxsize=None)
47 def version_tuple(self): 47 def version_tuple(self):
48 ret = Wrapper().ParseGitVersion() 48 ret = Wrapper().ParseGitVersion()
49 if ret is None: 49 if ret is None:
50 print('fatal: unable to detect git version', file=sys.stderr) 50 print("fatal: unable to detect git version", file=sys.stderr)
51 sys.exit(1) 51 sys.exit(1)
52 return ret 52 return ret
53 53
54 def __getattr__(self, name): 54 def __getattr__(self, name):
55 name = name.replace('_', '-') 55 name = name.replace("_", "-")
56 56
57 def fun(*cmdv): 57 def fun(*cmdv):
58 command = [name] 58 command = [name]
59 command.extend(cmdv) 59 command.extend(cmdv)
60 return GitCommand(None, command).Wait() == 0 60 return GitCommand(None, command).Wait() == 0
61 return fun 61
62 return fun
62 63
63 64
64git = _GitCall() 65git = _GitCall()
65 66
66 67
67def RepoSourceVersion(): 68def RepoSourceVersion():
68 """Return the version of the repo.git tree.""" 69 """Return the version of the repo.git tree."""
69 ver = getattr(RepoSourceVersion, 'version', None) 70 ver = getattr(RepoSourceVersion, "version", None)
70 71
71 # We avoid GitCommand so we don't run into circular deps -- GitCommand needs 72 # We avoid GitCommand so we don't run into circular deps -- GitCommand needs
72 # to initialize version info we provide. 73 # to initialize version info we provide.
73 if ver is None: 74 if ver is None:
74 env = GitCommand._GetBasicEnv() 75 env = GitCommand._GetBasicEnv()
76
77 proj = os.path.dirname(os.path.abspath(__file__))
78 env[GIT_DIR] = os.path.join(proj, ".git")
79 result = subprocess.run(
80 [GIT, "describe", HEAD],
81 stdout=subprocess.PIPE,
82 stderr=subprocess.DEVNULL,
83 encoding="utf-8",
84 env=env,
85 check=False,
86 )
87 if result.returncode == 0:
88 ver = result.stdout.strip()
89 if ver.startswith("v"):
90 ver = ver[1:]
91 else:
92 ver = "unknown"
93 setattr(RepoSourceVersion, "version", ver)
94
95 return ver
75 96
76 proj = os.path.dirname(os.path.abspath(__file__))
77 env[GIT_DIR] = os.path.join(proj, '.git')
78 result = subprocess.run([GIT, 'describe', HEAD], stdout=subprocess.PIPE,
79 stderr=subprocess.DEVNULL, encoding='utf-8',
80 env=env, check=False)
81 if result.returncode == 0:
82 ver = result.stdout.strip()
83 if ver.startswith('v'):
84 ver = ver[1:]
85 else:
86 ver = 'unknown'
87 setattr(RepoSourceVersion, 'version', ver)
88 97
89 return ver 98class UserAgent(object):
99 """Mange User-Agent settings when talking to external services
90 100
101 We follow the style as documented here:
102 https://developer.mozilla.org/en-US/docs/Web/HTTP/Headers/User-Agent
103 """
91 104
92class UserAgent(object): 105 _os = None
93 """Mange User-Agent settings when talking to external services 106 _repo_ua = None
94 107 _git_ua = None
95 We follow the style as documented here: 108
96 https://developer.mozilla.org/en-US/docs/Web/HTTP/Headers/User-Agent 109 @property
97 """ 110 def os(self):
98 111 """The operating system name."""
99 _os = None 112 if self._os is None:
100 _repo_ua = None 113 os_name = sys.platform
101 _git_ua = None 114 if os_name.lower().startswith("linux"):
102 115 os_name = "Linux"
103 @property 116 elif os_name == "win32":
104 def os(self): 117 os_name = "Win32"
105 """The operating system name.""" 118 elif os_name == "cygwin":
106 if self._os is None: 119 os_name = "Cygwin"
107 os_name = sys.platform 120 elif os_name == "darwin":
108 if os_name.lower().startswith('linux'): 121 os_name = "Darwin"
109 os_name = 'Linux' 122 self._os = os_name
110 elif os_name == 'win32': 123
111 os_name = 'Win32' 124 return self._os
112 elif os_name == 'cygwin': 125
113 os_name = 'Cygwin' 126 @property
114 elif os_name == 'darwin': 127 def repo(self):
115 os_name = 'Darwin' 128 """The UA when connecting directly from repo."""
116 self._os = os_name 129 if self._repo_ua is None:
117 130 py_version = sys.version_info
118 return self._os 131 self._repo_ua = "git-repo/%s (%s) git/%s Python/%d.%d.%d" % (
119 132 RepoSourceVersion(),
120 @property 133 self.os,
121 def repo(self): 134 git.version_tuple().full,
122 """The UA when connecting directly from repo.""" 135 py_version.major,
123 if self._repo_ua is None: 136 py_version.minor,
124 py_version = sys.version_info 137 py_version.micro,
125 self._repo_ua = 'git-repo/%s (%s) git/%s Python/%d.%d.%d' % ( 138 )
126 RepoSourceVersion(), 139
127 self.os, 140 return self._repo_ua
128 git.version_tuple().full, 141
129 py_version.major, py_version.minor, py_version.micro) 142 @property
130 143 def git(self):
131 return self._repo_ua 144 """The UA when running git."""
132 145 if self._git_ua is None:
133 @property 146 self._git_ua = "git/%s (%s) git-repo/%s" % (
134 def git(self): 147 git.version_tuple().full,
135 """The UA when running git.""" 148 self.os,
136 if self._git_ua is None: 149 RepoSourceVersion(),
137 self._git_ua = 'git/%s (%s) git-repo/%s' % ( 150 )
138 git.version_tuple().full, 151
139 self.os, 152 return self._git_ua
140 RepoSourceVersion())
141
142 return self._git_ua
143 153
144 154
145user_agent = UserAgent() 155user_agent = UserAgent()
146 156
147 157
148def git_require(min_version, fail=False, msg=''): 158def git_require(min_version, fail=False, msg=""):
149 git_version = git.version_tuple() 159 git_version = git.version_tuple()
150 if min_version <= git_version: 160 if min_version <= git_version:
151 return True 161 return True
152 if fail: 162 if fail:
153 need = '.'.join(map(str, min_version)) 163 need = ".".join(map(str, min_version))
154 if msg: 164 if msg:
155 msg = ' for ' + msg 165 msg = " for " + msg
156 print('fatal: git %s or later required%s' % (need, msg), file=sys.stderr) 166 print(
157 sys.exit(1) 167 "fatal: git %s or later required%s" % (need, msg), file=sys.stderr
158 return False 168 )
169 sys.exit(1)
170 return False
159 171
160 172
161def _build_env( 173def _build_env(
@@ -164,175 +176,194 @@ def _build_env(
164 disable_editor: Optional[bool] = False, 176 disable_editor: Optional[bool] = False,
165 ssh_proxy: Optional[Any] = None, 177 ssh_proxy: Optional[Any] = None,
166 gitdir: Optional[str] = None, 178 gitdir: Optional[str] = None,
167 objdir: Optional[str] = None 179 objdir: Optional[str] = None,
168): 180):
169 """Constucts an env dict for command execution.""" 181 """Constucts an env dict for command execution."""
170
171 assert _kwargs_only == (), '_build_env only accepts keyword arguments.'
172
173 env = GitCommand._GetBasicEnv()
174
175 if disable_editor:
176 env['GIT_EDITOR'] = ':'
177 if ssh_proxy:
178 env['REPO_SSH_SOCK'] = ssh_proxy.sock()
179 env['GIT_SSH'] = ssh_proxy.proxy
180 env['GIT_SSH_VARIANT'] = 'ssh'
181 if 'http_proxy' in env and 'darwin' == sys.platform:
182 s = "'http.proxy=%s'" % (env['http_proxy'],)
183 p = env.get('GIT_CONFIG_PARAMETERS')
184 if p is not None:
185 s = p + ' ' + s
186 env['GIT_CONFIG_PARAMETERS'] = s
187 if 'GIT_ALLOW_PROTOCOL' not in env:
188 env['GIT_ALLOW_PROTOCOL'] = (
189 'file:git:http:https:ssh:persistent-http:persistent-https:sso:rpc')
190 env['GIT_HTTP_USER_AGENT'] = user_agent.git
191
192 if objdir:
193 # Set to the place we want to save the objects.
194 env['GIT_OBJECT_DIRECTORY'] = objdir
195
196 alt_objects = os.path.join(gitdir, 'objects') if gitdir else None
197 if alt_objects and os.path.realpath(alt_objects) != os.path.realpath(objdir):
198 # Allow git to search the original place in case of local or unique refs
199 # that git will attempt to resolve even if we aren't fetching them.
200 env['GIT_ALTERNATE_OBJECT_DIRECTORIES'] = alt_objects
201 if bare and gitdir is not None:
202 env[GIT_DIR] = gitdir
203
204 return env
205 182
183 assert _kwargs_only == (), "_build_env only accepts keyword arguments."
184
185 env = GitCommand._GetBasicEnv()
186
187 if disable_editor:
188 env["GIT_EDITOR"] = ":"
189 if ssh_proxy:
190 env["REPO_SSH_SOCK"] = ssh_proxy.sock()
191 env["GIT_SSH"] = ssh_proxy.proxy
192 env["GIT_SSH_VARIANT"] = "ssh"
193 if "http_proxy" in env and "darwin" == sys.platform:
194 s = "'http.proxy=%s'" % (env["http_proxy"],)
195 p = env.get("GIT_CONFIG_PARAMETERS")
196 if p is not None:
197 s = p + " " + s
198 env["GIT_CONFIG_PARAMETERS"] = s
199 if "GIT_ALLOW_PROTOCOL" not in env:
200 env[
201 "GIT_ALLOW_PROTOCOL"
202 ] = "file:git:http:https:ssh:persistent-http:persistent-https:sso:rpc"
203 env["GIT_HTTP_USER_AGENT"] = user_agent.git
204
205 if objdir:
206 # Set to the place we want to save the objects.
207 env["GIT_OBJECT_DIRECTORY"] = objdir
208
209 alt_objects = os.path.join(gitdir, "objects") if gitdir else None
210 if alt_objects and os.path.realpath(alt_objects) != os.path.realpath(
211 objdir
212 ):
213 # Allow git to search the original place in case of local or unique
214 # refs that git will attempt to resolve even if we aren't fetching
215 # them.
216 env["GIT_ALTERNATE_OBJECT_DIRECTORIES"] = alt_objects
217 if bare and gitdir is not None:
218 env[GIT_DIR] = gitdir
206 219
207class GitCommand(object):
208 """Wrapper around a single git invocation."""
209
210 def __init__(self,
211 project,
212 cmdv,
213 bare=False,
214 input=None,
215 capture_stdout=False,
216 capture_stderr=False,
217 merge_output=False,
218 disable_editor=False,
219 ssh_proxy=None,
220 cwd=None,
221 gitdir=None,
222 objdir=None):
223
224 if project:
225 if not cwd:
226 cwd = project.worktree
227 if not gitdir:
228 gitdir = project.gitdir
229
230 # Git on Windows wants its paths only using / for reliability.
231 if platform_utils.isWindows():
232 if objdir:
233 objdir = objdir.replace('\\', '/')
234 if gitdir:
235 gitdir = gitdir.replace('\\', '/')
236
237 env = _build_env(
238 disable_editor=disable_editor,
239 ssh_proxy=ssh_proxy,
240 objdir=objdir,
241 gitdir=gitdir,
242 bare=bare,
243 )
244
245 command = [GIT]
246 if bare:
247 cwd = None
248 command.append(cmdv[0])
249 # Need to use the --progress flag for fetch/clone so output will be
250 # displayed as by default git only does progress output if stderr is a TTY.
251 if sys.stderr.isatty() and cmdv[0] in ('fetch', 'clone'):
252 if '--progress' not in cmdv and '--quiet' not in cmdv:
253 command.append('--progress')
254 command.extend(cmdv[1:])
255
256 stdin = subprocess.PIPE if input else None
257 stdout = subprocess.PIPE if capture_stdout else None
258 stderr = (subprocess.STDOUT if merge_output else
259 (subprocess.PIPE if capture_stderr else None))
260
261 dbg = ''
262 if IsTrace():
263 global LAST_CWD
264 global LAST_GITDIR
265
266 if cwd and LAST_CWD != cwd:
267 if LAST_GITDIR or LAST_CWD:
268 dbg += '\n'
269 dbg += ': cd %s\n' % cwd
270 LAST_CWD = cwd
271
272 if GIT_DIR in env and LAST_GITDIR != env[GIT_DIR]:
273 if LAST_GITDIR or LAST_CWD:
274 dbg += '\n'
275 dbg += ': export GIT_DIR=%s\n' % env[GIT_DIR]
276 LAST_GITDIR = env[GIT_DIR]
277
278 if 'GIT_OBJECT_DIRECTORY' in env:
279 dbg += ': export GIT_OBJECT_DIRECTORY=%s\n' % env['GIT_OBJECT_DIRECTORY']
280 if 'GIT_ALTERNATE_OBJECT_DIRECTORIES' in env:
281 dbg += ': export GIT_ALTERNATE_OBJECT_DIRECTORIES=%s\n' % (
282 env['GIT_ALTERNATE_OBJECT_DIRECTORIES'])
283
284 dbg += ': '
285 dbg += ' '.join(command)
286 if stdin == subprocess.PIPE:
287 dbg += ' 0<|'
288 if stdout == subprocess.PIPE:
289 dbg += ' 1>|'
290 if stderr == subprocess.PIPE:
291 dbg += ' 2>|'
292 elif stderr == subprocess.STDOUT:
293 dbg += ' 2>&1'
294
295 with Trace('git command %s %s with debug: %s', LAST_GITDIR, command, dbg):
296 try:
297 p = subprocess.Popen(command,
298 cwd=cwd,
299 env=env,
300 encoding='utf-8',
301 errors='backslashreplace',
302 stdin=stdin,
303 stdout=stdout,
304 stderr=stderr)
305 except Exception as e:
306 raise GitError('%s: %s' % (command[1], e))
307
308 if ssh_proxy:
309 ssh_proxy.add_client(p)
310
311 self.process = p
312
313 try:
314 self.stdout, self.stderr = p.communicate(input=input)
315 finally:
316 if ssh_proxy:
317 ssh_proxy.remove_client(p)
318 self.rc = p.wait()
319
320 @staticmethod
321 def _GetBasicEnv():
322 """Return a basic env for running git under.
323
324 This is guaranteed to be side-effect free.
325 """
326 env = os.environ.copy()
327 for key in (REPO_TRACE,
328 GIT_DIR,
329 'GIT_ALTERNATE_OBJECT_DIRECTORIES',
330 'GIT_OBJECT_DIRECTORY',
331 'GIT_WORK_TREE',
332 'GIT_GRAFT_FILE',
333 'GIT_INDEX_FILE'):
334 env.pop(key, None)
335 return env 220 return env
336 221
337 def Wait(self): 222
338 return self.rc 223class GitCommand(object):
224 """Wrapper around a single git invocation."""
225
226 def __init__(
227 self,
228 project,
229 cmdv,
230 bare=False,
231 input=None,
232 capture_stdout=False,
233 capture_stderr=False,
234 merge_output=False,
235 disable_editor=False,
236 ssh_proxy=None,
237 cwd=None,
238 gitdir=None,
239 objdir=None,
240 ):
241 if project:
242 if not cwd:
243 cwd = project.worktree
244 if not gitdir:
245 gitdir = project.gitdir
246
247 # Git on Windows wants its paths only using / for reliability.
248 if platform_utils.isWindows():
249 if objdir:
250 objdir = objdir.replace("\\", "/")
251 if gitdir:
252 gitdir = gitdir.replace("\\", "/")
253
254 env = _build_env(
255 disable_editor=disable_editor,
256 ssh_proxy=ssh_proxy,
257 objdir=objdir,
258 gitdir=gitdir,
259 bare=bare,
260 )
261
262 command = [GIT]
263 if bare:
264 cwd = None
265 command.append(cmdv[0])
266 # Need to use the --progress flag for fetch/clone so output will be
267 # displayed as by default git only does progress output if stderr is a
268 # TTY.
269 if sys.stderr.isatty() and cmdv[0] in ("fetch", "clone"):
270 if "--progress" not in cmdv and "--quiet" not in cmdv:
271 command.append("--progress")
272 command.extend(cmdv[1:])
273
274 stdin = subprocess.PIPE if input else None
275 stdout = subprocess.PIPE if capture_stdout else None
276 stderr = (
277 subprocess.STDOUT
278 if merge_output
279 else (subprocess.PIPE if capture_stderr else None)
280 )
281
282 dbg = ""
283 if IsTrace():
284 global LAST_CWD
285 global LAST_GITDIR
286
287 if cwd and LAST_CWD != cwd:
288 if LAST_GITDIR or LAST_CWD:
289 dbg += "\n"
290 dbg += ": cd %s\n" % cwd
291 LAST_CWD = cwd
292
293 if GIT_DIR in env and LAST_GITDIR != env[GIT_DIR]:
294 if LAST_GITDIR or LAST_CWD:
295 dbg += "\n"
296 dbg += ": export GIT_DIR=%s\n" % env[GIT_DIR]
297 LAST_GITDIR = env[GIT_DIR]
298
299 if "GIT_OBJECT_DIRECTORY" in env:
300 dbg += (
301 ": export GIT_OBJECT_DIRECTORY=%s\n"
302 % env["GIT_OBJECT_DIRECTORY"]
303 )
304 if "GIT_ALTERNATE_OBJECT_DIRECTORIES" in env:
305 dbg += ": export GIT_ALTERNATE_OBJECT_DIRECTORIES=%s\n" % (
306 env["GIT_ALTERNATE_OBJECT_DIRECTORIES"]
307 )
308
309 dbg += ": "
310 dbg += " ".join(command)
311 if stdin == subprocess.PIPE:
312 dbg += " 0<|"
313 if stdout == subprocess.PIPE:
314 dbg += " 1>|"
315 if stderr == subprocess.PIPE:
316 dbg += " 2>|"
317 elif stderr == subprocess.STDOUT:
318 dbg += " 2>&1"
319
320 with Trace(
321 "git command %s %s with debug: %s", LAST_GITDIR, command, dbg
322 ):
323 try:
324 p = subprocess.Popen(
325 command,
326 cwd=cwd,
327 env=env,
328 encoding="utf-8",
329 errors="backslashreplace",
330 stdin=stdin,
331 stdout=stdout,
332 stderr=stderr,
333 )
334 except Exception as e:
335 raise GitError("%s: %s" % (command[1], e))
336
337 if ssh_proxy:
338 ssh_proxy.add_client(p)
339
340 self.process = p
341
342 try:
343 self.stdout, self.stderr = p.communicate(input=input)
344 finally:
345 if ssh_proxy:
346 ssh_proxy.remove_client(p)
347 self.rc = p.wait()
348
349 @staticmethod
350 def _GetBasicEnv():
351 """Return a basic env for running git under.
352
353 This is guaranteed to be side-effect free.
354 """
355 env = os.environ.copy()
356 for key in (
357 REPO_TRACE,
358 GIT_DIR,
359 "GIT_ALTERNATE_OBJECT_DIRECTORIES",
360 "GIT_OBJECT_DIRECTORY",
361 "GIT_WORK_TREE",
362 "GIT_GRAFT_FILE",
363 "GIT_INDEX_FILE",
364 ):
365 env.pop(key, None)
366 return env
367
368 def Wait(self):
369 return self.rc
diff --git a/git_config.py b/git_config.py
index 9ad979ad..05b3c1ee 100644
--- a/git_config.py
+++ b/git_config.py
@@ -34,23 +34,23 @@ from git_refs import R_CHANGES, R_HEADS, R_TAGS
34 34
35# Prefix that is prepended to all the keys of SyncAnalysisState's data 35# Prefix that is prepended to all the keys of SyncAnalysisState's data
36# that is saved in the config. 36# that is saved in the config.
37SYNC_STATE_PREFIX = 'repo.syncstate.' 37SYNC_STATE_PREFIX = "repo.syncstate."
38 38
39ID_RE = re.compile(r'^[0-9a-f]{40}$') 39ID_RE = re.compile(r"^[0-9a-f]{40}$")
40 40
41REVIEW_CACHE = dict() 41REVIEW_CACHE = dict()
42 42
43 43
44def IsChange(rev): 44def IsChange(rev):
45 return rev.startswith(R_CHANGES) 45 return rev.startswith(R_CHANGES)
46 46
47 47
48def IsId(rev): 48def IsId(rev):
49 return ID_RE.match(rev) 49 return ID_RE.match(rev)
50 50
51 51
52def IsTag(rev): 52def IsTag(rev):
53 return rev.startswith(R_TAGS) 53 return rev.startswith(R_TAGS)
54 54
55 55
56def IsImmutable(rev): 56def IsImmutable(rev):
@@ -58,765 +58,785 @@ def IsImmutable(rev):
58 58
59 59
60def _key(name): 60def _key(name):
61 parts = name.split('.') 61 parts = name.split(".")
62 if len(parts) < 2: 62 if len(parts) < 2:
63 return name.lower() 63 return name.lower()
64 parts[0] = parts[0].lower() 64 parts[0] = parts[0].lower()
65 parts[-1] = parts[-1].lower() 65 parts[-1] = parts[-1].lower()
66 return '.'.join(parts) 66 return ".".join(parts)
67 67
68 68
69class GitConfig(object): 69class GitConfig(object):
70 _ForUser = None 70 _ForUser = None
71 71
72 _ForSystem = None 72 _ForSystem = None
73 _SYSTEM_CONFIG = '/etc/gitconfig' 73 _SYSTEM_CONFIG = "/etc/gitconfig"
74 74
75 @classmethod 75 @classmethod
76 def ForSystem(cls): 76 def ForSystem(cls):
77 if cls._ForSystem is None: 77 if cls._ForSystem is None:
78 cls._ForSystem = cls(configfile=cls._SYSTEM_CONFIG) 78 cls._ForSystem = cls(configfile=cls._SYSTEM_CONFIG)
79 return cls._ForSystem 79 return cls._ForSystem
80 80
81 @classmethod 81 @classmethod
82 def ForUser(cls): 82 def ForUser(cls):
83 if cls._ForUser is None: 83 if cls._ForUser is None:
84 cls._ForUser = cls(configfile=cls._getUserConfig()) 84 cls._ForUser = cls(configfile=cls._getUserConfig())
85 return cls._ForUser 85 return cls._ForUser
86 86
87 @staticmethod 87 @staticmethod
88 def _getUserConfig(): 88 def _getUserConfig():
89 return os.path.expanduser('~/.gitconfig') 89 return os.path.expanduser("~/.gitconfig")
90 90
91 @classmethod 91 @classmethod
92 def ForRepository(cls, gitdir, defaults=None): 92 def ForRepository(cls, gitdir, defaults=None):
93 return cls(configfile=os.path.join(gitdir, 'config'), 93 return cls(configfile=os.path.join(gitdir, "config"), defaults=defaults)
94 defaults=defaults) 94
95 95 def __init__(self, configfile, defaults=None, jsonFile=None):
96 def __init__(self, configfile, defaults=None, jsonFile=None): 96 self.file = configfile
97 self.file = configfile 97 self.defaults = defaults
98 self.defaults = defaults 98 self._cache_dict = None
99 self._cache_dict = None 99 self._section_dict = None
100 self._section_dict = None 100 self._remotes = {}
101 self._remotes = {} 101 self._branches = {}
102 self._branches = {} 102
103 103 self._json = jsonFile
104 self._json = jsonFile 104 if self._json is None:
105 if self._json is None: 105 self._json = os.path.join(
106 self._json = os.path.join( 106 os.path.dirname(self.file),
107 os.path.dirname(self.file), 107 ".repo_" + os.path.basename(self.file) + ".json",
108 '.repo_' + os.path.basename(self.file) + '.json') 108 )
109 109
110 def ClearCache(self): 110 def ClearCache(self):
111 """Clear the in-memory cache of config.""" 111 """Clear the in-memory cache of config."""
112 self._cache_dict = None 112 self._cache_dict = None
113 113
114 def Has(self, name, include_defaults=True): 114 def Has(self, name, include_defaults=True):
115 """Return true if this configuration file has the key. 115 """Return true if this configuration file has the key."""
116 """ 116 if _key(name) in self._cache:
117 if _key(name) in self._cache: 117 return True
118 return True 118 if include_defaults and self.defaults:
119 if include_defaults and self.defaults: 119 return self.defaults.Has(name, include_defaults=True)
120 return self.defaults.Has(name, include_defaults=True) 120 return False
121 return False 121
122 122 def GetInt(self, name: str) -> Union[int, None]:
123 def GetInt(self, name: str) -> Union[int, None]: 123 """Returns an integer from the configuration file.
124 """Returns an integer from the configuration file. 124
125 125 This follows the git config syntax.
126 This follows the git config syntax. 126
127 127 Args:
128 Args: 128 name: The key to lookup.
129 name: The key to lookup. 129
130 130 Returns:
131 Returns: 131 None if the value was not defined, or is not an int.
132 None if the value was not defined, or is not an int. 132 Otherwise, the number itself.
133 Otherwise, the number itself. 133 """
134 """ 134 v = self.GetString(name)
135 v = self.GetString(name) 135 if v is None:
136 if v is None: 136 return None
137 return None 137 v = v.strip()
138 v = v.strip() 138
139 139 mult = 1
140 mult = 1 140 if v.endswith("k"):
141 if v.endswith('k'): 141 v = v[:-1]
142 v = v[:-1] 142 mult = 1024
143 mult = 1024 143 elif v.endswith("m"):
144 elif v.endswith('m'): 144 v = v[:-1]
145 v = v[:-1] 145 mult = 1024 * 1024
146 mult = 1024 * 1024 146 elif v.endswith("g"):
147 elif v.endswith('g'): 147 v = v[:-1]
148 v = v[:-1] 148 mult = 1024 * 1024 * 1024
149 mult = 1024 * 1024 * 1024 149
150 150 base = 10
151 base = 10 151 if v.startswith("0x"):
152 if v.startswith('0x'): 152 base = 16
153 base = 16
154
155 try:
156 return int(v, base=base) * mult
157 except ValueError:
158 print(
159 f"warning: expected {name} to represent an integer, got {v} instead",
160 file=sys.stderr)
161 return None
162
163 def DumpConfigDict(self):
164 """Returns the current configuration dict.
165
166 Configuration data is information only (e.g. logging) and
167 should not be considered a stable data-source.
168
169 Returns:
170 dict of {<key>, <value>} for git configuration cache.
171 <value> are strings converted by GetString.
172 """
173 config_dict = {}
174 for key in self._cache:
175 config_dict[key] = self.GetString(key)
176 return config_dict
177
178 def GetBoolean(self, name: str) -> Union[str, None]:
179 """Returns a boolean from the configuration file.
180 None : The value was not defined, or is not a boolean.
181 True : The value was set to true or yes.
182 False: The value was set to false or no.
183 """
184 v = self.GetString(name)
185 if v is None:
186 return None
187 v = v.lower()
188 if v in ('true', 'yes'):
189 return True
190 if v in ('false', 'no'):
191 return False
192 print(f"warning: expected {name} to represent a boolean, got {v} instead",
193 file=sys.stderr)
194 return None
195 153
196 def SetBoolean(self, name, value): 154 try:
197 """Set the truthy value for a key.""" 155 return int(v, base=base) * mult
198 if value is not None: 156 except ValueError:
199 value = 'true' if value else 'false' 157 print(
200 self.SetString(name, value) 158 f"warning: expected {name} to represent an integer, got {v} "
159 "instead",
160 file=sys.stderr,
161 )
162 return None
163
164 def DumpConfigDict(self):
165 """Returns the current configuration dict.
166
167 Configuration data is information only (e.g. logging) and
168 should not be considered a stable data-source.
169
170 Returns:
171 dict of {<key>, <value>} for git configuration cache.
172 <value> are strings converted by GetString.
173 """
174 config_dict = {}
175 for key in self._cache:
176 config_dict[key] = self.GetString(key)
177 return config_dict
178
179 def GetBoolean(self, name: str) -> Union[str, None]:
180 """Returns a boolean from the configuration file.
181
182 Returns:
183 None: The value was not defined, or is not a boolean.
184 True: The value was set to true or yes.
185 False: The value was set to false or no.
186 """
187 v = self.GetString(name)
188 if v is None:
189 return None
190 v = v.lower()
191 if v in ("true", "yes"):
192 return True
193 if v in ("false", "no"):
194 return False
195 print(
196 f"warning: expected {name} to represent a boolean, got {v} instead",
197 file=sys.stderr,
198 )
199 return None
201 200
202 def GetString(self, name: str, all_keys: bool = False) -> Union[str, None]: 201 def SetBoolean(self, name, value):
203 """Get the first value for a key, or None if it is not defined. 202 """Set the truthy value for a key."""
203 if value is not None:
204 value = "true" if value else "false"
205 self.SetString(name, value)
204 206
205 This configuration file is used first, if the key is not 207 def GetString(self, name: str, all_keys: bool = False) -> Union[str, None]:
206 defined or all_keys = True then the defaults are also searched. 208 """Get the first value for a key, or None if it is not defined.
207 """
208 try:
209 v = self._cache[_key(name)]
210 except KeyError:
211 if self.defaults:
212 return self.defaults.GetString(name, all_keys=all_keys)
213 v = []
214
215 if not all_keys:
216 if v:
217 return v[0]
218 return None
219
220 r = []
221 r.extend(v)
222 if self.defaults:
223 r.extend(self.defaults.GetString(name, all_keys=True))
224 return r
225
226 def SetString(self, name, value):
227 """Set the value(s) for a key.
228 Only this configuration file is modified.
229
230 The supplied value should be either a string, or a list of strings (to
231 store multiple values), or None (to delete the key).
232 """
233 key = _key(name)
234 209
235 try: 210 This configuration file is used first, if the key is not
236 old = self._cache[key] 211 defined or all_keys = True then the defaults are also searched.
237 except KeyError: 212 """
238 old = [] 213 try:
214 v = self._cache[_key(name)]
215 except KeyError:
216 if self.defaults:
217 return self.defaults.GetString(name, all_keys=all_keys)
218 v = []
219
220 if not all_keys:
221 if v:
222 return v[0]
223 return None
224
225 r = []
226 r.extend(v)
227 if self.defaults:
228 r.extend(self.defaults.GetString(name, all_keys=True))
229 return r
230
231 def SetString(self, name, value):
232 """Set the value(s) for a key.
233 Only this configuration file is modified.
234
235 The supplied value should be either a string, or a list of strings (to
236 store multiple values), or None (to delete the key).
237 """
238 key = _key(name)
239 239
240 if value is None: 240 try:
241 if old: 241 old = self._cache[key]
242 del self._cache[key] 242 except KeyError:
243 self._do('--unset-all', name) 243 old = []
244
245 if value is None:
246 if old:
247 del self._cache[key]
248 self._do("--unset-all", name)
249
250 elif isinstance(value, list):
251 if len(value) == 0:
252 self.SetString(name, None)
253
254 elif len(value) == 1:
255 self.SetString(name, value[0])
256
257 elif old != value:
258 self._cache[key] = list(value)
259 self._do("--replace-all", name, value[0])
260 for i in range(1, len(value)):
261 self._do("--add", name, value[i])
262
263 elif len(old) != 1 or old[0] != value:
264 self._cache[key] = [value]
265 self._do("--replace-all", name, value)
266
267 def GetRemote(self, name):
268 """Get the remote.$name.* configuration values as an object."""
269 try:
270 r = self._remotes[name]
271 except KeyError:
272 r = Remote(self, name)
273 self._remotes[r.name] = r
274 return r
275
276 def GetBranch(self, name):
277 """Get the branch.$name.* configuration values as an object."""
278 try:
279 b = self._branches[name]
280 except KeyError:
281 b = Branch(self, name)
282 self._branches[b.name] = b
283 return b
284
285 def GetSyncAnalysisStateData(self):
286 """Returns data to be logged for the analysis of sync performance."""
287 return {
288 k: v
289 for k, v in self.DumpConfigDict().items()
290 if k.startswith(SYNC_STATE_PREFIX)
291 }
292
293 def UpdateSyncAnalysisState(self, options, superproject_logging_data):
294 """Update Config's SYNC_STATE_PREFIX* data with the latest sync data.
295
296 Args:
297 options: Options passed to sync returned from optparse. See
298 _Options().
299 superproject_logging_data: A dictionary of superproject data that is
300 to be logged.
301
302 Returns:
303 SyncAnalysisState object.
304 """
305 return SyncAnalysisState(self, options, superproject_logging_data)
306
307 def GetSubSections(self, section):
308 """List all subsection names matching $section.*.*"""
309 return self._sections.get(section, set())
310
311 def HasSection(self, section, subsection=""):
312 """Does at least one key in section.subsection exist?"""
313 try:
314 return subsection in self._sections[section]
315 except KeyError:
316 return False
317
318 def UrlInsteadOf(self, url):
319 """Resolve any url.*.insteadof references."""
320 for new_url in self.GetSubSections("url"):
321 for old_url in self.GetString("url.%s.insteadof" % new_url, True):
322 if old_url is not None and url.startswith(old_url):
323 return new_url + url[len(old_url) :]
324 return url
325
326 @property
327 def _sections(self):
328 d = self._section_dict
329 if d is None:
330 d = {}
331 for name in self._cache.keys():
332 p = name.split(".")
333 if 2 == len(p):
334 section = p[0]
335 subsect = ""
336 else:
337 section = p[0]
338 subsect = ".".join(p[1:-1])
339 if section not in d:
340 d[section] = set()
341 d[section].add(subsect)
342 self._section_dict = d
343 return d
344
345 @property
346 def _cache(self):
347 if self._cache_dict is None:
348 self._cache_dict = self._Read()
349 return self._cache_dict
350
351 def _Read(self):
352 d = self._ReadJson()
353 if d is None:
354 d = self._ReadGit()
355 self._SaveJson(d)
356 return d
357
358 def _ReadJson(self):
359 try:
360 if os.path.getmtime(self._json) <= os.path.getmtime(self.file):
361 platform_utils.remove(self._json)
362 return None
363 except OSError:
364 return None
365 try:
366 with Trace(": parsing %s", self.file):
367 with open(self._json) as fd:
368 return json.load(fd)
369 except (IOError, ValueError):
370 platform_utils.remove(self._json, missing_ok=True)
371 return None
372
373 def _SaveJson(self, cache):
374 try:
375 with open(self._json, "w") as fd:
376 json.dump(cache, fd, indent=2)
377 except (IOError, TypeError):
378 platform_utils.remove(self._json, missing_ok=True)
379
380 def _ReadGit(self):
381 """
382 Read configuration data from git.
383
384 This internal method populates the GitConfig cache.
385
386 """
387 c = {}
388 if not os.path.exists(self.file):
389 return c
390
391 d = self._do("--null", "--list")
392 for line in d.rstrip("\0").split("\0"):
393 if "\n" in line:
394 key, val = line.split("\n", 1)
395 else:
396 key = line
397 val = None
398
399 if key in c:
400 c[key].append(val)
401 else:
402 c[key] = [val]
403
404 return c
405
406 def _do(self, *args):
407 if self.file == self._SYSTEM_CONFIG:
408 command = ["config", "--system", "--includes"]
409 else:
410 command = ["config", "--file", self.file, "--includes"]
411 command.extend(args)
244 412
245 elif isinstance(value, list): 413 p = GitCommand(None, command, capture_stdout=True, capture_stderr=True)
246 if len(value) == 0: 414 if p.Wait() == 0:
247 self.SetString(name, None) 415 return p.stdout
416 else:
417 raise GitError("git config %s: %s" % (str(args), p.stderr))
248 418
249 elif len(value) == 1:
250 self.SetString(name, value[0])
251 419
252 elif old != value: 420class RepoConfig(GitConfig):
253 self._cache[key] = list(value) 421 """User settings for repo itself."""
254 self._do('--replace-all', name, value[0])
255 for i in range(1, len(value)):
256 self._do('--add', name, value[i])
257 422
258 elif len(old) != 1 or old[0] != value: 423 @staticmethod
259 self._cache[key] = [value] 424 def _getUserConfig():
260 self._do('--replace-all', name, value) 425 repo_config_dir = os.getenv("REPO_CONFIG_DIR", os.path.expanduser("~"))
426 return os.path.join(repo_config_dir, ".repoconfig/config")
261 427
262 def GetRemote(self, name):
263 """Get the remote.$name.* configuration values as an object.
264 """
265 try:
266 r = self._remotes[name]
267 except KeyError:
268 r = Remote(self, name)
269 self._remotes[r.name] = r
270 return r
271
272 def GetBranch(self, name):
273 """Get the branch.$name.* configuration values as an object.
274 """
275 try:
276 b = self._branches[name]
277 except KeyError:
278 b = Branch(self, name)
279 self._branches[b.name] = b
280 return b
281
282 def GetSyncAnalysisStateData(self):
283 """Returns data to be logged for the analysis of sync performance."""
284 return {k: v for k, v in self.DumpConfigDict().items() if k.startswith(SYNC_STATE_PREFIX)}
285
286 def UpdateSyncAnalysisState(self, options, superproject_logging_data):
287 """Update Config's SYNC_STATE_PREFIX* data with the latest sync data.
288
289 Args:
290 options: Options passed to sync returned from optparse. See _Options().
291 superproject_logging_data: A dictionary of superproject data that is to be logged.
292
293 Returns:
294 SyncAnalysisState object.
295 """
296 return SyncAnalysisState(self, options, superproject_logging_data)
297 428
298 def GetSubSections(self, section): 429class RefSpec(object):
299 """List all subsection names matching $section.*.* 430 """A Git refspec line, split into its components:
300 """
301 return self._sections.get(section, set())
302 431
303 def HasSection(self, section, subsection=''): 432 forced: True if the line starts with '+'
304 """Does at least one key in section.subsection exist? 433 src: Left side of the line
434 dst: Right side of the line
305 """ 435 """
306 try:
307 return subsection in self._sections[section]
308 except KeyError:
309 return False
310 436
311 def UrlInsteadOf(self, url): 437 @classmethod
312 """Resolve any url.*.insteadof references. 438 def FromString(cls, rs):
313 """ 439 lhs, rhs = rs.split(":", 2)
314 for new_url in self.GetSubSections('url'): 440 if lhs.startswith("+"):
315 for old_url in self.GetString('url.%s.insteadof' % new_url, True): 441 lhs = lhs[1:]
316 if old_url is not None and url.startswith(old_url): 442 forced = True
317 return new_url + url[len(old_url):]
318 return url
319
320 @property
321 def _sections(self):
322 d = self._section_dict
323 if d is None:
324 d = {}
325 for name in self._cache.keys():
326 p = name.split('.')
327 if 2 == len(p):
328 section = p[0]
329 subsect = ''
330 else: 443 else:
331 section = p[0] 444 forced = False
332 subsect = '.'.join(p[1:-1]) 445 return cls(forced, lhs, rhs)
333 if section not in d: 446
334 d[section] = set() 447 def __init__(self, forced, lhs, rhs):
335 d[section].add(subsect) 448 self.forced = forced
336 self._section_dict = d 449 self.src = lhs
337 return d 450 self.dst = rhs
338 451
339 @property 452 def SourceMatches(self, rev):
340 def _cache(self): 453 if self.src:
341 if self._cache_dict is None: 454 if rev == self.src:
342 self._cache_dict = self._Read() 455 return True
343 return self._cache_dict 456 if self.src.endswith("/*") and rev.startswith(self.src[:-1]):
344 457 return True
345 def _Read(self): 458 return False
346 d = self._ReadJson() 459
347 if d is None: 460 def DestMatches(self, ref):
348 d = self._ReadGit() 461 if self.dst:
349 self._SaveJson(d) 462 if ref == self.dst:
350 return d 463 return True
351 464 if self.dst.endswith("/*") and ref.startswith(self.dst[:-1]):
352 def _ReadJson(self): 465 return True
353 try: 466 return False
354 if os.path.getmtime(self._json) <= os.path.getmtime(self.file): 467
355 platform_utils.remove(self._json) 468 def MapSource(self, rev):
356 return None 469 if self.src.endswith("/*"):
357 except OSError: 470 return self.dst[:-1] + rev[len(self.src) - 1 :]
358 return None 471 return self.dst
359 try: 472
360 with Trace(': parsing %s', self.file): 473 def __str__(self):
361 with open(self._json) as fd: 474 s = ""
362 return json.load(fd) 475 if self.forced:
363 except (IOError, ValueError): 476 s += "+"
364 platform_utils.remove(self._json, missing_ok=True) 477 if self.src:
365 return None 478 s += self.src
366 479 if self.dst:
367 def _SaveJson(self, cache): 480 s += ":"
368 try: 481 s += self.dst
369 with open(self._json, 'w') as fd: 482 return s
370 json.dump(cache, fd, indent=2) 483
371 except (IOError, TypeError): 484
372 platform_utils.remove(self._json, missing_ok=True) 485URI_ALL = re.compile(r"^([a-z][a-z+-]*)://([^@/]*@?[^/]*)/")
373
374 def _ReadGit(self):
375 """
376 Read configuration data from git.
377
378 This internal method populates the GitConfig cache.
379
380 """
381 c = {}
382 if not os.path.exists(self.file):
383 return c
384
385 d = self._do('--null', '--list')
386 for line in d.rstrip('\0').split('\0'):
387 if '\n' in line:
388 key, val = line.split('\n', 1)
389 else:
390 key = line
391 val = None
392
393 if key in c:
394 c[key].append(val)
395 else:
396 c[key] = [val]
397
398 return c
399
400 def _do(self, *args):
401 if self.file == self._SYSTEM_CONFIG:
402 command = ['config', '--system', '--includes']
403 else:
404 command = ['config', '--file', self.file, '--includes']
405 command.extend(args)
406
407 p = GitCommand(None,
408 command,
409 capture_stdout=True,
410 capture_stderr=True)
411 if p.Wait() == 0:
412 return p.stdout
413 else:
414 raise GitError('git config %s: %s' % (str(args), p.stderr))
415
416
417class RepoConfig(GitConfig):
418 """User settings for repo itself."""
419
420 @staticmethod
421 def _getUserConfig():
422 repo_config_dir = os.getenv('REPO_CONFIG_DIR', os.path.expanduser('~'))
423 return os.path.join(repo_config_dir, '.repoconfig/config')
424
425
426class RefSpec(object):
427 """A Git refspec line, split into its components:
428
429 forced: True if the line starts with '+'
430 src: Left side of the line
431 dst: Right side of the line
432 """
433
434 @classmethod
435 def FromString(cls, rs):
436 lhs, rhs = rs.split(':', 2)
437 if lhs.startswith('+'):
438 lhs = lhs[1:]
439 forced = True
440 else:
441 forced = False
442 return cls(forced, lhs, rhs)
443
444 def __init__(self, forced, lhs, rhs):
445 self.forced = forced
446 self.src = lhs
447 self.dst = rhs
448
449 def SourceMatches(self, rev):
450 if self.src:
451 if rev == self.src:
452 return True
453 if self.src.endswith('/*') and rev.startswith(self.src[:-1]):
454 return True
455 return False
456
457 def DestMatches(self, ref):
458 if self.dst:
459 if ref == self.dst:
460 return True
461 if self.dst.endswith('/*') and ref.startswith(self.dst[:-1]):
462 return True
463 return False
464
465 def MapSource(self, rev):
466 if self.src.endswith('/*'):
467 return self.dst[:-1] + rev[len(self.src) - 1:]
468 return self.dst
469
470 def __str__(self):
471 s = ''
472 if self.forced:
473 s += '+'
474 if self.src:
475 s += self.src
476 if self.dst:
477 s += ':'
478 s += self.dst
479 return s
480
481
482URI_ALL = re.compile(r'^([a-z][a-z+-]*)://([^@/]*@?[^/]*)/')
483 486
484 487
485def GetSchemeFromUrl(url): 488def GetSchemeFromUrl(url):
486 m = URI_ALL.match(url) 489 m = URI_ALL.match(url)
487 if m: 490 if m:
488 return m.group(1) 491 return m.group(1)
489 return None 492 return None
490 493
491 494
492@contextlib.contextmanager 495@contextlib.contextmanager
493def GetUrlCookieFile(url, quiet): 496def GetUrlCookieFile(url, quiet):
494 if url.startswith('persistent-'): 497 if url.startswith("persistent-"):
495 try: 498 try:
496 p = subprocess.Popen( 499 p = subprocess.Popen(
497 ['git-remote-persistent-https', '-print_config', url], 500 ["git-remote-persistent-https", "-print_config", url],
498 stdin=subprocess.PIPE, stdout=subprocess.PIPE, 501 stdin=subprocess.PIPE,
499 stderr=subprocess.PIPE) 502 stdout=subprocess.PIPE,
500 try: 503 stderr=subprocess.PIPE,
501 cookieprefix = 'http.cookiefile=' 504 )
502 proxyprefix = 'http.proxy=' 505 try:
503 cookiefile = None 506 cookieprefix = "http.cookiefile="
504 proxy = None 507 proxyprefix = "http.proxy="
505 for line in p.stdout: 508 cookiefile = None
506 line = line.strip().decode('utf-8') 509 proxy = None
507 if line.startswith(cookieprefix): 510 for line in p.stdout:
508 cookiefile = os.path.expanduser(line[len(cookieprefix):]) 511 line = line.strip().decode("utf-8")
509 if line.startswith(proxyprefix): 512 if line.startswith(cookieprefix):
510 proxy = line[len(proxyprefix):] 513 cookiefile = os.path.expanduser(
511 # Leave subprocess open, as cookie file may be transient. 514 line[len(cookieprefix) :]
512 if cookiefile or proxy: 515 )
513 yield cookiefile, proxy 516 if line.startswith(proxyprefix):
514 return 517 proxy = line[len(proxyprefix) :]
515 finally: 518 # Leave subprocess open, as cookie file may be transient.
516 p.stdin.close() 519 if cookiefile or proxy:
517 if p.wait(): 520 yield cookiefile, proxy
518 err_msg = p.stderr.read().decode('utf-8') 521 return
519 if ' -print_config' in err_msg: 522 finally:
520 pass # Persistent proxy doesn't support -print_config. 523 p.stdin.close()
521 elif not quiet: 524 if p.wait():
522 print(err_msg, file=sys.stderr) 525 err_msg = p.stderr.read().decode("utf-8")
523 except OSError as e: 526 if " -print_config" in err_msg:
524 if e.errno == errno.ENOENT: 527 pass # Persistent proxy doesn't support -print_config.
525 pass # No persistent proxy. 528 elif not quiet:
526 raise 529 print(err_msg, file=sys.stderr)
527 cookiefile = GitConfig.ForUser().GetString('http.cookiefile') 530 except OSError as e:
528 if cookiefile: 531 if e.errno == errno.ENOENT:
529 cookiefile = os.path.expanduser(cookiefile) 532 pass # No persistent proxy.
530 yield cookiefile, None 533 raise
534 cookiefile = GitConfig.ForUser().GetString("http.cookiefile")
535 if cookiefile:
536 cookiefile = os.path.expanduser(cookiefile)
537 yield cookiefile, None
531 538
532 539
533class Remote(object): 540class Remote(object):
534 """Configuration options related to a remote. 541 """Configuration options related to a remote."""
535 """ 542
536 543 def __init__(self, config, name):
537 def __init__(self, config, name): 544 self._config = config
538 self._config = config 545 self.name = name
539 self.name = name 546 self.url = self._Get("url")
540 self.url = self._Get('url') 547 self.pushUrl = self._Get("pushurl")
541 self.pushUrl = self._Get('pushurl') 548 self.review = self._Get("review")
542 self.review = self._Get('review') 549 self.projectname = self._Get("projectname")
543 self.projectname = self._Get('projectname') 550 self.fetch = list(
544 self.fetch = list(map(RefSpec.FromString, 551 map(RefSpec.FromString, self._Get("fetch", all_keys=True))
545 self._Get('fetch', all_keys=True))) 552 )
546 self._review_url = None 553 self._review_url = None
547 554
548 def _InsteadOf(self): 555 def _InsteadOf(self):
549 globCfg = GitConfig.ForUser() 556 globCfg = GitConfig.ForUser()
550 urlList = globCfg.GetSubSections('url') 557 urlList = globCfg.GetSubSections("url")
551 longest = "" 558 longest = ""
552 longestUrl = "" 559 longestUrl = ""
553 560
554 for url in urlList: 561 for url in urlList:
555 key = "url." + url + ".insteadOf" 562 key = "url." + url + ".insteadOf"
556 insteadOfList = globCfg.GetString(key, all_keys=True) 563 insteadOfList = globCfg.GetString(key, all_keys=True)
557 564
558 for insteadOf in insteadOfList: 565 for insteadOf in insteadOfList:
559 if (self.url.startswith(insteadOf) 566 if self.url.startswith(insteadOf) and len(insteadOf) > len(
560 and len(insteadOf) > len(longest)): 567 longest
561 longest = insteadOf 568 ):
562 longestUrl = url 569 longest = insteadOf
563 570 longestUrl = url
564 if len(longest) == 0: 571
565 return self.url 572 if len(longest) == 0:
566 573 return self.url
567 return self.url.replace(longest, longestUrl, 1) 574
568 575 return self.url.replace(longest, longestUrl, 1)
569 def PreConnectFetch(self, ssh_proxy): 576
570 """Run any setup for this remote before we connect to it. 577 def PreConnectFetch(self, ssh_proxy):
571 578 """Run any setup for this remote before we connect to it.
572 In practice, if the remote is using SSH, we'll attempt to create a new 579
573 SSH master session to it for reuse across projects. 580 In practice, if the remote is using SSH, we'll attempt to create a new
574 581 SSH master session to it for reuse across projects.
575 Args: 582
576 ssh_proxy: The SSH settings for managing master sessions. 583 Args:
577 584 ssh_proxy: The SSH settings for managing master sessions.
578 Returns: 585
579 Whether the preconnect phase for this remote was successful. 586 Returns:
580 """ 587 Whether the preconnect phase for this remote was successful.
581 if not ssh_proxy: 588 """
582 return True 589 if not ssh_proxy:
583 590 return True
584 connectionUrl = self._InsteadOf() 591
585 return ssh_proxy.preconnect(connectionUrl) 592 connectionUrl = self._InsteadOf()
586 593 return ssh_proxy.preconnect(connectionUrl)
587 def ReviewUrl(self, userEmail, validate_certs): 594
588 if self._review_url is None: 595 def ReviewUrl(self, userEmail, validate_certs):
589 if self.review is None: 596 if self._review_url is None:
590 return None 597 if self.review is None:
591 598 return None
592 u = self.review 599
593 if u.startswith('persistent-'): 600 u = self.review
594 u = u[len('persistent-'):] 601 if u.startswith("persistent-"):
595 if u.split(':')[0] not in ('http', 'https', 'sso', 'ssh'): 602 u = u[len("persistent-") :]
596 u = 'http://%s' % u 603 if u.split(":")[0] not in ("http", "https", "sso", "ssh"):
597 if u.endswith('/Gerrit'): 604 u = "http://%s" % u
598 u = u[:len(u) - len('/Gerrit')] 605 if u.endswith("/Gerrit"):
599 if u.endswith('/ssh_info'): 606 u = u[: len(u) - len("/Gerrit")]
600 u = u[:len(u) - len('/ssh_info')] 607 if u.endswith("/ssh_info"):
601 if not u.endswith('/'): 608 u = u[: len(u) - len("/ssh_info")]
602 u += '/' 609 if not u.endswith("/"):
603 http_url = u 610 u += "/"
604 611 http_url = u
605 if u in REVIEW_CACHE: 612
606 self._review_url = REVIEW_CACHE[u] 613 if u in REVIEW_CACHE:
607 elif 'REPO_HOST_PORT_INFO' in os.environ: 614 self._review_url = REVIEW_CACHE[u]
608 host, port = os.environ['REPO_HOST_PORT_INFO'].split() 615 elif "REPO_HOST_PORT_INFO" in os.environ:
609 self._review_url = self._SshReviewUrl(userEmail, host, port) 616 host, port = os.environ["REPO_HOST_PORT_INFO"].split()
610 REVIEW_CACHE[u] = self._review_url 617 self._review_url = self._SshReviewUrl(userEmail, host, port)
611 elif u.startswith('sso:') or u.startswith('ssh:'): 618 REVIEW_CACHE[u] = self._review_url
612 self._review_url = u # Assume it's right 619 elif u.startswith("sso:") or u.startswith("ssh:"):
613 REVIEW_CACHE[u] = self._review_url 620 self._review_url = u # Assume it's right
614 elif 'REPO_IGNORE_SSH_INFO' in os.environ: 621 REVIEW_CACHE[u] = self._review_url
615 self._review_url = http_url 622 elif "REPO_IGNORE_SSH_INFO" in os.environ:
616 REVIEW_CACHE[u] = self._review_url 623 self._review_url = http_url
617 else: 624 REVIEW_CACHE[u] = self._review_url
618 try: 625 else:
619 info_url = u + 'ssh_info' 626 try:
620 if not validate_certs: 627 info_url = u + "ssh_info"
621 context = ssl._create_unverified_context() 628 if not validate_certs:
622 info = urllib.request.urlopen(info_url, context=context).read() 629 context = ssl._create_unverified_context()
623 else: 630 info = urllib.request.urlopen(
624 info = urllib.request.urlopen(info_url).read() 631 info_url, context=context
625 if info == b'NOT_AVAILABLE' or b'<' in info: 632 ).read()
626 # If `info` contains '<', we assume the server gave us some sort 633 else:
627 # of HTML response back, like maybe a login page. 634 info = urllib.request.urlopen(info_url).read()
628 # 635 if info == b"NOT_AVAILABLE" or b"<" in info:
629 # Assume HTTP if SSH is not enabled or ssh_info doesn't look right. 636 # If `info` contains '<', we assume the server gave us
630 self._review_url = http_url 637 # some sort of HTML response back, like maybe a login
631 else: 638 # page.
632 info = info.decode('utf-8') 639 #
633 host, port = info.split() 640 # Assume HTTP if SSH is not enabled or ssh_info doesn't
634 self._review_url = self._SshReviewUrl(userEmail, host, port) 641 # look right.
635 except urllib.error.HTTPError as e: 642 self._review_url = http_url
636 raise UploadError('%s: %s' % (self.review, str(e))) 643 else:
637 except urllib.error.URLError as e: 644 info = info.decode("utf-8")
638 raise UploadError('%s: %s' % (self.review, str(e))) 645 host, port = info.split()
639 except HTTPException as e: 646 self._review_url = self._SshReviewUrl(
640 raise UploadError('%s: %s' % (self.review, e.__class__.__name__)) 647 userEmail, host, port
641 648 )
642 REVIEW_CACHE[u] = self._review_url 649 except urllib.error.HTTPError as e:
643 return self._review_url + self.projectname 650 raise UploadError("%s: %s" % (self.review, str(e)))
644 651 except urllib.error.URLError as e:
645 def _SshReviewUrl(self, userEmail, host, port): 652 raise UploadError("%s: %s" % (self.review, str(e)))
646 username = self._config.GetString('review.%s.username' % self.review) 653 except HTTPException as e:
647 if username is None: 654 raise UploadError(
648 username = userEmail.split('@')[0] 655 "%s: %s" % (self.review, e.__class__.__name__)
649 return 'ssh://%s@%s:%s/' % (username, host, port) 656 )
650 657
651 def ToLocal(self, rev): 658 REVIEW_CACHE[u] = self._review_url
652 """Convert a remote revision string to something we have locally. 659 return self._review_url + self.projectname
653 """ 660
654 if self.name == '.' or IsId(rev): 661 def _SshReviewUrl(self, userEmail, host, port):
655 return rev 662 username = self._config.GetString("review.%s.username" % self.review)
663 if username is None:
664 username = userEmail.split("@")[0]
665 return "ssh://%s@%s:%s/" % (username, host, port)
666
667 def ToLocal(self, rev):
668 """Convert a remote revision string to something we have locally."""
669 if self.name == "." or IsId(rev):
670 return rev
671
672 if not rev.startswith("refs/"):
673 rev = R_HEADS + rev
674
675 for spec in self.fetch:
676 if spec.SourceMatches(rev):
677 return spec.MapSource(rev)
678
679 if not rev.startswith(R_HEADS):
680 return rev
681
682 raise GitError(
683 "%s: remote %s does not have %s"
684 % (self.projectname, self.name, rev)
685 )
686
687 def WritesTo(self, ref):
688 """True if the remote stores to the tracking ref."""
689 for spec in self.fetch:
690 if spec.DestMatches(ref):
691 return True
692 return False
693
694 def ResetFetch(self, mirror=False):
695 """Set the fetch refspec to its default value."""
696 if mirror:
697 dst = "refs/heads/*"
698 else:
699 dst = "refs/remotes/%s/*" % self.name
700 self.fetch = [RefSpec(True, "refs/heads/*", dst)]
701
702 def Save(self):
703 """Save this remote to the configuration."""
704 self._Set("url", self.url)
705 if self.pushUrl is not None:
706 self._Set("pushurl", self.pushUrl + "/" + self.projectname)
707 else:
708 self._Set("pushurl", self.pushUrl)
709 self._Set("review", self.review)
710 self._Set("projectname", self.projectname)
711 self._Set("fetch", list(map(str, self.fetch)))
656 712
657 if not rev.startswith('refs/'): 713 def _Set(self, key, value):
658 rev = R_HEADS + rev 714 key = "remote.%s.%s" % (self.name, key)
715 return self._config.SetString(key, value)
659 716
660 for spec in self.fetch: 717 def _Get(self, key, all_keys=False):
661 if spec.SourceMatches(rev): 718 key = "remote.%s.%s" % (self.name, key)
662 return spec.MapSource(rev) 719 return self._config.GetString(key, all_keys=all_keys)
663 720
664 if not rev.startswith(R_HEADS):
665 return rev
666 721
667 raise GitError('%s: remote %s does not have %s' % 722class Branch(object):
668 (self.projectname, self.name, rev)) 723 """Configuration options related to a single branch."""
669 724
670 def WritesTo(self, ref): 725 def __init__(self, config, name):
671 """True if the remote stores to the tracking ref. 726 self._config = config
672 """ 727 self.name = name
673 for spec in self.fetch: 728 self.merge = self._Get("merge")
674 if spec.DestMatches(ref):
675 return True
676 return False
677 729
678 def ResetFetch(self, mirror=False): 730 r = self._Get("remote")
679 """Set the fetch refspec to its default value. 731 if r:
680 """ 732 self.remote = self._config.GetRemote(r)
681 if mirror: 733 else:
682 dst = 'refs/heads/*' 734 self.remote = None
683 else:
684 dst = 'refs/remotes/%s/*' % self.name
685 self.fetch = [RefSpec(True, 'refs/heads/*', dst)]
686
687 def Save(self):
688 """Save this remote to the configuration.
689 """
690 self._Set('url', self.url)
691 if self.pushUrl is not None:
692 self._Set('pushurl', self.pushUrl + '/' + self.projectname)
693 else:
694 self._Set('pushurl', self.pushUrl)
695 self._Set('review', self.review)
696 self._Set('projectname', self.projectname)
697 self._Set('fetch', list(map(str, self.fetch)))
698 735
699 def _Set(self, key, value): 736 @property
700 key = 'remote.%s.%s' % (self.name, key) 737 def LocalMerge(self):
701 return self._config.SetString(key, value) 738 """Convert the merge spec to a local name."""
739 if self.remote and self.merge:
740 return self.remote.ToLocal(self.merge)
741 return None
702 742
703 def _Get(self, key, all_keys=False): 743 def Save(self):
704 key = 'remote.%s.%s' % (self.name, key) 744 """Save this branch back into the configuration."""
705 return self._config.GetString(key, all_keys=all_keys) 745 if self._config.HasSection("branch", self.name):
746 if self.remote:
747 self._Set("remote", self.remote.name)
748 else:
749 self._Set("remote", None)
750 self._Set("merge", self.merge)
706 751
752 else:
753 with open(self._config.file, "a") as fd:
754 fd.write('[branch "%s"]\n' % self.name)
755 if self.remote:
756 fd.write("\tremote = %s\n" % self.remote.name)
757 if self.merge:
758 fd.write("\tmerge = %s\n" % self.merge)
707 759
708class Branch(object): 760 def _Set(self, key, value):
709 """Configuration options related to a single branch. 761 key = "branch.%s.%s" % (self.name, key)
710 """ 762 return self._config.SetString(key, value)
711
712 def __init__(self, config, name):
713 self._config = config
714 self.name = name
715 self.merge = self._Get('merge')
716
717 r = self._Get('remote')
718 if r:
719 self.remote = self._config.GetRemote(r)
720 else:
721 self.remote = None
722
723 @property
724 def LocalMerge(self):
725 """Convert the merge spec to a local name.
726 """
727 if self.remote and self.merge:
728 return self.remote.ToLocal(self.merge)
729 return None
730 763
731 def Save(self): 764 def _Get(self, key, all_keys=False):
732 """Save this branch back into the configuration. 765 key = "branch.%s.%s" % (self.name, key)
733 """ 766 return self._config.GetString(key, all_keys=all_keys)
734 if self._config.HasSection('branch', self.name):
735 if self.remote:
736 self._Set('remote', self.remote.name)
737 else:
738 self._Set('remote', None)
739 self._Set('merge', self.merge)
740
741 else:
742 with open(self._config.file, 'a') as fd:
743 fd.write('[branch "%s"]\n' % self.name)
744 if self.remote:
745 fd.write('\tremote = %s\n' % self.remote.name)
746 if self.merge:
747 fd.write('\tmerge = %s\n' % self.merge)
748
749 def _Set(self, key, value):
750 key = 'branch.%s.%s' % (self.name, key)
751 return self._config.SetString(key, value)
752
753 def _Get(self, key, all_keys=False):
754 key = 'branch.%s.%s' % (self.name, key)
755 return self._config.GetString(key, all_keys=all_keys)
756 767
757 768
758class SyncAnalysisState: 769class SyncAnalysisState:
759 """Configuration options related to logging of sync state for analysis. 770 """Configuration options related to logging of sync state for analysis.
760
761 This object is versioned.
762 """
763 def __init__(self, config, options, superproject_logging_data):
764 """Initializes SyncAnalysisState.
765
766 Saves the following data into the |config| object.
767 - sys.argv, options, superproject's logging data.
768 - repo.*, branch.* and remote.* parameters from config object.
769 - Current time as synctime.
770 - Version number of the object.
771 771
772 All the keys saved by this object are prepended with SYNC_STATE_PREFIX. 772 This object is versioned.
773
774 Args:
775 config: GitConfig object to store all options.
776 options: Options passed to sync returned from optparse. See _Options().
777 superproject_logging_data: A dictionary of superproject data that is to be logged.
778 """
779 self._config = config
780 now = datetime.datetime.utcnow()
781 self._Set('main.synctime', now.isoformat() + 'Z')
782 self._Set('main.version', '1')
783 self._Set('sys.argv', sys.argv)
784 for key, value in superproject_logging_data.items():
785 self._Set(f'superproject.{key}', value)
786 for key, value in options.__dict__.items():
787 self._Set(f'options.{key}', value)
788 config_items = config.DumpConfigDict().items()
789 EXTRACT_NAMESPACES = {'repo', 'branch', 'remote'}
790 self._SetDictionary({k: v for k, v in config_items
791 if not k.startswith(SYNC_STATE_PREFIX) and
792 k.split('.', 1)[0] in EXTRACT_NAMESPACES})
793
794 def _SetDictionary(self, data):
795 """Save all key/value pairs of |data| dictionary.
796
797 Args:
798 data: A dictionary whose key/value are to be saved.
799 """ 773 """
800 for key, value in data.items():
801 self._Set(key, value)
802 774
803 def _Set(self, key, value): 775 def __init__(self, config, options, superproject_logging_data):
804 """Set the |value| for a |key| in the |_config| member. 776 """Initializes SyncAnalysisState.
805 777
806 |key| is prepended with the value of SYNC_STATE_PREFIX constant. 778 Saves the following data into the |config| object.
807 779 - sys.argv, options, superproject's logging data.
808 Args: 780 - repo.*, branch.* and remote.* parameters from config object.
809 key: Name of the key. 781 - Current time as synctime.
810 value: |value| could be of any type. If it is 'bool', it will be saved 782 - Version number of the object.
811 as a Boolean and for all other types, it will be saved as a String. 783
812 """ 784 All the keys saved by this object are prepended with SYNC_STATE_PREFIX.
813 if value is None: 785
814 return 786 Args:
815 sync_key = f'{SYNC_STATE_PREFIX}{key}' 787 config: GitConfig object to store all options.
816 sync_key = sync_key.replace('_', '') 788 options: Options passed to sync returned from optparse. See
817 if isinstance(value, str): 789 _Options().
818 self._config.SetString(sync_key, value) 790 superproject_logging_data: A dictionary of superproject data that is
819 elif isinstance(value, bool): 791 to be logged.
820 self._config.SetBoolean(sync_key, value) 792 """
821 else: 793 self._config = config
822 self._config.SetString(sync_key, str(value)) 794 now = datetime.datetime.utcnow()
795 self._Set("main.synctime", now.isoformat() + "Z")
796 self._Set("main.version", "1")
797 self._Set("sys.argv", sys.argv)
798 for key, value in superproject_logging_data.items():
799 self._Set(f"superproject.{key}", value)
800 for key, value in options.__dict__.items():
801 self._Set(f"options.{key}", value)
802 config_items = config.DumpConfigDict().items()
803 EXTRACT_NAMESPACES = {"repo", "branch", "remote"}
804 self._SetDictionary(
805 {
806 k: v
807 for k, v in config_items
808 if not k.startswith(SYNC_STATE_PREFIX)
809 and k.split(".", 1)[0] in EXTRACT_NAMESPACES
810 }
811 )
812
813 def _SetDictionary(self, data):
814 """Save all key/value pairs of |data| dictionary.
815
816 Args:
817 data: A dictionary whose key/value are to be saved.
818 """
819 for key, value in data.items():
820 self._Set(key, value)
821
822 def _Set(self, key, value):
823 """Set the |value| for a |key| in the |_config| member.
824
825 |key| is prepended with the value of SYNC_STATE_PREFIX constant.
826
827 Args:
828 key: Name of the key.
829 value: |value| could be of any type. If it is 'bool', it will be
830 saved as a Boolean and for all other types, it will be saved as
831 a String.
832 """
833 if value is None:
834 return
835 sync_key = f"{SYNC_STATE_PREFIX}{key}"
836 sync_key = sync_key.replace("_", "")
837 if isinstance(value, str):
838 self._config.SetString(sync_key, value)
839 elif isinstance(value, bool):
840 self._config.SetBoolean(sync_key, value)
841 else:
842 self._config.SetString(sync_key, str(value))
diff --git a/git_refs.py b/git_refs.py
index 300d2b30..aca1f90d 100644
--- a/git_refs.py
+++ b/git_refs.py
@@ -16,149 +16,150 @@ import os
16from repo_trace import Trace 16from repo_trace import Trace
17import platform_utils 17import platform_utils
18 18
19HEAD = 'HEAD' 19HEAD = "HEAD"
20R_CHANGES = 'refs/changes/' 20R_CHANGES = "refs/changes/"
21R_HEADS = 'refs/heads/' 21R_HEADS = "refs/heads/"
22R_TAGS = 'refs/tags/' 22R_TAGS = "refs/tags/"
23R_PUB = 'refs/published/' 23R_PUB = "refs/published/"
24R_WORKTREE = 'refs/worktree/' 24R_WORKTREE = "refs/worktree/"
25R_WORKTREE_M = R_WORKTREE + 'm/' 25R_WORKTREE_M = R_WORKTREE + "m/"
26R_M = 'refs/remotes/m/' 26R_M = "refs/remotes/m/"
27 27
28 28
29class GitRefs(object): 29class GitRefs(object):
30 def __init__(self, gitdir): 30 def __init__(self, gitdir):
31 self._gitdir = gitdir 31 self._gitdir = gitdir
32 self._phyref = None 32 self._phyref = None
33 self._symref = None 33 self._symref = None
34 self._mtime = {} 34 self._mtime = {}
35 35
36 @property 36 @property
37 def all(self): 37 def all(self):
38 self._EnsureLoaded() 38 self._EnsureLoaded()
39 return self._phyref 39 return self._phyref
40 40
41 def get(self, name): 41 def get(self, name):
42 try:
43 return self.all[name]
44 except KeyError:
45 return ''
46
47 def deleted(self, name):
48 if self._phyref is not None:
49 if name in self._phyref:
50 del self._phyref[name]
51
52 if name in self._symref:
53 del self._symref[name]
54
55 if name in self._mtime:
56 del self._mtime[name]
57
58 def symref(self, name):
59 try:
60 self._EnsureLoaded()
61 return self._symref[name]
62 except KeyError:
63 return ''
64
65 def _EnsureLoaded(self):
66 if self._phyref is None or self._NeedUpdate():
67 self._LoadAll()
68
69 def _NeedUpdate(self):
70 with Trace(': scan refs %s', self._gitdir):
71 for name, mtime in self._mtime.items():
72 try: 42 try:
73 if mtime != os.path.getmtime(os.path.join(self._gitdir, name)): 43 return self.all[name]
74 return True 44 except KeyError:
45 return ""
46
47 def deleted(self, name):
48 if self._phyref is not None:
49 if name in self._phyref:
50 del self._phyref[name]
51
52 if name in self._symref:
53 del self._symref[name]
54
55 if name in self._mtime:
56 del self._mtime[name]
57
58 def symref(self, name):
59 try:
60 self._EnsureLoaded()
61 return self._symref[name]
62 except KeyError:
63 return ""
64
65 def _EnsureLoaded(self):
66 if self._phyref is None or self._NeedUpdate():
67 self._LoadAll()
68
69 def _NeedUpdate(self):
70 with Trace(": scan refs %s", self._gitdir):
71 for name, mtime in self._mtime.items():
72 try:
73 if mtime != os.path.getmtime(
74 os.path.join(self._gitdir, name)
75 ):
76 return True
77 except OSError:
78 return True
79 return False
80
81 def _LoadAll(self):
82 with Trace(": load refs %s", self._gitdir):
83 self._phyref = {}
84 self._symref = {}
85 self._mtime = {}
86
87 self._ReadPackedRefs()
88 self._ReadLoose("refs/")
89 self._ReadLoose1(os.path.join(self._gitdir, HEAD), HEAD)
90
91 scan = self._symref
92 attempts = 0
93 while scan and attempts < 5:
94 scan_next = {}
95 for name, dest in scan.items():
96 if dest in self._phyref:
97 self._phyref[name] = self._phyref[dest]
98 else:
99 scan_next[name] = dest
100 scan = scan_next
101 attempts += 1
102
103 def _ReadPackedRefs(self):
104 path = os.path.join(self._gitdir, "packed-refs")
105 try:
106 fd = open(path, "r")
107 mtime = os.path.getmtime(path)
108 except IOError:
109 return
75 except OSError: 110 except OSError:
76 return True 111 return
77 return False 112 try:
78 113 for line in fd:
79 def _LoadAll(self): 114 line = str(line)
80 with Trace(': load refs %s', self._gitdir): 115 if line[0] == "#":
81 116 continue
82 self._phyref = {} 117 if line[0] == "^":
83 self._symref = {} 118 continue
84 self._mtime = {} 119
85 120 line = line[:-1]
86 self._ReadPackedRefs() 121 p = line.split(" ")
87 self._ReadLoose('refs/') 122 ref_id = p[0]
88 self._ReadLoose1(os.path.join(self._gitdir, HEAD), HEAD) 123 name = p[1]
89 124
90 scan = self._symref 125 self._phyref[name] = ref_id
91 attempts = 0 126 finally:
92 while scan and attempts < 5: 127 fd.close()
93 scan_next = {} 128 self._mtime["packed-refs"] = mtime
94 for name, dest in scan.items(): 129
95 if dest in self._phyref: 130 def _ReadLoose(self, prefix):
96 self._phyref[name] = self._phyref[dest] 131 base = os.path.join(self._gitdir, prefix)
97 else: 132 for name in platform_utils.listdir(base):
98 scan_next[name] = dest 133 p = os.path.join(base, name)
99 scan = scan_next 134 # We don't implement the full ref validation algorithm, just the
100 attempts += 1 135 # simple rules that would show up in local filesystems.
101 136 # https://git-scm.com/docs/git-check-ref-format
102 def _ReadPackedRefs(self): 137 if name.startswith(".") or name.endswith(".lock"):
103 path = os.path.join(self._gitdir, 'packed-refs') 138 pass
104 try: 139 elif platform_utils.isdir(p):
105 fd = open(path, 'r') 140 self._mtime[prefix] = os.path.getmtime(base)
106 mtime = os.path.getmtime(path) 141 self._ReadLoose(prefix + name + "/")
107 except IOError: 142 else:
108 return 143 self._ReadLoose1(p, prefix + name)
109 except OSError: 144
110 return 145 def _ReadLoose1(self, path, name):
111 try: 146 try:
112 for line in fd: 147 with open(path) as fd:
113 line = str(line) 148 mtime = os.path.getmtime(path)
114 if line[0] == '#': 149 ref_id = fd.readline()
115 continue 150 except (OSError, UnicodeError):
116 if line[0] == '^': 151 return
117 continue 152
118 153 try:
119 line = line[:-1] 154 ref_id = ref_id.decode()
120 p = line.split(' ') 155 except AttributeError:
121 ref_id = p[0] 156 pass
122 name = p[1] 157 if not ref_id:
123 158 return
124 self._phyref[name] = ref_id 159 ref_id = ref_id[:-1]
125 finally: 160
126 fd.close() 161 if ref_id.startswith("ref: "):
127 self._mtime['packed-refs'] = mtime 162 self._symref[name] = ref_id[5:]
128 163 else:
129 def _ReadLoose(self, prefix): 164 self._phyref[name] = ref_id
130 base = os.path.join(self._gitdir, prefix) 165 self._mtime[name] = mtime
131 for name in platform_utils.listdir(base):
132 p = os.path.join(base, name)
133 # We don't implement the full ref validation algorithm, just the simple
134 # rules that would show up in local filesystems.
135 # https://git-scm.com/docs/git-check-ref-format
136 if name.startswith('.') or name.endswith('.lock'):
137 pass
138 elif platform_utils.isdir(p):
139 self._mtime[prefix] = os.path.getmtime(base)
140 self._ReadLoose(prefix + name + '/')
141 else:
142 self._ReadLoose1(p, prefix + name)
143
144 def _ReadLoose1(self, path, name):
145 try:
146 with open(path) as fd:
147 mtime = os.path.getmtime(path)
148 ref_id = fd.readline()
149 except (OSError, UnicodeError):
150 return
151
152 try:
153 ref_id = ref_id.decode()
154 except AttributeError:
155 pass
156 if not ref_id:
157 return
158 ref_id = ref_id[:-1]
159
160 if ref_id.startswith('ref: '):
161 self._symref[name] = ref_id[5:]
162 else:
163 self._phyref[name] = ref_id
164 self._mtime[name] = mtime
diff --git a/git_superproject.py b/git_superproject.py
index 69a4d1fe..f1b4f231 100644
--- a/git_superproject.py
+++ b/git_superproject.py
@@ -12,7 +12,7 @@
12# See the License for the specific language governing permissions and 12# See the License for the specific language governing permissions and
13# limitations under the License. 13# limitations under the License.
14 14
15"""Provide functionality to get all projects and their commit ids from Superproject. 15"""Provide functionality to get projects and their commit ids from Superproject.
16 16
17For more information on superproject, check out: 17For more information on superproject, check out:
18https://en.wikibooks.org/wiki/Git/Submodules_and_Superprojects 18https://en.wikibooks.org/wiki/Git/Submodules_and_Superprojects
@@ -33,434 +33,524 @@ from git_command import git_require, GitCommand
33from git_config import RepoConfig 33from git_config import RepoConfig
34from git_refs import GitRefs 34from git_refs import GitRefs
35 35
36_SUPERPROJECT_GIT_NAME = 'superproject.git' 36_SUPERPROJECT_GIT_NAME = "superproject.git"
37_SUPERPROJECT_MANIFEST_NAME = 'superproject_override.xml' 37_SUPERPROJECT_MANIFEST_NAME = "superproject_override.xml"
38 38
39 39
40class SyncResult(NamedTuple): 40class SyncResult(NamedTuple):
41 """Return the status of sync and whether caller should exit.""" 41 """Return the status of sync and whether caller should exit."""
42 42
43 # Whether the superproject sync was successful. 43 # Whether the superproject sync was successful.
44 success: bool 44 success: bool
45 # Whether the caller should exit. 45 # Whether the caller should exit.
46 fatal: bool 46 fatal: bool
47 47
48 48
49class CommitIdsResult(NamedTuple): 49class CommitIdsResult(NamedTuple):
50 """Return the commit ids and whether caller should exit.""" 50 """Return the commit ids and whether caller should exit."""
51 51
52 # A dictionary with the projects/commit ids on success, otherwise None. 52 # A dictionary with the projects/commit ids on success, otherwise None.
53 commit_ids: dict 53 commit_ids: dict
54 # Whether the caller should exit. 54 # Whether the caller should exit.
55 fatal: bool 55 fatal: bool
56 56
57 57
58class UpdateProjectsResult(NamedTuple): 58class UpdateProjectsResult(NamedTuple):
59 """Return the overriding manifest file and whether caller should exit.""" 59 """Return the overriding manifest file and whether caller should exit."""
60 60
61 # Path name of the overriding manifest file if successful, otherwise None. 61 # Path name of the overriding manifest file if successful, otherwise None.
62 manifest_path: str 62 manifest_path: str
63 # Whether the caller should exit. 63 # Whether the caller should exit.
64 fatal: bool 64 fatal: bool
65 65
66 66
67class Superproject(object): 67class Superproject(object):
68 """Get commit ids from superproject. 68 """Get commit ids from superproject.
69 69
70 Initializes a local copy of a superproject for the manifest. This allows 70 Initializes a local copy of a superproject for the manifest. This allows
71 lookup of commit ids for all projects. It contains _project_commit_ids which 71 lookup of commit ids for all projects. It contains _project_commit_ids which
72 is a dictionary with project/commit id entries. 72 is a dictionary with project/commit id entries.
73 """
74 def __init__(self, manifest, name, remote, revision,
75 superproject_dir='exp-superproject'):
76 """Initializes superproject.
77
78 Args:
79 manifest: A Manifest object that is to be written to a file.
80 name: The unique name of the superproject
81 remote: The RemoteSpec for the remote.
82 revision: The name of the git branch to track.
83 superproject_dir: Relative path under |manifest.subdir| to checkout
84 superproject.
85 """
86 self._project_commit_ids = None
87 self._manifest = manifest
88 self.name = name
89 self.remote = remote
90 self.revision = self._branch = revision
91 self._repodir = manifest.repodir
92 self._superproject_dir = superproject_dir
93 self._superproject_path = manifest.SubmanifestInfoDir(manifest.path_prefix,
94 superproject_dir)
95 self._manifest_path = os.path.join(self._superproject_path,
96 _SUPERPROJECT_MANIFEST_NAME)
97 git_name = hashlib.md5(remote.name.encode('utf8')).hexdigest() + '-'
98 self._remote_url = remote.url
99 self._work_git_name = git_name + _SUPERPROJECT_GIT_NAME
100 self._work_git = os.path.join(self._superproject_path, self._work_git_name)
101
102 # The following are command arguemnts, rather than superproject attributes,
103 # and were included here originally. They should eventually become
104 # arguments that are passed down from the public methods, instead of being
105 # treated as attributes.
106 self._git_event_log = None
107 self._quiet = False
108 self._print_messages = False
109
110 def SetQuiet(self, value):
111 """Set the _quiet attribute."""
112 self._quiet = value
113
114 def SetPrintMessages(self, value):
115 """Set the _print_messages attribute."""
116 self._print_messages = value
117
118 @property
119 def project_commit_ids(self):
120 """Returns a dictionary of projects and their commit ids."""
121 return self._project_commit_ids
122
123 @property
124 def manifest_path(self):
125 """Returns the manifest path if the path exists or None."""
126 return self._manifest_path if os.path.exists(self._manifest_path) else None
127
128 def _LogMessage(self, fmt, *inputs):
129 """Logs message to stderr and _git_event_log."""
130 message = f'{self._LogMessagePrefix()} {fmt.format(*inputs)}'
131 if self._print_messages:
132 print(message, file=sys.stderr)
133 self._git_event_log.ErrorEvent(message, fmt)
134
135 def _LogMessagePrefix(self):
136 """Returns the prefix string to be logged in each log message"""
137 return f'repo superproject branch: {self._branch} url: {self._remote_url}'
138
139 def _LogError(self, fmt, *inputs):
140 """Logs error message to stderr and _git_event_log."""
141 self._LogMessage(f'error: {fmt}', *inputs)
142
143 def _LogWarning(self, fmt, *inputs):
144 """Logs warning message to stderr and _git_event_log."""
145 self._LogMessage(f'warning: {fmt}', *inputs)
146
147 def _Init(self):
148 """Sets up a local Git repository to get a copy of a superproject.
149
150 Returns:
151 True if initialization is successful, or False.
152 """
153 if not os.path.exists(self._superproject_path):
154 os.mkdir(self._superproject_path)
155 if not self._quiet and not os.path.exists(self._work_git):
156 print('%s: Performing initial setup for superproject; this might take '
157 'several minutes.' % self._work_git)
158 cmd = ['init', '--bare', self._work_git_name]
159 p = GitCommand(None,
160 cmd,
161 cwd=self._superproject_path,
162 capture_stdout=True,
163 capture_stderr=True)
164 retval = p.Wait()
165 if retval:
166 self._LogWarning('git init call failed, command: git {}, '
167 'return code: {}, stderr: {}', cmd, retval, p.stderr)
168 return False
169 return True
170
171 def _Fetch(self):
172 """Fetches a local copy of a superproject for the manifest based on |_remote_url|.
173
174 Returns:
175 True if fetch is successful, or False.
176 """
177 if not os.path.exists(self._work_git):
178 self._LogWarning('git fetch missing directory: {}', self._work_git)
179 return False
180 if not git_require((2, 28, 0)):
181 self._LogWarning('superproject requires a git version 2.28 or later')
182 return False
183 cmd = ['fetch', self._remote_url, '--depth', '1', '--force', '--no-tags',
184 '--filter', 'blob:none']
185
186 # Check if there is a local ref that we can pass to --negotiation-tip.
187 # If this is the first fetch, it does not exist yet.
188 # We use --negotiation-tip to speed up the fetch. Superproject branches do
189 # not share commits. So this lets git know it only needs to send commits
190 # reachable from the specified local refs.
191 rev_commit = GitRefs(self._work_git).get(f'refs/heads/{self.revision}')
192 if rev_commit:
193 cmd.extend(['--negotiation-tip', rev_commit])
194
195 if self._branch:
196 cmd += [self._branch + ':' + self._branch]
197 p = GitCommand(None,
198 cmd,
199 cwd=self._work_git,
200 capture_stdout=True,
201 capture_stderr=True)
202 retval = p.Wait()
203 if retval:
204 self._LogWarning('git fetch call failed, command: git {}, '
205 'return code: {}, stderr: {}', cmd, retval, p.stderr)
206 return False
207 return True
208
209 def _LsTree(self):
210 """Gets the commit ids for all projects.
211
212 Works only in git repositories.
213
214 Returns:
215 data: data returned from 'git ls-tree ...' instead of None.
216 """
217 if not os.path.exists(self._work_git):
218 self._LogWarning('git ls-tree missing directory: {}', self._work_git)
219 return None
220 data = None
221 branch = 'HEAD' if not self._branch else self._branch
222 cmd = ['ls-tree', '-z', '-r', branch]
223
224 p = GitCommand(None,
225 cmd,
226 cwd=self._work_git,
227 capture_stdout=True,
228 capture_stderr=True)
229 retval = p.Wait()
230 if retval == 0:
231 data = p.stdout
232 else:
233 self._LogWarning('git ls-tree call failed, command: git {}, '
234 'return code: {}, stderr: {}', cmd, retval, p.stderr)
235 return data
236
237 def Sync(self, git_event_log):
238 """Gets a local copy of a superproject for the manifest.
239
240 Args:
241 git_event_log: an EventLog, for git tracing.
242
243 Returns:
244 SyncResult
245 """
246 self._git_event_log = git_event_log
247 if not self._manifest.superproject:
248 self._LogWarning('superproject tag is not defined in manifest: {}',
249 self._manifest.manifestFile)
250 return SyncResult(False, False)
251
252 _PrintBetaNotice()
253
254 should_exit = True
255 if not self._remote_url:
256 self._LogWarning('superproject URL is not defined in manifest: {}',
257 self._manifest.manifestFile)
258 return SyncResult(False, should_exit)
259
260 if not self._Init():
261 return SyncResult(False, should_exit)
262 if not self._Fetch():
263 return SyncResult(False, should_exit)
264 if not self._quiet:
265 print('%s: Initial setup for superproject completed.' % self._work_git)
266 return SyncResult(True, False)
267
268 def _GetAllProjectsCommitIds(self):
269 """Get commit ids for all projects from superproject and save them in _project_commit_ids.
270
271 Returns:
272 CommitIdsResult
273 """
274 sync_result = self.Sync(self._git_event_log)
275 if not sync_result.success:
276 return CommitIdsResult(None, sync_result.fatal)
277
278 data = self._LsTree()
279 if not data:
280 self._LogWarning('git ls-tree failed to return data for manifest: {}',
281 self._manifest.manifestFile)
282 return CommitIdsResult(None, True)
283
284 # Parse lines like the following to select lines starting with '160000' and
285 # build a dictionary with project path (last element) and its commit id (3rd element).
286 #
287 # 160000 commit 2c2724cb36cd5a9cec6c852c681efc3b7c6b86ea\tart\x00
288 # 120000 blob acc2cbdf438f9d2141f0ae424cec1d8fc4b5d97f\tbootstrap.bash\x00
289 commit_ids = {}
290 for line in data.split('\x00'):
291 ls_data = line.split(None, 3)
292 if not ls_data:
293 break
294 if ls_data[0] == '160000':
295 commit_ids[ls_data[3]] = ls_data[2]
296
297 self._project_commit_ids = commit_ids
298 return CommitIdsResult(commit_ids, False)
299
300 def _WriteManifestFile(self):
301 """Writes manifest to a file.
302
303 Returns:
304 manifest_path: Path name of the file into which manifest is written instead of None.
305 """
306 if not os.path.exists(self._superproject_path):
307 self._LogWarning('missing superproject directory: {}', self._superproject_path)
308 return None
309 manifest_str = self._manifest.ToXml(groups=self._manifest.GetGroupsStr(),
310 omit_local=True).toxml()
311 manifest_path = self._manifest_path
312 try:
313 with open(manifest_path, 'w', encoding='utf-8') as fp:
314 fp.write(manifest_str)
315 except IOError as e:
316 self._LogError('cannot write manifest to : {} {}',
317 manifest_path, e)
318 return None
319 return manifest_path
320
321 def _SkipUpdatingProjectRevisionId(self, project):
322 """Checks if a project's revision id needs to be updated or not.
323
324 Revision id for projects from local manifest will not be updated.
325
326 Args:
327 project: project whose revision id is being updated.
328
329 Returns:
330 True if a project's revision id should not be updated, or False,
331 """ 73 """
332 path = project.relpath
333 if not path:
334 return True
335 # Skip the project with revisionId.
336 if project.revisionId:
337 return True
338 # Skip the project if it comes from the local manifest.
339 return project.manifest.IsFromLocalManifest(project)
340
341 def UpdateProjectsRevisionId(self, projects, git_event_log):
342 """Update revisionId of every project in projects with the commit id.
343
344 Args:
345 projects: a list of projects whose revisionId needs to be updated.
346 git_event_log: an EventLog, for git tracing.
347 74
348 Returns: 75 def __init__(
349 UpdateProjectsResult 76 self,
350 """ 77 manifest,
351 self._git_event_log = git_event_log 78 name,
352 commit_ids_result = self._GetAllProjectsCommitIds() 79 remote,
353 commit_ids = commit_ids_result.commit_ids 80 revision,
354 if not commit_ids: 81 superproject_dir="exp-superproject",
355 return UpdateProjectsResult(None, commit_ids_result.fatal) 82 ):
356 83 """Initializes superproject.
357 projects_missing_commit_ids = [] 84
358 for project in projects: 85 Args:
359 if self._SkipUpdatingProjectRevisionId(project): 86 manifest: A Manifest object that is to be written to a file.
360 continue 87 name: The unique name of the superproject
361 path = project.relpath 88 remote: The RemoteSpec for the remote.
362 commit_id = commit_ids.get(path) 89 revision: The name of the git branch to track.
363 if not commit_id: 90 superproject_dir: Relative path under |manifest.subdir| to checkout
364 projects_missing_commit_ids.append(path) 91 superproject.
365 92 """
366 # If superproject doesn't have a commit id for a project, then report an 93 self._project_commit_ids = None
367 # error event and continue as if do not use superproject is specified. 94 self._manifest = manifest
368 if projects_missing_commit_ids: 95 self.name = name
369 self._LogWarning('please file a bug using {} to report missing ' 96 self.remote = remote
370 'commit_ids for: {}', self._manifest.contactinfo.bugurl, 97 self.revision = self._branch = revision
371 projects_missing_commit_ids) 98 self._repodir = manifest.repodir
372 return UpdateProjectsResult(None, False) 99 self._superproject_dir = superproject_dir
373 100 self._superproject_path = manifest.SubmanifestInfoDir(
374 for project in projects: 101 manifest.path_prefix, superproject_dir
375 if not self._SkipUpdatingProjectRevisionId(project): 102 )
376 project.SetRevisionId(commit_ids.get(project.relpath)) 103 self._manifest_path = os.path.join(
377 104 self._superproject_path, _SUPERPROJECT_MANIFEST_NAME
378 manifest_path = self._WriteManifestFile() 105 )
379 return UpdateProjectsResult(manifest_path, False) 106 git_name = hashlib.md5(remote.name.encode("utf8")).hexdigest() + "-"
107 self._remote_url = remote.url
108 self._work_git_name = git_name + _SUPERPROJECT_GIT_NAME
109 self._work_git = os.path.join(
110 self._superproject_path, self._work_git_name
111 )
112
113 # The following are command arguemnts, rather than superproject
114 # attributes, and were included here originally. They should eventually
115 # become arguments that are passed down from the public methods, instead
116 # of being treated as attributes.
117 self._git_event_log = None
118 self._quiet = False
119 self._print_messages = False
120
121 def SetQuiet(self, value):
122 """Set the _quiet attribute."""
123 self._quiet = value
124
125 def SetPrintMessages(self, value):
126 """Set the _print_messages attribute."""
127 self._print_messages = value
128
129 @property
130 def project_commit_ids(self):
131 """Returns a dictionary of projects and their commit ids."""
132 return self._project_commit_ids
133
134 @property
135 def manifest_path(self):
136 """Returns the manifest path if the path exists or None."""
137 return (
138 self._manifest_path if os.path.exists(self._manifest_path) else None
139 )
140
141 def _LogMessage(self, fmt, *inputs):
142 """Logs message to stderr and _git_event_log."""
143 message = f"{self._LogMessagePrefix()} {fmt.format(*inputs)}"
144 if self._print_messages:
145 print(message, file=sys.stderr)
146 self._git_event_log.ErrorEvent(message, fmt)
147
148 def _LogMessagePrefix(self):
149 """Returns the prefix string to be logged in each log message"""
150 return (
151 f"repo superproject branch: {self._branch} url: {self._remote_url}"
152 )
153
154 def _LogError(self, fmt, *inputs):
155 """Logs error message to stderr and _git_event_log."""
156 self._LogMessage(f"error: {fmt}", *inputs)
157
158 def _LogWarning(self, fmt, *inputs):
159 """Logs warning message to stderr and _git_event_log."""
160 self._LogMessage(f"warning: {fmt}", *inputs)
161
162 def _Init(self):
163 """Sets up a local Git repository to get a copy of a superproject.
164
165 Returns:
166 True if initialization is successful, or False.
167 """
168 if not os.path.exists(self._superproject_path):
169 os.mkdir(self._superproject_path)
170 if not self._quiet and not os.path.exists(self._work_git):
171 print(
172 "%s: Performing initial setup for superproject; this might "
173 "take several minutes." % self._work_git
174 )
175 cmd = ["init", "--bare", self._work_git_name]
176 p = GitCommand(
177 None,
178 cmd,
179 cwd=self._superproject_path,
180 capture_stdout=True,
181 capture_stderr=True,
182 )
183 retval = p.Wait()
184 if retval:
185 self._LogWarning(
186 "git init call failed, command: git {}, "
187 "return code: {}, stderr: {}",
188 cmd,
189 retval,
190 p.stderr,
191 )
192 return False
193 return True
194
195 def _Fetch(self):
196 """Fetches a superproject for the manifest based on |_remote_url|.
197
198 This runs git fetch which stores a local copy the superproject.
199
200 Returns:
201 True if fetch is successful, or False.
202 """
203 if not os.path.exists(self._work_git):
204 self._LogWarning("git fetch missing directory: {}", self._work_git)
205 return False
206 if not git_require((2, 28, 0)):
207 self._LogWarning(
208 "superproject requires a git version 2.28 or later"
209 )
210 return False
211 cmd = [
212 "fetch",
213 self._remote_url,
214 "--depth",
215 "1",
216 "--force",
217 "--no-tags",
218 "--filter",
219 "blob:none",
220 ]
221
222 # Check if there is a local ref that we can pass to --negotiation-tip.
223 # If this is the first fetch, it does not exist yet.
224 # We use --negotiation-tip to speed up the fetch. Superproject branches
225 # do not share commits. So this lets git know it only needs to send
226 # commits reachable from the specified local refs.
227 rev_commit = GitRefs(self._work_git).get(f"refs/heads/{self.revision}")
228 if rev_commit:
229 cmd.extend(["--negotiation-tip", rev_commit])
230
231 if self._branch:
232 cmd += [self._branch + ":" + self._branch]
233 p = GitCommand(
234 None,
235 cmd,
236 cwd=self._work_git,
237 capture_stdout=True,
238 capture_stderr=True,
239 )
240 retval = p.Wait()
241 if retval:
242 self._LogWarning(
243 "git fetch call failed, command: git {}, "
244 "return code: {}, stderr: {}",
245 cmd,
246 retval,
247 p.stderr,
248 )
249 return False
250 return True
251
252 def _LsTree(self):
253 """Gets the commit ids for all projects.
254
255 Works only in git repositories.
256
257 Returns:
258 data: data returned from 'git ls-tree ...' instead of None.
259 """
260 if not os.path.exists(self._work_git):
261 self._LogWarning(
262 "git ls-tree missing directory: {}", self._work_git
263 )
264 return None
265 data = None
266 branch = "HEAD" if not self._branch else self._branch
267 cmd = ["ls-tree", "-z", "-r", branch]
268
269 p = GitCommand(
270 None,
271 cmd,
272 cwd=self._work_git,
273 capture_stdout=True,
274 capture_stderr=True,
275 )
276 retval = p.Wait()
277 if retval == 0:
278 data = p.stdout
279 else:
280 self._LogWarning(
281 "git ls-tree call failed, command: git {}, "
282 "return code: {}, stderr: {}",
283 cmd,
284 retval,
285 p.stderr,
286 )
287 return data
288
289 def Sync(self, git_event_log):
290 """Gets a local copy of a superproject for the manifest.
291
292 Args:
293 git_event_log: an EventLog, for git tracing.
294
295 Returns:
296 SyncResult
297 """
298 self._git_event_log = git_event_log
299 if not self._manifest.superproject:
300 self._LogWarning(
301 "superproject tag is not defined in manifest: {}",
302 self._manifest.manifestFile,
303 )
304 return SyncResult(False, False)
305
306 _PrintBetaNotice()
307
308 should_exit = True
309 if not self._remote_url:
310 self._LogWarning(
311 "superproject URL is not defined in manifest: {}",
312 self._manifest.manifestFile,
313 )
314 return SyncResult(False, should_exit)
315
316 if not self._Init():
317 return SyncResult(False, should_exit)
318 if not self._Fetch():
319 return SyncResult(False, should_exit)
320 if not self._quiet:
321 print(
322 "%s: Initial setup for superproject completed." % self._work_git
323 )
324 return SyncResult(True, False)
325
326 def _GetAllProjectsCommitIds(self):
327 """Get commit ids for all projects from superproject and save them.
328
329 Commit ids are saved in _project_commit_ids.
330
331 Returns:
332 CommitIdsResult
333 """
334 sync_result = self.Sync(self._git_event_log)
335 if not sync_result.success:
336 return CommitIdsResult(None, sync_result.fatal)
337
338 data = self._LsTree()
339 if not data:
340 self._LogWarning(
341 "git ls-tree failed to return data for manifest: {}",
342 self._manifest.manifestFile,
343 )
344 return CommitIdsResult(None, True)
345
346 # Parse lines like the following to select lines starting with '160000'
347 # and build a dictionary with project path (last element) and its commit
348 # id (3rd element).
349 #
350 # 160000 commit 2c2724cb36cd5a9cec6c852c681efc3b7c6b86ea\tart\x00
351 # 120000 blob acc2cbdf438f9d2141f0ae424cec1d8fc4b5d97f\tbootstrap.bash\x00 # noqa: E501
352 commit_ids = {}
353 for line in data.split("\x00"):
354 ls_data = line.split(None, 3)
355 if not ls_data:
356 break
357 if ls_data[0] == "160000":
358 commit_ids[ls_data[3]] = ls_data[2]
359
360 self._project_commit_ids = commit_ids
361 return CommitIdsResult(commit_ids, False)
362
363 def _WriteManifestFile(self):
364 """Writes manifest to a file.
365
366 Returns:
367 manifest_path: Path name of the file into which manifest is written
368 instead of None.
369 """
370 if not os.path.exists(self._superproject_path):
371 self._LogWarning(
372 "missing superproject directory: {}", self._superproject_path
373 )
374 return None
375 manifest_str = self._manifest.ToXml(
376 groups=self._manifest.GetGroupsStr(), omit_local=True
377 ).toxml()
378 manifest_path = self._manifest_path
379 try:
380 with open(manifest_path, "w", encoding="utf-8") as fp:
381 fp.write(manifest_str)
382 except IOError as e:
383 self._LogError("cannot write manifest to : {} {}", manifest_path, e)
384 return None
385 return manifest_path
386
387 def _SkipUpdatingProjectRevisionId(self, project):
388 """Checks if a project's revision id needs to be updated or not.
389
390 Revision id for projects from local manifest will not be updated.
391
392 Args:
393 project: project whose revision id is being updated.
394
395 Returns:
396 True if a project's revision id should not be updated, or False,
397 """
398 path = project.relpath
399 if not path:
400 return True
401 # Skip the project with revisionId.
402 if project.revisionId:
403 return True
404 # Skip the project if it comes from the local manifest.
405 return project.manifest.IsFromLocalManifest(project)
406
407 def UpdateProjectsRevisionId(self, projects, git_event_log):
408 """Update revisionId of every project in projects with the commit id.
409
410 Args:
411 projects: a list of projects whose revisionId needs to be updated.
412 git_event_log: an EventLog, for git tracing.
413
414 Returns:
415 UpdateProjectsResult
416 """
417 self._git_event_log = git_event_log
418 commit_ids_result = self._GetAllProjectsCommitIds()
419 commit_ids = commit_ids_result.commit_ids
420 if not commit_ids:
421 return UpdateProjectsResult(None, commit_ids_result.fatal)
422
423 projects_missing_commit_ids = []
424 for project in projects:
425 if self._SkipUpdatingProjectRevisionId(project):
426 continue
427 path = project.relpath
428 commit_id = commit_ids.get(path)
429 if not commit_id:
430 projects_missing_commit_ids.append(path)
431
432 # If superproject doesn't have a commit id for a project, then report an
433 # error event and continue as if do not use superproject is specified.
434 if projects_missing_commit_ids:
435 self._LogWarning(
436 "please file a bug using {} to report missing "
437 "commit_ids for: {}",
438 self._manifest.contactinfo.bugurl,
439 projects_missing_commit_ids,
440 )
441 return UpdateProjectsResult(None, False)
442
443 for project in projects:
444 if not self._SkipUpdatingProjectRevisionId(project):
445 project.SetRevisionId(commit_ids.get(project.relpath))
446
447 manifest_path = self._WriteManifestFile()
448 return UpdateProjectsResult(manifest_path, False)
380 449
381 450
382@functools.lru_cache(maxsize=10) 451@functools.lru_cache(maxsize=10)
383def _PrintBetaNotice(): 452def _PrintBetaNotice():
384 """Print the notice of beta status.""" 453 """Print the notice of beta status."""
385 print('NOTICE: --use-superproject is in beta; report any issues to the ' 454 print(
386 'address described in `repo version`', file=sys.stderr) 455 "NOTICE: --use-superproject is in beta; report any issues to the "
456 "address described in `repo version`",
457 file=sys.stderr,
458 )
387 459
388 460
389@functools.lru_cache(maxsize=None) 461@functools.lru_cache(maxsize=None)
390def _UseSuperprojectFromConfiguration(): 462def _UseSuperprojectFromConfiguration():
391 """Returns the user choice of whether to use superproject.""" 463 """Returns the user choice of whether to use superproject."""
392 user_cfg = RepoConfig.ForUser() 464 user_cfg = RepoConfig.ForUser()
393 time_now = int(time.time()) 465 time_now = int(time.time())
394 466
395 user_value = user_cfg.GetBoolean('repo.superprojectChoice') 467 user_value = user_cfg.GetBoolean("repo.superprojectChoice")
396 if user_value is not None: 468 if user_value is not None:
397 user_expiration = user_cfg.GetInt('repo.superprojectChoiceExpire') 469 user_expiration = user_cfg.GetInt("repo.superprojectChoiceExpire")
398 if user_expiration is None or user_expiration <= 0 or user_expiration >= time_now: 470 if (
399 # TODO(b/190688390) - Remove prompt when we are comfortable with the new 471 user_expiration is None
400 # default value. 472 or user_expiration <= 0
401 if user_value: 473 or user_expiration >= time_now
402 print(('You are currently enrolled in Git submodules experiment ' 474 ):
403 '(go/android-submodules-quickstart). Use --no-use-superproject ' 475 # TODO(b/190688390) - Remove prompt when we are comfortable with the
404 'to override.\n'), file=sys.stderr) 476 # new default value.
405 else: 477 if user_value:
406 print(('You are not currently enrolled in Git submodules experiment ' 478 print(
407 '(go/android-submodules-quickstart). Use --use-superproject ' 479 (
408 'to override.\n'), file=sys.stderr) 480 "You are currently enrolled in Git submodules "
409 return user_value 481 "experiment (go/android-submodules-quickstart). Use "
410 482 "--no-use-superproject to override.\n"
411 # We don't have an unexpired choice, ask for one. 483 ),
412 system_cfg = RepoConfig.ForSystem() 484 file=sys.stderr,
413 system_value = system_cfg.GetBoolean('repo.superprojectChoice') 485 )
414 if system_value: 486 else:
415 # The system configuration is proposing that we should enable the 487 print(
416 # use of superproject. Treat the user as enrolled for two weeks. 488 (
417 # 489 "You are not currently enrolled in Git submodules "
418 # TODO(b/190688390) - Remove prompt when we are comfortable with the new 490 "experiment (go/android-submodules-quickstart). Use "
419 # default value. 491 "--use-superproject to override.\n"
420 userchoice = True 492 ),
421 time_choiceexpire = time_now + (86400 * 14) 493 file=sys.stderr,
422 user_cfg.SetString('repo.superprojectChoiceExpire', str(time_choiceexpire)) 494 )
423 user_cfg.SetBoolean('repo.superprojectChoice', userchoice) 495 return user_value
424 print('You are automatically enrolled in Git submodules experiment ' 496
425 '(go/android-submodules-quickstart) for another two weeks.\n', 497 # We don't have an unexpired choice, ask for one.
426 file=sys.stderr) 498 system_cfg = RepoConfig.ForSystem()
427 return True 499 system_value = system_cfg.GetBoolean("repo.superprojectChoice")
428 500 if system_value:
429 # For all other cases, we would not use superproject by default. 501 # The system configuration is proposing that we should enable the
430 return False 502 # use of superproject. Treat the user as enrolled for two weeks.
503 #
504 # TODO(b/190688390) - Remove prompt when we are comfortable with the new
505 # default value.
506 userchoice = True
507 time_choiceexpire = time_now + (86400 * 14)
508 user_cfg.SetString(
509 "repo.superprojectChoiceExpire", str(time_choiceexpire)
510 )
511 user_cfg.SetBoolean("repo.superprojectChoice", userchoice)
512 print(
513 "You are automatically enrolled in Git submodules experiment "
514 "(go/android-submodules-quickstart) for another two weeks.\n",
515 file=sys.stderr,
516 )
517 return True
518
519 # For all other cases, we would not use superproject by default.
520 return False
431 521
432 522
433def PrintMessages(use_superproject, manifest): 523def PrintMessages(use_superproject, manifest):
434 """Returns a boolean if error/warning messages are to be printed. 524 """Returns a boolean if error/warning messages are to be printed.
435 525
436 Args: 526 Args:
437 use_superproject: option value from optparse. 527 use_superproject: option value from optparse.
438 manifest: manifest to use. 528 manifest: manifest to use.
439 """ 529 """
440 return use_superproject is not None or bool(manifest.superproject) 530 return use_superproject is not None or bool(manifest.superproject)
441 531
442 532
443def UseSuperproject(use_superproject, manifest): 533def UseSuperproject(use_superproject, manifest):
444 """Returns a boolean if use-superproject option is enabled. 534 """Returns a boolean if use-superproject option is enabled.
445 535
446 Args: 536 Args:
447 use_superproject: option value from optparse. 537 use_superproject: option value from optparse.
448 manifest: manifest to use. 538 manifest: manifest to use.
449 539
450 Returns: 540 Returns:
451 Whether the superproject should be used. 541 Whether the superproject should be used.
452 """ 542 """
453 543
454 if not manifest.superproject: 544 if not manifest.superproject:
455 # This (sub) manifest does not have a superproject definition. 545 # This (sub) manifest does not have a superproject definition.
456 return False 546 return False
457 elif use_superproject is not None: 547 elif use_superproject is not None:
458 return use_superproject 548 return use_superproject
459 else:
460 client_value = manifest.manifestProject.use_superproject
461 if client_value is not None:
462 return client_value
463 elif manifest.superproject:
464 return _UseSuperprojectFromConfiguration()
465 else: 549 else:
466 return False 550 client_value = manifest.manifestProject.use_superproject
551 if client_value is not None:
552 return client_value
553 elif manifest.superproject:
554 return _UseSuperprojectFromConfiguration()
555 else:
556 return False
diff --git a/git_trace2_event_log.py b/git_trace2_event_log.py
index 2edab0e1..d90e9039 100644
--- a/git_trace2_event_log.py
+++ b/git_trace2_event_log.py
@@ -41,291 +41,330 @@ from git_command import GitCommand, RepoSourceVersion
41 41
42 42
43class EventLog(object): 43class EventLog(object):
44 """Event log that records events that occurred during a repo invocation. 44 """Event log that records events that occurred during a repo invocation.
45
46 Events are written to the log as a consecutive JSON entries, one per line.
47 Entries follow the git trace2 EVENT format.
48
49 Each entry contains the following common keys:
50 - event: The event name
51 - sid: session-id - Unique string to allow process instance to be identified.
52 - thread: The thread name.
53 - time: is the UTC time of the event.
54
55 Valid 'event' names and event specific fields are documented here:
56 https://git-scm.com/docs/api-trace2#_event_format
57 """
58
59 def __init__(self, env=None):
60 """Initializes the event log."""
61 self._log = []
62 # Try to get session-id (sid) from environment (setup in repo launcher).
63 KEY = 'GIT_TRACE2_PARENT_SID'
64 if env is None:
65 env = os.environ
66
67 now = datetime.datetime.utcnow()
68
69 # Save both our sid component and the complete sid.
70 # We use our sid component (self._sid) as the unique filename prefix and
71 # the full sid (self._full_sid) in the log itself.
72 self._sid = 'repo-%s-P%08x' % (now.strftime('%Y%m%dT%H%M%SZ'), os.getpid())
73 parent_sid = env.get(KEY)
74 # Append our sid component to the parent sid (if it exists).
75 if parent_sid is not None:
76 self._full_sid = parent_sid + '/' + self._sid
77 else:
78 self._full_sid = self._sid
79
80 # Set/update the environment variable.
81 # Environment handling across systems is messy.
82 try:
83 env[KEY] = self._full_sid
84 except UnicodeEncodeError:
85 env[KEY] = self._full_sid.encode()
86
87 # Add a version event to front of the log.
88 self._AddVersionEvent()
89
90 @property
91 def full_sid(self):
92 return self._full_sid
93
94 def _AddVersionEvent(self):
95 """Adds a 'version' event at the beginning of current log."""
96 version_event = self._CreateEventDict('version')
97 version_event['evt'] = "2"
98 version_event['exe'] = RepoSourceVersion()
99 self._log.insert(0, version_event)
100
101 def _CreateEventDict(self, event_name):
102 """Returns a dictionary with the common keys/values for git trace2 events.
103
104 Args:
105 event_name: The event name.
106
107 Returns:
108 Dictionary with the common event fields populated.
109 """
110 return {
111 'event': event_name,
112 'sid': self._full_sid,
113 'thread': threading.current_thread().name,
114 'time': datetime.datetime.utcnow().isoformat() + 'Z',
115 }
116
117 def StartEvent(self):
118 """Append a 'start' event to the current log."""
119 start_event = self._CreateEventDict('start')
120 start_event['argv'] = sys.argv
121 self._log.append(start_event)
122
123 def ExitEvent(self, result):
124 """Append an 'exit' event to the current log.
125
126 Args:
127 result: Exit code of the event
128 """
129 exit_event = self._CreateEventDict('exit')
130
131 # Consider 'None' success (consistent with event_log result handling).
132 if result is None:
133 result = 0
134 exit_event['code'] = result
135 self._log.append(exit_event)
136
137 def CommandEvent(self, name, subcommands):
138 """Append a 'command' event to the current log.
139
140 Args:
141 name: Name of the primary command (ex: repo, git)
142 subcommands: List of the sub-commands (ex: version, init, sync)
143 """
144 command_event = self._CreateEventDict('command')
145 command_event['name'] = name
146 command_event['subcommands'] = subcommands
147 self._log.append(command_event)
148 45
149 def LogConfigEvents(self, config, event_dict_name): 46 Events are written to the log as a consecutive JSON entries, one per line.
150 """Append a |event_dict_name| event for each config key in |config|. 47 Entries follow the git trace2 EVENT format.
151 48
152 Args: 49 Each entry contains the following common keys:
153 config: Configuration dictionary. 50 - event: The event name
154 event_dict_name: Name of the event dictionary for items to be logged under. 51 - sid: session-id - Unique string to allow process instance to be
155 """ 52 identified.
156 for param, value in config.items(): 53 - thread: The thread name.
157 event = self._CreateEventDict(event_dict_name) 54 - time: is the UTC time of the event.
158 event['param'] = param
159 event['value'] = value
160 self._log.append(event)
161
162 def DefParamRepoEvents(self, config):
163 """Append a 'def_param' event for each repo.* config key to the current log.
164 55
165 Args: 56 Valid 'event' names and event specific fields are documented here:
166 config: Repo configuration dictionary 57 https://git-scm.com/docs/api-trace2#_event_format
167 """ 58 """
168 # Only output the repo.* config parameters.
169 repo_config = {k: v for k, v in config.items() if k.startswith('repo.')}
170 self.LogConfigEvents(repo_config, 'def_param')
171
172 def GetDataEventName(self, value):
173 """Returns 'data-json' if the value is an array else returns 'data'."""
174 return 'data-json' if value[0] == '[' and value[-1] == ']' else 'data'
175 59
176 def LogDataConfigEvents(self, config, prefix): 60 def __init__(self, env=None):
177 """Append a 'data' event for each config key/value in |config| to the current log. 61 """Initializes the event log."""
178 62 self._log = []
179 For each keyX and valueX of the config, "key" field of the event is '|prefix|/keyX' 63 # Try to get session-id (sid) from environment (setup in repo launcher).
180 and the "value" of the "key" field is valueX. 64 KEY = "GIT_TRACE2_PARENT_SID"
181 65 if env is None:
182 Args: 66 env = os.environ
183 config: Configuration dictionary. 67
184 prefix: Prefix for each key that is logged. 68 now = datetime.datetime.utcnow()
185 """ 69
186 for key, value in config.items(): 70 # Save both our sid component and the complete sid.
187 event = self._CreateEventDict(self.GetDataEventName(value)) 71 # We use our sid component (self._sid) as the unique filename prefix and
188 event['key'] = f'{prefix}/{key}' 72 # the full sid (self._full_sid) in the log itself.
189 event['value'] = value 73 self._sid = "repo-%s-P%08x" % (
190 self._log.append(event) 74 now.strftime("%Y%m%dT%H%M%SZ"),
191 75 os.getpid(),
192 def ErrorEvent(self, msg, fmt): 76 )
193 """Append a 'error' event to the current log.""" 77 parent_sid = env.get(KEY)
194 error_event = self._CreateEventDict('error') 78 # Append our sid component to the parent sid (if it exists).
195 error_event['msg'] = msg 79 if parent_sid is not None:
196 error_event['fmt'] = fmt 80 self._full_sid = parent_sid + "/" + self._sid
197 self._log.append(error_event) 81 else:
198 82 self._full_sid = self._sid
199 def _GetEventTargetPath(self): 83
200 """Get the 'trace2.eventtarget' path from git configuration. 84 # Set/update the environment variable.
201 85 # Environment handling across systems is messy.
202 Returns: 86 try:
203 path: git config's 'trace2.eventtarget' path if it exists, or None 87 env[KEY] = self._full_sid
204 """ 88 except UnicodeEncodeError:
205 path = None 89 env[KEY] = self._full_sid.encode()
206 cmd = ['config', '--get', 'trace2.eventtarget'] 90
207 # TODO(https://crbug.com/gerrit/13706): Use GitConfig when it supports 91 # Add a version event to front of the log.
208 # system git config variables. 92 self._AddVersionEvent()
209 p = GitCommand(None, cmd, capture_stdout=True, capture_stderr=True, 93
210 bare=True) 94 @property
211 retval = p.Wait() 95 def full_sid(self):
212 if retval == 0: 96 return self._full_sid
213 # Strip trailing carriage-return in path. 97
214 path = p.stdout.rstrip('\n') 98 def _AddVersionEvent(self):
215 elif retval != 1: 99 """Adds a 'version' event at the beginning of current log."""
216 # `git config --get` is documented to produce an exit status of `1` if 100 version_event = self._CreateEventDict("version")
217 # the requested variable is not present in the configuration. Report any 101 version_event["evt"] = "2"
218 # other return value as an error. 102 version_event["exe"] = RepoSourceVersion()
219 print("repo: error: 'git config --get' call failed with return code: %r, stderr: %r" % ( 103 self._log.insert(0, version_event)
220 retval, p.stderr), file=sys.stderr) 104
221 return path 105 def _CreateEventDict(self, event_name):
222 106 """Returns a dictionary with common keys/values for git trace2 events.
223 def _WriteLog(self, write_fn): 107
224 """Writes the log out using a provided writer function. 108 Args:
225 109 event_name: The event name.
226 Generate compact JSON output for each item in the log, and write it using 110
227 write_fn. 111 Returns:
228 112 Dictionary with the common event fields populated.
229 Args: 113 """
230 write_fn: A function that accepts byts and writes them to a destination. 114 return {
231 """ 115 "event": event_name,
232 116 "sid": self._full_sid,
233 for e in self._log: 117 "thread": threading.current_thread().name,
234 # Dump in compact encoding mode. 118 "time": datetime.datetime.utcnow().isoformat() + "Z",
235 # See 'Compact encoding' in Python docs: 119 }
236 # https://docs.python.org/3/library/json.html#module-json 120
237 write_fn(json.dumps(e, indent=None, separators=(',', ':')).encode('utf-8') + b'\n') 121 def StartEvent(self):
238 122 """Append a 'start' event to the current log."""
239 def Write(self, path=None): 123 start_event = self._CreateEventDict("start")
240 """Writes the log out to a file or socket. 124 start_event["argv"] = sys.argv
241 125 self._log.append(start_event)
242 Log is only written if 'path' or 'git config --get trace2.eventtarget' 126
243 provide a valid path (or socket) to write logs to. 127 def ExitEvent(self, result):
128 """Append an 'exit' event to the current log.
129
130 Args:
131 result: Exit code of the event
132 """
133 exit_event = self._CreateEventDict("exit")
134
135 # Consider 'None' success (consistent with event_log result handling).
136 if result is None:
137 result = 0
138 exit_event["code"] = result
139 self._log.append(exit_event)
140
141 def CommandEvent(self, name, subcommands):
142 """Append a 'command' event to the current log.
143
144 Args:
145 name: Name of the primary command (ex: repo, git)
146 subcommands: List of the sub-commands (ex: version, init, sync)
147 """
148 command_event = self._CreateEventDict("command")
149 command_event["name"] = name
150 command_event["subcommands"] = subcommands
151 self._log.append(command_event)
152
153 def LogConfigEvents(self, config, event_dict_name):
154 """Append a |event_dict_name| event for each config key in |config|.
155
156 Args:
157 config: Configuration dictionary.
158 event_dict_name: Name of the event dictionary for items to be logged
159 under.
160 """
161 for param, value in config.items():
162 event = self._CreateEventDict(event_dict_name)
163 event["param"] = param
164 event["value"] = value
165 self._log.append(event)
166
167 def DefParamRepoEvents(self, config):
168 """Append 'def_param' events for repo config keys to the current log.
169
170 This appends one event for each repo.* config key.
171
172 Args:
173 config: Repo configuration dictionary
174 """
175 # Only output the repo.* config parameters.
176 repo_config = {k: v for k, v in config.items() if k.startswith("repo.")}
177 self.LogConfigEvents(repo_config, "def_param")
178
179 def GetDataEventName(self, value):
180 """Returns 'data-json' if the value is an array else returns 'data'."""
181 return "data-json" if value[0] == "[" and value[-1] == "]" else "data"
182
183 def LogDataConfigEvents(self, config, prefix):
184 """Append a 'data' event for each entry in |config| to the current log.
185
186 For each keyX and valueX of the config, "key" field of the event is
187 '|prefix|/keyX' and the "value" of the "key" field is valueX.
188
189 Args:
190 config: Configuration dictionary.
191 prefix: Prefix for each key that is logged.
192 """
193 for key, value in config.items():
194 event = self._CreateEventDict(self.GetDataEventName(value))
195 event["key"] = f"{prefix}/{key}"
196 event["value"] = value
197 self._log.append(event)
198
199 def ErrorEvent(self, msg, fmt):
200 """Append a 'error' event to the current log."""
201 error_event = self._CreateEventDict("error")
202 error_event["msg"] = msg
203 error_event["fmt"] = fmt
204 self._log.append(error_event)
205
206 def _GetEventTargetPath(self):
207 """Get the 'trace2.eventtarget' path from git configuration.
208
209 Returns:
210 path: git config's 'trace2.eventtarget' path if it exists, or None
211 """
212 path = None
213 cmd = ["config", "--get", "trace2.eventtarget"]
214 # TODO(https://crbug.com/gerrit/13706): Use GitConfig when it supports
215 # system git config variables.
216 p = GitCommand(
217 None, cmd, capture_stdout=True, capture_stderr=True, bare=True
218 )
219 retval = p.Wait()
220 if retval == 0:
221 # Strip trailing carriage-return in path.
222 path = p.stdout.rstrip("\n")
223 elif retval != 1:
224 # `git config --get` is documented to produce an exit status of `1`
225 # if the requested variable is not present in the configuration.
226 # Report any other return value as an error.
227 print(
228 "repo: error: 'git config --get' call failed with return code: "
229 "%r, stderr: %r" % (retval, p.stderr),
230 file=sys.stderr,
231 )
232 return path
233
234 def _WriteLog(self, write_fn):
235 """Writes the log out using a provided writer function.
236
237 Generate compact JSON output for each item in the log, and write it
238 using write_fn.
239
240 Args:
241 write_fn: A function that accepts byts and writes them to a
242 destination.
243 """
244
245 for e in self._log:
246 # Dump in compact encoding mode.
247 # See 'Compact encoding' in Python docs:
248 # https://docs.python.org/3/library/json.html#module-json
249 write_fn(
250 json.dumps(e, indent=None, separators=(",", ":")).encode(
251 "utf-8"
252 )
253 + b"\n"
254 )
255
256 def Write(self, path=None):
257 """Writes the log out to a file or socket.
258
259 Log is only written if 'path' or 'git config --get trace2.eventtarget'
260 provide a valid path (or socket) to write logs to.
261
262 Logging filename format follows the git trace2 style of being a unique
263 (exclusive writable) file.
264
265 Args:
266 path: Path to where logs should be written. The path may have a
267 prefix of the form "af_unix:[{stream|dgram}:]", in which case
268 the path is treated as a Unix domain socket. See
269 https://git-scm.com/docs/api-trace2#_enabling_a_target for
270 details.
271
272 Returns:
273 log_path: Path to the log file or socket if log is written,
274 otherwise None
275 """
276 log_path = None
277 # If no logging path is specified, get the path from
278 # 'trace2.eventtarget'.
279 if path is None:
280 path = self._GetEventTargetPath()
281
282 # If no logging path is specified, exit.
283 if path is None:
284 return None
244 285
245 Logging filename format follows the git trace2 style of being a unique 286 path_is_socket = False
246 (exclusive writable) file. 287 socket_type = None
288 if isinstance(path, str):
289 parts = path.split(":", 1)
290 if parts[0] == "af_unix" and len(parts) == 2:
291 path_is_socket = True
292 path = parts[1]
293 parts = path.split(":", 1)
294 if parts[0] == "stream" and len(parts) == 2:
295 socket_type = socket.SOCK_STREAM
296 path = parts[1]
297 elif parts[0] == "dgram" and len(parts) == 2:
298 socket_type = socket.SOCK_DGRAM
299 path = parts[1]
300 else:
301 # Get absolute path.
302 path = os.path.abspath(os.path.expanduser(path))
303 else:
304 raise TypeError("path: str required but got %s." % type(path))
305
306 # Git trace2 requires a directory to write log to.
307
308 # TODO(https://crbug.com/gerrit/13706): Support file (append) mode also.
309 if not (path_is_socket or os.path.isdir(path)):
310 return None
247 311
248 Args: 312 if path_is_socket:
249 path: Path to where logs should be written. The path may have a prefix of 313 if socket_type == socket.SOCK_STREAM or socket_type is None:
250 the form "af_unix:[{stream|dgram}:]", in which case the path is 314 try:
251 treated as a Unix domain socket. See 315 with socket.socket(
252 https://git-scm.com/docs/api-trace2#_enabling_a_target for details. 316 socket.AF_UNIX, socket.SOCK_STREAM
317 ) as sock:
318 sock.connect(path)
319 self._WriteLog(sock.sendall)
320 return f"af_unix:stream:{path}"
321 except OSError as err:
322 # If we tried to connect to a DGRAM socket using STREAM,
323 # ignore the attempt and continue to DGRAM below. Otherwise,
324 # issue a warning.
325 if err.errno != errno.EPROTOTYPE:
326 print(
327 f"repo: warning: git trace2 logging failed: {err}",
328 file=sys.stderr,
329 )
330 return None
331 if socket_type == socket.SOCK_DGRAM or socket_type is None:
332 try:
333 with socket.socket(
334 socket.AF_UNIX, socket.SOCK_DGRAM
335 ) as sock:
336 self._WriteLog(lambda bs: sock.sendto(bs, path))
337 return f"af_unix:dgram:{path}"
338 except OSError as err:
339 print(
340 f"repo: warning: git trace2 logging failed: {err}",
341 file=sys.stderr,
342 )
343 return None
344 # Tried to open a socket but couldn't connect (SOCK_STREAM) or write
345 # (SOCK_DGRAM).
346 print(
347 "repo: warning: git trace2 logging failed: could not write to "
348 "socket",
349 file=sys.stderr,
350 )
351 return None
253 352
254 Returns: 353 # Path is an absolute path
255 log_path: Path to the log file or socket if log is written, otherwise None 354 # Use NamedTemporaryFile to generate a unique filename as required by
256 """ 355 # git trace2.
257 log_path = None
258 # If no logging path is specified, get the path from 'trace2.eventtarget'.
259 if path is None:
260 path = self._GetEventTargetPath()
261
262 # If no logging path is specified, exit.
263 if path is None:
264 return None
265
266 path_is_socket = False
267 socket_type = None
268 if isinstance(path, str):
269 parts = path.split(':', 1)
270 if parts[0] == 'af_unix' and len(parts) == 2:
271 path_is_socket = True
272 path = parts[1]
273 parts = path.split(':', 1)
274 if parts[0] == 'stream' and len(parts) == 2:
275 socket_type = socket.SOCK_STREAM
276 path = parts[1]
277 elif parts[0] == 'dgram' and len(parts) == 2:
278 socket_type = socket.SOCK_DGRAM
279 path = parts[1]
280 else:
281 # Get absolute path.
282 path = os.path.abspath(os.path.expanduser(path))
283 else:
284 raise TypeError('path: str required but got %s.' % type(path))
285
286 # Git trace2 requires a directory to write log to.
287
288 # TODO(https://crbug.com/gerrit/13706): Support file (append) mode also.
289 if not (path_is_socket or os.path.isdir(path)):
290 return None
291
292 if path_is_socket:
293 if socket_type == socket.SOCK_STREAM or socket_type is None:
294 try: 356 try:
295 with socket.socket(socket.AF_UNIX, socket.SOCK_STREAM) as sock: 357 with tempfile.NamedTemporaryFile(
296 sock.connect(path) 358 mode="xb", prefix=self._sid, dir=path, delete=False
297 self._WriteLog(sock.sendall) 359 ) as f:
298 return f'af_unix:stream:{path}' 360 # TODO(https://crbug.com/gerrit/13706): Support writing events
299 except OSError as err: 361 # as they occur.
300 # If we tried to connect to a DGRAM socket using STREAM, ignore the 362 self._WriteLog(f.write)
301 # attempt and continue to DGRAM below. Otherwise, issue a warning. 363 log_path = f.name
302 if err.errno != errno.EPROTOTYPE: 364 except FileExistsError as err:
303 print(f'repo: warning: git trace2 logging failed: {err}', file=sys.stderr) 365 print(
366 "repo: warning: git trace2 logging failed: %r" % err,
367 file=sys.stderr,
368 )
304 return None 369 return None
305 if socket_type == socket.SOCK_DGRAM or socket_type is None: 370 return log_path
306 try:
307 with socket.socket(socket.AF_UNIX, socket.SOCK_DGRAM) as sock:
308 self._WriteLog(lambda bs: sock.sendto(bs, path))
309 return f'af_unix:dgram:{path}'
310 except OSError as err:
311 print(f'repo: warning: git trace2 logging failed: {err}', file=sys.stderr)
312 return None
313 # Tried to open a socket but couldn't connect (SOCK_STREAM) or write
314 # (SOCK_DGRAM).
315 print('repo: warning: git trace2 logging failed: could not write to socket', file=sys.stderr)
316 return None
317
318 # Path is an absolute path
319 # Use NamedTemporaryFile to generate a unique filename as required by git trace2.
320 try:
321 with tempfile.NamedTemporaryFile(mode='xb', prefix=self._sid, dir=path,
322 delete=False) as f:
323 # TODO(https://crbug.com/gerrit/13706): Support writing events as they
324 # occur.
325 self._WriteLog(f.write)
326 log_path = f.name
327 except FileExistsError as err:
328 print('repo: warning: git trace2 logging failed: %r' % err,
329 file=sys.stderr)
330 return None
331 return log_path
diff --git a/gitc_utils.py b/gitc_utils.py
index dfcfd2a4..7b72048f 100644
--- a/gitc_utils.py
+++ b/gitc_utils.py
@@ -28,128 +28,139 @@ NUM_BATCH_RETRIEVE_REVISIONID = 32
28 28
29 29
30def get_gitc_manifest_dir(): 30def get_gitc_manifest_dir():
31 return wrapper.Wrapper().get_gitc_manifest_dir() 31 return wrapper.Wrapper().get_gitc_manifest_dir()
32 32
33 33
34def parse_clientdir(gitc_fs_path): 34def parse_clientdir(gitc_fs_path):
35 return wrapper.Wrapper().gitc_parse_clientdir(gitc_fs_path) 35 return wrapper.Wrapper().gitc_parse_clientdir(gitc_fs_path)
36 36
37 37
38def _get_project_revision(args): 38def _get_project_revision(args):
39 """Worker for _set_project_revisions to lookup one project remote.""" 39 """Worker for _set_project_revisions to lookup one project remote."""
40 (i, url, expr) = args 40 (i, url, expr) = args
41 gitcmd = git_command.GitCommand( 41 gitcmd = git_command.GitCommand(
42 None, ['ls-remote', url, expr], capture_stdout=True, cwd='/tmp') 42 None, ["ls-remote", url, expr], capture_stdout=True, cwd="/tmp"
43 rc = gitcmd.Wait() 43 )
44 return (i, rc, gitcmd.stdout.split('\t', 1)[0]) 44 rc = gitcmd.Wait()
45 return (i, rc, gitcmd.stdout.split("\t", 1)[0])
45 46
46 47
47def _set_project_revisions(projects): 48def _set_project_revisions(projects):
48 """Sets the revisionExpr for a list of projects. 49 """Sets the revisionExpr for a list of projects.
49 50
50 Because of the limit of open file descriptors allowed, length of projects 51 Because of the limit of open file descriptors allowed, length of projects
51 should not be overly large. Recommend calling this function multiple times 52 should not be overly large. Recommend calling this function multiple times
52 with each call not exceeding NUM_BATCH_RETRIEVE_REVISIONID projects. 53 with each call not exceeding NUM_BATCH_RETRIEVE_REVISIONID projects.
53 54
54 Args: 55 Args:
55 projects: List of project objects to set the revionExpr for. 56 projects: List of project objects to set the revionExpr for.
56 """ 57 """
57 # Retrieve the commit id for each project based off of it's current 58 # Retrieve the commit id for each project based off of its current
58 # revisionExpr and it is not already a commit id. 59 # revisionExpr and it is not already a commit id.
59 with multiprocessing.Pool(NUM_BATCH_RETRIEVE_REVISIONID) as pool: 60 with multiprocessing.Pool(NUM_BATCH_RETRIEVE_REVISIONID) as pool:
60 results_iter = pool.imap_unordered( 61 results_iter = pool.imap_unordered(
61 _get_project_revision, 62 _get_project_revision,
62 ((i, project.remote.url, project.revisionExpr) 63 (
63 for i, project in enumerate(projects) 64 (i, project.remote.url, project.revisionExpr)
64 if not git_config.IsId(project.revisionExpr)), 65 for i, project in enumerate(projects)
65 chunksize=8) 66 if not git_config.IsId(project.revisionExpr)
66 for (i, rc, revisionExpr) in results_iter: 67 ),
67 project = projects[i] 68 chunksize=8,
68 if rc: 69 )
69 print('FATAL: Failed to retrieve revisionExpr for %s' % project.name) 70 for i, rc, revisionExpr in results_iter:
70 pool.terminate() 71 project = projects[i]
71 sys.exit(1) 72 if rc:
72 if not revisionExpr: 73 print(
73 pool.terminate() 74 "FATAL: Failed to retrieve revisionExpr for %s"
74 raise ManifestParseError('Invalid SHA-1 revision project %s (%s)' % 75 % project.name
75 (project.remote.url, project.revisionExpr)) 76 )
76 project.revisionExpr = revisionExpr 77 pool.terminate()
78 sys.exit(1)
79 if not revisionExpr:
80 pool.terminate()
81 raise ManifestParseError(
82 "Invalid SHA-1 revision project %s (%s)"
83 % (project.remote.url, project.revisionExpr)
84 )
85 project.revisionExpr = revisionExpr
77 86
78 87
79def generate_gitc_manifest(gitc_manifest, manifest, paths=None): 88def generate_gitc_manifest(gitc_manifest, manifest, paths=None):
80 """Generate a manifest for shafsd to use for this GITC client. 89 """Generate a manifest for shafsd to use for this GITC client.
81 90
82 Args: 91 Args:
83 gitc_manifest: Current gitc manifest, or None if there isn't one yet. 92 gitc_manifest: Current gitc manifest, or None if there isn't one yet.
84 manifest: A GitcManifest object loaded with the current repo manifest. 93 manifest: A GitcManifest object loaded with the current repo manifest.
85 paths: List of project paths we want to update. 94 paths: List of project paths we want to update.
86 """ 95 """
87 96
88 print('Generating GITC Manifest by fetching revision SHAs for each ' 97 print(
89 'project.') 98 "Generating GITC Manifest by fetching revision SHAs for each "
90 if paths is None: 99 "project."
91 paths = list(manifest.paths.keys()) 100 )
92 101 if paths is None:
93 groups = [x for x in re.split(r'[,\s]+', manifest.GetGroupsStr()) if x] 102 paths = list(manifest.paths.keys())
94 103
95 # Convert the paths to projects, and filter them to the matched groups. 104 groups = [x for x in re.split(r"[,\s]+", manifest.GetGroupsStr()) if x]
96 projects = [manifest.paths[p] for p in paths] 105
97 projects = [p for p in projects if p.MatchesGroups(groups)] 106 # Convert the paths to projects, and filter them to the matched groups.
98 107 projects = [manifest.paths[p] for p in paths]
99 if gitc_manifest is not None: 108 projects = [p for p in projects if p.MatchesGroups(groups)]
100 for path, proj in manifest.paths.items(): 109
101 if not proj.MatchesGroups(groups): 110 if gitc_manifest is not None:
102 continue 111 for path, proj in manifest.paths.items():
103 112 if not proj.MatchesGroups(groups):
104 if not proj.upstream and not git_config.IsId(proj.revisionExpr): 113 continue
105 proj.upstream = proj.revisionExpr 114
106 115 if not proj.upstream and not git_config.IsId(proj.revisionExpr):
107 if path not in gitc_manifest.paths: 116 proj.upstream = proj.revisionExpr
108 # Any new projects need their first revision, even if we weren't asked 117
109 # for them. 118 if path not in gitc_manifest.paths:
110 projects.append(proj) 119 # Any new projects need their first revision, even if we weren't
111 elif path not in paths: 120 # asked for them.
112 # And copy revisions from the previous manifest if we're not updating 121 projects.append(proj)
113 # them now. 122 elif path not in paths:
114 gitc_proj = gitc_manifest.paths[path] 123 # And copy revisions from the previous manifest if we're not
115 if gitc_proj.old_revision: 124 # updating them now.
116 proj.revisionExpr = None 125 gitc_proj = gitc_manifest.paths[path]
117 proj.old_revision = gitc_proj.old_revision 126 if gitc_proj.old_revision:
118 else: 127 proj.revisionExpr = None
119 proj.revisionExpr = gitc_proj.revisionExpr 128 proj.old_revision = gitc_proj.old_revision
120 129 else:
121 _set_project_revisions(projects) 130 proj.revisionExpr = gitc_proj.revisionExpr
122 131
123 if gitc_manifest is not None: 132 _set_project_revisions(projects)
124 for path, proj in gitc_manifest.paths.items(): 133
125 if proj.old_revision and path in paths: 134 if gitc_manifest is not None:
126 # If we updated a project that has been started, keep the old-revision 135 for path, proj in gitc_manifest.paths.items():
127 # updated. 136 if proj.old_revision and path in paths:
128 repo_proj = manifest.paths[path] 137 # If we updated a project that has been started, keep the
129 repo_proj.old_revision = repo_proj.revisionExpr 138 # old-revision updated.
130 repo_proj.revisionExpr = None 139 repo_proj = manifest.paths[path]
131 140 repo_proj.old_revision = repo_proj.revisionExpr
132 # Convert URLs from relative to absolute. 141 repo_proj.revisionExpr = None
133 for _name, remote in manifest.remotes.items(): 142
134 remote.fetchUrl = remote.resolvedFetchUrl 143 # Convert URLs from relative to absolute.
135 144 for _name, remote in manifest.remotes.items():
136 # Save the manifest. 145 remote.fetchUrl = remote.resolvedFetchUrl
137 save_manifest(manifest) 146
147 # Save the manifest.
148 save_manifest(manifest)
138 149
139 150
140def save_manifest(manifest, client_dir=None): 151def save_manifest(manifest, client_dir=None):
141 """Save the manifest file in the client_dir. 152 """Save the manifest file in the client_dir.
142 153
143 Args: 154 Args:
144 manifest: Manifest object to save. 155 manifest: Manifest object to save.
145 client_dir: Client directory to save the manifest in. 156 client_dir: Client directory to save the manifest in.
146 """ 157 """
147 if not client_dir: 158 if not client_dir:
148 manifest_file = manifest.manifestFile 159 manifest_file = manifest.manifestFile
149 else: 160 else:
150 manifest_file = os.path.join(client_dir, '.manifest') 161 manifest_file = os.path.join(client_dir, ".manifest")
151 with open(manifest_file, 'w') as f: 162 with open(manifest_file, "w") as f:
152 manifest.Save(f, groups=manifest.GetGroupsStr()) 163 manifest.Save(f, groups=manifest.GetGroupsStr())
153 # TODO(sbasi/jorg): Come up with a solution to remove the sleep below. 164 # TODO(sbasi/jorg): Come up with a solution to remove the sleep below.
154 # Give the GITC filesystem time to register the manifest changes. 165 # Give the GITC filesystem time to register the manifest changes.
155 time.sleep(3) 166 time.sleep(3)
diff --git a/hooks.py b/hooks.py
index 67c21a25..decf0699 100644
--- a/hooks.py
+++ b/hooks.py
@@ -26,271 +26,293 @@ from git_refs import HEAD
26 26
27 27
28class RepoHook(object): 28class RepoHook(object):
29 """A RepoHook contains information about a script to run as a hook. 29 """A RepoHook contains information about a script to run as a hook.
30 30
31 Hooks are used to run a python script before running an upload (for instance, 31 Hooks are used to run a python script before running an upload (for
32 to run presubmit checks). Eventually, we may have hooks for other actions. 32 instance, to run presubmit checks). Eventually, we may have hooks for other
33 33 actions.
34 This shouldn't be confused with files in the 'repo/hooks' directory. Those 34
35 files are copied into each '.git/hooks' folder for each project. Repo-level 35 This shouldn't be confused with files in the 'repo/hooks' directory. Those
36 hooks are associated instead with repo actions. 36 files are copied into each '.git/hooks' folder for each project. Repo-level
37 37 hooks are associated instead with repo actions.
38 Hooks are always python. When a hook is run, we will load the hook into the 38
39 interpreter and execute its main() function. 39 Hooks are always python. When a hook is run, we will load the hook into the
40 40 interpreter and execute its main() function.
41 Combinations of hook option flags: 41
42 - no-verify=False, verify=False (DEFAULT): 42 Combinations of hook option flags:
43 If stdout is a tty, can prompt about running hooks if needed. 43 - no-verify=False, verify=False (DEFAULT):
44 If user denies running hooks, the action is cancelled. If stdout is 44 If stdout is a tty, can prompt about running hooks if needed.
45 not a tty and we would need to prompt about hooks, action is 45 If user denies running hooks, the action is cancelled. If stdout is
46 cancelled. 46 not a tty and we would need to prompt about hooks, action is
47 - no-verify=False, verify=True: 47 cancelled.
48 Always run hooks with no prompt. 48 - no-verify=False, verify=True:
49 - no-verify=True, verify=False: 49 Always run hooks with no prompt.
50 Never run hooks, but run action anyway (AKA bypass hooks). 50 - no-verify=True, verify=False:
51 - no-verify=True, verify=True: 51 Never run hooks, but run action anyway (AKA bypass hooks).
52 Invalid 52 - no-verify=True, verify=True:
53 """ 53 Invalid
54
55 def __init__(self,
56 hook_type,
57 hooks_project,
58 repo_topdir,
59 manifest_url,
60 bypass_hooks=False,
61 allow_all_hooks=False,
62 ignore_hooks=False,
63 abort_if_user_denies=False):
64 """RepoHook constructor.
65
66 Params:
67 hook_type: A string representing the type of hook. This is also used
68 to figure out the name of the file containing the hook. For
69 example: 'pre-upload'.
70 hooks_project: The project containing the repo hooks.
71 If you have a manifest, this is manifest.repo_hooks_project.
72 OK if this is None, which will make the hook a no-op.
73 repo_topdir: The top directory of the repo client checkout.
74 This is the one containing the .repo directory. Scripts will
75 run with CWD as this directory.
76 If you have a manifest, this is manifest.topdir.
77 manifest_url: The URL to the manifest git repo.
78 bypass_hooks: If True, then 'Do not run the hook'.
79 allow_all_hooks: If True, then 'Run the hook without prompting'.
80 ignore_hooks: If True, then 'Do not abort action if hooks fail'.
81 abort_if_user_denies: If True, we'll abort running the hook if the user
82 doesn't allow us to run the hook.
83 """ 54 """
84 self._hook_type = hook_type
85 self._hooks_project = hooks_project
86 self._repo_topdir = repo_topdir
87 self._manifest_url = manifest_url
88 self._bypass_hooks = bypass_hooks
89 self._allow_all_hooks = allow_all_hooks
90 self._ignore_hooks = ignore_hooks
91 self._abort_if_user_denies = abort_if_user_denies
92
93 # Store the full path to the script for convenience.
94 if self._hooks_project:
95 self._script_fullpath = os.path.join(self._hooks_project.worktree,
96 self._hook_type + '.py')
97 else:
98 self._script_fullpath = None
99
100 def _GetHash(self):
101 """Return a hash of the contents of the hooks directory.
102
103 We'll just use git to do this. This hash has the property that if anything
104 changes in the directory we will return a different has.
105
106 SECURITY CONSIDERATION:
107 This hash only represents the contents of files in the hook directory, not
108 any other files imported or called by hooks. Changes to imported files
109 can change the script behavior without affecting the hash.
110
111 Returns:
112 A string representing the hash. This will always be ASCII so that it can
113 be printed to the user easily.
114 """
115 assert self._hooks_project, "Must have hooks to calculate their hash."
116
117 # We will use the work_git object rather than just calling GetRevisionId().
118 # That gives us a hash of the latest checked in version of the files that
119 # the user will actually be executing. Specifically, GetRevisionId()
120 # doesn't appear to change even if a user checks out a different version
121 # of the hooks repo (via git checkout) nor if a user commits their own revs.
122 #
123 # NOTE: Local (non-committed) changes will not be factored into this hash.
124 # I think this is OK, since we're really only worried about warning the user
125 # about upstream changes.
126 return self._hooks_project.work_git.rev_parse(HEAD)
127
128 def _GetMustVerb(self):
129 """Return 'must' if the hook is required; 'should' if not."""
130 if self._abort_if_user_denies:
131 return 'must'
132 else:
133 return 'should'
134
135 def _CheckForHookApproval(self):
136 """Check to see whether this hook has been approved.
137
138 We'll accept approval of manifest URLs if they're using secure transports.
139 This way the user can say they trust the manifest hoster. For insecure
140 hosts, we fall back to checking the hash of the hooks repo.
141
142 Note that we ask permission for each individual hook even though we use
143 the hash of all hooks when detecting changes. We'd like the user to be
144 able to approve / deny each hook individually. We only use the hash of all
145 hooks because there is no other easy way to detect changes to local imports.
146
147 Returns:
148 True if this hook is approved to run; False otherwise.
149
150 Raises:
151 HookError: Raised if the user doesn't approve and abort_if_user_denies
152 was passed to the consturctor.
153 """
154 if self._ManifestUrlHasSecureScheme():
155 return self._CheckForHookApprovalManifest()
156 else:
157 return self._CheckForHookApprovalHash()
158
159 def _CheckForHookApprovalHelper(self, subkey, new_val, main_prompt,
160 changed_prompt):
161 """Check for approval for a particular attribute and hook.
162
163 Args:
164 subkey: The git config key under [repo.hooks.<hook_type>] to store the
165 last approved string.
166 new_val: The new value to compare against the last approved one.
167 main_prompt: Message to display to the user to ask for approval.
168 changed_prompt: Message explaining why we're re-asking for approval.
169
170 Returns:
171 True if this hook is approved to run; False otherwise.
172
173 Raises:
174 HookError: Raised if the user doesn't approve and abort_if_user_denies
175 was passed to the consturctor.
176 """
177 hooks_config = self._hooks_project.config
178 git_approval_key = 'repo.hooks.%s.%s' % (self._hook_type, subkey)
179
180 # Get the last value that the user approved for this hook; may be None.
181 old_val = hooks_config.GetString(git_approval_key)
182
183 if old_val is not None:
184 # User previously approved hook and asked not to be prompted again.
185 if new_val == old_val:
186 # Approval matched. We're done.
187 return True
188 else:
189 # Give the user a reason why we're prompting, since they last told
190 # us to "never ask again".
191 prompt = 'WARNING: %s\n\n' % (changed_prompt,)
192 else:
193 prompt = ''
194
195 # Prompt the user if we're not on a tty; on a tty we'll assume "no".
196 if sys.stdout.isatty():
197 prompt += main_prompt + ' (yes/always/NO)? '
198 response = input(prompt).lower()
199 print()
200
201 # User is doing a one-time approval.
202 if response in ('y', 'yes'):
203 return True
204 elif response == 'always':
205 hooks_config.SetString(git_approval_key, new_val)
206 return True
207
208 # For anything else, we'll assume no approval.
209 if self._abort_if_user_denies:
210 raise HookError('You must allow the %s hook or use --no-verify.' %
211 self._hook_type)
212
213 return False
214
215 def _ManifestUrlHasSecureScheme(self):
216 """Check if the URI for the manifest is a secure transport."""
217 secure_schemes = ('file', 'https', 'ssh', 'persistent-https', 'sso', 'rpc')
218 parse_results = urllib.parse.urlparse(self._manifest_url)
219 return parse_results.scheme in secure_schemes
220
221 def _CheckForHookApprovalManifest(self):
222 """Check whether the user has approved this manifest host.
223
224 Returns:
225 True if this hook is approved to run; False otherwise.
226 """
227 return self._CheckForHookApprovalHelper(
228 'approvedmanifest',
229 self._manifest_url,
230 'Run hook scripts from %s' % (self._manifest_url,),
231 'Manifest URL has changed since %s was allowed.' % (self._hook_type,))
232
233 def _CheckForHookApprovalHash(self):
234 """Check whether the user has approved the hooks repo.
235
236 Returns:
237 True if this hook is approved to run; False otherwise.
238 """
239 prompt = ('Repo %s run the script:\n'
240 ' %s\n'
241 '\n'
242 'Do you want to allow this script to run')
243 return self._CheckForHookApprovalHelper(
244 'approvedhash',
245 self._GetHash(),
246 prompt % (self._GetMustVerb(), self._script_fullpath),
247 'Scripts have changed since %s was allowed.' % (self._hook_type,))
248
249 @staticmethod
250 def _ExtractInterpFromShebang(data):
251 """Extract the interpreter used in the shebang.
252
253 Try to locate the interpreter the script is using (ignoring `env`).
254
255 Args:
256 data: The file content of the script.
257
258 Returns:
259 The basename of the main script interpreter, or None if a shebang is not
260 used or could not be parsed out.
261 """
262 firstline = data.splitlines()[:1]
263 if not firstline:
264 return None
265
266 # The format here can be tricky.
267 shebang = firstline[0].strip()
268 m = re.match(r'^#!\s*([^\s]+)(?:\s+([^\s]+))?', shebang)
269 if not m:
270 return None
271
272 # If the using `env`, find the target program.
273 interp = m.group(1)
274 if os.path.basename(interp) == 'env':
275 interp = m.group(2)
276
277 return interp
278
279 def _ExecuteHookViaReexec(self, interp, context, **kwargs):
280 """Execute the hook script through |interp|.
281 55
282 Note: Support for this feature should be dropped ~Jun 2021. 56 def __init__(
283 57 self,
284 Args: 58 hook_type,
285 interp: The Python program to run. 59 hooks_project,
286 context: Basic Python context to execute the hook inside. 60 repo_topdir,
287 kwargs: Arbitrary arguments to pass to the hook script. 61 manifest_url,
288 62 bypass_hooks=False,
289 Raises: 63 allow_all_hooks=False,
290 HookError: When the hooks failed for any reason. 64 ignore_hooks=False,
291 """ 65 abort_if_user_denies=False,
292 # This logic needs to be kept in sync with _ExecuteHookViaImport below. 66 ):
293 script = """ 67 """RepoHook constructor.
68
69 Params:
70 hook_type: A string representing the type of hook. This is also used
71 to figure out the name of the file containing the hook. For
72 example: 'pre-upload'.
73 hooks_project: The project containing the repo hooks.
74 If you have a manifest, this is manifest.repo_hooks_project.
75 OK if this is None, which will make the hook a no-op.
76 repo_topdir: The top directory of the repo client checkout.
77 This is the one containing the .repo directory. Scripts will
78 run with CWD as this directory.
79 If you have a manifest, this is manifest.topdir.
80 manifest_url: The URL to the manifest git repo.
81 bypass_hooks: If True, then 'Do not run the hook'.
82 allow_all_hooks: If True, then 'Run the hook without prompting'.
83 ignore_hooks: If True, then 'Do not abort action if hooks fail'.
84 abort_if_user_denies: If True, we'll abort running the hook if the
85 user doesn't allow us to run the hook.
86 """
87 self._hook_type = hook_type
88 self._hooks_project = hooks_project
89 self._repo_topdir = repo_topdir
90 self._manifest_url = manifest_url
91 self._bypass_hooks = bypass_hooks
92 self._allow_all_hooks = allow_all_hooks
93 self._ignore_hooks = ignore_hooks
94 self._abort_if_user_denies = abort_if_user_denies
95
96 # Store the full path to the script for convenience.
97 if self._hooks_project:
98 self._script_fullpath = os.path.join(
99 self._hooks_project.worktree, self._hook_type + ".py"
100 )
101 else:
102 self._script_fullpath = None
103
104 def _GetHash(self):
105 """Return a hash of the contents of the hooks directory.
106
107 We'll just use git to do this. This hash has the property that if
108 anything changes in the directory we will return a different has.
109
110 SECURITY CONSIDERATION:
111 This hash only represents the contents of files in the hook
112 directory, not any other files imported or called by hooks. Changes
113 to imported files can change the script behavior without affecting
114 the hash.
115
116 Returns:
117 A string representing the hash. This will always be ASCII so that
118 it can be printed to the user easily.
119 """
120 assert self._hooks_project, "Must have hooks to calculate their hash."
121
122 # We will use the work_git object rather than just calling
123 # GetRevisionId(). That gives us a hash of the latest checked in version
124 # of the files that the user will actually be executing. Specifically,
125 # GetRevisionId() doesn't appear to change even if a user checks out a
126 # different version of the hooks repo (via git checkout) nor if a user
127 # commits their own revs.
128 #
129 # NOTE: Local (non-committed) changes will not be factored into this
130 # hash. I think this is OK, since we're really only worried about
131 # warning the user about upstream changes.
132 return self._hooks_project.work_git.rev_parse(HEAD)
133
134 def _GetMustVerb(self):
135 """Return 'must' if the hook is required; 'should' if not."""
136 if self._abort_if_user_denies:
137 return "must"
138 else:
139 return "should"
140
141 def _CheckForHookApproval(self):
142 """Check to see whether this hook has been approved.
143
144 We'll accept approval of manifest URLs if they're using secure
145 transports. This way the user can say they trust the manifest hoster.
146 For insecure hosts, we fall back to checking the hash of the hooks repo.
147
148 Note that we ask permission for each individual hook even though we use
149 the hash of all hooks when detecting changes. We'd like the user to be
150 able to approve / deny each hook individually. We only use the hash of
151 all hooks because there is no other easy way to detect changes to local
152 imports.
153
154 Returns:
155 True if this hook is approved to run; False otherwise.
156
157 Raises:
158 HookError: Raised if the user doesn't approve and
159 abort_if_user_denies was passed to the consturctor.
160 """
161 if self._ManifestUrlHasSecureScheme():
162 return self._CheckForHookApprovalManifest()
163 else:
164 return self._CheckForHookApprovalHash()
165
166 def _CheckForHookApprovalHelper(
167 self, subkey, new_val, main_prompt, changed_prompt
168 ):
169 """Check for approval for a particular attribute and hook.
170
171 Args:
172 subkey: The git config key under [repo.hooks.<hook_type>] to store
173 the last approved string.
174 new_val: The new value to compare against the last approved one.
175 main_prompt: Message to display to the user to ask for approval.
176 changed_prompt: Message explaining why we're re-asking for approval.
177
178 Returns:
179 True if this hook is approved to run; False otherwise.
180
181 Raises:
182 HookError: Raised if the user doesn't approve and
183 abort_if_user_denies was passed to the consturctor.
184 """
185 hooks_config = self._hooks_project.config
186 git_approval_key = "repo.hooks.%s.%s" % (self._hook_type, subkey)
187
188 # Get the last value that the user approved for this hook; may be None.
189 old_val = hooks_config.GetString(git_approval_key)
190
191 if old_val is not None:
192 # User previously approved hook and asked not to be prompted again.
193 if new_val == old_val:
194 # Approval matched. We're done.
195 return True
196 else:
197 # Give the user a reason why we're prompting, since they last
198 # told us to "never ask again".
199 prompt = "WARNING: %s\n\n" % (changed_prompt,)
200 else:
201 prompt = ""
202
203 # Prompt the user if we're not on a tty; on a tty we'll assume "no".
204 if sys.stdout.isatty():
205 prompt += main_prompt + " (yes/always/NO)? "
206 response = input(prompt).lower()
207 print()
208
209 # User is doing a one-time approval.
210 if response in ("y", "yes"):
211 return True
212 elif response == "always":
213 hooks_config.SetString(git_approval_key, new_val)
214 return True
215
216 # For anything else, we'll assume no approval.
217 if self._abort_if_user_denies:
218 raise HookError(
219 "You must allow the %s hook or use --no-verify."
220 % self._hook_type
221 )
222
223 return False
224
225 def _ManifestUrlHasSecureScheme(self):
226 """Check if the URI for the manifest is a secure transport."""
227 secure_schemes = (
228 "file",
229 "https",
230 "ssh",
231 "persistent-https",
232 "sso",
233 "rpc",
234 )
235 parse_results = urllib.parse.urlparse(self._manifest_url)
236 return parse_results.scheme in secure_schemes
237
238 def _CheckForHookApprovalManifest(self):
239 """Check whether the user has approved this manifest host.
240
241 Returns:
242 True if this hook is approved to run; False otherwise.
243 """
244 return self._CheckForHookApprovalHelper(
245 "approvedmanifest",
246 self._manifest_url,
247 "Run hook scripts from %s" % (self._manifest_url,),
248 "Manifest URL has changed since %s was allowed."
249 % (self._hook_type,),
250 )
251
252 def _CheckForHookApprovalHash(self):
253 """Check whether the user has approved the hooks repo.
254
255 Returns:
256 True if this hook is approved to run; False otherwise.
257 """
258 prompt = (
259 "Repo %s run the script:\n"
260 " %s\n"
261 "\n"
262 "Do you want to allow this script to run"
263 )
264 return self._CheckForHookApprovalHelper(
265 "approvedhash",
266 self._GetHash(),
267 prompt % (self._GetMustVerb(), self._script_fullpath),
268 "Scripts have changed since %s was allowed." % (self._hook_type,),
269 )
270
271 @staticmethod
272 def _ExtractInterpFromShebang(data):
273 """Extract the interpreter used in the shebang.
274
275 Try to locate the interpreter the script is using (ignoring `env`).
276
277 Args:
278 data: The file content of the script.
279
280 Returns:
281 The basename of the main script interpreter, or None if a shebang is
282 not used or could not be parsed out.
283 """
284 firstline = data.splitlines()[:1]
285 if not firstline:
286 return None
287
288 # The format here can be tricky.
289 shebang = firstline[0].strip()
290 m = re.match(r"^#!\s*([^\s]+)(?:\s+([^\s]+))?", shebang)
291 if not m:
292 return None
293
294 # If the using `env`, find the target program.
295 interp = m.group(1)
296 if os.path.basename(interp) == "env":
297 interp = m.group(2)
298
299 return interp
300
301 def _ExecuteHookViaReexec(self, interp, context, **kwargs):
302 """Execute the hook script through |interp|.
303
304 Note: Support for this feature should be dropped ~Jun 2021.
305
306 Args:
307 interp: The Python program to run.
308 context: Basic Python context to execute the hook inside.
309 kwargs: Arbitrary arguments to pass to the hook script.
310
311 Raises:
312 HookError: When the hooks failed for any reason.
313 """
314 # This logic needs to be kept in sync with _ExecuteHookViaImport below.
315 script = """
294import json, os, sys 316import json, os, sys
295path = '''%(path)s''' 317path = '''%(path)s'''
296kwargs = json.loads('''%(kwargs)s''') 318kwargs = json.loads('''%(kwargs)s''')
@@ -300,210 +322,240 @@ data = open(path).read()
300exec(compile(data, path, 'exec'), context) 322exec(compile(data, path, 'exec'), context)
301context['main'](**kwargs) 323context['main'](**kwargs)
302""" % { 324""" % {
303 'path': self._script_fullpath, 325 "path": self._script_fullpath,
304 'kwargs': json.dumps(kwargs), 326 "kwargs": json.dumps(kwargs),
305 'context': json.dumps(context), 327 "context": json.dumps(context),
306 } 328 }
307 329
308 # We pass the script via stdin to avoid OS argv limits. It also makes 330 # We pass the script via stdin to avoid OS argv limits. It also makes
309 # unhandled exception tracebacks less verbose/confusing for users. 331 # unhandled exception tracebacks less verbose/confusing for users.
310 cmd = [interp, '-c', 'import sys; exec(sys.stdin.read())'] 332 cmd = [interp, "-c", "import sys; exec(sys.stdin.read())"]
311 proc = subprocess.Popen(cmd, stdin=subprocess.PIPE) 333 proc = subprocess.Popen(cmd, stdin=subprocess.PIPE)
312 proc.communicate(input=script.encode('utf-8')) 334 proc.communicate(input=script.encode("utf-8"))
313 if proc.returncode: 335 if proc.returncode:
314 raise HookError('Failed to run %s hook.' % (self._hook_type,)) 336 raise HookError("Failed to run %s hook." % (self._hook_type,))
315 337
316 def _ExecuteHookViaImport(self, data, context, **kwargs): 338 def _ExecuteHookViaImport(self, data, context, **kwargs):
317 """Execute the hook code in |data| directly. 339 """Execute the hook code in |data| directly.
318 340
319 Args: 341 Args:
320 data: The code of the hook to execute. 342 data: The code of the hook to execute.
321 context: Basic Python context to execute the hook inside. 343 context: Basic Python context to execute the hook inside.
322 kwargs: Arbitrary arguments to pass to the hook script. 344 kwargs: Arbitrary arguments to pass to the hook script.
323 345
324 Raises: 346 Raises:
325 HookError: When the hooks failed for any reason. 347 HookError: When the hooks failed for any reason.
326 """ 348 """
327 # Exec, storing global context in the context dict. We catch exceptions 349 # Exec, storing global context in the context dict. We catch exceptions
328 # and convert to a HookError w/ just the failing traceback. 350 # and convert to a HookError w/ just the failing traceback.
329 try: 351 try:
330 exec(compile(data, self._script_fullpath, 'exec'), context) 352 exec(compile(data, self._script_fullpath, "exec"), context)
331 except Exception: 353 except Exception:
332 raise HookError('%s\nFailed to import %s hook; see traceback above.' % 354 raise HookError(
333 (traceback.format_exc(), self._hook_type)) 355 "%s\nFailed to import %s hook; see traceback above."
334 356 % (traceback.format_exc(), self._hook_type)
335 # Running the script should have defined a main() function. 357 )
336 if 'main' not in context: 358
337 raise HookError('Missing main() in: "%s"' % self._script_fullpath) 359 # Running the script should have defined a main() function.
338 360 if "main" not in context:
339 # Call the main function in the hook. If the hook should cause the 361 raise HookError('Missing main() in: "%s"' % self._script_fullpath)
340 # build to fail, it will raise an Exception. We'll catch that convert 362
341 # to a HookError w/ just the failing traceback. 363 # Call the main function in the hook. If the hook should cause the
342 try: 364 # build to fail, it will raise an Exception. We'll catch that convert
343 context['main'](**kwargs) 365 # to a HookError w/ just the failing traceback.
344 except Exception:
345 raise HookError('%s\nFailed to run main() for %s hook; see traceback '
346 'above.' % (traceback.format_exc(), self._hook_type))
347
348 def _ExecuteHook(self, **kwargs):
349 """Actually execute the given hook.
350
351 This will run the hook's 'main' function in our python interpreter.
352
353 Args:
354 kwargs: Keyword arguments to pass to the hook. These are often specific
355 to the hook type. For instance, pre-upload hooks will contain
356 a project_list.
357 """
358 # Keep sys.path and CWD stashed away so that we can always restore them
359 # upon function exit.
360 orig_path = os.getcwd()
361 orig_syspath = sys.path
362
363 try:
364 # Always run hooks with CWD as topdir.
365 os.chdir(self._repo_topdir)
366
367 # Put the hook dir as the first item of sys.path so hooks can do
368 # relative imports. We want to replace the repo dir as [0] so
369 # hooks can't import repo files.
370 sys.path = [os.path.dirname(self._script_fullpath)] + sys.path[1:]
371
372 # Initial global context for the hook to run within.
373 context = {'__file__': self._script_fullpath}
374
375 # Add 'hook_should_take_kwargs' to the arguments to be passed to main.
376 # We don't actually want hooks to define their main with this argument--
377 # it's there to remind them that their hook should always take **kwargs.
378 # For instance, a pre-upload hook should be defined like:
379 # def main(project_list, **kwargs):
380 #
381 # This allows us to later expand the API without breaking old hooks.
382 kwargs = kwargs.copy()
383 kwargs['hook_should_take_kwargs'] = True
384
385 # See what version of python the hook has been written against.
386 data = open(self._script_fullpath).read()
387 interp = self._ExtractInterpFromShebang(data)
388 reexec = False
389 if interp:
390 prog = os.path.basename(interp)
391 if prog.startswith('python2') and sys.version_info.major != 2:
392 reexec = True
393 elif prog.startswith('python3') and sys.version_info.major == 2:
394 reexec = True
395
396 # Attempt to execute the hooks through the requested version of Python.
397 if reexec:
398 try: 366 try:
399 self._ExecuteHookViaReexec(interp, context, **kwargs) 367 context["main"](**kwargs)
400 except OSError as e: 368 except Exception:
401 if e.errno == errno.ENOENT: 369 raise HookError(
402 # We couldn't find the interpreter, so fallback to importing. 370 "%s\nFailed to run main() for %s hook; see traceback "
371 "above." % (traceback.format_exc(), self._hook_type)
372 )
373
374 def _ExecuteHook(self, **kwargs):
375 """Actually execute the given hook.
376
377 This will run the hook's 'main' function in our python interpreter.
378
379 Args:
380 kwargs: Keyword arguments to pass to the hook. These are often
381 specific to the hook type. For instance, pre-upload hooks will
382 contain a project_list.
383 """
384 # Keep sys.path and CWD stashed away so that we can always restore them
385 # upon function exit.
386 orig_path = os.getcwd()
387 orig_syspath = sys.path
388
389 try:
390 # Always run hooks with CWD as topdir.
391 os.chdir(self._repo_topdir)
392
393 # Put the hook dir as the first item of sys.path so hooks can do
394 # relative imports. We want to replace the repo dir as [0] so
395 # hooks can't import repo files.
396 sys.path = [os.path.dirname(self._script_fullpath)] + sys.path[1:]
397
398 # Initial global context for the hook to run within.
399 context = {"__file__": self._script_fullpath}
400
401 # Add 'hook_should_take_kwargs' to the arguments to be passed to
402 # main. We don't actually want hooks to define their main with this
403 # argument--it's there to remind them that their hook should always
404 # take **kwargs.
405 # For instance, a pre-upload hook should be defined like:
406 # def main(project_list, **kwargs):
407 #
408 # This allows us to later expand the API without breaking old hooks.
409 kwargs = kwargs.copy()
410 kwargs["hook_should_take_kwargs"] = True
411
412 # See what version of python the hook has been written against.
413 data = open(self._script_fullpath).read()
414 interp = self._ExtractInterpFromShebang(data)
403 reexec = False 415 reexec = False
404 else: 416 if interp:
405 raise 417 prog = os.path.basename(interp)
406 418 if prog.startswith("python2") and sys.version_info.major != 2:
407 # Run the hook by importing directly. 419 reexec = True
408 if not reexec: 420 elif prog.startswith("python3") and sys.version_info.major == 2:
409 self._ExecuteHookViaImport(data, context, **kwargs) 421 reexec = True
410 finally: 422
411 # Restore sys.path and CWD. 423 # Attempt to execute the hooks through the requested version of
412 sys.path = orig_syspath 424 # Python.
413 os.chdir(orig_path) 425 if reexec:
414 426 try:
415 def _CheckHook(self): 427 self._ExecuteHookViaReexec(interp, context, **kwargs)
416 # Bail with a nice error if we can't find the hook. 428 except OSError as e:
417 if not os.path.isfile(self._script_fullpath): 429 if e.errno == errno.ENOENT:
418 raise HookError('Couldn\'t find repo hook: %s' % self._script_fullpath) 430 # We couldn't find the interpreter, so fallback to
419 431 # importing.
420 def Run(self, **kwargs): 432 reexec = False
421 """Run the hook. 433 else:
422 434 raise
423 If the hook doesn't exist (because there is no hooks project or because 435
424 this particular hook is not enabled), this is a no-op. 436 # Run the hook by importing directly.
425 437 if not reexec:
426 Args: 438 self._ExecuteHookViaImport(data, context, **kwargs)
427 user_allows_all_hooks: If True, we will never prompt about running the 439 finally:
428 hook--we'll just assume it's OK to run it. 440 # Restore sys.path and CWD.
429 kwargs: Keyword arguments to pass to the hook. These are often specific 441 sys.path = orig_syspath
430 to the hook type. For instance, pre-upload hooks will contain 442 os.chdir(orig_path)
431 a project_list. 443
432 444 def _CheckHook(self):
433 Returns: 445 # Bail with a nice error if we can't find the hook.
434 True: On success or ignore hooks by user-request 446 if not os.path.isfile(self._script_fullpath):
435 False: The hook failed. The caller should respond with aborting the action. 447 raise HookError(
436 Some examples in which False is returned: 448 "Couldn't find repo hook: %s" % self._script_fullpath
437 * Finding the hook failed while it was enabled, or 449 )
438 * the user declined to run a required hook (from _CheckForHookApproval) 450
439 In all these cases the user did not pass the proper arguments to 451 def Run(self, **kwargs):
440 ignore the result through the option combinations as listed in 452 """Run the hook.
441 AddHookOptionGroup(). 453
442 """ 454 If the hook doesn't exist (because there is no hooks project or because
443 # Do not do anything in case bypass_hooks is set, or 455 this particular hook is not enabled), this is a no-op.
444 # no-op if there is no hooks project or if hook is disabled. 456
445 if (self._bypass_hooks or 457 Args:
446 not self._hooks_project or 458 user_allows_all_hooks: If True, we will never prompt about running
447 self._hook_type not in self._hooks_project.enabled_repo_hooks): 459 the hook--we'll just assume it's OK to run it.
448 return True 460 kwargs: Keyword arguments to pass to the hook. These are often
449 461 specific to the hook type. For instance, pre-upload hooks will
450 passed = True 462 contain a project_list.
451 try: 463
452 self._CheckHook() 464 Returns:
453 465 True: On success or ignore hooks by user-request
454 # Make sure the user is OK with running the hook. 466 False: The hook failed. The caller should respond with aborting the
455 if self._allow_all_hooks or self._CheckForHookApproval(): 467 action. Some examples in which False is returned:
456 # Run the hook with the same version of python we're using. 468 * Finding the hook failed while it was enabled, or
457 self._ExecuteHook(**kwargs) 469 * the user declined to run a required hook (from
458 except SystemExit as e: 470 _CheckForHookApproval)
459 passed = False 471 In all these cases the user did not pass the proper arguments to
460 print('ERROR: %s hooks exited with exit code: %s' % (self._hook_type, str(e)), 472 ignore the result through the option combinations as listed in
461 file=sys.stderr) 473 AddHookOptionGroup().
462 except HookError as e: 474 """
463 passed = False 475 # Do not do anything in case bypass_hooks is set, or
464 print('ERROR: %s' % str(e), file=sys.stderr) 476 # no-op if there is no hooks project or if hook is disabled.
465 477 if (
466 if not passed and self._ignore_hooks: 478 self._bypass_hooks
467 print('\nWARNING: %s hooks failed, but continuing anyways.' % self._hook_type, 479 or not self._hooks_project
468 file=sys.stderr) 480 or self._hook_type not in self._hooks_project.enabled_repo_hooks
469 passed = True 481 ):
470 482 return True
471 return passed 483
472 484 passed = True
473 @classmethod 485 try:
474 def FromSubcmd(cls, manifest, opt, *args, **kwargs): 486 self._CheckHook()
475 """Method to construct the repo hook class 487
476 488 # Make sure the user is OK with running the hook.
477 Args: 489 if self._allow_all_hooks or self._CheckForHookApproval():
478 manifest: The current active manifest for this command from which we 490 # Run the hook with the same version of python we're using.
479 extract a couple of fields. 491 self._ExecuteHook(**kwargs)
480 opt: Contains the commandline options for the action of this hook. 492 except SystemExit as e:
481 It should contain the options added by AddHookOptionGroup() in which 493 passed = False
482 we are interested in RepoHook execution. 494 print(
483 """ 495 "ERROR: %s hooks exited with exit code: %s"
484 for key in ('bypass_hooks', 'allow_all_hooks', 'ignore_hooks'): 496 % (self._hook_type, str(e)),
485 kwargs.setdefault(key, getattr(opt, key)) 497 file=sys.stderr,
486 kwargs.update({ 498 )
487 'hooks_project': manifest.repo_hooks_project, 499 except HookError as e:
488 'repo_topdir': manifest.topdir, 500 passed = False
489 'manifest_url': manifest.manifestProject.GetRemote('origin').url, 501 print("ERROR: %s" % str(e), file=sys.stderr)
490 }) 502
491 return cls(*args, **kwargs) 503 if not passed and self._ignore_hooks:
492 504 print(
493 @staticmethod 505 "\nWARNING: %s hooks failed, but continuing anyways."
494 def AddOptionGroup(parser, name): 506 % self._hook_type,
495 """Help options relating to the various hooks.""" 507 file=sys.stderr,
496 508 )
497 # Note that verify and no-verify are NOT opposites of each other, which 509 passed = True
498 # is why they store to different locations. We are using them to match 510
499 # 'git commit' syntax. 511 return passed
500 group = parser.add_option_group(name + ' hooks') 512
501 group.add_option('--no-verify', 513 @classmethod
502 dest='bypass_hooks', action='store_true', 514 def FromSubcmd(cls, manifest, opt, *args, **kwargs):
503 help='Do not run the %s hook.' % name) 515 """Method to construct the repo hook class
504 group.add_option('--verify', 516
505 dest='allow_all_hooks', action='store_true', 517 Args:
506 help='Run the %s hook without prompting.' % name) 518 manifest: The current active manifest for this command from which we
507 group.add_option('--ignore-hooks', 519 extract a couple of fields.
508 action='store_true', 520 opt: Contains the commandline options for the action of this hook.
509 help='Do not abort if %s hooks fail.' % name) 521 It should contain the options added by AddHookOptionGroup() in
522 which we are interested in RepoHook execution.
523 """
524 for key in ("bypass_hooks", "allow_all_hooks", "ignore_hooks"):
525 kwargs.setdefault(key, getattr(opt, key))
526 kwargs.update(
527 {
528 "hooks_project": manifest.repo_hooks_project,
529 "repo_topdir": manifest.topdir,
530 "manifest_url": manifest.manifestProject.GetRemote(
531 "origin"
532 ).url,
533 }
534 )
535 return cls(*args, **kwargs)
536
537 @staticmethod
538 def AddOptionGroup(parser, name):
539 """Help options relating to the various hooks."""
540
541 # Note that verify and no-verify are NOT opposites of each other, which
542 # is why they store to different locations. We are using them to match
543 # 'git commit' syntax.
544 group = parser.add_option_group(name + " hooks")
545 group.add_option(
546 "--no-verify",
547 dest="bypass_hooks",
548 action="store_true",
549 help="Do not run the %s hook." % name,
550 )
551 group.add_option(
552 "--verify",
553 dest="allow_all_hooks",
554 action="store_true",
555 help="Run the %s hook without prompting." % name,
556 )
557 group.add_option(
558 "--ignore-hooks",
559 action="store_true",
560 help="Do not abort if %s hooks fail." % name,
561 )
diff --git a/main.py b/main.py
index f4b6e7ac..6dcb66f6 100755
--- a/main.py
+++ b/main.py
@@ -31,9 +31,9 @@ import time
31import urllib.request 31import urllib.request
32 32
33try: 33try:
34 import kerberos 34 import kerberos
35except ImportError: 35except ImportError:
36 kerberos = None 36 kerberos = None
37 37
38from color import SetDefaultColoring 38from color import SetDefaultColoring
39import event_log 39import event_log
@@ -74,347 +74,442 @@ MIN_PYTHON_VERSION_SOFT = (3, 6)
74MIN_PYTHON_VERSION_HARD = (3, 6) 74MIN_PYTHON_VERSION_HARD = (3, 6)
75 75
76if sys.version_info.major < 3: 76if sys.version_info.major < 3:
77 print('repo: error: Python 2 is no longer supported; ' 77 print(
78 'Please upgrade to Python {}.{}+.'.format(*MIN_PYTHON_VERSION_SOFT), 78 "repo: error: Python 2 is no longer supported; "
79 file=sys.stderr) 79 "Please upgrade to Python {}.{}+.".format(*MIN_PYTHON_VERSION_SOFT),
80 sys.exit(1) 80 file=sys.stderr,
81else: 81 )
82 if sys.version_info < MIN_PYTHON_VERSION_HARD:
83 print('repo: error: Python 3 version is too old; '
84 'Please upgrade to Python {}.{}+.'.format(*MIN_PYTHON_VERSION_SOFT),
85 file=sys.stderr)
86 sys.exit(1) 82 sys.exit(1)
87 elif sys.version_info < MIN_PYTHON_VERSION_SOFT: 83else:
88 print('repo: warning: your Python 3 version is no longer supported; ' 84 if sys.version_info < MIN_PYTHON_VERSION_HARD:
89 'Please upgrade to Python {}.{}+.'.format(*MIN_PYTHON_VERSION_SOFT), 85 print(
90 file=sys.stderr) 86 "repo: error: Python 3 version is too old; "
87 "Please upgrade to Python {}.{}+.".format(*MIN_PYTHON_VERSION_SOFT),
88 file=sys.stderr,
89 )
90 sys.exit(1)
91 elif sys.version_info < MIN_PYTHON_VERSION_SOFT:
92 print(
93 "repo: warning: your Python 3 version is no longer supported; "
94 "Please upgrade to Python {}.{}+.".format(*MIN_PYTHON_VERSION_SOFT),
95 file=sys.stderr,
96 )
91 97
92 98
93global_options = optparse.OptionParser( 99global_options = optparse.OptionParser(
94 usage='repo [-p|--paginate|--no-pager] COMMAND [ARGS]', 100 usage="repo [-p|--paginate|--no-pager] COMMAND [ARGS]",
95 add_help_option=False) 101 add_help_option=False,
96global_options.add_option('-h', '--help', action='store_true', 102)
97 help='show this help message and exit') 103global_options.add_option(
98global_options.add_option('--help-all', action='store_true', 104 "-h", "--help", action="store_true", help="show this help message and exit"
99 help='show this help message with all subcommands and exit') 105)
100global_options.add_option('-p', '--paginate', 106global_options.add_option(
101 dest='pager', action='store_true', 107 "--help-all",
102 help='display command output in the pager') 108 action="store_true",
103global_options.add_option('--no-pager', 109 help="show this help message with all subcommands and exit",
104 dest='pager', action='store_false', 110)
105 help='disable the pager') 111global_options.add_option(
106global_options.add_option('--color', 112 "-p",
107 choices=('auto', 'always', 'never'), default=None, 113 "--paginate",
108 help='control color usage: auto, always, never') 114 dest="pager",
109global_options.add_option('--trace', 115 action="store_true",
110 dest='trace', action='store_true', 116 help="display command output in the pager",
111 help='trace git command execution (REPO_TRACE=1)') 117)
112global_options.add_option('--trace-to-stderr', 118global_options.add_option(
113 dest='trace_to_stderr', action='store_true', 119 "--no-pager", dest="pager", action="store_false", help="disable the pager"
114 help='trace outputs go to stderr in addition to .repo/TRACE_FILE') 120)
115global_options.add_option('--trace-python', 121global_options.add_option(
116 dest='trace_python', action='store_true', 122 "--color",
117 help='trace python command execution') 123 choices=("auto", "always", "never"),
118global_options.add_option('--time', 124 default=None,
119 dest='time', action='store_true', 125 help="control color usage: auto, always, never",
120 help='time repo command execution') 126)
121global_options.add_option('--version', 127global_options.add_option(
122 dest='show_version', action='store_true', 128 "--trace",
123 help='display this version of repo') 129 dest="trace",
124global_options.add_option('--show-toplevel', 130 action="store_true",
125 action='store_true', 131 help="trace git command execution (REPO_TRACE=1)",
126 help='display the path of the top-level directory of ' 132)
127 'the repo client checkout') 133global_options.add_option(
128global_options.add_option('--event-log', 134 "--trace-to-stderr",
129 dest='event_log', action='store', 135 dest="trace_to_stderr",
130 help='filename of event log to append timeline to') 136 action="store_true",
131global_options.add_option('--git-trace2-event-log', action='store', 137 help="trace outputs go to stderr in addition to .repo/TRACE_FILE",
132 help='directory to write git trace2 event log to') 138)
133global_options.add_option('--submanifest-path', action='store', 139global_options.add_option(
134 metavar='REL_PATH', help='submanifest path') 140 "--trace-python",
141 dest="trace_python",
142 action="store_true",
143 help="trace python command execution",
144)
145global_options.add_option(
146 "--time",
147 dest="time",
148 action="store_true",
149 help="time repo command execution",
150)
151global_options.add_option(
152 "--version",
153 dest="show_version",
154 action="store_true",
155 help="display this version of repo",
156)
157global_options.add_option(
158 "--show-toplevel",
159 action="store_true",
160 help="display the path of the top-level directory of "
161 "the repo client checkout",
162)
163global_options.add_option(
164 "--event-log",
165 dest="event_log",
166 action="store",
167 help="filename of event log to append timeline to",
168)
169global_options.add_option(
170 "--git-trace2-event-log",
171 action="store",
172 help="directory to write git trace2 event log to",
173)
174global_options.add_option(
175 "--submanifest-path",
176 action="store",
177 metavar="REL_PATH",
178 help="submanifest path",
179)
135 180
136 181
137class _Repo(object): 182class _Repo(object):
138 def __init__(self, repodir): 183 def __init__(self, repodir):
139 self.repodir = repodir 184 self.repodir = repodir
140 self.commands = all_commands 185 self.commands = all_commands
141 186
142 def _PrintHelp(self, short: bool = False, all_commands: bool = False): 187 def _PrintHelp(self, short: bool = False, all_commands: bool = False):
143 """Show --help screen.""" 188 """Show --help screen."""
144 global_options.print_help() 189 global_options.print_help()
145 print() 190 print()
146 if short: 191 if short:
147 commands = ' '.join(sorted(self.commands)) 192 commands = " ".join(sorted(self.commands))
148 wrapped_commands = textwrap.wrap(commands, width=77) 193 wrapped_commands = textwrap.wrap(commands, width=77)
149 print('Available commands:\n %s' % ('\n '.join(wrapped_commands),)) 194 print(
150 print('\nRun `repo help <command>` for command-specific details.') 195 "Available commands:\n %s" % ("\n ".join(wrapped_commands),)
151 print('Bug reports:', Wrapper().BUG_URL) 196 )
152 else: 197 print("\nRun `repo help <command>` for command-specific details.")
153 cmd = self.commands['help']() 198 print("Bug reports:", Wrapper().BUG_URL)
154 if all_commands:
155 cmd.PrintAllCommandsBody()
156 else:
157 cmd.PrintCommonCommandsBody()
158
159 def _ParseArgs(self, argv):
160 """Parse the main `repo` command line options."""
161 for i, arg in enumerate(argv):
162 if not arg.startswith('-'):
163 name = arg
164 glob = argv[:i]
165 argv = argv[i + 1:]
166 break
167 else:
168 name = None
169 glob = argv
170 argv = []
171 gopts, _gargs = global_options.parse_args(glob)
172
173 if name:
174 name, alias_args = self._ExpandAlias(name)
175 argv = alias_args + argv
176
177 return (name, gopts, argv)
178
179 def _ExpandAlias(self, name):
180 """Look up user registered aliases."""
181 # We don't resolve aliases for existing subcommands. This matches git.
182 if name in self.commands:
183 return name, []
184
185 key = 'alias.%s' % (name,)
186 alias = RepoConfig.ForRepository(self.repodir).GetString(key)
187 if alias is None:
188 alias = RepoConfig.ForUser().GetString(key)
189 if alias is None:
190 return name, []
191
192 args = alias.strip().split(' ', 1)
193 name = args[0]
194 if len(args) == 2:
195 args = shlex.split(args[1])
196 else:
197 args = []
198 return name, args
199
200 def _Run(self, name, gopts, argv):
201 """Execute the requested subcommand."""
202 result = 0
203
204 # Handle options that terminate quickly first.
205 if gopts.help or gopts.help_all:
206 self._PrintHelp(short=False, all_commands=gopts.help_all)
207 return 0
208 elif gopts.show_version:
209 # Always allow global --version regardless of subcommand validity.
210 name = 'version'
211 elif gopts.show_toplevel:
212 print(os.path.dirname(self.repodir))
213 return 0
214 elif not name:
215 # No subcommand specified, so show the help/subcommand.
216 self._PrintHelp(short=True)
217 return 1
218
219 run = lambda: self._RunLong(name, gopts, argv) or 0
220 with Trace('starting new command: %s', ', '.join([name] + argv),
221 first_trace=True):
222 if gopts.trace_python:
223 import trace
224 tracer = trace.Trace(count=False, trace=True, timing=True,
225 ignoredirs=set(sys.path[1:]))
226 result = tracer.runfunc(run)
227 else:
228 result = run()
229 return result
230
231 def _RunLong(self, name, gopts, argv):
232 """Execute the (longer running) requested subcommand."""
233 result = 0
234 SetDefaultColoring(gopts.color)
235
236 git_trace2_event_log = EventLog()
237 outer_client = RepoClient(self.repodir)
238 repo_client = outer_client
239 if gopts.submanifest_path:
240 repo_client = RepoClient(self.repodir,
241 submanifest_path=gopts.submanifest_path,
242 outer_client=outer_client)
243 gitc_manifest = None
244 gitc_client_name = gitc_utils.parse_clientdir(os.getcwd())
245 if gitc_client_name:
246 gitc_manifest = GitcClient(self.repodir, gitc_client_name)
247 repo_client.isGitcClient = True
248
249 try:
250 cmd = self.commands[name](
251 repodir=self.repodir,
252 client=repo_client,
253 manifest=repo_client.manifest,
254 outer_client=outer_client,
255 outer_manifest=outer_client.manifest,
256 gitc_manifest=gitc_manifest,
257 git_event_log=git_trace2_event_log)
258 except KeyError:
259 print("repo: '%s' is not a repo command. See 'repo help'." % name,
260 file=sys.stderr)
261 return 1
262
263 Editor.globalConfig = cmd.client.globalConfig
264
265 if not isinstance(cmd, MirrorSafeCommand) and cmd.manifest.IsMirror:
266 print("fatal: '%s' requires a working directory" % name,
267 file=sys.stderr)
268 return 1
269
270 if isinstance(cmd, GitcAvailableCommand) and not gitc_utils.get_gitc_manifest_dir():
271 print("fatal: '%s' requires GITC to be available" % name,
272 file=sys.stderr)
273 return 1
274
275 if isinstance(cmd, GitcClientCommand) and not gitc_client_name:
276 print("fatal: '%s' requires a GITC client" % name,
277 file=sys.stderr)
278 return 1
279
280 try:
281 copts, cargs = cmd.OptionParser.parse_args(argv)
282 copts = cmd.ReadEnvironmentOptions(copts)
283 except NoManifestException as e:
284 print('error: in `%s`: %s' % (' '.join([name] + argv), str(e)),
285 file=sys.stderr)
286 print('error: manifest missing or unreadable -- please run init',
287 file=sys.stderr)
288 return 1
289
290 if gopts.pager is not False and not isinstance(cmd, InteractiveCommand):
291 config = cmd.client.globalConfig
292 if gopts.pager:
293 use_pager = True
294 else:
295 use_pager = config.GetBoolean('pager.%s' % name)
296 if use_pager is None:
297 use_pager = cmd.WantPager(copts)
298 if use_pager:
299 RunPager(config)
300
301 start = time.time()
302 cmd_event = cmd.event_log.Add(name, event_log.TASK_COMMAND, start)
303 cmd.event_log.SetParent(cmd_event)
304 git_trace2_event_log.StartEvent()
305 git_trace2_event_log.CommandEvent(name='repo', subcommands=[name])
306
307 try:
308 cmd.CommonValidateOptions(copts, cargs)
309 cmd.ValidateOptions(copts, cargs)
310
311 this_manifest_only = copts.this_manifest_only
312 outer_manifest = copts.outer_manifest
313 if cmd.MULTI_MANIFEST_SUPPORT or this_manifest_only:
314 result = cmd.Execute(copts, cargs)
315 elif outer_manifest and repo_client.manifest.is_submanifest:
316 # The command does not support multi-manifest, we are using a
317 # submanifest, and the command line is for the outermost manifest.
318 # Re-run using the outermost manifest, which will recurse through the
319 # submanifests.
320 gopts.submanifest_path = ''
321 result = self._Run(name, gopts, argv)
322 else:
323 # No multi-manifest support. Run the command in the current
324 # (sub)manifest, and then any child submanifests.
325 result = cmd.Execute(copts, cargs)
326 for submanifest in repo_client.manifest.submanifests.values():
327 spec = submanifest.ToSubmanifestSpec()
328 gopts.submanifest_path = submanifest.repo_client.path_prefix
329 child_argv = argv[:]
330 child_argv.append('--no-outer-manifest')
331 # Not all subcommands support the 3 manifest options, so only add them
332 # if the original command includes them.
333 if hasattr(copts, 'manifest_url'):
334 child_argv.extend(['--manifest-url', spec.manifestUrl])
335 if hasattr(copts, 'manifest_name'):
336 child_argv.extend(['--manifest-name', spec.manifestName])
337 if hasattr(copts, 'manifest_branch'):
338 child_argv.extend(['--manifest-branch', spec.revision])
339 result = self._Run(name, gopts, child_argv) or result
340 except (DownloadError, ManifestInvalidRevisionError,
341 NoManifestException) as e:
342 print('error: in `%s`: %s' % (' '.join([name] + argv), str(e)),
343 file=sys.stderr)
344 if isinstance(e, NoManifestException):
345 print('error: manifest missing or unreadable -- please run init',
346 file=sys.stderr)
347 result = 1
348 except NoSuchProjectError as e:
349 if e.name:
350 print('error: project %s not found' % e.name, file=sys.stderr)
351 else:
352 print('error: no project in current directory', file=sys.stderr)
353 result = 1
354 except InvalidProjectGroupsError as e:
355 if e.name:
356 print('error: project group must be enabled for project %s' % e.name, file=sys.stderr)
357 else:
358 print('error: project group must be enabled for the project in the current directory',
359 file=sys.stderr)
360 result = 1
361 except SystemExit as e:
362 if e.code:
363 result = e.code
364 raise
365 finally:
366 finish = time.time()
367 elapsed = finish - start
368 hours, remainder = divmod(elapsed, 3600)
369 minutes, seconds = divmod(remainder, 60)
370 if gopts.time:
371 if hours == 0:
372 print('real\t%dm%.3fs' % (minutes, seconds), file=sys.stderr)
373 else: 199 else:
374 print('real\t%dh%dm%.3fs' % (hours, minutes, seconds), 200 cmd = self.commands["help"]()
375 file=sys.stderr) 201 if all_commands:
376 202 cmd.PrintAllCommandsBody()
377 cmd.event_log.FinishEvent(cmd_event, finish, 203 else:
378 result is None or result == 0) 204 cmd.PrintCommonCommandsBody()
379 git_trace2_event_log.DefParamRepoEvents( 205
380 cmd.manifest.manifestProject.config.DumpConfigDict()) 206 def _ParseArgs(self, argv):
381 git_trace2_event_log.ExitEvent(result) 207 """Parse the main `repo` command line options."""
382 208 for i, arg in enumerate(argv):
383 if gopts.event_log: 209 if not arg.startswith("-"):
384 cmd.event_log.Write(os.path.abspath( 210 name = arg
385 os.path.expanduser(gopts.event_log))) 211 glob = argv[:i]
386 212 argv = argv[i + 1 :]
387 git_trace2_event_log.Write(gopts.git_trace2_event_log) 213 break
388 return result 214 else:
215 name = None
216 glob = argv
217 argv = []
218 gopts, _gargs = global_options.parse_args(glob)
219
220 if name:
221 name, alias_args = self._ExpandAlias(name)
222 argv = alias_args + argv
223
224 return (name, gopts, argv)
225
226 def _ExpandAlias(self, name):
227 """Look up user registered aliases."""
228 # We don't resolve aliases for existing subcommands. This matches git.
229 if name in self.commands:
230 return name, []
231
232 key = "alias.%s" % (name,)
233 alias = RepoConfig.ForRepository(self.repodir).GetString(key)
234 if alias is None:
235 alias = RepoConfig.ForUser().GetString(key)
236 if alias is None:
237 return name, []
238
239 args = alias.strip().split(" ", 1)
240 name = args[0]
241 if len(args) == 2:
242 args = shlex.split(args[1])
243 else:
244 args = []
245 return name, args
246
247 def _Run(self, name, gopts, argv):
248 """Execute the requested subcommand."""
249 result = 0
250
251 # Handle options that terminate quickly first.
252 if gopts.help or gopts.help_all:
253 self._PrintHelp(short=False, all_commands=gopts.help_all)
254 return 0
255 elif gopts.show_version:
256 # Always allow global --version regardless of subcommand validity.
257 name = "version"
258 elif gopts.show_toplevel:
259 print(os.path.dirname(self.repodir))
260 return 0
261 elif not name:
262 # No subcommand specified, so show the help/subcommand.
263 self._PrintHelp(short=True)
264 return 1
265
266 run = lambda: self._RunLong(name, gopts, argv) or 0
267 with Trace(
268 "starting new command: %s",
269 ", ".join([name] + argv),
270 first_trace=True,
271 ):
272 if gopts.trace_python:
273 import trace
274
275 tracer = trace.Trace(
276 count=False,
277 trace=True,
278 timing=True,
279 ignoredirs=set(sys.path[1:]),
280 )
281 result = tracer.runfunc(run)
282 else:
283 result = run()
284 return result
285
286 def _RunLong(self, name, gopts, argv):
287 """Execute the (longer running) requested subcommand."""
288 result = 0
289 SetDefaultColoring(gopts.color)
290
291 git_trace2_event_log = EventLog()
292 outer_client = RepoClient(self.repodir)
293 repo_client = outer_client
294 if gopts.submanifest_path:
295 repo_client = RepoClient(
296 self.repodir,
297 submanifest_path=gopts.submanifest_path,
298 outer_client=outer_client,
299 )
300 gitc_manifest = None
301 gitc_client_name = gitc_utils.parse_clientdir(os.getcwd())
302 if gitc_client_name:
303 gitc_manifest = GitcClient(self.repodir, gitc_client_name)
304 repo_client.isGitcClient = True
305
306 try:
307 cmd = self.commands[name](
308 repodir=self.repodir,
309 client=repo_client,
310 manifest=repo_client.manifest,
311 outer_client=outer_client,
312 outer_manifest=outer_client.manifest,
313 gitc_manifest=gitc_manifest,
314 git_event_log=git_trace2_event_log,
315 )
316 except KeyError:
317 print(
318 "repo: '%s' is not a repo command. See 'repo help'." % name,
319 file=sys.stderr,
320 )
321 return 1
322
323 Editor.globalConfig = cmd.client.globalConfig
324
325 if not isinstance(cmd, MirrorSafeCommand) and cmd.manifest.IsMirror:
326 print(
327 "fatal: '%s' requires a working directory" % name,
328 file=sys.stderr,
329 )
330 return 1
331
332 if (
333 isinstance(cmd, GitcAvailableCommand)
334 and not gitc_utils.get_gitc_manifest_dir()
335 ):
336 print(
337 "fatal: '%s' requires GITC to be available" % name,
338 file=sys.stderr,
339 )
340 return 1
341
342 if isinstance(cmd, GitcClientCommand) and not gitc_client_name:
343 print("fatal: '%s' requires a GITC client" % name, file=sys.stderr)
344 return 1
345
346 try:
347 copts, cargs = cmd.OptionParser.parse_args(argv)
348 copts = cmd.ReadEnvironmentOptions(copts)
349 except NoManifestException as e:
350 print(
351 "error: in `%s`: %s" % (" ".join([name] + argv), str(e)),
352 file=sys.stderr,
353 )
354 print(
355 "error: manifest missing or unreadable -- please run init",
356 file=sys.stderr,
357 )
358 return 1
359
360 if gopts.pager is not False and not isinstance(cmd, InteractiveCommand):
361 config = cmd.client.globalConfig
362 if gopts.pager:
363 use_pager = True
364 else:
365 use_pager = config.GetBoolean("pager.%s" % name)
366 if use_pager is None:
367 use_pager = cmd.WantPager(copts)
368 if use_pager:
369 RunPager(config)
370
371 start = time.time()
372 cmd_event = cmd.event_log.Add(name, event_log.TASK_COMMAND, start)
373 cmd.event_log.SetParent(cmd_event)
374 git_trace2_event_log.StartEvent()
375 git_trace2_event_log.CommandEvent(name="repo", subcommands=[name])
376
377 try:
378 cmd.CommonValidateOptions(copts, cargs)
379 cmd.ValidateOptions(copts, cargs)
380
381 this_manifest_only = copts.this_manifest_only
382 outer_manifest = copts.outer_manifest
383 if cmd.MULTI_MANIFEST_SUPPORT or this_manifest_only:
384 result = cmd.Execute(copts, cargs)
385 elif outer_manifest and repo_client.manifest.is_submanifest:
386 # The command does not support multi-manifest, we are using a
387 # submanifest, and the command line is for the outermost
388 # manifest. Re-run using the outermost manifest, which will
389 # recurse through the submanifests.
390 gopts.submanifest_path = ""
391 result = self._Run(name, gopts, argv)
392 else:
393 # No multi-manifest support. Run the command in the current
394 # (sub)manifest, and then any child submanifests.
395 result = cmd.Execute(copts, cargs)
396 for submanifest in repo_client.manifest.submanifests.values():
397 spec = submanifest.ToSubmanifestSpec()
398 gopts.submanifest_path = submanifest.repo_client.path_prefix
399 child_argv = argv[:]
400 child_argv.append("--no-outer-manifest")
401 # Not all subcommands support the 3 manifest options, so
402 # only add them if the original command includes them.
403 if hasattr(copts, "manifest_url"):
404 child_argv.extend(["--manifest-url", spec.manifestUrl])
405 if hasattr(copts, "manifest_name"):
406 child_argv.extend(
407 ["--manifest-name", spec.manifestName]
408 )
409 if hasattr(copts, "manifest_branch"):
410 child_argv.extend(["--manifest-branch", spec.revision])
411 result = self._Run(name, gopts, child_argv) or result
412 except (
413 DownloadError,
414 ManifestInvalidRevisionError,
415 NoManifestException,
416 ) as e:
417 print(
418 "error: in `%s`: %s" % (" ".join([name] + argv), str(e)),
419 file=sys.stderr,
420 )
421 if isinstance(e, NoManifestException):
422 print(
423 "error: manifest missing or unreadable -- please run init",
424 file=sys.stderr,
425 )
426 result = 1
427 except NoSuchProjectError as e:
428 if e.name:
429 print("error: project %s not found" % e.name, file=sys.stderr)
430 else:
431 print("error: no project in current directory", file=sys.stderr)
432 result = 1
433 except InvalidProjectGroupsError as e:
434 if e.name:
435 print(
436 "error: project group must be enabled for project %s"
437 % e.name,
438 file=sys.stderr,
439 )
440 else:
441 print(
442 "error: project group must be enabled for the project in "
443 "the current directory",
444 file=sys.stderr,
445 )
446 result = 1
447 except SystemExit as e:
448 if e.code:
449 result = e.code
450 raise
451 finally:
452 finish = time.time()
453 elapsed = finish - start
454 hours, remainder = divmod(elapsed, 3600)
455 minutes, seconds = divmod(remainder, 60)
456 if gopts.time:
457 if hours == 0:
458 print(
459 "real\t%dm%.3fs" % (minutes, seconds), file=sys.stderr
460 )
461 else:
462 print(
463 "real\t%dh%dm%.3fs" % (hours, minutes, seconds),
464 file=sys.stderr,
465 )
466
467 cmd.event_log.FinishEvent(
468 cmd_event, finish, result is None or result == 0
469 )
470 git_trace2_event_log.DefParamRepoEvents(
471 cmd.manifest.manifestProject.config.DumpConfigDict()
472 )
473 git_trace2_event_log.ExitEvent(result)
474
475 if gopts.event_log:
476 cmd.event_log.Write(
477 os.path.abspath(os.path.expanduser(gopts.event_log))
478 )
479
480 git_trace2_event_log.Write(gopts.git_trace2_event_log)
481 return result
389 482
390 483
391def _CheckWrapperVersion(ver_str, repo_path): 484def _CheckWrapperVersion(ver_str, repo_path):
392 """Verify the repo launcher is new enough for this checkout. 485 """Verify the repo launcher is new enough for this checkout.
393 486
394 Args: 487 Args:
395 ver_str: The version string passed from the repo launcher when it ran us. 488 ver_str: The version string passed from the repo launcher when it ran
396 repo_path: The path to the repo launcher that loaded us. 489 us.
397 """ 490 repo_path: The path to the repo launcher that loaded us.
398 # Refuse to work with really old wrapper versions. We don't test these, 491 """
399 # so might as well require a somewhat recent sane version. 492 # Refuse to work with really old wrapper versions. We don't test these,
400 # v1.15 of the repo launcher was released in ~Mar 2012. 493 # so might as well require a somewhat recent sane version.
401 MIN_REPO_VERSION = (1, 15) 494 # v1.15 of the repo launcher was released in ~Mar 2012.
402 min_str = '.'.join(str(x) for x in MIN_REPO_VERSION) 495 MIN_REPO_VERSION = (1, 15)
403 496 min_str = ".".join(str(x) for x in MIN_REPO_VERSION)
404 if not repo_path: 497
405 repo_path = '~/bin/repo' 498 if not repo_path:
406 499 repo_path = "~/bin/repo"
407 if not ver_str: 500
408 print('no --wrapper-version argument', file=sys.stderr) 501 if not ver_str:
409 sys.exit(1) 502 print("no --wrapper-version argument", file=sys.stderr)
410 503 sys.exit(1)
411 # Pull out the version of the repo launcher we know about to compare. 504
412 exp = Wrapper().VERSION 505 # Pull out the version of the repo launcher we know about to compare.
413 ver = tuple(map(int, ver_str.split('.'))) 506 exp = Wrapper().VERSION
414 507 ver = tuple(map(int, ver_str.split(".")))
415 exp_str = '.'.join(map(str, exp)) 508
416 if ver < MIN_REPO_VERSION: 509 exp_str = ".".join(map(str, exp))
417 print(""" 510 if ver < MIN_REPO_VERSION:
511 print(
512 """
418repo: error: 513repo: error:
419!!! Your version of repo %s is too old. 514!!! Your version of repo %s is too old.
420!!! We need at least version %s. 515!!! We need at least version %s.
@@ -422,284 +517,321 @@ repo: error:
422!!! You must upgrade before you can continue: 517!!! You must upgrade before you can continue:
423 518
424 cp %s %s 519 cp %s %s
425""" % (ver_str, min_str, exp_str, WrapperPath(), repo_path), file=sys.stderr) 520"""
426 sys.exit(1) 521 % (ver_str, min_str, exp_str, WrapperPath(), repo_path),
427 522 file=sys.stderr,
428 if exp > ver: 523 )
429 print('\n... A new version of repo (%s) is available.' % (exp_str,), 524 sys.exit(1)
430 file=sys.stderr) 525
431 if os.access(repo_path, os.W_OK): 526 if exp > ver:
432 print("""\ 527 print(
528 "\n... A new version of repo (%s) is available." % (exp_str,),
529 file=sys.stderr,
530 )
531 if os.access(repo_path, os.W_OK):
532 print(
533 """\
433... You should upgrade soon: 534... You should upgrade soon:
434 cp %s %s 535 cp %s %s
435""" % (WrapperPath(), repo_path), file=sys.stderr) 536"""
436 else: 537 % (WrapperPath(), repo_path),
437 print("""\ 538 file=sys.stderr,
539 )
540 else:
541 print(
542 """\
438... New version is available at: %s 543... New version is available at: %s
439... The launcher is run from: %s 544... The launcher is run from: %s
440!!! The launcher is not writable. Please talk to your sysadmin or distro 545!!! The launcher is not writable. Please talk to your sysadmin or distro
441!!! to get an update installed. 546!!! to get an update installed.
442""" % (WrapperPath(), repo_path), file=sys.stderr) 547"""
548 % (WrapperPath(), repo_path),
549 file=sys.stderr,
550 )
443 551
444 552
445def _CheckRepoDir(repo_dir): 553def _CheckRepoDir(repo_dir):
446 if not repo_dir: 554 if not repo_dir:
447 print('no --repo-dir argument', file=sys.stderr) 555 print("no --repo-dir argument", file=sys.stderr)
448 sys.exit(1) 556 sys.exit(1)
449 557
450 558
451def _PruneOptions(argv, opt): 559def _PruneOptions(argv, opt):
452 i = 0 560 i = 0
453 while i < len(argv): 561 while i < len(argv):
454 a = argv[i] 562 a = argv[i]
455 if a == '--': 563 if a == "--":
456 break 564 break
457 if a.startswith('--'): 565 if a.startswith("--"):
458 eq = a.find('=') 566 eq = a.find("=")
459 if eq > 0: 567 if eq > 0:
460 a = a[0:eq] 568 a = a[0:eq]
461 if not opt.has_option(a): 569 if not opt.has_option(a):
462 del argv[i] 570 del argv[i]
463 continue 571 continue
464 i += 1 572 i += 1
465 573
466 574
467class _UserAgentHandler(urllib.request.BaseHandler): 575class _UserAgentHandler(urllib.request.BaseHandler):
468 def http_request(self, req): 576 def http_request(self, req):
469 req.add_header('User-Agent', user_agent.repo) 577 req.add_header("User-Agent", user_agent.repo)
470 return req 578 return req
471 579
472 def https_request(self, req): 580 def https_request(self, req):
473 req.add_header('User-Agent', user_agent.repo) 581 req.add_header("User-Agent", user_agent.repo)
474 return req 582 return req
475 583
476 584
477def _AddPasswordFromUserInput(handler, msg, req): 585def _AddPasswordFromUserInput(handler, msg, req):
478 # If repo could not find auth info from netrc, try to get it from user input 586 # If repo could not find auth info from netrc, try to get it from user input
479 url = req.get_full_url() 587 url = req.get_full_url()
480 user, password = handler.passwd.find_user_password(None, url) 588 user, password = handler.passwd.find_user_password(None, url)
481 if user is None: 589 if user is None:
482 print(msg) 590 print(msg)
483 try: 591 try:
484 user = input('User: ') 592 user = input("User: ")
485 password = getpass.getpass() 593 password = getpass.getpass()
486 except KeyboardInterrupt: 594 except KeyboardInterrupt:
487 return 595 return
488 handler.passwd.add_password(None, url, user, password) 596 handler.passwd.add_password(None, url, user, password)
489 597
490 598
491class _BasicAuthHandler(urllib.request.HTTPBasicAuthHandler): 599class _BasicAuthHandler(urllib.request.HTTPBasicAuthHandler):
492 def http_error_401(self, req, fp, code, msg, headers): 600 def http_error_401(self, req, fp, code, msg, headers):
493 _AddPasswordFromUserInput(self, msg, req) 601 _AddPasswordFromUserInput(self, msg, req)
494 return urllib.request.HTTPBasicAuthHandler.http_error_401( 602 return urllib.request.HTTPBasicAuthHandler.http_error_401(
495 self, req, fp, code, msg, headers) 603 self, req, fp, code, msg, headers
496 604 )
497 def http_error_auth_reqed(self, authreq, host, req, headers): 605
498 try: 606 def http_error_auth_reqed(self, authreq, host, req, headers):
499 old_add_header = req.add_header 607 try:
500 608 old_add_header = req.add_header
501 def _add_header(name, val): 609
502 val = val.replace('\n', '') 610 def _add_header(name, val):
503 old_add_header(name, val) 611 val = val.replace("\n", "")
504 req.add_header = _add_header 612 old_add_header(name, val)
505 return urllib.request.AbstractBasicAuthHandler.http_error_auth_reqed( 613
506 self, authreq, host, req, headers) 614 req.add_header = _add_header
507 except Exception: 615 return (
508 reset = getattr(self, 'reset_retry_count', None) 616 urllib.request.AbstractBasicAuthHandler.http_error_auth_reqed(
509 if reset is not None: 617 self, authreq, host, req, headers
510 reset() 618 )
511 elif getattr(self, 'retried', None): 619 )
512 self.retried = 0 620 except Exception:
513 raise 621 reset = getattr(self, "reset_retry_count", None)
622 if reset is not None:
623 reset()
624 elif getattr(self, "retried", None):
625 self.retried = 0
626 raise
514 627
515 628
516class _DigestAuthHandler(urllib.request.HTTPDigestAuthHandler): 629class _DigestAuthHandler(urllib.request.HTTPDigestAuthHandler):
517 def http_error_401(self, req, fp, code, msg, headers): 630 def http_error_401(self, req, fp, code, msg, headers):
518 _AddPasswordFromUserInput(self, msg, req) 631 _AddPasswordFromUserInput(self, msg, req)
519 return urllib.request.HTTPDigestAuthHandler.http_error_401( 632 return urllib.request.HTTPDigestAuthHandler.http_error_401(
520 self, req, fp, code, msg, headers) 633 self, req, fp, code, msg, headers
634 )
635
636 def http_error_auth_reqed(self, auth_header, host, req, headers):
637 try:
638 old_add_header = req.add_header
639
640 def _add_header(name, val):
641 val = val.replace("\n", "")
642 old_add_header(name, val)
643
644 req.add_header = _add_header
645 return (
646 urllib.request.AbstractDigestAuthHandler.http_error_auth_reqed(
647 self, auth_header, host, req, headers
648 )
649 )
650 except Exception:
651 reset = getattr(self, "reset_retry_count", None)
652 if reset is not None:
653 reset()
654 elif getattr(self, "retried", None):
655 self.retried = 0
656 raise
521 657
522 def http_error_auth_reqed(self, auth_header, host, req, headers): 658
523 try: 659class _KerberosAuthHandler(urllib.request.BaseHandler):
524 old_add_header = req.add_header 660 def __init__(self):
525 661 self.retried = 0
526 def _add_header(name, val): 662 self.context = None
527 val = val.replace('\n', '') 663 self.handler_order = urllib.request.BaseHandler.handler_order - 50
528 old_add_header(name, val) 664
529 req.add_header = _add_header 665 def http_error_401(self, req, fp, code, msg, headers):
530 return urllib.request.AbstractDigestAuthHandler.http_error_auth_reqed( 666 host = req.get_host()
531 self, auth_header, host, req, headers) 667 retry = self.http_error_auth_reqed(
532 except Exception: 668 "www-authenticate", host, req, headers
533 reset = getattr(self, 'reset_retry_count', None) 669 )
534 if reset is not None: 670 return retry
535 reset() 671
536 elif getattr(self, 'retried', None): 672 def http_error_auth_reqed(self, auth_header, host, req, headers):
673 try:
674 spn = "HTTP@%s" % host
675 authdata = self._negotiate_get_authdata(auth_header, headers)
676
677 if self.retried > 3:
678 raise urllib.request.HTTPError(
679 req.get_full_url(),
680 401,
681 "Negotiate auth failed",
682 headers,
683 None,
684 )
685 else:
686 self.retried += 1
687
688 neghdr = self._negotiate_get_svctk(spn, authdata)
689 if neghdr is None:
690 return None
691
692 req.add_unredirected_header("Authorization", neghdr)
693 response = self.parent.open(req)
694
695 srvauth = self._negotiate_get_authdata(auth_header, response.info())
696 if self._validate_response(srvauth):
697 return response
698 except kerberos.GSSError:
699 return None
700 except Exception:
701 self.reset_retry_count()
702 raise
703 finally:
704 self._clean_context()
705
706 def reset_retry_count(self):
537 self.retried = 0 707 self.retried = 0
538 raise
539 708
709 def _negotiate_get_authdata(self, auth_header, headers):
710 authhdr = headers.get(auth_header, None)
711 if authhdr is not None:
712 for mech_tuple in authhdr.split(","):
713 mech, __, authdata = mech_tuple.strip().partition(" ")
714 if mech.lower() == "negotiate":
715 return authdata.strip()
716 return None
540 717
541class _KerberosAuthHandler(urllib.request.BaseHandler): 718 def _negotiate_get_svctk(self, spn, authdata):
542 def __init__(self): 719 if authdata is None:
543 self.retried = 0 720 return None
544 self.context = None
545 self.handler_order = urllib.request.BaseHandler.handler_order - 50
546 721
547 def http_error_401(self, req, fp, code, msg, headers): 722 result, self.context = kerberos.authGSSClientInit(spn)
548 host = req.get_host() 723 if result < kerberos.AUTH_GSS_COMPLETE:
549 retry = self.http_error_auth_reqed('www-authenticate', host, req, headers) 724 return None
550 return retry
551 725
552 def http_error_auth_reqed(self, auth_header, host, req, headers): 726 result = kerberos.authGSSClientStep(self.context, authdata)
553 try: 727 if result < kerberos.AUTH_GSS_CONTINUE:
554 spn = "HTTP@%s" % host 728 return None
555 authdata = self._negotiate_get_authdata(auth_header, headers)
556 729
557 if self.retried > 3: 730 response = kerberos.authGSSClientResponse(self.context)
558 raise urllib.request.HTTPError(req.get_full_url(), 401, 731 return "Negotiate %s" % response
559 "Negotiate auth failed", headers, None)
560 else:
561 self.retried += 1
562 732
563 neghdr = self._negotiate_get_svctk(spn, authdata) 733 def _validate_response(self, authdata):
564 if neghdr is None: 734 if authdata is None:
735 return None
736 result = kerberos.authGSSClientStep(self.context, authdata)
737 if result == kerberos.AUTH_GSS_COMPLETE:
738 return True
565 return None 739 return None
566 740
567 req.add_unredirected_header('Authorization', neghdr) 741 def _clean_context(self):
568 response = self.parent.open(req) 742 if self.context is not None:
569 743 kerberos.authGSSClientClean(self.context)
570 srvauth = self._negotiate_get_authdata(auth_header, response.info()) 744 self.context = None
571 if self._validate_response(srvauth):
572 return response
573 except kerberos.GSSError:
574 return None
575 except Exception:
576 self.reset_retry_count()
577 raise
578 finally:
579 self._clean_context()
580
581 def reset_retry_count(self):
582 self.retried = 0
583
584 def _negotiate_get_authdata(self, auth_header, headers):
585 authhdr = headers.get(auth_header, None)
586 if authhdr is not None:
587 for mech_tuple in authhdr.split(","):
588 mech, __, authdata = mech_tuple.strip().partition(" ")
589 if mech.lower() == "negotiate":
590 return authdata.strip()
591 return None
592
593 def _negotiate_get_svctk(self, spn, authdata):
594 if authdata is None:
595 return None
596
597 result, self.context = kerberos.authGSSClientInit(spn)
598 if result < kerberos.AUTH_GSS_COMPLETE:
599 return None
600
601 result = kerberos.authGSSClientStep(self.context, authdata)
602 if result < kerberos.AUTH_GSS_CONTINUE:
603 return None
604
605 response = kerberos.authGSSClientResponse(self.context)
606 return "Negotiate %s" % response
607
608 def _validate_response(self, authdata):
609 if authdata is None:
610 return None
611 result = kerberos.authGSSClientStep(self.context, authdata)
612 if result == kerberos.AUTH_GSS_COMPLETE:
613 return True
614 return None
615
616 def _clean_context(self):
617 if self.context is not None:
618 kerberos.authGSSClientClean(self.context)
619 self.context = None
620 745
621 746
622def init_http(): 747def init_http():
623 handlers = [_UserAgentHandler()] 748 handlers = [_UserAgentHandler()]
624 749
625 mgr = urllib.request.HTTPPasswordMgrWithDefaultRealm() 750 mgr = urllib.request.HTTPPasswordMgrWithDefaultRealm()
626 try: 751 try:
627 n = netrc.netrc() 752 n = netrc.netrc()
628 for host in n.hosts: 753 for host in n.hosts:
629 p = n.hosts[host] 754 p = n.hosts[host]
630 mgr.add_password(p[1], 'http://%s/' % host, p[0], p[2]) 755 mgr.add_password(p[1], "http://%s/" % host, p[0], p[2])
631 mgr.add_password(p[1], 'https://%s/' % host, p[0], p[2]) 756 mgr.add_password(p[1], "https://%s/" % host, p[0], p[2])
632 except netrc.NetrcParseError: 757 except netrc.NetrcParseError:
633 pass 758 pass
634 except IOError: 759 except IOError:
635 pass 760 pass
636 handlers.append(_BasicAuthHandler(mgr)) 761 handlers.append(_BasicAuthHandler(mgr))
637 handlers.append(_DigestAuthHandler(mgr)) 762 handlers.append(_DigestAuthHandler(mgr))
638 if kerberos: 763 if kerberos:
639 handlers.append(_KerberosAuthHandler()) 764 handlers.append(_KerberosAuthHandler())
640 765
641 if 'http_proxy' in os.environ: 766 if "http_proxy" in os.environ:
642 url = os.environ['http_proxy'] 767 url = os.environ["http_proxy"]
643 handlers.append(urllib.request.ProxyHandler({'http': url, 'https': url})) 768 handlers.append(
644 if 'REPO_CURL_VERBOSE' in os.environ: 769 urllib.request.ProxyHandler({"http": url, "https": url})
645 handlers.append(urllib.request.HTTPHandler(debuglevel=1)) 770 )
646 handlers.append(urllib.request.HTTPSHandler(debuglevel=1)) 771 if "REPO_CURL_VERBOSE" in os.environ:
647 urllib.request.install_opener(urllib.request.build_opener(*handlers)) 772 handlers.append(urllib.request.HTTPHandler(debuglevel=1))
773 handlers.append(urllib.request.HTTPSHandler(debuglevel=1))
774 urllib.request.install_opener(urllib.request.build_opener(*handlers))
648 775
649 776
650def _Main(argv): 777def _Main(argv):
651 result = 0 778 result = 0
652 779
653 opt = optparse.OptionParser(usage="repo wrapperinfo -- ...") 780 opt = optparse.OptionParser(usage="repo wrapperinfo -- ...")
654 opt.add_option("--repo-dir", dest="repodir", 781 opt.add_option("--repo-dir", dest="repodir", help="path to .repo/")
655 help="path to .repo/") 782 opt.add_option(
656 opt.add_option("--wrapper-version", dest="wrapper_version", 783 "--wrapper-version",
657 help="version of the wrapper script") 784 dest="wrapper_version",
658 opt.add_option("--wrapper-path", dest="wrapper_path", 785 help="version of the wrapper script",
659 help="location of the wrapper script") 786 )
660 _PruneOptions(argv, opt) 787 opt.add_option(
661 opt, argv = opt.parse_args(argv) 788 "--wrapper-path",
662 789 dest="wrapper_path",
663 _CheckWrapperVersion(opt.wrapper_version, opt.wrapper_path) 790 help="location of the wrapper script",
664 _CheckRepoDir(opt.repodir) 791 )
665 792 _PruneOptions(argv, opt)
666 Version.wrapper_version = opt.wrapper_version 793 opt, argv = opt.parse_args(argv)
667 Version.wrapper_path = opt.wrapper_path 794
668 795 _CheckWrapperVersion(opt.wrapper_version, opt.wrapper_path)
669 repo = _Repo(opt.repodir) 796 _CheckRepoDir(opt.repodir)
670 797
671 try: 798 Version.wrapper_version = opt.wrapper_version
672 init_http() 799 Version.wrapper_path = opt.wrapper_path
673 name, gopts, argv = repo._ParseArgs(argv) 800
674 801 repo = _Repo(opt.repodir)
675 if gopts.trace: 802
676 SetTrace()
677
678 if gopts.trace_to_stderr:
679 SetTraceToStderr()
680
681 result = repo._Run(name, gopts, argv) or 0
682 except KeyboardInterrupt:
683 print('aborted by user', file=sys.stderr)
684 result = 1
685 except ManifestParseError as mpe:
686 print('fatal: %s' % mpe, file=sys.stderr)
687 result = 1
688 except RepoChangedException as rce:
689 # If repo changed, re-exec ourselves.
690 #
691 argv = list(sys.argv)
692 argv.extend(rce.extra_args)
693 try: 803 try:
694 os.execv(sys.executable, [__file__] + argv) 804 init_http()
695 except OSError as e: 805 name, gopts, argv = repo._ParseArgs(argv)
696 print('fatal: cannot restart repo after upgrade', file=sys.stderr)
697 print('fatal: %s' % e, file=sys.stderr)
698 result = 128
699 806
700 TerminatePager() 807 if gopts.trace:
701 sys.exit(result) 808 SetTrace()
702 809
810 if gopts.trace_to_stderr:
811 SetTraceToStderr()
703 812
704if __name__ == '__main__': 813 result = repo._Run(name, gopts, argv) or 0
705 _Main(sys.argv[1:]) 814 except KeyboardInterrupt:
815 print("aborted by user", file=sys.stderr)
816 result = 1
817 except ManifestParseError as mpe:
818 print("fatal: %s" % mpe, file=sys.stderr)
819 result = 1
820 except RepoChangedException as rce:
821 # If repo changed, re-exec ourselves.
822 #
823 argv = list(sys.argv)
824 argv.extend(rce.extra_args)
825 try:
826 os.execv(sys.executable, [__file__] + argv)
827 except OSError as e:
828 print("fatal: cannot restart repo after upgrade", file=sys.stderr)
829 print("fatal: %s" % e, file=sys.stderr)
830 result = 128
831
832 TerminatePager()
833 sys.exit(result)
834
835
836if __name__ == "__main__":
837 _Main(sys.argv[1:])
diff --git a/manifest_xml.py b/manifest_xml.py
index 5b83f368..9603906f 100644
--- a/manifest_xml.py
+++ b/manifest_xml.py
@@ -26,415 +26,452 @@ from git_config import GitConfig
26from git_refs import R_HEADS, HEAD 26from git_refs import R_HEADS, HEAD
27from git_superproject import Superproject 27from git_superproject import Superproject
28import platform_utils 28import platform_utils
29from project import (Annotation, RemoteSpec, Project, RepoProject, 29from project import (
30 ManifestProject) 30 Annotation,
31from error import (ManifestParseError, ManifestInvalidPathError, 31 RemoteSpec,
32 ManifestInvalidRevisionError) 32 Project,
33 RepoProject,
34 ManifestProject,
35)
36from error import (
37 ManifestParseError,
38 ManifestInvalidPathError,
39 ManifestInvalidRevisionError,
40)
33from wrapper import Wrapper 41from wrapper import Wrapper
34 42
35MANIFEST_FILE_NAME = 'manifest.xml' 43MANIFEST_FILE_NAME = "manifest.xml"
36LOCAL_MANIFEST_NAME = 'local_manifest.xml' 44LOCAL_MANIFEST_NAME = "local_manifest.xml"
37LOCAL_MANIFESTS_DIR_NAME = 'local_manifests' 45LOCAL_MANIFESTS_DIR_NAME = "local_manifests"
38SUBMANIFEST_DIR = 'submanifests' 46SUBMANIFEST_DIR = "submanifests"
39# Limit submanifests to an arbitrary depth for loop detection. 47# Limit submanifests to an arbitrary depth for loop detection.
40MAX_SUBMANIFEST_DEPTH = 8 48MAX_SUBMANIFEST_DEPTH = 8
41# Add all projects from sub manifest into a group. 49# Add all projects from sub manifest into a group.
42SUBMANIFEST_GROUP_PREFIX = 'submanifest:' 50SUBMANIFEST_GROUP_PREFIX = "submanifest:"
43 51
44# Add all projects from local manifest into a group. 52# Add all projects from local manifest into a group.
45LOCAL_MANIFEST_GROUP_PREFIX = 'local:' 53LOCAL_MANIFEST_GROUP_PREFIX = "local:"
46 54
47# ContactInfo has the self-registered bug url, supplied by the manifest authors. 55# ContactInfo has the self-registered bug url, supplied by the manifest authors.
48ContactInfo = collections.namedtuple('ContactInfo', 'bugurl') 56ContactInfo = collections.namedtuple("ContactInfo", "bugurl")
49 57
50# urljoin gets confused if the scheme is not known. 58# urljoin gets confused if the scheme is not known.
51urllib.parse.uses_relative.extend([ 59urllib.parse.uses_relative.extend(
52 'ssh', 60 ["ssh", "git", "persistent-https", "sso", "rpc"]
53 'git', 61)
54 'persistent-https', 62urllib.parse.uses_netloc.extend(
55 'sso', 63 ["ssh", "git", "persistent-https", "sso", "rpc"]
56 'rpc']) 64)
57urllib.parse.uses_netloc.extend([
58 'ssh',
59 'git',
60 'persistent-https',
61 'sso',
62 'rpc'])
63 65
64 66
65def XmlBool(node, attr, default=None): 67def XmlBool(node, attr, default=None):
66 """Determine boolean value of |node|'s |attr|. 68 """Determine boolean value of |node|'s |attr|.
67 69
68 Invalid values will issue a non-fatal warning. 70 Invalid values will issue a non-fatal warning.
69 71
70 Args: 72 Args:
71 node: XML node whose attributes we access. 73 node: XML node whose attributes we access.
72 attr: The attribute to access. 74 attr: The attribute to access.
73 default: If the attribute is not set (value is empty), then use this. 75 default: If the attribute is not set (value is empty), then use this.
74 76
75 Returns: 77 Returns:
76 True if the attribute is a valid string representing true. 78 True if the attribute is a valid string representing true.
77 False if the attribute is a valid string representing false. 79 False if the attribute is a valid string representing false.
78 |default| otherwise. 80 |default| otherwise.
79 """ 81 """
80 value = node.getAttribute(attr) 82 value = node.getAttribute(attr)
81 s = value.lower() 83 s = value.lower()
82 if s == '': 84 if s == "":
83 return default 85 return default
84 elif s in {'yes', 'true', '1'}: 86 elif s in {"yes", "true", "1"}:
85 return True 87 return True
86 elif s in {'no', 'false', '0'}: 88 elif s in {"no", "false", "0"}:
87 return False 89 return False
88 else: 90 else:
89 print('warning: manifest: %s="%s": ignoring invalid XML boolean' % 91 print(
90 (attr, value), file=sys.stderr) 92 'warning: manifest: %s="%s": ignoring invalid XML boolean'
91 return default 93 % (attr, value),
94 file=sys.stderr,
95 )
96 return default
92 97
93 98
94def XmlInt(node, attr, default=None): 99def XmlInt(node, attr, default=None):
95 """Determine integer value of |node|'s |attr|. 100 """Determine integer value of |node|'s |attr|.
96 101
97 Args: 102 Args:
98 node: XML node whose attributes we access. 103 node: XML node whose attributes we access.
99 attr: The attribute to access. 104 attr: The attribute to access.
100 default: If the attribute is not set (value is empty), then use this. 105 default: If the attribute is not set (value is empty), then use this.
101 106
102 Returns: 107 Returns:
103 The number if the attribute is a valid number. 108 The number if the attribute is a valid number.
104 109
105 Raises: 110 Raises:
106 ManifestParseError: The number is invalid. 111 ManifestParseError: The number is invalid.
107 """ 112 """
108 value = node.getAttribute(attr) 113 value = node.getAttribute(attr)
109 if not value: 114 if not value:
110 return default 115 return default
111 116
112 try: 117 try:
113 return int(value) 118 return int(value)
114 except ValueError: 119 except ValueError:
115 raise ManifestParseError('manifest: invalid %s="%s" integer' % 120 raise ManifestParseError(
116 (attr, value)) 121 'manifest: invalid %s="%s" integer' % (attr, value)
122 )
117 123
118 124
119class _Default(object): 125class _Default(object):
120 """Project defaults within the manifest.""" 126 """Project defaults within the manifest."""
121 127
122 revisionExpr = None 128 revisionExpr = None
123 destBranchExpr = None 129 destBranchExpr = None
124 upstreamExpr = None 130 upstreamExpr = None
125 remote = None 131 remote = None
126 sync_j = None 132 sync_j = None
127 sync_c = False 133 sync_c = False
128 sync_s = False 134 sync_s = False
129 sync_tags = True 135 sync_tags = True
130 136
131 def __eq__(self, other): 137 def __eq__(self, other):
132 if not isinstance(other, _Default): 138 if not isinstance(other, _Default):
133 return False 139 return False
134 return self.__dict__ == other.__dict__ 140 return self.__dict__ == other.__dict__
135 141
136 def __ne__(self, other): 142 def __ne__(self, other):
137 if not isinstance(other, _Default): 143 if not isinstance(other, _Default):
138 return True 144 return True
139 return self.__dict__ != other.__dict__ 145 return self.__dict__ != other.__dict__
140 146
141 147
142class _XmlRemote(object): 148class _XmlRemote(object):
143 def __init__(self, 149 def __init__(
144 name, 150 self,
145 alias=None, 151 name,
146 fetch=None, 152 alias=None,
147 pushUrl=None, 153 fetch=None,
148 manifestUrl=None, 154 pushUrl=None,
149 review=None, 155 manifestUrl=None,
150 revision=None): 156 review=None,
151 self.name = name 157 revision=None,
152 self.fetchUrl = fetch 158 ):
153 self.pushUrl = pushUrl 159 self.name = name
154 self.manifestUrl = manifestUrl 160 self.fetchUrl = fetch
155 self.remoteAlias = alias 161 self.pushUrl = pushUrl
156 self.reviewUrl = review 162 self.manifestUrl = manifestUrl
157 self.revision = revision 163 self.remoteAlias = alias
158 self.resolvedFetchUrl = self._resolveFetchUrl() 164 self.reviewUrl = review
159 self.annotations = [] 165 self.revision = revision
160 166 self.resolvedFetchUrl = self._resolveFetchUrl()
161 def __eq__(self, other): 167 self.annotations = []
162 if not isinstance(other, _XmlRemote): 168
163 return False 169 def __eq__(self, other):
164 return (sorted(self.annotations) == sorted(other.annotations) and 170 if not isinstance(other, _XmlRemote):
165 self.name == other.name and self.fetchUrl == other.fetchUrl and 171 return False
166 self.pushUrl == other.pushUrl and self.remoteAlias == other.remoteAlias 172 return (
167 and self.reviewUrl == other.reviewUrl and self.revision == other.revision) 173 sorted(self.annotations) == sorted(other.annotations)
168 174 and self.name == other.name
169 def __ne__(self, other): 175 and self.fetchUrl == other.fetchUrl
170 return not self.__eq__(other) 176 and self.pushUrl == other.pushUrl
171 177 and self.remoteAlias == other.remoteAlias
172 def _resolveFetchUrl(self): 178 and self.reviewUrl == other.reviewUrl
173 if self.fetchUrl is None: 179 and self.revision == other.revision
174 return '' 180 )
175 url = self.fetchUrl.rstrip('/') 181
176 manifestUrl = self.manifestUrl.rstrip('/') 182 def __ne__(self, other):
177 # urljoin will gets confused over quite a few things. The ones we care 183 return not self.__eq__(other)
178 # about here are: 184
179 # * no scheme in the base url, like <hostname:port> 185 def _resolveFetchUrl(self):
180 # We handle no scheme by replacing it with an obscure protocol, gopher 186 if self.fetchUrl is None:
181 # and then replacing it with the original when we are done. 187 return ""
182 188 url = self.fetchUrl.rstrip("/")
183 if manifestUrl.find(':') != manifestUrl.find('/') - 1: 189 manifestUrl = self.manifestUrl.rstrip("/")
184 url = urllib.parse.urljoin('gopher://' + manifestUrl, url) 190 # urljoin will gets confused over quite a few things. The ones we care
185 url = re.sub(r'^gopher://', '', url) 191 # about here are:
186 else: 192 # * no scheme in the base url, like <hostname:port>
187 url = urllib.parse.urljoin(manifestUrl, url) 193 # We handle no scheme by replacing it with an obscure protocol, gopher
188 return url 194 # and then replacing it with the original when we are done.
189 195
190 def ToRemoteSpec(self, projectName): 196 if manifestUrl.find(":") != manifestUrl.find("/") - 1:
191 fetchUrl = self.resolvedFetchUrl.rstrip('/') 197 url = urllib.parse.urljoin("gopher://" + manifestUrl, url)
192 url = fetchUrl + '/' + projectName 198 url = re.sub(r"^gopher://", "", url)
193 remoteName = self.name 199 else:
194 if self.remoteAlias: 200 url = urllib.parse.urljoin(manifestUrl, url)
195 remoteName = self.remoteAlias 201 return url
196 return RemoteSpec(remoteName, 202
197 url=url, 203 def ToRemoteSpec(self, projectName):
198 pushUrl=self.pushUrl, 204 fetchUrl = self.resolvedFetchUrl.rstrip("/")
199 review=self.reviewUrl, 205 url = fetchUrl + "/" + projectName
200 orig_name=self.name, 206 remoteName = self.name
201 fetchUrl=self.fetchUrl) 207 if self.remoteAlias:
202 208 remoteName = self.remoteAlias
203 def AddAnnotation(self, name, value, keep): 209 return RemoteSpec(
204 self.annotations.append(Annotation(name, value, keep)) 210 remoteName,
211 url=url,
212 pushUrl=self.pushUrl,
213 review=self.reviewUrl,
214 orig_name=self.name,
215 fetchUrl=self.fetchUrl,
216 )
217
218 def AddAnnotation(self, name, value, keep):
219 self.annotations.append(Annotation(name, value, keep))
205 220
206 221
207class _XmlSubmanifest: 222class _XmlSubmanifest:
208 """Manage the <submanifest> element specified in the manifest. 223 """Manage the <submanifest> element specified in the manifest.
209 224
210 Attributes: 225 Attributes:
211 name: a string, the name for this submanifest. 226 name: a string, the name for this submanifest.
212 remote: a string, the remote.name for this submanifest. 227 remote: a string, the remote.name for this submanifest.
213 project: a string, the name of the manifest project. 228 project: a string, the name of the manifest project.
214 revision: a string, the commitish. 229 revision: a string, the commitish.
215 manifestName: a string, the submanifest file name. 230 manifestName: a string, the submanifest file name.
216 groups: a list of strings, the groups to add to all projects in the submanifest. 231 groups: a list of strings, the groups to add to all projects in the
217 default_groups: a list of strings, the default groups to sync. 232 submanifest.
218 path: a string, the relative path for the submanifest checkout. 233 default_groups: a list of strings, the default groups to sync.
219 parent: an XmlManifest, the parent manifest. 234 path: a string, the relative path for the submanifest checkout.
220 annotations: (derived) a list of annotations. 235 parent: an XmlManifest, the parent manifest.
221 present: (derived) a boolean, whether the sub manifest file is present. 236 annotations: (derived) a list of annotations.
222 """ 237 present: (derived) a boolean, whether the sub manifest file is present.
223 def __init__(self, 238 """
224 name,
225 remote=None,
226 project=None,
227 revision=None,
228 manifestName=None,
229 groups=None,
230 default_groups=None,
231 path=None,
232 parent=None):
233 self.name = name
234 self.remote = remote
235 self.project = project
236 self.revision = revision
237 self.manifestName = manifestName
238 self.groups = groups
239 self.default_groups = default_groups
240 self.path = path
241 self.parent = parent
242 self.annotations = []
243 outer_client = parent._outer_client or parent
244 if self.remote and not self.project:
245 raise ManifestParseError(
246 f'Submanifest {name}: must specify project when remote is given.')
247 # Construct the absolute path to the manifest file using the parent's
248 # method, so that we can correctly create our repo_client.
249 manifestFile = parent.SubmanifestInfoDir(
250 os.path.join(parent.path_prefix, self.relpath),
251 os.path.join('manifests', manifestName or 'default.xml'))
252 linkFile = parent.SubmanifestInfoDir(
253 os.path.join(parent.path_prefix, self.relpath), MANIFEST_FILE_NAME)
254 rc = self.repo_client = RepoClient(
255 parent.repodir, linkFile, parent_groups=','.join(groups) or '',
256 submanifest_path=self.relpath, outer_client=outer_client,
257 default_groups=default_groups)
258
259 self.present = os.path.exists(manifestFile)
260
261 def __eq__(self, other):
262 if not isinstance(other, _XmlSubmanifest):
263 return False
264 return (
265 self.name == other.name and
266 self.remote == other.remote and
267 self.project == other.project and
268 self.revision == other.revision and
269 self.manifestName == other.manifestName and
270 self.groups == other.groups and
271 self.default_groups == other.default_groups and
272 self.path == other.path and
273 sorted(self.annotations) == sorted(other.annotations))
274
275 def __ne__(self, other):
276 return not self.__eq__(other)
277
278 def ToSubmanifestSpec(self):
279 """Return a SubmanifestSpec object, populating attributes"""
280 mp = self.parent.manifestProject
281 remote = self.parent.remotes[self.remote or self.parent.default.remote.name]
282 # If a project was given, generate the url from the remote and project.
283 # If not, use this manifestProject's url.
284 if self.project:
285 manifestUrl = remote.ToRemoteSpec(self.project).url
286 else:
287 manifestUrl = mp.GetRemote().url
288 manifestName = self.manifestName or 'default.xml'
289 revision = self.revision or self.name
290 path = self.path or revision.split('/')[-1]
291 groups = self.groups or []
292 default_groups = self.default_groups or []
293 239
294 return SubmanifestSpec(self.name, manifestUrl, manifestName, revision, path, 240 def __init__(
295 groups) 241 self,
242 name,
243 remote=None,
244 project=None,
245 revision=None,
246 manifestName=None,
247 groups=None,
248 default_groups=None,
249 path=None,
250 parent=None,
251 ):
252 self.name = name
253 self.remote = remote
254 self.project = project
255 self.revision = revision
256 self.manifestName = manifestName
257 self.groups = groups
258 self.default_groups = default_groups
259 self.path = path
260 self.parent = parent
261 self.annotations = []
262 outer_client = parent._outer_client or parent
263 if self.remote and not self.project:
264 raise ManifestParseError(
265 f"Submanifest {name}: must specify project when remote is "
266 "given."
267 )
268 # Construct the absolute path to the manifest file using the parent's
269 # method, so that we can correctly create our repo_client.
270 manifestFile = parent.SubmanifestInfoDir(
271 os.path.join(parent.path_prefix, self.relpath),
272 os.path.join("manifests", manifestName or "default.xml"),
273 )
274 linkFile = parent.SubmanifestInfoDir(
275 os.path.join(parent.path_prefix, self.relpath), MANIFEST_FILE_NAME
276 )
277 self.repo_client = RepoClient(
278 parent.repodir,
279 linkFile,
280 parent_groups=",".join(groups) or "",
281 submanifest_path=self.relpath,
282 outer_client=outer_client,
283 default_groups=default_groups,
284 )
285
286 self.present = os.path.exists(manifestFile)
287
288 def __eq__(self, other):
289 if not isinstance(other, _XmlSubmanifest):
290 return False
291 return (
292 self.name == other.name
293 and self.remote == other.remote
294 and self.project == other.project
295 and self.revision == other.revision
296 and self.manifestName == other.manifestName
297 and self.groups == other.groups
298 and self.default_groups == other.default_groups
299 and self.path == other.path
300 and sorted(self.annotations) == sorted(other.annotations)
301 )
302
303 def __ne__(self, other):
304 return not self.__eq__(other)
305
306 def ToSubmanifestSpec(self):
307 """Return a SubmanifestSpec object, populating attributes"""
308 mp = self.parent.manifestProject
309 remote = self.parent.remotes[
310 self.remote or self.parent.default.remote.name
311 ]
312 # If a project was given, generate the url from the remote and project.
313 # If not, use this manifestProject's url.
314 if self.project:
315 manifestUrl = remote.ToRemoteSpec(self.project).url
316 else:
317 manifestUrl = mp.GetRemote().url
318 manifestName = self.manifestName or "default.xml"
319 revision = self.revision or self.name
320 path = self.path or revision.split("/")[-1]
321 groups = self.groups or []
296 322
297 @property 323 return SubmanifestSpec(
298 def relpath(self): 324 self.name, manifestUrl, manifestName, revision, path, groups
299 """The path of this submanifest relative to the parent manifest.""" 325 )
300 revision = self.revision or self.name
301 return self.path or revision.split('/')[-1]
302 326
303 def GetGroupsStr(self): 327 @property
304 """Returns the `groups` given for this submanifest.""" 328 def relpath(self):
305 if self.groups: 329 """The path of this submanifest relative to the parent manifest."""
306 return ','.join(self.groups) 330 revision = self.revision or self.name
307 return '' 331 return self.path or revision.split("/")[-1]
308 332
309 def GetDefaultGroupsStr(self): 333 def GetGroupsStr(self):
310 """Returns the `default-groups` given for this submanifest.""" 334 """Returns the `groups` given for this submanifest."""
311 return ','.join(self.default_groups or []) 335 if self.groups:
336 return ",".join(self.groups)
337 return ""
312 338
313 def AddAnnotation(self, name, value, keep): 339 def GetDefaultGroupsStr(self):
314 """Add annotations to the submanifest.""" 340 """Returns the `default-groups` given for this submanifest."""
315 self.annotations.append(Annotation(name, value, keep)) 341 return ",".join(self.default_groups or [])
316 342
343 def AddAnnotation(self, name, value, keep):
344 """Add annotations to the submanifest."""
345 self.annotations.append(Annotation(name, value, keep))
317 346
318class SubmanifestSpec:
319 """The submanifest element, with all fields expanded."""
320
321 def __init__(self,
322 name,
323 manifestUrl,
324 manifestName,
325 revision,
326 path,
327 groups):
328 self.name = name
329 self.manifestUrl = manifestUrl
330 self.manifestName = manifestName
331 self.revision = revision
332 self.path = path
333 self.groups = groups or []
334 347
348class SubmanifestSpec:
349 """The submanifest element, with all fields expanded."""
335 350
336class XmlManifest(object): 351 def __init__(self, name, manifestUrl, manifestName, revision, path, groups):
337 """manages the repo configuration file""" 352 self.name = name
353 self.manifestUrl = manifestUrl
354 self.manifestName = manifestName
355 self.revision = revision
356 self.path = path
357 self.groups = groups or []
338 358
339 def __init__(self, repodir, manifest_file, local_manifests=None,
340 outer_client=None, parent_groups='', submanifest_path='',
341 default_groups=None):
342 """Initialize.
343 359
344 Args: 360class XmlManifest(object):
345 repodir: Path to the .repo/ dir for holding all internal checkout state. 361 """manages the repo configuration file"""
346 It must be in the top directory of the repo client checkout. 362
347 manifest_file: Full path to the manifest file to parse. This will usually 363 def __init__(
348 be |repodir|/|MANIFEST_FILE_NAME|. 364 self,
349 local_manifests: Full path to the directory of local override manifests. 365 repodir,
350 This will usually be |repodir|/|LOCAL_MANIFESTS_DIR_NAME|. 366 manifest_file,
351 outer_client: RepoClient of the outer manifest. 367 local_manifests=None,
352 parent_groups: a string, the groups to apply to this projects. 368 outer_client=None,
353 submanifest_path: The submanifest root relative to the repo root. 369 parent_groups="",
354 default_groups: a string, the default manifest groups to use. 370 submanifest_path="",
355 """ 371 default_groups=None,
356 # TODO(vapier): Move this out of this class. 372 ):
357 self.globalConfig = GitConfig.ForUser() 373 """Initialize.
358 374
359 self.repodir = os.path.abspath(repodir) 375 Args:
360 self._CheckLocalPath(submanifest_path) 376 repodir: Path to the .repo/ dir for holding all internal checkout
361 self.topdir = os.path.dirname(self.repodir) 377 state. It must be in the top directory of the repo client
362 if submanifest_path: 378 checkout.
363 # This avoids a trailing os.path.sep when submanifest_path is empty. 379 manifest_file: Full path to the manifest file to parse. This will
364 self.topdir = os.path.join(self.topdir, submanifest_path) 380 usually be |repodir|/|MANIFEST_FILE_NAME|.
365 if manifest_file != os.path.abspath(manifest_file): 381 local_manifests: Full path to the directory of local override
366 raise ManifestParseError('manifest_file must be abspath') 382 manifests. This will usually be
367 self.manifestFile = manifest_file 383 |repodir|/|LOCAL_MANIFESTS_DIR_NAME|.
368 if not outer_client or outer_client == self: 384 outer_client: RepoClient of the outer manifest.
369 # manifestFileOverrides only exists in the outer_client's manifest, since 385 parent_groups: a string, the groups to apply to this projects.
370 # that is the only instance left when Unload() is called on the outer 386 submanifest_path: The submanifest root relative to the repo root.
371 # manifest. 387 default_groups: a string, the default manifest groups to use.
372 self.manifestFileOverrides = {} 388 """
373 self.local_manifests = local_manifests 389 # TODO(vapier): Move this out of this class.
374 self._load_local_manifests = True 390 self.globalConfig = GitConfig.ForUser()
375 self.parent_groups = parent_groups 391
376 self.default_groups = default_groups 392 self.repodir = os.path.abspath(repodir)
377 393 self._CheckLocalPath(submanifest_path)
378 if outer_client and self.isGitcClient: 394 self.topdir = os.path.dirname(self.repodir)
379 raise ManifestParseError('Multi-manifest is incompatible with `gitc-init`') 395 if submanifest_path:
380 396 # This avoids a trailing os.path.sep when submanifest_path is empty.
381 if submanifest_path and not outer_client: 397 self.topdir = os.path.join(self.topdir, submanifest_path)
382 # If passing a submanifest_path, there must be an outer_client. 398 if manifest_file != os.path.abspath(manifest_file):
383 raise ManifestParseError(f'Bad call to {self.__class__.__name__}') 399 raise ManifestParseError("manifest_file must be abspath")
384 400 self.manifestFile = manifest_file
385 # If self._outer_client is None, this is not a checkout that supports 401 if not outer_client or outer_client == self:
386 # multi-tree. 402 # manifestFileOverrides only exists in the outer_client's manifest,
387 self._outer_client = outer_client or self 403 # since that is the only instance left when Unload() is called on
388 404 # the outer manifest.
389 self.repoProject = RepoProject(self, 'repo', 405 self.manifestFileOverrides = {}
390 gitdir=os.path.join(repodir, 'repo/.git'), 406 self.local_manifests = local_manifests
391 worktree=os.path.join(repodir, 'repo')) 407 self._load_local_manifests = True
392 408 self.parent_groups = parent_groups
393 mp = self.SubmanifestProject(self.path_prefix) 409 self.default_groups = default_groups
394 self.manifestProject = mp 410
395 411 if outer_client and self.isGitcClient:
396 # This is a bit hacky, but we're in a chicken & egg situation: all the 412 raise ManifestParseError(
397 # normal repo settings live in the manifestProject which we just setup 413 "Multi-manifest is incompatible with `gitc-init`"
398 # above, so we couldn't easily query before that. We assume Project() 414 )
399 # init doesn't care if this changes afterwards. 415
400 if os.path.exists(mp.gitdir) and mp.use_worktree: 416 if submanifest_path and not outer_client:
401 mp.use_git_worktrees = True 417 # If passing a submanifest_path, there must be an outer_client.
402 418 raise ManifestParseError(f"Bad call to {self.__class__.__name__}")
403 self.Unload() 419
404 420 # If self._outer_client is None, this is not a checkout that supports
405 def Override(self, name, load_local_manifests=True): 421 # multi-tree.
406 """Use a different manifest, just for the current instantiation. 422 self._outer_client = outer_client or self
407 """ 423
408 path = None 424 self.repoProject = RepoProject(
409 425 self,
410 # Look for a manifest by path in the filesystem (including the cwd). 426 "repo",
411 if not load_local_manifests: 427 gitdir=os.path.join(repodir, "repo/.git"),
412 local_path = os.path.abspath(name) 428 worktree=os.path.join(repodir, "repo"),
413 if os.path.isfile(local_path): 429 )
414 path = local_path 430
415 431 mp = self.SubmanifestProject(self.path_prefix)
416 # Look for manifests by name from the manifests repo. 432 self.manifestProject = mp
417 if path is None: 433
418 path = os.path.join(self.manifestProject.worktree, name) 434 # This is a bit hacky, but we're in a chicken & egg situation: all the
419 if not os.path.isfile(path): 435 # normal repo settings live in the manifestProject which we just setup
420 raise ManifestParseError('manifest %s not found' % name) 436 # above, so we couldn't easily query before that. We assume Project()
421 437 # init doesn't care if this changes afterwards.
422 self._load_local_manifests = load_local_manifests 438 if os.path.exists(mp.gitdir) and mp.use_worktree:
423 self._outer_client.manifestFileOverrides[self.path_prefix] = path 439 mp.use_git_worktrees = True
424 self.Unload() 440
425 self._Load() 441 self.Unload()
426 442
427 def Link(self, name): 443 def Override(self, name, load_local_manifests=True):
428 """Update the repo metadata to use a different manifest. 444 """Use a different manifest, just for the current instantiation."""
429 """ 445 path = None
430 self.Override(name) 446
431 447 # Look for a manifest by path in the filesystem (including the cwd).
432 # Old versions of repo would generate symlinks we need to clean up. 448 if not load_local_manifests:
433 platform_utils.remove(self.manifestFile, missing_ok=True) 449 local_path = os.path.abspath(name)
434 # This file is interpreted as if it existed inside the manifest repo. 450 if os.path.isfile(local_path):
435 # That allows us to use <include> with the relative file name. 451 path = local_path
436 with open(self.manifestFile, 'w') as fp: 452
437 fp.write("""<?xml version="1.0" encoding="UTF-8"?> 453 # Look for manifests by name from the manifests repo.
454 if path is None:
455 path = os.path.join(self.manifestProject.worktree, name)
456 if not os.path.isfile(path):
457 raise ManifestParseError("manifest %s not found" % name)
458
459 self._load_local_manifests = load_local_manifests
460 self._outer_client.manifestFileOverrides[self.path_prefix] = path
461 self.Unload()
462 self._Load()
463
464 def Link(self, name):
465 """Update the repo metadata to use a different manifest."""
466 self.Override(name)
467
468 # Old versions of repo would generate symlinks we need to clean up.
469 platform_utils.remove(self.manifestFile, missing_ok=True)
470 # This file is interpreted as if it existed inside the manifest repo.
471 # That allows us to use <include> with the relative file name.
472 with open(self.manifestFile, "w") as fp:
473 fp.write(
474 """<?xml version="1.0" encoding="UTF-8"?>
438<!-- 475<!--
439DO NOT EDIT THIS FILE! It is generated by repo and changes will be discarded. 476DO NOT EDIT THIS FILE! It is generated by repo and changes will be discarded.
440If you want to use a different manifest, use `repo init -m <file>` instead. 477If you want to use a different manifest, use `repo init -m <file>` instead.
@@ -448,1591 +485,1803 @@ https://gerrit.googlesource.com/git-repo/+/HEAD/docs/manifest-format.md
448<manifest> 485<manifest>
449 <include name="%s" /> 486 <include name="%s" />
450</manifest> 487</manifest>
451""" % (name,)) 488"""
452 489 % (name,)
453 def _RemoteToXml(self, r, doc, root): 490 )
454 e = doc.createElement('remote') 491
455 root.appendChild(e) 492 def _RemoteToXml(self, r, doc, root):
456 e.setAttribute('name', r.name) 493 e = doc.createElement("remote")
457 e.setAttribute('fetch', r.fetchUrl) 494 root.appendChild(e)
458 if r.pushUrl is not None: 495 e.setAttribute("name", r.name)
459 e.setAttribute('pushurl', r.pushUrl) 496 e.setAttribute("fetch", r.fetchUrl)
460 if r.remoteAlias is not None: 497 if r.pushUrl is not None:
461 e.setAttribute('alias', r.remoteAlias) 498 e.setAttribute("pushurl", r.pushUrl)
462 if r.reviewUrl is not None: 499 if r.remoteAlias is not None:
463 e.setAttribute('review', r.reviewUrl) 500 e.setAttribute("alias", r.remoteAlias)
464 if r.revision is not None: 501 if r.reviewUrl is not None:
465 e.setAttribute('revision', r.revision) 502 e.setAttribute("review", r.reviewUrl)
466 503 if r.revision is not None:
467 for a in r.annotations: 504 e.setAttribute("revision", r.revision)
468 if a.keep == 'true': 505
469 ae = doc.createElement('annotation') 506 for a in r.annotations:
470 ae.setAttribute('name', a.name) 507 if a.keep == "true":
471 ae.setAttribute('value', a.value) 508 ae = doc.createElement("annotation")
472 e.appendChild(ae) 509 ae.setAttribute("name", a.name)
473 510 ae.setAttribute("value", a.value)
474 def _SubmanifestToXml(self, r, doc, root): 511 e.appendChild(ae)
475 """Generate XML <submanifest/> node.""" 512
476 e = doc.createElement('submanifest') 513 def _SubmanifestToXml(self, r, doc, root):
477 root.appendChild(e) 514 """Generate XML <submanifest/> node."""
478 e.setAttribute('name', r.name) 515 e = doc.createElement("submanifest")
479 if r.remote is not None: 516 root.appendChild(e)
480 e.setAttribute('remote', r.remote) 517 e.setAttribute("name", r.name)
481 if r.project is not None: 518 if r.remote is not None:
482 e.setAttribute('project', r.project) 519 e.setAttribute("remote", r.remote)
483 if r.manifestName is not None: 520 if r.project is not None:
484 e.setAttribute('manifest-name', r.manifestName) 521 e.setAttribute("project", r.project)
485 if r.revision is not None: 522 if r.manifestName is not None:
486 e.setAttribute('revision', r.revision) 523 e.setAttribute("manifest-name", r.manifestName)
487 if r.path is not None: 524 if r.revision is not None:
488 e.setAttribute('path', r.path) 525 e.setAttribute("revision", r.revision)
489 if r.groups: 526 if r.path is not None:
490 e.setAttribute('groups', r.GetGroupsStr()) 527 e.setAttribute("path", r.path)
491 if r.default_groups: 528 if r.groups:
492 e.setAttribute('default-groups', r.GetDefaultGroupsStr()) 529 e.setAttribute("groups", r.GetGroupsStr())
493 530 if r.default_groups:
494 for a in r.annotations: 531 e.setAttribute("default-groups", r.GetDefaultGroupsStr())
495 if a.keep == 'true': 532
496 ae = doc.createElement('annotation') 533 for a in r.annotations:
497 ae.setAttribute('name', a.name) 534 if a.keep == "true":
498 ae.setAttribute('value', a.value) 535 ae = doc.createElement("annotation")
499 e.appendChild(ae) 536 ae.setAttribute("name", a.name)
500 537 ae.setAttribute("value", a.value)
501 def _ParseList(self, field): 538 e.appendChild(ae)
502 """Parse fields that contain flattened lists. 539
503 540 def _ParseList(self, field):
504 These are whitespace & comma separated. Empty elements will be discarded. 541 """Parse fields that contain flattened lists.
505 """ 542
506 return [x for x in re.split(r'[,\s]+', field) if x] 543 These are whitespace & comma separated. Empty elements will be
507 544 discarded.
508 def ToXml(self, peg_rev=False, peg_rev_upstream=True, 545 """
509 peg_rev_dest_branch=True, groups=None, omit_local=False): 546 return [x for x in re.split(r"[,\s]+", field) if x]
510 """Return the current manifest XML.""" 547
511 mp = self.manifestProject 548 def ToXml(
512 549 self,
513 if groups is None: 550 peg_rev=False,
514 groups = mp.manifest_groups 551 peg_rev_upstream=True,
515 if groups: 552 peg_rev_dest_branch=True,
516 groups = self._ParseList(groups) 553 groups=None,
517 554 omit_local=False,
518 doc = xml.dom.minidom.Document() 555 ):
519 root = doc.createElement('manifest') 556 """Return the current manifest XML."""
520 if self.is_submanifest: 557 mp = self.manifestProject
521 root.setAttribute('path', self.path_prefix) 558
522 doc.appendChild(root) 559 if groups is None:
523 560 groups = mp.manifest_groups
524 # Save out the notice. There's a little bit of work here to give it the 561 if groups:
525 # right whitespace, which assumes that the notice is automatically indented 562 groups = self._ParseList(groups)
526 # by 4 by minidom. 563
527 if self.notice: 564 doc = xml.dom.minidom.Document()
528 notice_element = root.appendChild(doc.createElement('notice')) 565 root = doc.createElement("manifest")
529 notice_lines = self.notice.splitlines() 566 if self.is_submanifest:
530 indented_notice = ('\n'.join(" " * 4 + line for line in notice_lines))[4:] 567 root.setAttribute("path", self.path_prefix)
531 notice_element.appendChild(doc.createTextNode(indented_notice)) 568 doc.appendChild(root)
532 569
533 d = self.default 570 # Save out the notice. There's a little bit of work here to give it the
534 571 # right whitespace, which assumes that the notice is automatically
535 for r in sorted(self.remotes): 572 # indented by 4 by minidom.
536 self._RemoteToXml(self.remotes[r], doc, root) 573 if self.notice:
537 if self.remotes: 574 notice_element = root.appendChild(doc.createElement("notice"))
538 root.appendChild(doc.createTextNode('')) 575 notice_lines = self.notice.splitlines()
539 576 indented_notice = (
540 have_default = False 577 "\n".join(" " * 4 + line for line in notice_lines)
541 e = doc.createElement('default') 578 )[4:]
542 if d.remote: 579 notice_element.appendChild(doc.createTextNode(indented_notice))
543 have_default = True 580
544 e.setAttribute('remote', d.remote.name) 581 d = self.default
545 if d.revisionExpr: 582
546 have_default = True 583 for r in sorted(self.remotes):
547 e.setAttribute('revision', d.revisionExpr) 584 self._RemoteToXml(self.remotes[r], doc, root)
548 if d.destBranchExpr: 585 if self.remotes:
549 have_default = True 586 root.appendChild(doc.createTextNode(""))
550 e.setAttribute('dest-branch', d.destBranchExpr) 587
551 if d.upstreamExpr: 588 have_default = False
552 have_default = True 589 e = doc.createElement("default")
553 e.setAttribute('upstream', d.upstreamExpr) 590 if d.remote:
554 if d.sync_j is not None: 591 have_default = True
555 have_default = True 592 e.setAttribute("remote", d.remote.name)
556 e.setAttribute('sync-j', '%d' % d.sync_j) 593 if d.revisionExpr:
557 if d.sync_c: 594 have_default = True
558 have_default = True 595 e.setAttribute("revision", d.revisionExpr)
559 e.setAttribute('sync-c', 'true') 596 if d.destBranchExpr:
560 if d.sync_s: 597 have_default = True
561 have_default = True 598 e.setAttribute("dest-branch", d.destBranchExpr)
562 e.setAttribute('sync-s', 'true') 599 if d.upstreamExpr:
563 if not d.sync_tags: 600 have_default = True
564 have_default = True 601 e.setAttribute("upstream", d.upstreamExpr)
565 e.setAttribute('sync-tags', 'false') 602 if d.sync_j is not None:
566 if have_default: 603 have_default = True
567 root.appendChild(e) 604 e.setAttribute("sync-j", "%d" % d.sync_j)
568 root.appendChild(doc.createTextNode('')) 605 if d.sync_c:
569 606 have_default = True
570 if self._manifest_server: 607 e.setAttribute("sync-c", "true")
571 e = doc.createElement('manifest-server') 608 if d.sync_s:
572 e.setAttribute('url', self._manifest_server) 609 have_default = True
573 root.appendChild(e) 610 e.setAttribute("sync-s", "true")
574 root.appendChild(doc.createTextNode('')) 611 if not d.sync_tags:
575 612 have_default = True
576 for r in sorted(self.submanifests): 613 e.setAttribute("sync-tags", "false")
577 self._SubmanifestToXml(self.submanifests[r], doc, root) 614 if have_default:
578 if self.submanifests: 615 root.appendChild(e)
579 root.appendChild(doc.createTextNode('')) 616 root.appendChild(doc.createTextNode(""))
580 617
581 def output_projects(parent, parent_node, projects): 618 if self._manifest_server:
582 for project_name in projects: 619 e = doc.createElement("manifest-server")
583 for project in self._projects[project_name]: 620 e.setAttribute("url", self._manifest_server)
584 output_project(parent, parent_node, project) 621 root.appendChild(e)
585 622 root.appendChild(doc.createTextNode(""))
586 def output_project(parent, parent_node, p): 623
587 if not p.MatchesGroups(groups): 624 for r in sorted(self.submanifests):
588 return 625 self._SubmanifestToXml(self.submanifests[r], doc, root)
589 626 if self.submanifests:
590 if omit_local and self.IsFromLocalManifest(p): 627 root.appendChild(doc.createTextNode(""))
591 return 628
592 629 def output_projects(parent, parent_node, projects):
593 name = p.name 630 for project_name in projects:
594 relpath = p.relpath 631 for project in self._projects[project_name]:
595 if parent: 632 output_project(parent, parent_node, project)
596 name = self._UnjoinName(parent.name, name) 633
597 relpath = self._UnjoinRelpath(parent.relpath, relpath) 634 def output_project(parent, parent_node, p):
598 635 if not p.MatchesGroups(groups):
599 e = doc.createElement('project') 636 return
600 parent_node.appendChild(e) 637
601 e.setAttribute('name', name) 638 if omit_local and self.IsFromLocalManifest(p):
602 if relpath != name: 639 return
603 e.setAttribute('path', relpath) 640
604 remoteName = None 641 name = p.name
605 if d.remote: 642 relpath = p.relpath
606 remoteName = d.remote.name 643 if parent:
607 if not d.remote or p.remote.orig_name != remoteName: 644 name = self._UnjoinName(parent.name, name)
608 remoteName = p.remote.orig_name 645 relpath = self._UnjoinRelpath(parent.relpath, relpath)
609 e.setAttribute('remote', remoteName) 646
610 if peg_rev: 647 e = doc.createElement("project")
611 if self.IsMirror: 648 parent_node.appendChild(e)
612 value = p.bare_git.rev_parse(p.revisionExpr + '^0') 649 e.setAttribute("name", name)
613 else: 650 if relpath != name:
614 value = p.work_git.rev_parse(HEAD + '^0') 651 e.setAttribute("path", relpath)
615 e.setAttribute('revision', value) 652 remoteName = None
616 if peg_rev_upstream: 653 if d.remote:
617 if p.upstream: 654 remoteName = d.remote.name
618 e.setAttribute('upstream', p.upstream) 655 if not d.remote or p.remote.orig_name != remoteName:
619 elif value != p.revisionExpr: 656 remoteName = p.remote.orig_name
620 # Only save the origin if the origin is not a sha1, and the default 657 e.setAttribute("remote", remoteName)
621 # isn't our value 658 if peg_rev:
622 e.setAttribute('upstream', p.revisionExpr) 659 if self.IsMirror:
623 660 value = p.bare_git.rev_parse(p.revisionExpr + "^0")
624 if peg_rev_dest_branch: 661 else:
625 if p.dest_branch: 662 value = p.work_git.rev_parse(HEAD + "^0")
626 e.setAttribute('dest-branch', p.dest_branch) 663 e.setAttribute("revision", value)
627 elif value != p.revisionExpr: 664 if peg_rev_upstream:
628 e.setAttribute('dest-branch', p.revisionExpr) 665 if p.upstream:
629 666 e.setAttribute("upstream", p.upstream)
630 else: 667 elif value != p.revisionExpr:
631 revision = self.remotes[p.remote.orig_name].revision or d.revisionExpr 668 # Only save the origin if the origin is not a sha1, and
632 if not revision or revision != p.revisionExpr: 669 # the default isn't our value
633 e.setAttribute('revision', p.revisionExpr) 670 e.setAttribute("upstream", p.revisionExpr)
634 elif p.revisionId: 671
635 e.setAttribute('revision', p.revisionId) 672 if peg_rev_dest_branch:
636 if (p.upstream and (p.upstream != p.revisionExpr or 673 if p.dest_branch:
637 p.upstream != d.upstreamExpr)): 674 e.setAttribute("dest-branch", p.dest_branch)
638 e.setAttribute('upstream', p.upstream) 675 elif value != p.revisionExpr:
639 676 e.setAttribute("dest-branch", p.revisionExpr)
640 if p.dest_branch and p.dest_branch != d.destBranchExpr: 677
641 e.setAttribute('dest-branch', p.dest_branch) 678 else:
642 679 revision = (
643 for c in p.copyfiles: 680 self.remotes[p.remote.orig_name].revision or d.revisionExpr
644 ce = doc.createElement('copyfile') 681 )
645 ce.setAttribute('src', c.src) 682 if not revision or revision != p.revisionExpr:
646 ce.setAttribute('dest', c.dest) 683 e.setAttribute("revision", p.revisionExpr)
647 e.appendChild(ce) 684 elif p.revisionId:
648 685 e.setAttribute("revision", p.revisionId)
649 for l in p.linkfiles: 686 if p.upstream and (
650 le = doc.createElement('linkfile') 687 p.upstream != p.revisionExpr or p.upstream != d.upstreamExpr
651 le.setAttribute('src', l.src) 688 ):
652 le.setAttribute('dest', l.dest) 689 e.setAttribute("upstream", p.upstream)
653 e.appendChild(le) 690
654 691 if p.dest_branch and p.dest_branch != d.destBranchExpr:
655 default_groups = ['all', 'name:%s' % p.name, 'path:%s' % p.relpath] 692 e.setAttribute("dest-branch", p.dest_branch)
656 egroups = [g for g in p.groups if g not in default_groups] 693
657 if egroups: 694 for c in p.copyfiles:
658 e.setAttribute('groups', ','.join(egroups)) 695 ce = doc.createElement("copyfile")
659 696 ce.setAttribute("src", c.src)
660 for a in p.annotations: 697 ce.setAttribute("dest", c.dest)
661 if a.keep == "true": 698 e.appendChild(ce)
662 ae = doc.createElement('annotation') 699
663 ae.setAttribute('name', a.name) 700 for lf in p.linkfiles:
664 ae.setAttribute('value', a.value) 701 le = doc.createElement("linkfile")
665 e.appendChild(ae) 702 le.setAttribute("src", lf.src)
666 703 le.setAttribute("dest", lf.dest)
667 if p.sync_c: 704 e.appendChild(le)
668 e.setAttribute('sync-c', 'true') 705
669 706 default_groups = ["all", "name:%s" % p.name, "path:%s" % p.relpath]
670 if p.sync_s: 707 egroups = [g for g in p.groups if g not in default_groups]
671 e.setAttribute('sync-s', 'true') 708 if egroups:
672 709 e.setAttribute("groups", ",".join(egroups))
673 if not p.sync_tags: 710
674 e.setAttribute('sync-tags', 'false') 711 for a in p.annotations:
675 712 if a.keep == "true":
676 if p.clone_depth: 713 ae = doc.createElement("annotation")
677 e.setAttribute('clone-depth', str(p.clone_depth)) 714 ae.setAttribute("name", a.name)
678 715 ae.setAttribute("value", a.value)
679 self._output_manifest_project_extras(p, e) 716 e.appendChild(ae)
680 717
681 if p.subprojects: 718 if p.sync_c:
682 subprojects = set(subp.name for subp in p.subprojects) 719 e.setAttribute("sync-c", "true")
683 output_projects(p, e, list(sorted(subprojects))) 720
684 721 if p.sync_s:
685 projects = set(p.name for p in self._paths.values() if not p.parent) 722 e.setAttribute("sync-s", "true")
686 output_projects(None, root, list(sorted(projects))) 723
687 724 if not p.sync_tags:
688 if self._repo_hooks_project: 725 e.setAttribute("sync-tags", "false")
689 root.appendChild(doc.createTextNode('')) 726
690 e = doc.createElement('repo-hooks') 727 if p.clone_depth:
691 e.setAttribute('in-project', self._repo_hooks_project.name) 728 e.setAttribute("clone-depth", str(p.clone_depth))
692 e.setAttribute('enabled-list', 729
693 ' '.join(self._repo_hooks_project.enabled_repo_hooks)) 730 self._output_manifest_project_extras(p, e)
694 root.appendChild(e) 731
695 732 if p.subprojects:
696 if self._superproject: 733 subprojects = set(subp.name for subp in p.subprojects)
697 root.appendChild(doc.createTextNode('')) 734 output_projects(p, e, list(sorted(subprojects)))
698 e = doc.createElement('superproject') 735
699 e.setAttribute('name', self._superproject.name) 736 projects = set(p.name for p in self._paths.values() if not p.parent)
700 remoteName = None 737 output_projects(None, root, list(sorted(projects)))
701 if d.remote: 738
702 remoteName = d.remote.name 739 if self._repo_hooks_project:
703 remote = self._superproject.remote 740 root.appendChild(doc.createTextNode(""))
704 if not d.remote or remote.orig_name != remoteName: 741 e = doc.createElement("repo-hooks")
705 remoteName = remote.orig_name 742 e.setAttribute("in-project", self._repo_hooks_project.name)
706 e.setAttribute('remote', remoteName) 743 e.setAttribute(
707 revision = remote.revision or d.revisionExpr 744 "enabled-list",
708 if not revision or revision != self._superproject.revision: 745 " ".join(self._repo_hooks_project.enabled_repo_hooks),
709 e.setAttribute('revision', self._superproject.revision) 746 )
710 root.appendChild(e) 747 root.appendChild(e)
711
712 if self._contactinfo.bugurl != Wrapper().BUG_URL:
713 root.appendChild(doc.createTextNode(''))
714 e = doc.createElement('contactinfo')
715 e.setAttribute('bugurl', self._contactinfo.bugurl)
716 root.appendChild(e)
717
718 return doc
719
720 def ToDict(self, **kwargs):
721 """Return the current manifest as a dictionary."""
722 # Elements that may only appear once.
723 SINGLE_ELEMENTS = {
724 'notice',
725 'default',
726 'manifest-server',
727 'repo-hooks',
728 'superproject',
729 'contactinfo',
730 }
731 # Elements that may be repeated.
732 MULTI_ELEMENTS = {
733 'remote',
734 'remove-project',
735 'project',
736 'extend-project',
737 'include',
738 'submanifest',
739 # These are children of 'project' nodes.
740 'annotation',
741 'project',
742 'copyfile',
743 'linkfile',
744 }
745
746 doc = self.ToXml(**kwargs)
747 ret = {}
748
749 def append_children(ret, node):
750 for child in node.childNodes:
751 if child.nodeType == xml.dom.Node.ELEMENT_NODE:
752 attrs = child.attributes
753 element = dict((attrs.item(i).localName, attrs.item(i).value)
754 for i in range(attrs.length))
755 if child.nodeName in SINGLE_ELEMENTS:
756 ret[child.nodeName] = element
757 elif child.nodeName in MULTI_ELEMENTS:
758 ret.setdefault(child.nodeName, []).append(element)
759 else:
760 raise ManifestParseError('Unhandled element "%s"' % (child.nodeName,))
761
762 append_children(element, child)
763
764 append_children(ret, doc.firstChild)
765
766 return ret
767
768 def Save(self, fd, **kwargs):
769 """Write the current manifest out to the given file descriptor."""
770 doc = self.ToXml(**kwargs)
771 doc.writexml(fd, '', ' ', '\n', 'UTF-8')
772
773 def _output_manifest_project_extras(self, p, e):
774 """Manifests can modify e if they support extra project attributes."""
775
776 @property
777 def is_multimanifest(self):
778 """Whether this is a multimanifest checkout.
779
780 This is safe to use as long as the outermost manifest XML has been parsed.
781 """
782 return bool(self._outer_client._submanifests)
783
784 @property
785 def is_submanifest(self):
786 """Whether this manifest is a submanifest.
787
788 This is safe to use as long as the outermost manifest XML has been parsed.
789 """
790 return self._outer_client and self._outer_client != self
791
792 @property
793 def outer_client(self):
794 """The instance of the outermost manifest client."""
795 self._Load()
796 return self._outer_client
797
798 @property
799 def all_manifests(self):
800 """Generator yielding all (sub)manifests, in depth-first order."""
801 self._Load()
802 outer = self._outer_client
803 yield outer
804 for tree in outer.all_children:
805 yield tree
806
807 @property
808 def all_children(self):
809 """Generator yielding all (present) child submanifests."""
810 self._Load()
811 for child in self._submanifests.values():
812 if child.repo_client:
813 yield child.repo_client
814 for tree in child.repo_client.all_children:
815 yield tree
816
817 @property
818 def path_prefix(self):
819 """The path of this submanifest, relative to the outermost manifest."""
820 if not self._outer_client or self == self._outer_client:
821 return ''
822 return os.path.relpath(self.topdir, self._outer_client.topdir)
823
824 @property
825 def all_paths(self):
826 """All project paths for all (sub)manifests.
827
828 See also `paths`.
829
830 Returns:
831 A dictionary of {path: Project()}. `path` is relative to the outer
832 manifest.
833 """
834 ret = {}
835 for tree in self.all_manifests:
836 prefix = tree.path_prefix
837 ret.update({os.path.join(prefix, k): v for k, v in tree.paths.items()})
838 return ret
839
840 @property
841 def all_projects(self):
842 """All projects for all (sub)manifests. See `projects`."""
843 return list(itertools.chain.from_iterable(x._paths.values() for x in self.all_manifests))
844
845 @property
846 def paths(self):
847 """Return all paths for this manifest.
848
849 Returns:
850 A dictionary of {path: Project()}. `path` is relative to this manifest.
851 """
852 self._Load()
853 return self._paths
854
855 @property
856 def projects(self):
857 """Return a list of all Projects in this manifest."""
858 self._Load()
859 return list(self._paths.values())
860
861 @property
862 def remotes(self):
863 """Return a list of remotes for this manifest."""
864 self._Load()
865 return self._remotes
866
867 @property
868 def default(self):
869 """Return default values for this manifest."""
870 self._Load()
871 return self._default
872
873 @property
874 def submanifests(self):
875 """All submanifests in this manifest."""
876 self._Load()
877 return self._submanifests
878
879 @property
880 def repo_hooks_project(self):
881 self._Load()
882 return self._repo_hooks_project
883
884 @property
885 def superproject(self):
886 self._Load()
887 return self._superproject
888
889 @property
890 def contactinfo(self):
891 self._Load()
892 return self._contactinfo
893
894 @property
895 def notice(self):
896 self._Load()
897 return self._notice
898
899 @property
900 def manifest_server(self):
901 self._Load()
902 return self._manifest_server
903
904 @property
905 def CloneBundle(self):
906 clone_bundle = self.manifestProject.clone_bundle
907 if clone_bundle is None:
908 return False if self.manifestProject.partial_clone else True
909 else:
910 return clone_bundle
911
912 @property
913 def CloneFilter(self):
914 if self.manifestProject.partial_clone:
915 return self.manifestProject.clone_filter
916 return None
917
918 @property
919 def PartialCloneExclude(self):
920 exclude = self.manifest.manifestProject.partial_clone_exclude or ''
921 return set(x.strip() for x in exclude.split(','))
922
923 def SetManifestOverride(self, path):
924 """Override manifestFile. The caller must call Unload()"""
925 self._outer_client.manifest.manifestFileOverrides[self.path_prefix] = path
926
927 @property
928 def UseLocalManifests(self):
929 return self._load_local_manifests
930
931 def SetUseLocalManifests(self, value):
932 self._load_local_manifests = value
933
934 @property
935 def HasLocalManifests(self):
936 return self._load_local_manifests and self.local_manifests
937
938 def IsFromLocalManifest(self, project):
939 """Is the project from a local manifest?"""
940 return any(x.startswith(LOCAL_MANIFEST_GROUP_PREFIX)
941 for x in project.groups)
942
943 @property
944 def IsMirror(self):
945 return self.manifestProject.mirror
946
947 @property
948 def UseGitWorktrees(self):
949 return self.manifestProject.use_worktree
950
951 @property
952 def IsArchive(self):
953 return self.manifestProject.archive
954
955 @property
956 def HasSubmodules(self):
957 return self.manifestProject.submodules
958
959 @property
960 def EnableGitLfs(self):
961 return self.manifestProject.git_lfs
962
963 def FindManifestByPath(self, path):
964 """Returns the manifest containing path."""
965 path = os.path.abspath(path)
966 manifest = self._outer_client or self
967 old = None
968 while manifest._submanifests and manifest != old:
969 old = manifest
970 for name in manifest._submanifests:
971 tree = manifest._submanifests[name]
972 if path.startswith(tree.repo_client.manifest.topdir):
973 manifest = tree.repo_client
974 break
975 return manifest
976
977 @property
978 def subdir(self):
979 """Returns the path for per-submanifest objects for this manifest."""
980 return self.SubmanifestInfoDir(self.path_prefix)
981
982 def SubmanifestInfoDir(self, submanifest_path, object_path=''):
983 """Return the path to submanifest-specific info for a submanifest.
984
985 Return the full path of the directory in which to put per-manifest objects.
986
987 Args:
988 submanifest_path: a string, the path of the submanifest, relative to the
989 outermost topdir. If empty, then repodir is returned.
990 object_path: a string, relative path to append to the submanifest info
991 directory path.
992 """
993 if submanifest_path:
994 return os.path.join(self.repodir, SUBMANIFEST_DIR, submanifest_path,
995 object_path)
996 else:
997 return os.path.join(self.repodir, object_path)
998
999 def SubmanifestProject(self, submanifest_path):
1000 """Return a manifestProject for a submanifest."""
1001 subdir = self.SubmanifestInfoDir(submanifest_path)
1002 mp = ManifestProject(self, 'manifests',
1003 gitdir=os.path.join(subdir, 'manifests.git'),
1004 worktree=os.path.join(subdir, 'manifests'))
1005 return mp
1006
1007 def GetDefaultGroupsStr(self, with_platform=True):
1008 """Returns the default group string to use.
1009
1010 Args:
1011 with_platform: a boolean, whether to include the group for the
1012 underlying platform.
1013 """
1014 groups = ','.join(self.default_groups or ['default'])
1015 if with_platform:
1016 groups += f',platform-{platform.system().lower()}'
1017 return groups
1018
1019 def GetGroupsStr(self):
1020 """Returns the manifest group string that should be synced."""
1021 return self.manifestProject.manifest_groups or self.GetDefaultGroupsStr()
1022
1023 def Unload(self):
1024 """Unload the manifest.
1025
1026 If the manifest files have been changed since Load() was called, this will
1027 cause the new/updated manifest to be used.
1028
1029 """
1030 self._loaded = False
1031 self._projects = {}
1032 self._paths = {}
1033 self._remotes = {}
1034 self._default = None
1035 self._submanifests = {}
1036 self._repo_hooks_project = None
1037 self._superproject = None
1038 self._contactinfo = ContactInfo(Wrapper().BUG_URL)
1039 self._notice = None
1040 self.branch = None
1041 self._manifest_server = None
1042
1043 def Load(self):
1044 """Read the manifest into memory."""
1045 # Do not expose internal arguments.
1046 self._Load()
1047
1048 def _Load(self, initial_client=None, submanifest_depth=0):
1049 if submanifest_depth > MAX_SUBMANIFEST_DEPTH:
1050 raise ManifestParseError('maximum submanifest depth %d exceeded.' %
1051 MAX_SUBMANIFEST_DEPTH)
1052 if not self._loaded:
1053 if self._outer_client and self._outer_client != self:
1054 # This will load all clients.
1055 self._outer_client._Load(initial_client=self)
1056
1057 savedManifestFile = self.manifestFile
1058 override = self._outer_client.manifestFileOverrides.get(self.path_prefix)
1059 if override:
1060 self.manifestFile = override
1061
1062 try:
1063 m = self.manifestProject
1064 b = m.GetBranch(m.CurrentBranch).merge
1065 if b is not None and b.startswith(R_HEADS):
1066 b = b[len(R_HEADS):]
1067 self.branch = b
1068
1069 parent_groups = self.parent_groups
1070 if self.path_prefix:
1071 parent_groups = f'{SUBMANIFEST_GROUP_PREFIX}:path:{self.path_prefix},{parent_groups}'
1072
1073 # The manifestFile was specified by the user which is why we allow include
1074 # paths to point anywhere.
1075 nodes = []
1076 nodes.append(self._ParseManifestXml(
1077 self.manifestFile, self.manifestProject.worktree,
1078 parent_groups=parent_groups, restrict_includes=False))
1079
1080 if self._load_local_manifests and self.local_manifests:
1081 try:
1082 for local_file in sorted(platform_utils.listdir(self.local_manifests)):
1083 if local_file.endswith('.xml'):
1084 local = os.path.join(self.local_manifests, local_file)
1085 # Since local manifests are entirely managed by the user, allow
1086 # them to point anywhere the user wants.
1087 local_group = f'{LOCAL_MANIFEST_GROUP_PREFIX}:{local_file[:-4]}'
1088 nodes.append(self._ParseManifestXml(
1089 local, self.subdir,
1090 parent_groups=f'{local_group},{parent_groups}',
1091 restrict_includes=False))
1092 except OSError:
1093 pass
1094 748
749 if self._superproject:
750 root.appendChild(doc.createTextNode(""))
751 e = doc.createElement("superproject")
752 e.setAttribute("name", self._superproject.name)
753 remoteName = None
754 if d.remote:
755 remoteName = d.remote.name
756 remote = self._superproject.remote
757 if not d.remote or remote.orig_name != remoteName:
758 remoteName = remote.orig_name
759 e.setAttribute("remote", remoteName)
760 revision = remote.revision or d.revisionExpr
761 if not revision or revision != self._superproject.revision:
762 e.setAttribute("revision", self._superproject.revision)
763 root.appendChild(e)
764
765 if self._contactinfo.bugurl != Wrapper().BUG_URL:
766 root.appendChild(doc.createTextNode(""))
767 e = doc.createElement("contactinfo")
768 e.setAttribute("bugurl", self._contactinfo.bugurl)
769 root.appendChild(e)
770
771 return doc
772
773 def ToDict(self, **kwargs):
774 """Return the current manifest as a dictionary."""
775 # Elements that may only appear once.
776 SINGLE_ELEMENTS = {
777 "notice",
778 "default",
779 "manifest-server",
780 "repo-hooks",
781 "superproject",
782 "contactinfo",
783 }
784 # Elements that may be repeated.
785 MULTI_ELEMENTS = {
786 "remote",
787 "remove-project",
788 "project",
789 "extend-project",
790 "include",
791 "submanifest",
792 # These are children of 'project' nodes.
793 "annotation",
794 "project",
795 "copyfile",
796 "linkfile",
797 }
798
799 doc = self.ToXml(**kwargs)
800 ret = {}
801
802 def append_children(ret, node):
803 for child in node.childNodes:
804 if child.nodeType == xml.dom.Node.ELEMENT_NODE:
805 attrs = child.attributes
806 element = dict(
807 (attrs.item(i).localName, attrs.item(i).value)
808 for i in range(attrs.length)
809 )
810 if child.nodeName in SINGLE_ELEMENTS:
811 ret[child.nodeName] = element
812 elif child.nodeName in MULTI_ELEMENTS:
813 ret.setdefault(child.nodeName, []).append(element)
814 else:
815 raise ManifestParseError(
816 'Unhandled element "%s"' % (child.nodeName,)
817 )
818
819 append_children(element, child)
820
821 append_children(ret, doc.firstChild)
822
823 return ret
824
825 def Save(self, fd, **kwargs):
826 """Write the current manifest out to the given file descriptor."""
827 doc = self.ToXml(**kwargs)
828 doc.writexml(fd, "", " ", "\n", "UTF-8")
829
830 def _output_manifest_project_extras(self, p, e):
831 """Manifests can modify e if they support extra project attributes."""
832
833 @property
834 def is_multimanifest(self):
835 """Whether this is a multimanifest checkout.
836
837 This is safe to use as long as the outermost manifest XML has been
838 parsed.
839 """
840 return bool(self._outer_client._submanifests)
841
842 @property
843 def is_submanifest(self):
844 """Whether this manifest is a submanifest.
845
846 This is safe to use as long as the outermost manifest XML has been
847 parsed.
848 """
849 return self._outer_client and self._outer_client != self
850
851 @property
852 def outer_client(self):
853 """The instance of the outermost manifest client."""
854 self._Load()
855 return self._outer_client
856
857 @property
858 def all_manifests(self):
859 """Generator yielding all (sub)manifests, in depth-first order."""
860 self._Load()
861 outer = self._outer_client
862 yield outer
863 for tree in outer.all_children:
864 yield tree
865
866 @property
867 def all_children(self):
868 """Generator yielding all (present) child submanifests."""
869 self._Load()
870 for child in self._submanifests.values():
871 if child.repo_client:
872 yield child.repo_client
873 for tree in child.repo_client.all_children:
874 yield tree
875
876 @property
877 def path_prefix(self):
878 """The path of this submanifest, relative to the outermost manifest."""
879 if not self._outer_client or self == self._outer_client:
880 return ""
881 return os.path.relpath(self.topdir, self._outer_client.topdir)
882
883 @property
884 def all_paths(self):
885 """All project paths for all (sub)manifests.
886
887 See also `paths`.
888
889 Returns:
890 A dictionary of {path: Project()}. `path` is relative to the outer
891 manifest.
892 """
893 ret = {}
894 for tree in self.all_manifests:
895 prefix = tree.path_prefix
896 ret.update(
897 {os.path.join(prefix, k): v for k, v in tree.paths.items()}
898 )
899 return ret
900
901 @property
902 def all_projects(self):
903 """All projects for all (sub)manifests. See `projects`."""
904 return list(
905 itertools.chain.from_iterable(
906 x._paths.values() for x in self.all_manifests
907 )
908 )
909
910 @property
911 def paths(self):
912 """Return all paths for this manifest.
913
914 Returns:
915 A dictionary of {path: Project()}. `path` is relative to this
916 manifest.
917 """
918 self._Load()
919 return self._paths
920
921 @property
922 def projects(self):
923 """Return a list of all Projects in this manifest."""
924 self._Load()
925 return list(self._paths.values())
926
927 @property
928 def remotes(self):
929 """Return a list of remotes for this manifest."""
930 self._Load()
931 return self._remotes
932
933 @property
934 def default(self):
935 """Return default values for this manifest."""
936 self._Load()
937 return self._default
938
939 @property
940 def submanifests(self):
941 """All submanifests in this manifest."""
942 self._Load()
943 return self._submanifests
944
945 @property
946 def repo_hooks_project(self):
947 self._Load()
948 return self._repo_hooks_project
949
950 @property
951 def superproject(self):
952 self._Load()
953 return self._superproject
954
955 @property
956 def contactinfo(self):
957 self._Load()
958 return self._contactinfo
959
960 @property
961 def notice(self):
962 self._Load()
963 return self._notice
964
965 @property
966 def manifest_server(self):
967 self._Load()
968 return self._manifest_server
969
970 @property
971 def CloneBundle(self):
972 clone_bundle = self.manifestProject.clone_bundle
973 if clone_bundle is None:
974 return False if self.manifestProject.partial_clone else True
975 else:
976 return clone_bundle
977
978 @property
979 def CloneFilter(self):
980 if self.manifestProject.partial_clone:
981 return self.manifestProject.clone_filter
982 return None
983
984 @property
985 def PartialCloneExclude(self):
986 exclude = self.manifest.manifestProject.partial_clone_exclude or ""
987 return set(x.strip() for x in exclude.split(","))
988
989 def SetManifestOverride(self, path):
990 """Override manifestFile. The caller must call Unload()"""
991 self._outer_client.manifest.manifestFileOverrides[
992 self.path_prefix
993 ] = path
994
995 @property
996 def UseLocalManifests(self):
997 return self._load_local_manifests
998
999 def SetUseLocalManifests(self, value):
1000 self._load_local_manifests = value
1001
1002 @property
1003 def HasLocalManifests(self):
1004 return self._load_local_manifests and self.local_manifests
1005
1006 def IsFromLocalManifest(self, project):
1007 """Is the project from a local manifest?"""
1008 return any(
1009 x.startswith(LOCAL_MANIFEST_GROUP_PREFIX) for x in project.groups
1010 )
1011
1012 @property
1013 def IsMirror(self):
1014 return self.manifestProject.mirror
1015
1016 @property
1017 def UseGitWorktrees(self):
1018 return self.manifestProject.use_worktree
1019
1020 @property
1021 def IsArchive(self):
1022 return self.manifestProject.archive
1023
1024 @property
1025 def HasSubmodules(self):
1026 return self.manifestProject.submodules
1027
1028 @property
1029 def EnableGitLfs(self):
1030 return self.manifestProject.git_lfs
1031
1032 def FindManifestByPath(self, path):
1033 """Returns the manifest containing path."""
1034 path = os.path.abspath(path)
1035 manifest = self._outer_client or self
1036 old = None
1037 while manifest._submanifests and manifest != old:
1038 old = manifest
1039 for name in manifest._submanifests:
1040 tree = manifest._submanifests[name]
1041 if path.startswith(tree.repo_client.manifest.topdir):
1042 manifest = tree.repo_client
1043 break
1044 return manifest
1045
1046 @property
1047 def subdir(self):
1048 """Returns the path for per-submanifest objects for this manifest."""
1049 return self.SubmanifestInfoDir(self.path_prefix)
1050
1051 def SubmanifestInfoDir(self, submanifest_path, object_path=""):
1052 """Return the path to submanifest-specific info for a submanifest.
1053
1054 Return the full path of the directory in which to put per-manifest
1055 objects.
1056
1057 Args:
1058 submanifest_path: a string, the path of the submanifest, relative to
1059 the outermost topdir. If empty, then repodir is returned.
1060 object_path: a string, relative path to append to the submanifest
1061 info directory path.
1062 """
1063 if submanifest_path:
1064 return os.path.join(
1065 self.repodir, SUBMANIFEST_DIR, submanifest_path, object_path
1066 )
1067 else:
1068 return os.path.join(self.repodir, object_path)
1069
1070 def SubmanifestProject(self, submanifest_path):
1071 """Return a manifestProject for a submanifest."""
1072 subdir = self.SubmanifestInfoDir(submanifest_path)
1073 mp = ManifestProject(
1074 self,
1075 "manifests",
1076 gitdir=os.path.join(subdir, "manifests.git"),
1077 worktree=os.path.join(subdir, "manifests"),
1078 )
1079 return mp
1080
1081 def GetDefaultGroupsStr(self, with_platform=True):
1082 """Returns the default group string to use.
1083
1084 Args:
1085 with_platform: a boolean, whether to include the group for the
1086 underlying platform.
1087 """
1088 groups = ",".join(self.default_groups or ["default"])
1089 if with_platform:
1090 groups += f",platform-{platform.system().lower()}"
1091 return groups
1092
1093 def GetGroupsStr(self):
1094 """Returns the manifest group string that should be synced."""
1095 return (
1096 self.manifestProject.manifest_groups or self.GetDefaultGroupsStr()
1097 )
1098
1099 def Unload(self):
1100 """Unload the manifest.
1101
1102 If the manifest files have been changed since Load() was called, this
1103 will cause the new/updated manifest to be used.
1104
1105 """
1106 self._loaded = False
1107 self._projects = {}
1108 self._paths = {}
1109 self._remotes = {}
1110 self._default = None
1111 self._submanifests = {}
1112 self._repo_hooks_project = None
1113 self._superproject = None
1114 self._contactinfo = ContactInfo(Wrapper().BUG_URL)
1115 self._notice = None
1116 self.branch = None
1117 self._manifest_server = None
1118
1119 def Load(self):
1120 """Read the manifest into memory."""
1121 # Do not expose internal arguments.
1122 self._Load()
1123
1124 def _Load(self, initial_client=None, submanifest_depth=0):
1125 if submanifest_depth > MAX_SUBMANIFEST_DEPTH:
1126 raise ManifestParseError(
1127 "maximum submanifest depth %d exceeded." % MAX_SUBMANIFEST_DEPTH
1128 )
1129 if not self._loaded:
1130 if self._outer_client and self._outer_client != self:
1131 # This will load all clients.
1132 self._outer_client._Load(initial_client=self)
1133
1134 savedManifestFile = self.manifestFile
1135 override = self._outer_client.manifestFileOverrides.get(
1136 self.path_prefix
1137 )
1138 if override:
1139 self.manifestFile = override
1140
1141 try:
1142 m = self.manifestProject
1143 b = m.GetBranch(m.CurrentBranch).merge
1144 if b is not None and b.startswith(R_HEADS):
1145 b = b[len(R_HEADS) :]
1146 self.branch = b
1147
1148 parent_groups = self.parent_groups
1149 if self.path_prefix:
1150 parent_groups = (
1151 f"{SUBMANIFEST_GROUP_PREFIX}:path:"
1152 f"{self.path_prefix},{parent_groups}"
1153 )
1154
1155 # The manifestFile was specified by the user which is why we
1156 # allow include paths to point anywhere.
1157 nodes = []
1158 nodes.append(
1159 self._ParseManifestXml(
1160 self.manifestFile,
1161 self.manifestProject.worktree,
1162 parent_groups=parent_groups,
1163 restrict_includes=False,
1164 )
1165 )
1166
1167 if self._load_local_manifests and self.local_manifests:
1168 try:
1169 for local_file in sorted(
1170 platform_utils.listdir(self.local_manifests)
1171 ):
1172 if local_file.endswith(".xml"):
1173 local = os.path.join(
1174 self.local_manifests, local_file
1175 )
1176 # Since local manifests are entirely managed by
1177 # the user, allow them to point anywhere the
1178 # user wants.
1179 local_group = (
1180 f"{LOCAL_MANIFEST_GROUP_PREFIX}:"
1181 f"{local_file[:-4]}"
1182 )
1183 nodes.append(
1184 self._ParseManifestXml(
1185 local,
1186 self.subdir,
1187 parent_groups=(
1188 f"{local_group},{parent_groups}"
1189 ),
1190 restrict_includes=False,
1191 )
1192 )
1193 except OSError:
1194 pass
1195
1196 try:
1197 self._ParseManifest(nodes)
1198 except ManifestParseError as e:
1199 # There was a problem parsing, unload ourselves in case they
1200 # catch this error and try again later, we will show the
1201 # correct error
1202 self.Unload()
1203 raise e
1204
1205 if self.IsMirror:
1206 self._AddMetaProjectMirror(self.repoProject)
1207 self._AddMetaProjectMirror(self.manifestProject)
1208
1209 self._loaded = True
1210 finally:
1211 if override:
1212 self.manifestFile = savedManifestFile
1213
1214 # Now that we have loaded this manifest, load any submanifests as
1215 # well. We need to do this after self._loaded is set to avoid
1216 # looping.
1217 for name in self._submanifests:
1218 tree = self._submanifests[name]
1219 tree.ToSubmanifestSpec()
1220 present = os.path.exists(
1221 os.path.join(self.subdir, MANIFEST_FILE_NAME)
1222 )
1223 if present and tree.present and not tree.repo_client:
1224 if initial_client and initial_client.topdir == self.topdir:
1225 tree.repo_client = self
1226 tree.present = present
1227 elif not os.path.exists(self.subdir):
1228 tree.present = False
1229 if present and tree.present:
1230 tree.repo_client._Load(
1231 initial_client=initial_client,
1232 submanifest_depth=submanifest_depth + 1,
1233 )
1234
1235 def _ParseManifestXml(
1236 self, path, include_root, parent_groups="", restrict_includes=True
1237 ):
1238 """Parse a manifest XML and return the computed nodes.
1239
1240 Args:
1241 path: The XML file to read & parse.
1242 include_root: The path to interpret include "name"s relative to.
1243 parent_groups: The groups to apply to this projects.
1244 restrict_includes: Whether to constrain the "name" attribute of
1245 includes.
1246
1247 Returns:
1248 List of XML nodes.
1249 """
1095 try: 1250 try:
1096 self._ParseManifest(nodes) 1251 root = xml.dom.minidom.parse(path)
1097 except ManifestParseError as e: 1252 except (OSError, xml.parsers.expat.ExpatError) as e:
1098 # There was a problem parsing, unload ourselves in case they catch 1253 raise ManifestParseError(
1099 # this error and try again later, we will show the correct error 1254 "error parsing manifest %s: %s" % (path, e)
1100 self.Unload() 1255 )
1101 raise e 1256
1102 1257 if not root or not root.childNodes:
1103 if self.IsMirror: 1258 raise ManifestParseError("no root node in %s" % (path,))
1104 self._AddMetaProjectMirror(self.repoProject) 1259
1105 self._AddMetaProjectMirror(self.manifestProject) 1260 for manifest in root.childNodes:
1106 1261 if manifest.nodeName == "manifest":
1107 self._loaded = True 1262 break
1108 finally: 1263 else:
1109 if override: 1264 raise ManifestParseError("no <manifest> in %s" % (path,))
1110 self.manifestFile = savedManifestFile
1111
1112 # Now that we have loaded this manifest, load any submanifests as well.
1113 # We need to do this after self._loaded is set to avoid looping.
1114 for name in self._submanifests:
1115 tree = self._submanifests[name]
1116 spec = tree.ToSubmanifestSpec()
1117 present = os.path.exists(os.path.join(self.subdir, MANIFEST_FILE_NAME))
1118 if present and tree.present and not tree.repo_client:
1119 if initial_client and initial_client.topdir == self.topdir:
1120 tree.repo_client = self
1121 tree.present = present
1122 elif not os.path.exists(self.subdir):
1123 tree.present = False
1124 if present and tree.present:
1125 tree.repo_client._Load(initial_client=initial_client,
1126 submanifest_depth=submanifest_depth + 1)
1127
1128 def _ParseManifestXml(self, path, include_root, parent_groups='',
1129 restrict_includes=True):
1130 """Parse a manifest XML and return the computed nodes.
1131
1132 Args:
1133 path: The XML file to read & parse.
1134 include_root: The path to interpret include "name"s relative to.
1135 parent_groups: The groups to apply to this projects.
1136 restrict_includes: Whether to constrain the "name" attribute of includes.
1137
1138 Returns:
1139 List of XML nodes.
1140 """
1141 try:
1142 root = xml.dom.minidom.parse(path)
1143 except (OSError, xml.parsers.expat.ExpatError) as e:
1144 raise ManifestParseError("error parsing manifest %s: %s" % (path, e))
1145 1265
1146 if not root or not root.childNodes: 1266 nodes = []
1147 raise ManifestParseError("no root node in %s" % (path,)) 1267 for node in manifest.childNodes:
1268 if node.nodeName == "include":
1269 name = self._reqatt(node, "name")
1270 if restrict_includes:
1271 msg = self._CheckLocalPath(name)
1272 if msg:
1273 raise ManifestInvalidPathError(
1274 '<include> invalid "name": %s: %s' % (name, msg)
1275 )
1276 include_groups = ""
1277 if parent_groups:
1278 include_groups = parent_groups
1279 if node.hasAttribute("groups"):
1280 include_groups = (
1281 node.getAttribute("groups") + "," + include_groups
1282 )
1283 fp = os.path.join(include_root, name)
1284 if not os.path.isfile(fp):
1285 raise ManifestParseError(
1286 "include [%s/]%s doesn't exist or isn't a file"
1287 % (include_root, name)
1288 )
1289 try:
1290 nodes.extend(
1291 self._ParseManifestXml(fp, include_root, include_groups)
1292 )
1293 # should isolate this to the exact exception, but that's
1294 # tricky. actual parsing implementation may vary.
1295 except (
1296 KeyboardInterrupt,
1297 RuntimeError,
1298 SystemExit,
1299 ManifestParseError,
1300 ):
1301 raise
1302 except Exception as e:
1303 raise ManifestParseError(
1304 "failed parsing included manifest %s: %s" % (name, e)
1305 )
1306 else:
1307 if parent_groups and node.nodeName == "project":
1308 nodeGroups = parent_groups
1309 if node.hasAttribute("groups"):
1310 nodeGroups = (
1311 node.getAttribute("groups") + "," + nodeGroups
1312 )
1313 node.setAttribute("groups", nodeGroups)
1314 nodes.append(node)
1315 return nodes
1316
1317 def _ParseManifest(self, node_list):
1318 for node in itertools.chain(*node_list):
1319 if node.nodeName == "remote":
1320 remote = self._ParseRemote(node)
1321 if remote:
1322 if remote.name in self._remotes:
1323 if remote != self._remotes[remote.name]:
1324 raise ManifestParseError(
1325 "remote %s already exists with different "
1326 "attributes" % (remote.name)
1327 )
1328 else:
1329 self._remotes[remote.name] = remote
1330
1331 for node in itertools.chain(*node_list):
1332 if node.nodeName == "default":
1333 new_default = self._ParseDefault(node)
1334 emptyDefault = (
1335 not node.hasAttributes() and not node.hasChildNodes()
1336 )
1337 if self._default is None:
1338 self._default = new_default
1339 elif not emptyDefault and new_default != self._default:
1340 raise ManifestParseError(
1341 "duplicate default in %s" % (self.manifestFile)
1342 )
1148 1343
1149 for manifest in root.childNodes:
1150 if manifest.nodeName == 'manifest':
1151 break
1152 else:
1153 raise ManifestParseError("no <manifest> in %s" % (path,))
1154
1155 nodes = []
1156 for node in manifest.childNodes:
1157 if node.nodeName == 'include':
1158 name = self._reqatt(node, 'name')
1159 if restrict_includes:
1160 msg = self._CheckLocalPath(name)
1161 if msg:
1162 raise ManifestInvalidPathError(
1163 '<include> invalid "name": %s: %s' % (name, msg))
1164 include_groups = ''
1165 if parent_groups:
1166 include_groups = parent_groups
1167 if node.hasAttribute('groups'):
1168 include_groups = node.getAttribute('groups') + ',' + include_groups
1169 fp = os.path.join(include_root, name)
1170 if not os.path.isfile(fp):
1171 raise ManifestParseError("include [%s/]%s doesn't exist or isn't a file"
1172 % (include_root, name))
1173 try:
1174 nodes.extend(self._ParseManifestXml(fp, include_root, include_groups))
1175 # should isolate this to the exact exception, but that's
1176 # tricky. actual parsing implementation may vary.
1177 except (KeyboardInterrupt, RuntimeError, SystemExit, ManifestParseError):
1178 raise
1179 except Exception as e:
1180 raise ManifestParseError(
1181 "failed parsing included manifest %s: %s" % (name, e))
1182 else:
1183 if parent_groups and node.nodeName == 'project':
1184 nodeGroups = parent_groups
1185 if node.hasAttribute('groups'):
1186 nodeGroups = node.getAttribute('groups') + ',' + nodeGroups
1187 node.setAttribute('groups', nodeGroups)
1188 nodes.append(node)
1189 return nodes
1190
1191 def _ParseManifest(self, node_list):
1192 for node in itertools.chain(*node_list):
1193 if node.nodeName == 'remote':
1194 remote = self._ParseRemote(node)
1195 if remote:
1196 if remote.name in self._remotes:
1197 if remote != self._remotes[remote.name]:
1198 raise ManifestParseError(
1199 'remote %s already exists with different attributes' %
1200 (remote.name))
1201 else:
1202 self._remotes[remote.name] = remote
1203
1204 for node in itertools.chain(*node_list):
1205 if node.nodeName == 'default':
1206 new_default = self._ParseDefault(node)
1207 emptyDefault = not node.hasAttributes() and not node.hasChildNodes()
1208 if self._default is None: 1344 if self._default is None:
1209 self._default = new_default 1345 self._default = _Default()
1210 elif not emptyDefault and new_default != self._default: 1346
1211 raise ManifestParseError('duplicate default in %s' % 1347 submanifest_paths = set()
1212 (self.manifestFile)) 1348 for node in itertools.chain(*node_list):
1213 1349 if node.nodeName == "submanifest":
1214 if self._default is None: 1350 submanifest = self._ParseSubmanifest(node)
1215 self._default = _Default() 1351 if submanifest:
1216 1352 if submanifest.name in self._submanifests:
1217 submanifest_paths = set() 1353 if submanifest != self._submanifests[submanifest.name]:
1218 for node in itertools.chain(*node_list): 1354 raise ManifestParseError(
1219 if node.nodeName == 'submanifest': 1355 "submanifest %s already exists with different "
1220 submanifest = self._ParseSubmanifest(node) 1356 "attributes" % (submanifest.name)
1221 if submanifest: 1357 )
1222 if submanifest.name in self._submanifests: 1358 else:
1223 if submanifest != self._submanifests[submanifest.name]: 1359 self._submanifests[submanifest.name] = submanifest
1224 raise ManifestParseError( 1360 submanifest_paths.add(submanifest.relpath)
1225 'submanifest %s already exists with different attributes' % 1361
1226 (submanifest.name)) 1362 for node in itertools.chain(*node_list):
1227 else: 1363 if node.nodeName == "notice":
1228 self._submanifests[submanifest.name] = submanifest 1364 if self._notice is not None:
1229 submanifest_paths.add(submanifest.relpath) 1365 raise ManifestParseError(
1230 1366 "duplicate notice in %s" % (self.manifestFile)
1231 for node in itertools.chain(*node_list): 1367 )
1232 if node.nodeName == 'notice': 1368 self._notice = self._ParseNotice(node)
1233 if self._notice is not None: 1369
1234 raise ManifestParseError( 1370 for node in itertools.chain(*node_list):
1235 'duplicate notice in %s' % 1371 if node.nodeName == "manifest-server":
1236 (self.manifestFile)) 1372 url = self._reqatt(node, "url")
1237 self._notice = self._ParseNotice(node) 1373 if self._manifest_server is not None:
1238 1374 raise ManifestParseError(
1239 for node in itertools.chain(*node_list): 1375 "duplicate manifest-server in %s" % (self.manifestFile)
1240 if node.nodeName == 'manifest-server': 1376 )
1241 url = self._reqatt(node, 'url') 1377 self._manifest_server = url
1242 if self._manifest_server is not None: 1378
1243 raise ManifestParseError( 1379 def recursively_add_projects(project):
1244 'duplicate manifest-server in %s' % 1380 projects = self._projects.setdefault(project.name, [])
1245 (self.manifestFile)) 1381 if project.relpath is None:
1246 self._manifest_server = url 1382 raise ManifestParseError(
1247 1383 "missing path for %s in %s"
1248 def recursively_add_projects(project): 1384 % (project.name, self.manifestFile)
1249 projects = self._projects.setdefault(project.name, []) 1385 )
1250 if project.relpath is None: 1386 if project.relpath in self._paths:
1251 raise ManifestParseError( 1387 raise ManifestParseError(
1252 'missing path for %s in %s' % 1388 "duplicate path %s in %s"
1253 (project.name, self.manifestFile)) 1389 % (project.relpath, self.manifestFile)
1254 if project.relpath in self._paths: 1390 )
1255 raise ManifestParseError( 1391 for tree in submanifest_paths:
1256 'duplicate path %s in %s' % 1392 if project.relpath.startswith(tree):
1257 (project.relpath, self.manifestFile)) 1393 raise ManifestParseError(
1258 for tree in submanifest_paths: 1394 "project %s conflicts with submanifest path %s"
1259 if project.relpath.startswith(tree): 1395 % (project.relpath, tree)
1260 raise ManifestParseError( 1396 )
1261 'project %s conflicts with submanifest path %s' % 1397 self._paths[project.relpath] = project
1262 (project.relpath, tree)) 1398 projects.append(project)
1263 self._paths[project.relpath] = project 1399 for subproject in project.subprojects:
1264 projects.append(project) 1400 recursively_add_projects(subproject)
1265 for subproject in project.subprojects: 1401
1266 recursively_add_projects(subproject) 1402 repo_hooks_project = None
1267 1403 enabled_repo_hooks = None
1268 repo_hooks_project = None 1404 for node in itertools.chain(*node_list):
1269 enabled_repo_hooks = None 1405 if node.nodeName == "project":
1270 for node in itertools.chain(*node_list): 1406 project = self._ParseProject(node)
1271 if node.nodeName == 'project': 1407 recursively_add_projects(project)
1272 project = self._ParseProject(node) 1408 if node.nodeName == "extend-project":
1273 recursively_add_projects(project) 1409 name = self._reqatt(node, "name")
1274 if node.nodeName == 'extend-project': 1410
1275 name = self._reqatt(node, 'name') 1411 if name not in self._projects:
1412 raise ManifestParseError(
1413 "extend-project element specifies non-existent "
1414 "project: %s" % name
1415 )
1416
1417 path = node.getAttribute("path")
1418 dest_path = node.getAttribute("dest-path")
1419 groups = node.getAttribute("groups")
1420 if groups:
1421 groups = self._ParseList(groups)
1422 revision = node.getAttribute("revision")
1423 remote_name = node.getAttribute("remote")
1424 if not remote_name:
1425 remote = self._default.remote
1426 else:
1427 remote = self._get_remote(node)
1428 dest_branch = node.getAttribute("dest-branch")
1429 upstream = node.getAttribute("upstream")
1430
1431 named_projects = self._projects[name]
1432 if dest_path and not path and len(named_projects) > 1:
1433 raise ManifestParseError(
1434 "extend-project cannot use dest-path when "
1435 "matching multiple projects: %s" % name
1436 )
1437 for p in self._projects[name]:
1438 if path and p.relpath != path:
1439 continue
1440 if groups:
1441 p.groups.extend(groups)
1442 if revision:
1443 p.SetRevision(revision)
1444
1445 if remote_name:
1446 p.remote = remote.ToRemoteSpec(name)
1447 if dest_branch:
1448 p.dest_branch = dest_branch
1449 if upstream:
1450 p.upstream = upstream
1451
1452 if dest_path:
1453 del self._paths[p.relpath]
1454 (
1455 relpath,
1456 worktree,
1457 gitdir,
1458 objdir,
1459 _,
1460 ) = self.GetProjectPaths(name, dest_path, remote.name)
1461 p.UpdatePaths(relpath, worktree, gitdir, objdir)
1462 self._paths[p.relpath] = p
1463
1464 if node.nodeName == "repo-hooks":
1465 # Only one project can be the hooks project
1466 if repo_hooks_project is not None:
1467 raise ManifestParseError(
1468 "duplicate repo-hooks in %s" % (self.manifestFile)
1469 )
1470
1471 # Get the name of the project and the (space-separated) list of
1472 # enabled.
1473 repo_hooks_project = self._reqatt(node, "in-project")
1474 enabled_repo_hooks = self._ParseList(
1475 self._reqatt(node, "enabled-list")
1476 )
1477 if node.nodeName == "superproject":
1478 name = self._reqatt(node, "name")
1479 # There can only be one superproject.
1480 if self._superproject:
1481 raise ManifestParseError(
1482 "duplicate superproject in %s" % (self.manifestFile)
1483 )
1484 remote_name = node.getAttribute("remote")
1485 if not remote_name:
1486 remote = self._default.remote
1487 else:
1488 remote = self._get_remote(node)
1489 if remote is None:
1490 raise ManifestParseError(
1491 "no remote for superproject %s within %s"
1492 % (name, self.manifestFile)
1493 )
1494 revision = node.getAttribute("revision") or remote.revision
1495 if not revision:
1496 revision = self._default.revisionExpr
1497 if not revision:
1498 raise ManifestParseError(
1499 "no revision for superproject %s within %s"
1500 % (name, self.manifestFile)
1501 )
1502 self._superproject = Superproject(
1503 self,
1504 name=name,
1505 remote=remote.ToRemoteSpec(name),
1506 revision=revision,
1507 )
1508 if node.nodeName == "contactinfo":
1509 bugurl = self._reqatt(node, "bugurl")
1510 # This element can be repeated, later entries will clobber
1511 # earlier ones.
1512 self._contactinfo = ContactInfo(bugurl)
1513
1514 if node.nodeName == "remove-project":
1515 name = self._reqatt(node, "name")
1516
1517 if name in self._projects:
1518 for p in self._projects[name]:
1519 del self._paths[p.relpath]
1520 del self._projects[name]
1521
1522 # If the manifest removes the hooks project, treat it as if
1523 # it deleted
1524 # the repo-hooks element too.
1525 if repo_hooks_project == name:
1526 repo_hooks_project = None
1527 elif not XmlBool(node, "optional", False):
1528 raise ManifestParseError(
1529 "remove-project element specifies non-existent "
1530 "project: %s" % name
1531 )
1532
1533 # Store repo hooks project information.
1534 if repo_hooks_project:
1535 # Store a reference to the Project.
1536 try:
1537 repo_hooks_projects = self._projects[repo_hooks_project]
1538 except KeyError:
1539 raise ManifestParseError(
1540 "project %s not found for repo-hooks" % (repo_hooks_project)
1541 )
1542
1543 if len(repo_hooks_projects) != 1:
1544 raise ManifestParseError(
1545 "internal error parsing repo-hooks in %s"
1546 % (self.manifestFile)
1547 )
1548 self._repo_hooks_project = repo_hooks_projects[0]
1549 # Store the enabled hooks in the Project object.
1550 self._repo_hooks_project.enabled_repo_hooks = enabled_repo_hooks
1551
1552 def _AddMetaProjectMirror(self, m):
1553 name = None
1554 m_url = m.GetRemote().url
1555 if m_url.endswith("/.git"):
1556 raise ManifestParseError("refusing to mirror %s" % m_url)
1557
1558 if self._default and self._default.remote:
1559 url = self._default.remote.resolvedFetchUrl
1560 if not url.endswith("/"):
1561 url += "/"
1562 if m_url.startswith(url):
1563 remote = self._default.remote
1564 name = m_url[len(url) :]
1565
1566 if name is None:
1567 s = m_url.rindex("/") + 1
1568 manifestUrl = self.manifestProject.config.GetString(
1569 "remote.origin.url"
1570 )
1571 remote = _XmlRemote(
1572 "origin", fetch=m_url[:s], manifestUrl=manifestUrl
1573 )
1574 name = m_url[s:]
1575
1576 if name.endswith(".git"):
1577 name = name[:-4]
1276 1578
1277 if name not in self._projects: 1579 if name not in self._projects:
1278 raise ManifestParseError('extend-project element specifies non-existent ' 1580 m.PreSync()
1279 'project: %s' % name) 1581 gitdir = os.path.join(self.topdir, "%s.git" % name)
1582 project = Project(
1583 manifest=self,
1584 name=name,
1585 remote=remote.ToRemoteSpec(name),
1586 gitdir=gitdir,
1587 objdir=gitdir,
1588 worktree=None,
1589 relpath=name or None,
1590 revisionExpr=m.revisionExpr,
1591 revisionId=None,
1592 )
1593 self._projects[project.name] = [project]
1594 self._paths[project.relpath] = project
1595
1596 def _ParseRemote(self, node):
1597 """
1598 reads a <remote> element from the manifest file
1599 """
1600 name = self._reqatt(node, "name")
1601 alias = node.getAttribute("alias")
1602 if alias == "":
1603 alias = None
1604 fetch = self._reqatt(node, "fetch")
1605 pushUrl = node.getAttribute("pushurl")
1606 if pushUrl == "":
1607 pushUrl = None
1608 review = node.getAttribute("review")
1609 if review == "":
1610 review = None
1611 revision = node.getAttribute("revision")
1612 if revision == "":
1613 revision = None
1614 manifestUrl = self.manifestProject.config.GetString("remote.origin.url")
1615
1616 remote = _XmlRemote(
1617 name, alias, fetch, pushUrl, manifestUrl, review, revision
1618 )
1619
1620 for n in node.childNodes:
1621 if n.nodeName == "annotation":
1622 self._ParseAnnotation(remote, n)
1623
1624 return remote
1625
1626 def _ParseDefault(self, node):
1627 """
1628 reads a <default> element from the manifest file
1629 """
1630 d = _Default()
1631 d.remote = self._get_remote(node)
1632 d.revisionExpr = node.getAttribute("revision")
1633 if d.revisionExpr == "":
1634 d.revisionExpr = None
1635
1636 d.destBranchExpr = node.getAttribute("dest-branch") or None
1637 d.upstreamExpr = node.getAttribute("upstream") or None
1638
1639 d.sync_j = XmlInt(node, "sync-j", None)
1640 if d.sync_j is not None and d.sync_j <= 0:
1641 raise ManifestParseError(
1642 '%s: sync-j must be greater than 0, not "%s"'
1643 % (self.manifestFile, d.sync_j)
1644 )
1645
1646 d.sync_c = XmlBool(node, "sync-c", False)
1647 d.sync_s = XmlBool(node, "sync-s", False)
1648 d.sync_tags = XmlBool(node, "sync-tags", True)
1649 return d
1650
1651 def _ParseNotice(self, node):
1652 """
1653 reads a <notice> element from the manifest file
1654
1655 The <notice> element is distinct from other tags in the XML in that the
1656 data is conveyed between the start and end tag (it's not an
1657 empty-element tag).
1658
1659 The white space (carriage returns, indentation) for the notice element
1660 is relevant and is parsed in a way that is based on how python
1661 docstrings work. In fact, the code is remarkably similar to here:
1662 http://www.python.org/dev/peps/pep-0257/
1663 """
1664 # Get the data out of the node...
1665 notice = node.childNodes[0].data
1666
1667 # Figure out minimum indentation, skipping the first line (the same line
1668 # as the <notice> tag)...
1669 minIndent = sys.maxsize
1670 lines = notice.splitlines()
1671 for line in lines[1:]:
1672 lstrippedLine = line.lstrip()
1673 if lstrippedLine:
1674 indent = len(line) - len(lstrippedLine)
1675 minIndent = min(indent, minIndent)
1676
1677 # Strip leading / trailing blank lines and also indentation.
1678 cleanLines = [lines[0].strip()]
1679 for line in lines[1:]:
1680 cleanLines.append(line[minIndent:].rstrip())
1681
1682 # Clear completely blank lines from front and back...
1683 while cleanLines and not cleanLines[0]:
1684 del cleanLines[0]
1685 while cleanLines and not cleanLines[-1]:
1686 del cleanLines[-1]
1687
1688 return "\n".join(cleanLines)
1689
1690 def _ParseSubmanifest(self, node):
1691 """Reads a <submanifest> element from the manifest file."""
1692 name = self._reqatt(node, "name")
1693 remote = node.getAttribute("remote")
1694 if remote == "":
1695 remote = None
1696 project = node.getAttribute("project")
1697 if project == "":
1698 project = None
1699 revision = node.getAttribute("revision")
1700 if revision == "":
1701 revision = None
1702 manifestName = node.getAttribute("manifest-name")
1703 if manifestName == "":
1704 manifestName = None
1705 groups = ""
1706 if node.hasAttribute("groups"):
1707 groups = node.getAttribute("groups")
1708 groups = self._ParseList(groups)
1709 default_groups = self._ParseList(node.getAttribute("default-groups"))
1710 path = node.getAttribute("path")
1711 if path == "":
1712 path = None
1713 if revision:
1714 msg = self._CheckLocalPath(revision.split("/")[-1])
1715 if msg:
1716 raise ManifestInvalidPathError(
1717 '<submanifest> invalid "revision": %s: %s'
1718 % (revision, msg)
1719 )
1720 else:
1721 msg = self._CheckLocalPath(name)
1722 if msg:
1723 raise ManifestInvalidPathError(
1724 '<submanifest> invalid "name": %s: %s' % (name, msg)
1725 )
1726 else:
1727 msg = self._CheckLocalPath(path)
1728 if msg:
1729 raise ManifestInvalidPathError(
1730 '<submanifest> invalid "path": %s: %s' % (path, msg)
1731 )
1732
1733 submanifest = _XmlSubmanifest(
1734 name,
1735 remote,
1736 project,
1737 revision,
1738 manifestName,
1739 groups,
1740 default_groups,
1741 path,
1742 self,
1743 )
1744
1745 for n in node.childNodes:
1746 if n.nodeName == "annotation":
1747 self._ParseAnnotation(submanifest, n)
1748
1749 return submanifest
1750
1751 def _JoinName(self, parent_name, name):
1752 return os.path.join(parent_name, name)
1753
1754 def _UnjoinName(self, parent_name, name):
1755 return os.path.relpath(name, parent_name)
1756
1757 def _ParseProject(self, node, parent=None, **extra_proj_attrs):
1758 """
1759 reads a <project> element from the manifest file
1760 """
1761 name = self._reqatt(node, "name")
1762 msg = self._CheckLocalPath(name, dir_ok=True)
1763 if msg:
1764 raise ManifestInvalidPathError(
1765 '<project> invalid "name": %s: %s' % (name, msg)
1766 )
1767 if parent:
1768 name = self._JoinName(parent.name, name)
1280 1769
1281 path = node.getAttribute('path') 1770 remote = self._get_remote(node)
1282 dest_path = node.getAttribute('dest-path') 1771 if remote is None:
1283 groups = node.getAttribute('groups') 1772 remote = self._default.remote
1284 if groups: 1773 if remote is None:
1285 groups = self._ParseList(groups) 1774 raise ManifestParseError(
1286 revision = node.getAttribute('revision') 1775 "no remote for project %s within %s" % (name, self.manifestFile)
1287 remote_name = node.getAttribute('remote') 1776 )
1288 if not remote_name: 1777
1289 remote = self._default.remote 1778 revisionExpr = node.getAttribute("revision") or remote.revision
1779 if not revisionExpr:
1780 revisionExpr = self._default.revisionExpr
1781 if not revisionExpr:
1782 raise ManifestParseError(
1783 "no revision for project %s within %s"
1784 % (name, self.manifestFile)
1785 )
1786
1787 path = node.getAttribute("path")
1788 if not path:
1789 path = name
1290 else: 1790 else:
1291 remote = self._get_remote(node) 1791 # NB: The "." project is handled specially in
1292 dest_branch = node.getAttribute('dest-branch') 1792 # Project.Sync_LocalHalf.
1293 upstream = node.getAttribute('upstream') 1793 msg = self._CheckLocalPath(path, dir_ok=True, cwd_dot_ok=True)
1294 1794 if msg:
1295 named_projects = self._projects[name] 1795 raise ManifestInvalidPathError(
1296 if dest_path and not path and len(named_projects) > 1: 1796 '<project> invalid "path": %s: %s' % (path, msg)
1297 raise ManifestParseError('extend-project cannot use dest-path when ' 1797 )
1298 'matching multiple projects: %s' % name) 1798
1299 for p in self._projects[name]: 1799 rebase = XmlBool(node, "rebase", True)
1300 if path and p.relpath != path: 1800 sync_c = XmlBool(node, "sync-c", False)
1301 continue 1801 sync_s = XmlBool(node, "sync-s", self._default.sync_s)
1302 if groups: 1802 sync_tags = XmlBool(node, "sync-tags", self._default.sync_tags)
1303 p.groups.extend(groups) 1803
1304 if revision: 1804 clone_depth = XmlInt(node, "clone-depth")
1305 p.SetRevision(revision) 1805 if clone_depth is not None and clone_depth <= 0:
1306 1806 raise ManifestParseError(
1307 if remote_name: 1807 '%s: clone-depth must be greater than 0, not "%s"'
1308 p.remote = remote.ToRemoteSpec(name) 1808 % (self.manifestFile, clone_depth)
1309 if dest_branch: 1809 )
1310 p.dest_branch = dest_branch 1810
1311 if upstream: 1811 dest_branch = (
1312 p.upstream = upstream 1812 node.getAttribute("dest-branch") or self._default.destBranchExpr
1313 1813 )
1314 if dest_path: 1814
1315 del self._paths[p.relpath] 1815 upstream = node.getAttribute("upstream") or self._default.upstreamExpr
1316 relpath, worktree, gitdir, objdir, _ = self.GetProjectPaths( 1816
1317 name, dest_path, remote.name) 1817 groups = ""
1318 p.UpdatePaths(relpath, worktree, gitdir, objdir) 1818 if node.hasAttribute("groups"):
1319 self._paths[p.relpath] = p 1819 groups = node.getAttribute("groups")
1320 1820 groups = self._ParseList(groups)
1321 if node.nodeName == 'repo-hooks': 1821
1322 # Only one project can be the hooks project 1822 if parent is None:
1323 if repo_hooks_project is not None: 1823 (
1324 raise ManifestParseError( 1824 relpath,
1325 'duplicate repo-hooks in %s' % 1825 worktree,
1326 (self.manifestFile)) 1826 gitdir,
1327 1827 objdir,
1328 # Get the name of the project and the (space-separated) list of enabled. 1828 use_git_worktrees,
1329 repo_hooks_project = self._reqatt(node, 'in-project') 1829 ) = self.GetProjectPaths(name, path, remote.name)
1330 enabled_repo_hooks = self._ParseList(self._reqatt(node, 'enabled-list'))
1331 if node.nodeName == 'superproject':
1332 name = self._reqatt(node, 'name')
1333 # There can only be one superproject.
1334 if self._superproject:
1335 raise ManifestParseError(
1336 'duplicate superproject in %s' %
1337 (self.manifestFile))
1338 remote_name = node.getAttribute('remote')
1339 if not remote_name:
1340 remote = self._default.remote
1341 else: 1830 else:
1342 remote = self._get_remote(node) 1831 use_git_worktrees = False
1343 if remote is None: 1832 relpath, worktree, gitdir, objdir = self.GetSubprojectPaths(
1344 raise ManifestParseError("no remote for superproject %s within %s" % 1833 parent, name, path
1345 (name, self.manifestFile)) 1834 )
1346 revision = node.getAttribute('revision') or remote.revision 1835
1347 if not revision: 1836 default_groups = ["all", "name:%s" % name, "path:%s" % relpath]
1348 revision = self._default.revisionExpr 1837 groups.extend(set(default_groups).difference(groups))
1349 if not revision: 1838
1350 raise ManifestParseError('no revision for superproject %s within %s' % 1839 if self.IsMirror and node.hasAttribute("force-path"):
1351 (name, self.manifestFile)) 1840 if XmlBool(node, "force-path", False):
1352 self._superproject = Superproject(self, 1841 gitdir = os.path.join(self.topdir, "%s.git" % path)
1353 name=name, 1842
1354 remote=remote.ToRemoteSpec(name), 1843 project = Project(
1355 revision=revision) 1844 manifest=self,
1356 if node.nodeName == 'contactinfo': 1845 name=name,
1357 bugurl = self._reqatt(node, 'bugurl') 1846 remote=remote.ToRemoteSpec(name),
1358 # This element can be repeated, later entries will clobber earlier ones. 1847 gitdir=gitdir,
1359 self._contactinfo = ContactInfo(bugurl) 1848 objdir=objdir,
1360 1849 worktree=worktree,
1361 if node.nodeName == 'remove-project': 1850 relpath=relpath,
1362 name = self._reqatt(node, 'name') 1851 revisionExpr=revisionExpr,
1363 1852 revisionId=None,
1364 if name in self._projects: 1853 rebase=rebase,
1365 for p in self._projects[name]: 1854 groups=groups,
1366 del self._paths[p.relpath] 1855 sync_c=sync_c,
1367 del self._projects[name] 1856 sync_s=sync_s,
1368 1857 sync_tags=sync_tags,
1369 # If the manifest removes the hooks project, treat it as if it deleted 1858 clone_depth=clone_depth,
1370 # the repo-hooks element too. 1859 upstream=upstream,
1371 if repo_hooks_project == name: 1860 parent=parent,
1372 repo_hooks_project = None 1861 dest_branch=dest_branch,
1373 elif not XmlBool(node, 'optional', False): 1862 use_git_worktrees=use_git_worktrees,
1374 raise ManifestParseError('remove-project element specifies non-existent ' 1863 **extra_proj_attrs,
1375 'project: %s' % name) 1864 )
1376 1865
1377 # Store repo hooks project information. 1866 for n in node.childNodes:
1378 if repo_hooks_project: 1867 if n.nodeName == "copyfile":
1379 # Store a reference to the Project. 1868 self._ParseCopyFile(project, n)
1380 try: 1869 if n.nodeName == "linkfile":
1381 repo_hooks_projects = self._projects[repo_hooks_project] 1870 self._ParseLinkFile(project, n)
1382 except KeyError: 1871 if n.nodeName == "annotation":
1383 raise ManifestParseError( 1872 self._ParseAnnotation(project, n)
1384 'project %s not found for repo-hooks' % 1873 if n.nodeName == "project":
1385 (repo_hooks_project)) 1874 project.subprojects.append(
1386 1875 self._ParseProject(n, parent=project)
1387 if len(repo_hooks_projects) != 1: 1876 )
1388 raise ManifestParseError( 1877
1389 'internal error parsing repo-hooks in %s' % 1878 return project
1390 (self.manifestFile)) 1879
1391 self._repo_hooks_project = repo_hooks_projects[0] 1880 def GetProjectPaths(self, name, path, remote):
1392 # Store the enabled hooks in the Project object. 1881 """Return the paths for a project.
1393 self._repo_hooks_project.enabled_repo_hooks = enabled_repo_hooks 1882
1394 1883 Args:
1395 def _AddMetaProjectMirror(self, m): 1884 name: a string, the name of the project.
1396 name = None 1885 path: a string, the path of the project.
1397 m_url = m.GetRemote().url 1886 remote: a string, the remote.name of the project.
1398 if m_url.endswith('/.git'): 1887
1399 raise ManifestParseError('refusing to mirror %s' % m_url) 1888 Returns:
1400 1889 A tuple of (relpath, worktree, gitdir, objdir, use_git_worktrees)
1401 if self._default and self._default.remote: 1890 for the project with |name| and |path|.
1402 url = self._default.remote.resolvedFetchUrl 1891 """
1403 if not url.endswith('/'): 1892 # The manifest entries might have trailing slashes. Normalize them to
1404 url += '/' 1893 # avoid unexpected filesystem behavior since we do string concatenation
1405 if m_url.startswith(url): 1894 # below.
1406 remote = self._default.remote 1895 path = path.rstrip("/")
1407 name = m_url[len(url):] 1896 name = name.rstrip("/")
1408 1897 remote = remote.rstrip("/")
1409 if name is None: 1898 use_git_worktrees = False
1410 s = m_url.rindex('/') + 1 1899 use_remote_name = self.is_multimanifest
1411 manifestUrl = self.manifestProject.config.GetString('remote.origin.url') 1900 relpath = path
1412 remote = _XmlRemote('origin', fetch=m_url[:s], manifestUrl=manifestUrl) 1901 if self.IsMirror:
1413 name = m_url[s:] 1902 worktree = None
1414 1903 gitdir = os.path.join(self.topdir, "%s.git" % name)
1415 if name.endswith('.git'): 1904 objdir = gitdir
1416 name = name[:-4] 1905 else:
1417 1906 if use_remote_name:
1418 if name not in self._projects: 1907 namepath = os.path.join(remote, f"{name}.git")
1419 m.PreSync() 1908 else:
1420 gitdir = os.path.join(self.topdir, '%s.git' % name) 1909 namepath = f"{name}.git"
1421 project = Project(manifest=self, 1910 worktree = os.path.join(self.topdir, path).replace("\\", "/")
1422 name=name, 1911 gitdir = os.path.join(self.subdir, "projects", "%s.git" % path)
1423 remote=remote.ToRemoteSpec(name), 1912 # We allow people to mix git worktrees & non-git worktrees for now.
1424 gitdir=gitdir, 1913 # This allows for in situ migration of repo clients.
1425 objdir=gitdir, 1914 if os.path.exists(gitdir) or not self.UseGitWorktrees:
1426 worktree=None, 1915 objdir = os.path.join(self.repodir, "project-objects", namepath)
1427 relpath=name or None, 1916 else:
1428 revisionExpr=m.revisionExpr, 1917 use_git_worktrees = True
1429 revisionId=None) 1918 gitdir = os.path.join(self.repodir, "worktrees", namepath)
1430 self._projects[project.name] = [project] 1919 objdir = gitdir
1431 self._paths[project.relpath] = project 1920 return relpath, worktree, gitdir, objdir, use_git_worktrees
1432 1921
1433 def _ParseRemote(self, node): 1922 def GetProjectsWithName(self, name, all_manifests=False):
1434 """ 1923 """All projects with |name|.
1435 reads a <remote> element from the manifest file 1924
1436 """ 1925 Args:
1437 name = self._reqatt(node, 'name') 1926 name: a string, the name of the project.
1438 alias = node.getAttribute('alias') 1927 all_manifests: a boolean, if True, then all manifests are searched.
1439 if alias == '': 1928 If False, then only this manifest is searched.
1440 alias = None 1929
1441 fetch = self._reqatt(node, 'fetch') 1930 Returns:
1442 pushUrl = node.getAttribute('pushurl') 1931 A list of Project instances with name |name|.
1443 if pushUrl == '': 1932 """
1444 pushUrl = None 1933 if all_manifests:
1445 review = node.getAttribute('review') 1934 return list(
1446 if review == '': 1935 itertools.chain.from_iterable(
1447 review = None 1936 x._projects.get(name, []) for x in self.all_manifests
1448 revision = node.getAttribute('revision') 1937 )
1449 if revision == '': 1938 )
1450 revision = None 1939 return self._projects.get(name, [])
1451 manifestUrl = self.manifestProject.config.GetString('remote.origin.url') 1940
1452 1941 def GetSubprojectName(self, parent, submodule_path):
1453 remote = _XmlRemote(name, alias, fetch, pushUrl, manifestUrl, review, revision) 1942 return os.path.join(parent.name, submodule_path)
1454 1943
1455 for n in node.childNodes: 1944 def _JoinRelpath(self, parent_relpath, relpath):
1456 if n.nodeName == 'annotation': 1945 return os.path.join(parent_relpath, relpath)
1457 self._ParseAnnotation(remote, n) 1946
1458 1947 def _UnjoinRelpath(self, parent_relpath, relpath):
1459 return remote 1948 return os.path.relpath(relpath, parent_relpath)
1460 1949
1461 def _ParseDefault(self, node): 1950 def GetSubprojectPaths(self, parent, name, path):
1462 """ 1951 # The manifest entries might have trailing slashes. Normalize them to
1463 reads a <default> element from the manifest file 1952 # avoid unexpected filesystem behavior since we do string concatenation
1464 """ 1953 # below.
1465 d = _Default() 1954 path = path.rstrip("/")
1466 d.remote = self._get_remote(node) 1955 name = name.rstrip("/")
1467 d.revisionExpr = node.getAttribute('revision') 1956 relpath = self._JoinRelpath(parent.relpath, path)
1468 if d.revisionExpr == '': 1957 gitdir = os.path.join(parent.gitdir, "subprojects", "%s.git" % path)
1469 d.revisionExpr = None 1958 objdir = os.path.join(
1470 1959 parent.gitdir, "subproject-objects", "%s.git" % name
1471 d.destBranchExpr = node.getAttribute('dest-branch') or None 1960 )
1472 d.upstreamExpr = node.getAttribute('upstream') or None 1961 if self.IsMirror:
1473 1962 worktree = None
1474 d.sync_j = XmlInt(node, 'sync-j', None) 1963 else:
1475 if d.sync_j is not None and d.sync_j <= 0: 1964 worktree = os.path.join(parent.worktree, path).replace("\\", "/")
1476 raise ManifestParseError('%s: sync-j must be greater than 0, not "%s"' % 1965 return relpath, worktree, gitdir, objdir
1477 (self.manifestFile, d.sync_j)) 1966
1478 1967 @staticmethod
1479 d.sync_c = XmlBool(node, 'sync-c', False) 1968 def _CheckLocalPath(path, dir_ok=False, cwd_dot_ok=False):
1480 d.sync_s = XmlBool(node, 'sync-s', False) 1969 """Verify |path| is reasonable for use in filesystem paths.
1481 d.sync_tags = XmlBool(node, 'sync-tags', True) 1970
1482 return d 1971 Used with <copyfile> & <linkfile> & <project> elements.
1483 1972
1484 def _ParseNotice(self, node): 1973 This only validates the |path| in isolation: it does not check against
1485 """ 1974 the current filesystem state. Thus it is suitable as a first-past in a
1486 reads a <notice> element from the manifest file 1975 parser.
1487 1976
1488 The <notice> element is distinct from other tags in the XML in that the 1977 It enforces a number of constraints:
1489 data is conveyed between the start and end tag (it's not an empty-element 1978 * No empty paths.
1490 tag). 1979 * No "~" in paths.
1491 1980 * No Unicode codepoints that filesystems might elide when normalizing.
1492 The white space (carriage returns, indentation) for the notice element is 1981 * No relative path components like "." or "..".
1493 relevant and is parsed in a way that is based on how python docstrings work. 1982 * No absolute paths.
1494 In fact, the code is remarkably similar to here: 1983 * No ".git" or ".repo*" path components.
1495 http://www.python.org/dev/peps/pep-0257/ 1984
1496 """ 1985 Args:
1497 # Get the data out of the node... 1986 path: The path name to validate.
1498 notice = node.childNodes[0].data 1987 dir_ok: Whether |path| may force a directory (e.g. end in a /).
1499 1988 cwd_dot_ok: Whether |path| may be just ".".
1500 # Figure out minimum indentation, skipping the first line (the same line 1989
1501 # as the <notice> tag)... 1990 Returns:
1502 minIndent = sys.maxsize 1991 None if |path| is OK, a failure message otherwise.
1503 lines = notice.splitlines() 1992 """
1504 for line in lines[1:]: 1993 if not path:
1505 lstrippedLine = line.lstrip() 1994 return "empty paths not allowed"
1506 if lstrippedLine: 1995
1507 indent = len(line) - len(lstrippedLine) 1996 if "~" in path:
1508 minIndent = min(indent, minIndent) 1997 return "~ not allowed (due to 8.3 filenames on Windows filesystems)"
1509 1998
1510 # Strip leading / trailing blank lines and also indentation. 1999 path_codepoints = set(path)
1511 cleanLines = [lines[0].strip()] 2000
1512 for line in lines[1:]: 2001 # Some filesystems (like Apple's HFS+) try to normalize Unicode
1513 cleanLines.append(line[minIndent:].rstrip()) 2002 # codepoints which means there are alternative names for ".git". Reject
1514 2003 # paths with these in it as there shouldn't be any reasonable need for
1515 # Clear completely blank lines from front and back... 2004 # them here. The set of codepoints here was cribbed from jgit's
1516 while cleanLines and not cleanLines[0]: 2005 # implementation:
1517 del cleanLines[0] 2006 # https://eclipse.googlesource.com/jgit/jgit/+/9110037e3e9461ff4dac22fee84ef3694ed57648/org.eclipse.jgit/src/org/eclipse/jgit/lib/ObjectChecker.java#884
1518 while cleanLines and not cleanLines[-1]: 2007 BAD_CODEPOINTS = {
1519 del cleanLines[-1] 2008 "\u200C", # ZERO WIDTH NON-JOINER
1520 2009 "\u200D", # ZERO WIDTH JOINER
1521 return '\n'.join(cleanLines) 2010 "\u200E", # LEFT-TO-RIGHT MARK
1522 2011 "\u200F", # RIGHT-TO-LEFT MARK
1523 def _ParseSubmanifest(self, node): 2012 "\u202A", # LEFT-TO-RIGHT EMBEDDING
1524 """Reads a <submanifest> element from the manifest file.""" 2013 "\u202B", # RIGHT-TO-LEFT EMBEDDING
1525 name = self._reqatt(node, 'name') 2014 "\u202C", # POP DIRECTIONAL FORMATTING
1526 remote = node.getAttribute('remote') 2015 "\u202D", # LEFT-TO-RIGHT OVERRIDE
1527 if remote == '': 2016 "\u202E", # RIGHT-TO-LEFT OVERRIDE
1528 remote = None 2017 "\u206A", # INHIBIT SYMMETRIC SWAPPING
1529 project = node.getAttribute('project') 2018 "\u206B", # ACTIVATE SYMMETRIC SWAPPING
1530 if project == '': 2019 "\u206C", # INHIBIT ARABIC FORM SHAPING
1531 project = None 2020 "\u206D", # ACTIVATE ARABIC FORM SHAPING
1532 revision = node.getAttribute('revision') 2021 "\u206E", # NATIONAL DIGIT SHAPES
1533 if revision == '': 2022 "\u206F", # NOMINAL DIGIT SHAPES
1534 revision = None 2023 "\uFEFF", # ZERO WIDTH NO-BREAK SPACE
1535 manifestName = node.getAttribute('manifest-name') 2024 }
1536 if manifestName == '': 2025 if BAD_CODEPOINTS & path_codepoints:
1537 manifestName = None 2026 # This message is more expansive than reality, but should be fine.
1538 groups = '' 2027 return "Unicode combining characters not allowed"
1539 if node.hasAttribute('groups'): 2028
1540 groups = node.getAttribute('groups') 2029 # Reject newlines as there shouldn't be any legitmate use for them,
1541 groups = self._ParseList(groups) 2030 # they'll be confusing to users, and they can easily break tools that
1542 default_groups = self._ParseList(node.getAttribute('default-groups')) 2031 # expect to be able to iterate over newline delimited lists. This even
1543 path = node.getAttribute('path') 2032 # applies to our own code like .repo/project.list.
1544 if path == '': 2033 if {"\r", "\n"} & path_codepoints:
1545 path = None 2034 return "Newlines not allowed"
1546 if revision: 2035
1547 msg = self._CheckLocalPath(revision.split('/')[-1]) 2036 # Assume paths might be used on case-insensitive filesystems.
2037 path = path.lower()
2038
2039 # Split up the path by its components. We can't use os.path.sep
2040 # exclusively as some platforms (like Windows) will convert / to \ and
2041 # that bypasses all our constructed logic here. Especially since
2042 # manifest authors only use / in their paths.
2043 resep = re.compile(r"[/%s]" % re.escape(os.path.sep))
2044 # Strip off trailing slashes as those only produce '' elements, and we
2045 # use parts to look for individual bad components.
2046 parts = resep.split(path.rstrip("/"))
2047
2048 # Some people use src="." to create stable links to projects. Lets
2049 # allow that but reject all other uses of "." to keep things simple.
2050 if not cwd_dot_ok or parts != ["."]:
2051 for part in set(parts):
2052 if part in {".", "..", ".git"} or part.startswith(".repo"):
2053 return "bad component: %s" % (part,)
2054
2055 if not dir_ok and resep.match(path[-1]):
2056 return "dirs not allowed"
2057
2058 # NB: The two abspath checks here are to handle platforms with multiple
2059 # filesystem path styles (e.g. Windows).
2060 norm = os.path.normpath(path)
2061 if (
2062 norm == ".."
2063 or (
2064 len(norm) >= 3
2065 and norm.startswith("..")
2066 and resep.match(norm[0])
2067 )
2068 or os.path.isabs(norm)
2069 or norm.startswith("/")
2070 ):
2071 return "path cannot be outside"
2072
2073 @classmethod
2074 def _ValidateFilePaths(cls, element, src, dest):
2075 """Verify |src| & |dest| are reasonable for <copyfile> & <linkfile>.
2076
2077 We verify the path independent of any filesystem state as we won't have
2078 a checkout available to compare to. i.e. This is for parsing validation
2079 purposes only.
2080
2081 We'll do full/live sanity checking before we do the actual filesystem
2082 modifications in _CopyFile/_LinkFile/etc...
2083 """
2084 # |dest| is the file we write to or symlink we create.
2085 # It is relative to the top of the repo client checkout.
2086 msg = cls._CheckLocalPath(dest)
1548 if msg: 2087 if msg:
1549 raise ManifestInvalidPathError( 2088 raise ManifestInvalidPathError(
1550 '<submanifest> invalid "revision": %s: %s' % (revision, msg)) 2089 '<%s> invalid "dest": %s: %s' % (element, dest, msg)
1551 else: 2090 )
1552 msg = self._CheckLocalPath(name) 2091
2092 # |src| is the file we read from or path we point to for symlinks.
2093 # It is relative to the top of the git project checkout.
2094 is_linkfile = element == "linkfile"
2095 msg = cls._CheckLocalPath(
2096 src, dir_ok=is_linkfile, cwd_dot_ok=is_linkfile
2097 )
1553 if msg: 2098 if msg:
1554 raise ManifestInvalidPathError( 2099 raise ManifestInvalidPathError(
1555 '<submanifest> invalid "name": %s: %s' % (name, msg)) 2100 '<%s> invalid "src": %s: %s' % (element, src, msg)
1556 else: 2101 )
1557 msg = self._CheckLocalPath(path) 2102
1558 if msg: 2103 def _ParseCopyFile(self, project, node):
1559 raise ManifestInvalidPathError( 2104 src = self._reqatt(node, "src")
1560 '<submanifest> invalid "path": %s: %s' % (path, msg)) 2105 dest = self._reqatt(node, "dest")
1561 2106 if not self.IsMirror:
1562 submanifest = _XmlSubmanifest(name, remote, project, revision, manifestName, 2107 # src is project relative;
1563 groups, default_groups, path, self) 2108 # dest is relative to the top of the tree.
1564 2109 # We only validate paths if we actually plan to process them.
1565 for n in node.childNodes: 2110 self._ValidateFilePaths("copyfile", src, dest)
1566 if n.nodeName == 'annotation': 2111 project.AddCopyFile(src, dest, self.topdir)
1567 self._ParseAnnotation(submanifest, n) 2112
1568 2113 def _ParseLinkFile(self, project, node):
1569 return submanifest 2114 src = self._reqatt(node, "src")
1570 2115 dest = self._reqatt(node, "dest")
1571 def _JoinName(self, parent_name, name): 2116 if not self.IsMirror:
1572 return os.path.join(parent_name, name) 2117 # src is project relative;
1573 2118 # dest is relative to the top of the tree.
1574 def _UnjoinName(self, parent_name, name): 2119 # We only validate paths if we actually plan to process them.
1575 return os.path.relpath(name, parent_name) 2120 self._ValidateFilePaths("linkfile", src, dest)
1576 2121 project.AddLinkFile(src, dest, self.topdir)
1577 def _ParseProject(self, node, parent=None, **extra_proj_attrs): 2122
1578 """ 2123 def _ParseAnnotation(self, element, node):
1579 reads a <project> element from the manifest file 2124 name = self._reqatt(node, "name")
1580 """ 2125 value = self._reqatt(node, "value")
1581 name = self._reqatt(node, 'name')
1582 msg = self._CheckLocalPath(name, dir_ok=True)
1583 if msg:
1584 raise ManifestInvalidPathError(
1585 '<project> invalid "name": %s: %s' % (name, msg))
1586 if parent:
1587 name = self._JoinName(parent.name, name)
1588
1589 remote = self._get_remote(node)
1590 if remote is None:
1591 remote = self._default.remote
1592 if remote is None:
1593 raise ManifestParseError("no remote for project %s within %s" %
1594 (name, self.manifestFile))
1595
1596 revisionExpr = node.getAttribute('revision') or remote.revision
1597 if not revisionExpr:
1598 revisionExpr = self._default.revisionExpr
1599 if not revisionExpr:
1600 raise ManifestParseError("no revision for project %s within %s" %
1601 (name, self.manifestFile))
1602
1603 path = node.getAttribute('path')
1604 if not path:
1605 path = name
1606 else:
1607 # NB: The "." project is handled specially in Project.Sync_LocalHalf.
1608 msg = self._CheckLocalPath(path, dir_ok=True, cwd_dot_ok=True)
1609 if msg:
1610 raise ManifestInvalidPathError(
1611 '<project> invalid "path": %s: %s' % (path, msg))
1612
1613 rebase = XmlBool(node, 'rebase', True)
1614 sync_c = XmlBool(node, 'sync-c', False)
1615 sync_s = XmlBool(node, 'sync-s', self._default.sync_s)
1616 sync_tags = XmlBool(node, 'sync-tags', self._default.sync_tags)
1617
1618 clone_depth = XmlInt(node, 'clone-depth')
1619 if clone_depth is not None and clone_depth <= 0:
1620 raise ManifestParseError('%s: clone-depth must be greater than 0, not "%s"' %
1621 (self.manifestFile, clone_depth))
1622
1623 dest_branch = node.getAttribute('dest-branch') or self._default.destBranchExpr
1624
1625 upstream = node.getAttribute('upstream') or self._default.upstreamExpr
1626
1627 groups = ''
1628 if node.hasAttribute('groups'):
1629 groups = node.getAttribute('groups')
1630 groups = self._ParseList(groups)
1631
1632 if parent is None:
1633 relpath, worktree, gitdir, objdir, use_git_worktrees = \
1634 self.GetProjectPaths(name, path, remote.name)
1635 else:
1636 use_git_worktrees = False
1637 relpath, worktree, gitdir, objdir = \
1638 self.GetSubprojectPaths(parent, name, path)
1639
1640 default_groups = ['all', 'name:%s' % name, 'path:%s' % relpath]
1641 groups.extend(set(default_groups).difference(groups))
1642
1643 if self.IsMirror and node.hasAttribute('force-path'):
1644 if XmlBool(node, 'force-path', False):
1645 gitdir = os.path.join(self.topdir, '%s.git' % path)
1646
1647 project = Project(manifest=self,
1648 name=name,
1649 remote=remote.ToRemoteSpec(name),
1650 gitdir=gitdir,
1651 objdir=objdir,
1652 worktree=worktree,
1653 relpath=relpath,
1654 revisionExpr=revisionExpr,
1655 revisionId=None,
1656 rebase=rebase,
1657 groups=groups,
1658 sync_c=sync_c,
1659 sync_s=sync_s,
1660 sync_tags=sync_tags,
1661 clone_depth=clone_depth,
1662 upstream=upstream,
1663 parent=parent,
1664 dest_branch=dest_branch,
1665 use_git_worktrees=use_git_worktrees,
1666 **extra_proj_attrs)
1667
1668 for n in node.childNodes:
1669 if n.nodeName == 'copyfile':
1670 self._ParseCopyFile(project, n)
1671 if n.nodeName == 'linkfile':
1672 self._ParseLinkFile(project, n)
1673 if n.nodeName == 'annotation':
1674 self._ParseAnnotation(project, n)
1675 if n.nodeName == 'project':
1676 project.subprojects.append(self._ParseProject(n, parent=project))
1677
1678 return project
1679
1680 def GetProjectPaths(self, name, path, remote):
1681 """Return the paths for a project.
1682
1683 Args:
1684 name: a string, the name of the project.
1685 path: a string, the path of the project.
1686 remote: a string, the remote.name of the project.
1687
1688 Returns:
1689 A tuple of (relpath, worktree, gitdir, objdir, use_git_worktrees) for the
1690 project with |name| and |path|.
1691 """
1692 # The manifest entries might have trailing slashes. Normalize them to avoid
1693 # unexpected filesystem behavior since we do string concatenation below.
1694 path = path.rstrip('/')
1695 name = name.rstrip('/')
1696 remote = remote.rstrip('/')
1697 use_git_worktrees = False
1698 use_remote_name = self.is_multimanifest
1699 relpath = path
1700 if self.IsMirror:
1701 worktree = None
1702 gitdir = os.path.join(self.topdir, '%s.git' % name)
1703 objdir = gitdir
1704 else:
1705 if use_remote_name:
1706 namepath = os.path.join(remote, f'{name}.git')
1707 else:
1708 namepath = f'{name}.git'
1709 worktree = os.path.join(self.topdir, path).replace('\\', '/')
1710 gitdir = os.path.join(self.subdir, 'projects', '%s.git' % path)
1711 # We allow people to mix git worktrees & non-git worktrees for now.
1712 # This allows for in situ migration of repo clients.
1713 if os.path.exists(gitdir) or not self.UseGitWorktrees:
1714 objdir = os.path.join(self.repodir, 'project-objects', namepath)
1715 else:
1716 use_git_worktrees = True
1717 gitdir = os.path.join(self.repodir, 'worktrees', namepath)
1718 objdir = gitdir
1719 return relpath, worktree, gitdir, objdir, use_git_worktrees
1720
1721 def GetProjectsWithName(self, name, all_manifests=False):
1722 """All projects with |name|.
1723
1724 Args:
1725 name: a string, the name of the project.
1726 all_manifests: a boolean, if True, then all manifests are searched. If
1727 False, then only this manifest is searched.
1728
1729 Returns:
1730 A list of Project instances with name |name|.
1731 """
1732 if all_manifests:
1733 return list(itertools.chain.from_iterable(
1734 x._projects.get(name, []) for x in self.all_manifests))
1735 return self._projects.get(name, [])
1736
1737 def GetSubprojectName(self, parent, submodule_path):
1738 return os.path.join(parent.name, submodule_path)
1739
1740 def _JoinRelpath(self, parent_relpath, relpath):
1741 return os.path.join(parent_relpath, relpath)
1742
1743 def _UnjoinRelpath(self, parent_relpath, relpath):
1744 return os.path.relpath(relpath, parent_relpath)
1745
1746 def GetSubprojectPaths(self, parent, name, path):
1747 # The manifest entries might have trailing slashes. Normalize them to avoid
1748 # unexpected filesystem behavior since we do string concatenation below.
1749 path = path.rstrip('/')
1750 name = name.rstrip('/')
1751 relpath = self._JoinRelpath(parent.relpath, path)
1752 gitdir = os.path.join(parent.gitdir, 'subprojects', '%s.git' % path)
1753 objdir = os.path.join(parent.gitdir, 'subproject-objects', '%s.git' % name)
1754 if self.IsMirror:
1755 worktree = None
1756 else:
1757 worktree = os.path.join(parent.worktree, path).replace('\\', '/')
1758 return relpath, worktree, gitdir, objdir
1759
1760 @staticmethod
1761 def _CheckLocalPath(path, dir_ok=False, cwd_dot_ok=False):
1762 """Verify |path| is reasonable for use in filesystem paths.
1763
1764 Used with <copyfile> & <linkfile> & <project> elements.
1765
1766 This only validates the |path| in isolation: it does not check against the
1767 current filesystem state. Thus it is suitable as a first-past in a parser.
1768
1769 It enforces a number of constraints:
1770 * No empty paths.
1771 * No "~" in paths.
1772 * No Unicode codepoints that filesystems might elide when normalizing.
1773 * No relative path components like "." or "..".
1774 * No absolute paths.
1775 * No ".git" or ".repo*" path components.
1776
1777 Args:
1778 path: The path name to validate.
1779 dir_ok: Whether |path| may force a directory (e.g. end in a /).
1780 cwd_dot_ok: Whether |path| may be just ".".
1781
1782 Returns:
1783 None if |path| is OK, a failure message otherwise.
1784 """
1785 if not path:
1786 return 'empty paths not allowed'
1787
1788 if '~' in path:
1789 return '~ not allowed (due to 8.3 filenames on Windows filesystems)'
1790
1791 path_codepoints = set(path)
1792
1793 # Some filesystems (like Apple's HFS+) try to normalize Unicode codepoints
1794 # which means there are alternative names for ".git". Reject paths with
1795 # these in it as there shouldn't be any reasonable need for them here.
1796 # The set of codepoints here was cribbed from jgit's implementation:
1797 # https://eclipse.googlesource.com/jgit/jgit/+/9110037e3e9461ff4dac22fee84ef3694ed57648/org.eclipse.jgit/src/org/eclipse/jgit/lib/ObjectChecker.java#884
1798 BAD_CODEPOINTS = {
1799 u'\u200C', # ZERO WIDTH NON-JOINER
1800 u'\u200D', # ZERO WIDTH JOINER
1801 u'\u200E', # LEFT-TO-RIGHT MARK
1802 u'\u200F', # RIGHT-TO-LEFT MARK
1803 u'\u202A', # LEFT-TO-RIGHT EMBEDDING
1804 u'\u202B', # RIGHT-TO-LEFT EMBEDDING
1805 u'\u202C', # POP DIRECTIONAL FORMATTING
1806 u'\u202D', # LEFT-TO-RIGHT OVERRIDE
1807 u'\u202E', # RIGHT-TO-LEFT OVERRIDE
1808 u'\u206A', # INHIBIT SYMMETRIC SWAPPING
1809 u'\u206B', # ACTIVATE SYMMETRIC SWAPPING
1810 u'\u206C', # INHIBIT ARABIC FORM SHAPING
1811 u'\u206D', # ACTIVATE ARABIC FORM SHAPING
1812 u'\u206E', # NATIONAL DIGIT SHAPES
1813 u'\u206F', # NOMINAL DIGIT SHAPES
1814 u'\uFEFF', # ZERO WIDTH NO-BREAK SPACE
1815 }
1816 if BAD_CODEPOINTS & path_codepoints:
1817 # This message is more expansive than reality, but should be fine.
1818 return 'Unicode combining characters not allowed'
1819
1820 # Reject newlines as there shouldn't be any legitmate use for them, they'll
1821 # be confusing to users, and they can easily break tools that expect to be
1822 # able to iterate over newline delimited lists. This even applies to our
1823 # own code like .repo/project.list.
1824 if {'\r', '\n'} & path_codepoints:
1825 return 'Newlines not allowed'
1826
1827 # Assume paths might be used on case-insensitive filesystems.
1828 path = path.lower()
1829
1830 # Split up the path by its components. We can't use os.path.sep exclusively
1831 # as some platforms (like Windows) will convert / to \ and that bypasses all
1832 # our constructed logic here. Especially since manifest authors only use
1833 # / in their paths.
1834 resep = re.compile(r'[/%s]' % re.escape(os.path.sep))
1835 # Strip off trailing slashes as those only produce '' elements, and we use
1836 # parts to look for individual bad components.
1837 parts = resep.split(path.rstrip('/'))
1838
1839 # Some people use src="." to create stable links to projects. Lets allow
1840 # that but reject all other uses of "." to keep things simple.
1841 if not cwd_dot_ok or parts != ['.']:
1842 for part in set(parts):
1843 if part in {'.', '..', '.git'} or part.startswith('.repo'):
1844 return 'bad component: %s' % (part,)
1845
1846 if not dir_ok and resep.match(path[-1]):
1847 return 'dirs not allowed'
1848
1849 # NB: The two abspath checks here are to handle platforms with multiple
1850 # filesystem path styles (e.g. Windows).
1851 norm = os.path.normpath(path)
1852 if (norm == '..' or
1853 (len(norm) >= 3 and norm.startswith('..') and resep.match(norm[0])) or
1854 os.path.isabs(norm) or
1855 norm.startswith('/')):
1856 return 'path cannot be outside'
1857
1858 @classmethod
1859 def _ValidateFilePaths(cls, element, src, dest):
1860 """Verify |src| & |dest| are reasonable for <copyfile> & <linkfile>.
1861
1862 We verify the path independent of any filesystem state as we won't have a
1863 checkout available to compare to. i.e. This is for parsing validation
1864 purposes only.
1865
1866 We'll do full/live sanity checking before we do the actual filesystem
1867 modifications in _CopyFile/_LinkFile/etc...
1868 """
1869 # |dest| is the file we write to or symlink we create.
1870 # It is relative to the top of the repo client checkout.
1871 msg = cls._CheckLocalPath(dest)
1872 if msg:
1873 raise ManifestInvalidPathError(
1874 '<%s> invalid "dest": %s: %s' % (element, dest, msg))
1875
1876 # |src| is the file we read from or path we point to for symlinks.
1877 # It is relative to the top of the git project checkout.
1878 is_linkfile = element == 'linkfile'
1879 msg = cls._CheckLocalPath(src, dir_ok=is_linkfile, cwd_dot_ok=is_linkfile)
1880 if msg:
1881 raise ManifestInvalidPathError(
1882 '<%s> invalid "src": %s: %s' % (element, src, msg))
1883
1884 def _ParseCopyFile(self, project, node):
1885 src = self._reqatt(node, 'src')
1886 dest = self._reqatt(node, 'dest')
1887 if not self.IsMirror:
1888 # src is project relative;
1889 # dest is relative to the top of the tree.
1890 # We only validate paths if we actually plan to process them.
1891 self._ValidateFilePaths('copyfile', src, dest)
1892 project.AddCopyFile(src, dest, self.topdir)
1893
1894 def _ParseLinkFile(self, project, node):
1895 src = self._reqatt(node, 'src')
1896 dest = self._reqatt(node, 'dest')
1897 if not self.IsMirror:
1898 # src is project relative;
1899 # dest is relative to the top of the tree.
1900 # We only validate paths if we actually plan to process them.
1901 self._ValidateFilePaths('linkfile', src, dest)
1902 project.AddLinkFile(src, dest, self.topdir)
1903
1904 def _ParseAnnotation(self, element, node):
1905 name = self._reqatt(node, 'name')
1906 value = self._reqatt(node, 'value')
1907 try:
1908 keep = self._reqatt(node, 'keep').lower()
1909 except ManifestParseError:
1910 keep = "true"
1911 if keep != "true" and keep != "false":
1912 raise ManifestParseError('optional "keep" attribute must be '
1913 '"true" or "false"')
1914 element.AddAnnotation(name, value, keep)
1915
1916 def _get_remote(self, node):
1917 name = node.getAttribute('remote')
1918 if not name:
1919 return None
1920
1921 v = self._remotes.get(name)
1922 if not v:
1923 raise ManifestParseError("remote %s not defined in %s" %
1924 (name, self.manifestFile))
1925 return v
1926
1927 def _reqatt(self, node, attname):
1928 """
1929 reads a required attribute from the node.
1930 """
1931 v = node.getAttribute(attname)
1932 if not v:
1933 raise ManifestParseError("no %s in <%s> within %s" %
1934 (attname, node.nodeName, self.manifestFile))
1935 return v
1936
1937 def projectsDiff(self, manifest):
1938 """return the projects differences between two manifests.
1939
1940 The diff will be from self to given manifest.
1941
1942 """
1943 fromProjects = self.paths
1944 toProjects = manifest.paths
1945
1946 fromKeys = sorted(fromProjects.keys())
1947 toKeys = sorted(toProjects.keys())
1948
1949 diff = {'added': [], 'removed': [], 'missing': [], 'changed': [], 'unreachable': []}
1950
1951 for proj in fromKeys:
1952 if proj not in toKeys:
1953 diff['removed'].append(fromProjects[proj])
1954 elif not fromProjects[proj].Exists:
1955 diff['missing'].append(toProjects[proj])
1956 toKeys.remove(proj)
1957 else:
1958 fromProj = fromProjects[proj]
1959 toProj = toProjects[proj]
1960 try: 2126 try:
1961 fromRevId = fromProj.GetCommitRevisionId() 2127 keep = self._reqatt(node, "keep").lower()
1962 toRevId = toProj.GetCommitRevisionId() 2128 except ManifestParseError:
1963 except ManifestInvalidRevisionError: 2129 keep = "true"
1964 diff['unreachable'].append((fromProj, toProj)) 2130 if keep != "true" and keep != "false":
1965 else: 2131 raise ManifestParseError(
1966 if fromRevId != toRevId: 2132 'optional "keep" attribute must be ' '"true" or "false"'
1967 diff['changed'].append((fromProj, toProj)) 2133 )
1968 toKeys.remove(proj) 2134 element.AddAnnotation(name, value, keep)
1969 2135
1970 for proj in toKeys: 2136 def _get_remote(self, node):
1971 diff['added'].append(toProjects[proj]) 2137 name = node.getAttribute("remote")
1972 2138 if not name:
1973 return diff 2139 return None
2140
2141 v = self._remotes.get(name)
2142 if not v:
2143 raise ManifestParseError(
2144 "remote %s not defined in %s" % (name, self.manifestFile)
2145 )
2146 return v
2147
2148 def _reqatt(self, node, attname):
2149 """
2150 reads a required attribute from the node.
2151 """
2152 v = node.getAttribute(attname)
2153 if not v:
2154 raise ManifestParseError(
2155 "no %s in <%s> within %s"
2156 % (attname, node.nodeName, self.manifestFile)
2157 )
2158 return v
2159
2160 def projectsDiff(self, manifest):
2161 """return the projects differences between two manifests.
2162
2163 The diff will be from self to given manifest.
2164
2165 """
2166 fromProjects = self.paths
2167 toProjects = manifest.paths
2168
2169 fromKeys = sorted(fromProjects.keys())
2170 toKeys = sorted(toProjects.keys())
2171
2172 diff = {
2173 "added": [],
2174 "removed": [],
2175 "missing": [],
2176 "changed": [],
2177 "unreachable": [],
2178 }
2179
2180 for proj in fromKeys:
2181 if proj not in toKeys:
2182 diff["removed"].append(fromProjects[proj])
2183 elif not fromProjects[proj].Exists:
2184 diff["missing"].append(toProjects[proj])
2185 toKeys.remove(proj)
2186 else:
2187 fromProj = fromProjects[proj]
2188 toProj = toProjects[proj]
2189 try:
2190 fromRevId = fromProj.GetCommitRevisionId()
2191 toRevId = toProj.GetCommitRevisionId()
2192 except ManifestInvalidRevisionError:
2193 diff["unreachable"].append((fromProj, toProj))
2194 else:
2195 if fromRevId != toRevId:
2196 diff["changed"].append((fromProj, toProj))
2197 toKeys.remove(proj)
2198
2199 for proj in toKeys:
2200 diff["added"].append(toProjects[proj])
2201
2202 return diff
1974 2203
1975 2204
1976class GitcManifest(XmlManifest): 2205class GitcManifest(XmlManifest):
1977 """Parser for GitC (git-in-the-cloud) manifests.""" 2206 """Parser for GitC (git-in-the-cloud) manifests."""
1978 2207
1979 def _ParseProject(self, node, parent=None): 2208 def _ParseProject(self, node, parent=None):
1980 """Override _ParseProject and add support for GITC specific attributes.""" 2209 """Override _ParseProject and add support for GITC specific attributes.""" # noqa: E501
1981 return super()._ParseProject( 2210 return super()._ParseProject(
1982 node, parent=parent, old_revision=node.getAttribute('old-revision')) 2211 node, parent=parent, old_revision=node.getAttribute("old-revision")
2212 )
1983 2213
1984 def _output_manifest_project_extras(self, p, e): 2214 def _output_manifest_project_extras(self, p, e):
1985 """Output GITC Specific Project attributes""" 2215 """Output GITC Specific Project attributes"""
1986 if p.old_revision: 2216 if p.old_revision:
1987 e.setAttribute('old-revision', str(p.old_revision)) 2217 e.setAttribute("old-revision", str(p.old_revision))
1988 2218
1989 2219
1990class RepoClient(XmlManifest): 2220class RepoClient(XmlManifest):
1991 """Manages a repo client checkout.""" 2221 """Manages a repo client checkout."""
1992 2222
1993 def __init__(self, repodir, manifest_file=None, submanifest_path='', **kwargs): 2223 def __init__(
1994 """Initialize. 2224 self, repodir, manifest_file=None, submanifest_path="", **kwargs
1995 2225 ):
1996 Args: 2226 """Initialize.
1997 repodir: Path to the .repo/ dir for holding all internal checkout state. 2227
1998 It must be in the top directory of the repo client checkout. 2228 Args:
1999 manifest_file: Full path to the manifest file to parse. This will usually 2229 repodir: Path to the .repo/ dir for holding all internal checkout
2000 be |repodir|/|MANIFEST_FILE_NAME|. 2230 state. It must be in the top directory of the repo client
2001 submanifest_path: The submanifest root relative to the repo root. 2231 checkout.
2002 **kwargs: Additional keyword arguments, passed to XmlManifest. 2232 manifest_file: Full path to the manifest file to parse. This will
2003 """ 2233 usually be |repodir|/|MANIFEST_FILE_NAME|.
2004 self.isGitcClient = False 2234 submanifest_path: The submanifest root relative to the repo root.
2005 submanifest_path = submanifest_path or '' 2235 **kwargs: Additional keyword arguments, passed to XmlManifest.
2006 if submanifest_path: 2236 """
2007 self._CheckLocalPath(submanifest_path) 2237 self.isGitcClient = False
2008 prefix = os.path.join(repodir, SUBMANIFEST_DIR, submanifest_path) 2238 submanifest_path = submanifest_path or ""
2009 else: 2239 if submanifest_path:
2010 prefix = repodir 2240 self._CheckLocalPath(submanifest_path)
2011 2241 prefix = os.path.join(repodir, SUBMANIFEST_DIR, submanifest_path)
2012 if os.path.exists(os.path.join(prefix, LOCAL_MANIFEST_NAME)): 2242 else:
2013 print('error: %s is not supported; put local manifests in `%s` instead' % 2243 prefix = repodir
2014 (LOCAL_MANIFEST_NAME, os.path.join(prefix, LOCAL_MANIFESTS_DIR_NAME)), 2244
2015 file=sys.stderr) 2245 if os.path.exists(os.path.join(prefix, LOCAL_MANIFEST_NAME)):
2016 sys.exit(1) 2246 print(
2017 2247 "error: %s is not supported; put local manifests in `%s` "
2018 if manifest_file is None: 2248 "instead"
2019 manifest_file = os.path.join(prefix, MANIFEST_FILE_NAME) 2249 % (
2020 local_manifests = os.path.abspath(os.path.join(prefix, LOCAL_MANIFESTS_DIR_NAME)) 2250 LOCAL_MANIFEST_NAME,
2021 super().__init__(repodir, manifest_file, local_manifests, 2251 os.path.join(prefix, LOCAL_MANIFESTS_DIR_NAME),
2022 submanifest_path=submanifest_path, **kwargs) 2252 ),
2023 2253 file=sys.stderr,
2024 # TODO: Completely separate manifest logic out of the client. 2254 )
2025 self.manifest = self 2255 sys.exit(1)
2256
2257 if manifest_file is None:
2258 manifest_file = os.path.join(prefix, MANIFEST_FILE_NAME)
2259 local_manifests = os.path.abspath(
2260 os.path.join(prefix, LOCAL_MANIFESTS_DIR_NAME)
2261 )
2262 super().__init__(
2263 repodir,
2264 manifest_file,
2265 local_manifests,
2266 submanifest_path=submanifest_path,
2267 **kwargs,
2268 )
2269
2270 # TODO: Completely separate manifest logic out of the client.
2271 self.manifest = self
2026 2272
2027 2273
2028class GitcClient(RepoClient, GitcManifest): 2274class GitcClient(RepoClient, GitcManifest):
2029 """Manages a GitC client checkout.""" 2275 """Manages a GitC client checkout."""
2030 2276
2031 def __init__(self, repodir, gitc_client_name): 2277 def __init__(self, repodir, gitc_client_name):
2032 """Initialize the GitcManifest object.""" 2278 """Initialize the GitcManifest object."""
2033 self.gitc_client_name = gitc_client_name 2279 self.gitc_client_name = gitc_client_name
2034 self.gitc_client_dir = os.path.join(gitc_utils.get_gitc_manifest_dir(), 2280 self.gitc_client_dir = os.path.join(
2035 gitc_client_name) 2281 gitc_utils.get_gitc_manifest_dir(), gitc_client_name
2036 2282 )
2037 super().__init__(repodir, os.path.join(self.gitc_client_dir, '.manifest')) 2283
2038 self.isGitcClient = True 2284 super().__init__(
2285 repodir, os.path.join(self.gitc_client_dir, ".manifest")
2286 )
2287 self.isGitcClient = True
diff --git a/pager.py b/pager.py
index 438597ef..dbd5ae85 100644
--- a/pager.py
+++ b/pager.py
@@ -26,102 +26,101 @@ old_stderr = None
26 26
27 27
28def RunPager(globalConfig): 28def RunPager(globalConfig):
29 if not os.isatty(0) or not os.isatty(1): 29 if not os.isatty(0) or not os.isatty(1):
30 return 30 return
31 pager = _SelectPager(globalConfig) 31 pager = _SelectPager(globalConfig)
32 if pager == '' or pager == 'cat': 32 if pager == "" or pager == "cat":
33 return 33 return
34 34
35 if platform_utils.isWindows(): 35 if platform_utils.isWindows():
36 _PipePager(pager) 36 _PipePager(pager)
37 else: 37 else:
38 _ForkPager(pager) 38 _ForkPager(pager)
39 39
40 40
41def TerminatePager(): 41def TerminatePager():
42 global pager_process, old_stdout, old_stderr 42 global pager_process, old_stdout, old_stderr
43 if pager_process: 43 if pager_process:
44 sys.stdout.flush() 44 sys.stdout.flush()
45 sys.stderr.flush() 45 sys.stderr.flush()
46 pager_process.stdin.close() 46 pager_process.stdin.close()
47 pager_process.wait() 47 pager_process.wait()
48 pager_process = None 48 pager_process = None
49 # Restore initial stdout/err in case there is more output in this process 49 # Restore initial stdout/err in case there is more output in this
50 # after shutting down the pager process 50 # process after shutting down the pager process.
51 sys.stdout = old_stdout 51 sys.stdout = old_stdout
52 sys.stderr = old_stderr 52 sys.stderr = old_stderr
53 53
54 54
55def _PipePager(pager): 55def _PipePager(pager):
56 global pager_process, old_stdout, old_stderr 56 global pager_process, old_stdout, old_stderr
57 assert pager_process is None, "Only one active pager process at a time" 57 assert pager_process is None, "Only one active pager process at a time"
58 # Create pager process, piping stdout/err into its stdin 58 # Create pager process, piping stdout/err into its stdin.
59 try: 59 try:
60 pager_process = subprocess.Popen([pager], stdin=subprocess.PIPE, stdout=sys.stdout, 60 pager_process = subprocess.Popen(
61 stderr=sys.stderr) 61 [pager], stdin=subprocess.PIPE, stdout=sys.stdout, stderr=sys.stderr
62 except FileNotFoundError: 62 )
63 sys.exit(f'fatal: cannot start pager "{pager}"') 63 except FileNotFoundError:
64 old_stdout = sys.stdout 64 sys.exit(f'fatal: cannot start pager "{pager}"')
65 old_stderr = sys.stderr 65 old_stdout = sys.stdout
66 sys.stdout = pager_process.stdin 66 old_stderr = sys.stderr
67 sys.stderr = pager_process.stdin 67 sys.stdout = pager_process.stdin
68 sys.stderr = pager_process.stdin
68 69
69 70
70def _ForkPager(pager): 71def _ForkPager(pager):
71 global active 72 global active
72 # This process turns into the pager; a child it forks will 73 # This process turns into the pager; a child it forks will
73 # do the real processing and output back to the pager. This 74 # do the real processing and output back to the pager. This
74 # is necessary to keep the pager in control of the tty. 75 # is necessary to keep the pager in control of the tty.
75 # 76 try:
76 try: 77 r, w = os.pipe()
77 r, w = os.pipe() 78 pid = os.fork()
78 pid = os.fork() 79 if not pid:
79 if not pid: 80 os.dup2(w, 1)
80 os.dup2(w, 1) 81 os.dup2(w, 2)
81 os.dup2(w, 2) 82 os.close(r)
82 os.close(r) 83 os.close(w)
83 os.close(w) 84 active = True
84 active = True 85 return
85 return 86
86 87 os.dup2(r, 0)
87 os.dup2(r, 0) 88 os.close(r)
88 os.close(r) 89 os.close(w)
89 os.close(w) 90
90 91 _BecomePager(pager)
91 _BecomePager(pager) 92 except Exception:
92 except Exception: 93 print("fatal: cannot start pager '%s'" % pager, file=sys.stderr)
93 print("fatal: cannot start pager '%s'" % pager, file=sys.stderr) 94 sys.exit(255)
94 sys.exit(255)
95 95
96 96
97def _SelectPager(globalConfig): 97def _SelectPager(globalConfig):
98 try: 98 try:
99 return os.environ['GIT_PAGER'] 99 return os.environ["GIT_PAGER"]
100 except KeyError: 100 except KeyError:
101 pass 101 pass
102 102
103 pager = globalConfig.GetString('core.pager') 103 pager = globalConfig.GetString("core.pager")
104 if pager: 104 if pager:
105 return pager 105 return pager
106 106
107 try: 107 try:
108 return os.environ['PAGER'] 108 return os.environ["PAGER"]
109 except KeyError: 109 except KeyError:
110 pass 110 pass
111 111
112 return 'less' 112 return "less"
113 113
114 114
115def _BecomePager(pager): 115def _BecomePager(pager):
116 # Delaying execution of the pager until we have output 116 # Delaying execution of the pager until we have output
117 # ready works around a long-standing bug in popularly 117 # ready works around a long-standing bug in popularly
118 # available versions of 'less', a better 'more'. 118 # available versions of 'less', a better 'more'.
119 # 119 _a, _b, _c = select.select([0], [], [0])
120 _a, _b, _c = select.select([0], [], [0]) 120
121 121 os.environ["LESS"] = "FRSX"
122 os.environ['LESS'] = 'FRSX' 122
123 123 try:
124 try: 124 os.execvp(pager, [pager])
125 os.execvp(pager, [pager]) 125 except OSError:
126 except OSError: 126 os.execv("/bin/sh", ["sh", "-c", pager])
127 os.execv('/bin/sh', ['sh', '-c', pager])
diff --git a/platform_utils.py b/platform_utils.py
index 0203249a..2c48e622 100644
--- a/platform_utils.py
+++ b/platform_utils.py
@@ -20,246 +20,264 @@ import stat
20 20
21 21
22def isWindows(): 22def isWindows():
23 """ Returns True when running with the native port of Python for Windows, 23 """Returns True when running with the native port of Python for Windows,
24 False when running on any other platform (including the Cygwin port of 24 False when running on any other platform (including the Cygwin port of
25 Python). 25 Python).
26 """ 26 """
27 # Note: The cygwin port of Python returns "CYGWIN_NT_xxx" 27 # Note: The cygwin port of Python returns "CYGWIN_NT_xxx"
28 return platform.system() == "Windows" 28 return platform.system() == "Windows"
29 29
30 30
31def symlink(source, link_name): 31def symlink(source, link_name):
32 """Creates a symbolic link pointing to source named link_name. 32 """Creates a symbolic link pointing to source named link_name.
33 Note: On Windows, source must exist on disk, as the implementation needs 33
34 to know whether to create a "File" or a "Directory" symbolic link. 34 Note: On Windows, source must exist on disk, as the implementation needs
35 """ 35 to know whether to create a "File" or a "Directory" symbolic link.
36 if isWindows(): 36 """
37 import platform_utils_win32 37 if isWindows():
38 source = _validate_winpath(source) 38 import platform_utils_win32
39 link_name = _validate_winpath(link_name) 39
40 target = os.path.join(os.path.dirname(link_name), source) 40 source = _validate_winpath(source)
41 if isdir(target): 41 link_name = _validate_winpath(link_name)
42 platform_utils_win32.create_dirsymlink(_makelongpath(source), link_name) 42 target = os.path.join(os.path.dirname(link_name), source)
43 if isdir(target):
44 platform_utils_win32.create_dirsymlink(
45 _makelongpath(source), link_name
46 )
47 else:
48 platform_utils_win32.create_filesymlink(
49 _makelongpath(source), link_name
50 )
43 else: 51 else:
44 platform_utils_win32.create_filesymlink(_makelongpath(source), link_name) 52 return os.symlink(source, link_name)
45 else:
46 return os.symlink(source, link_name)
47 53
48 54
49def _validate_winpath(path): 55def _validate_winpath(path):
50 path = os.path.normpath(path) 56 path = os.path.normpath(path)
51 if _winpath_is_valid(path): 57 if _winpath_is_valid(path):
52 return path 58 return path
53 raise ValueError("Path \"%s\" must be a relative path or an absolute " 59 raise ValueError(
54 "path starting with a drive letter".format(path)) 60 'Path "{}" must be a relative path or an absolute '
61 "path starting with a drive letter".format(path)
62 )
55 63
56 64
57def _winpath_is_valid(path): 65def _winpath_is_valid(path):
58 """Windows only: returns True if path is relative (e.g. ".\\foo") or is 66 """Windows only: returns True if path is relative (e.g. ".\\foo") or is
59 absolute including a drive letter (e.g. "c:\\foo"). Returns False if path 67 absolute including a drive letter (e.g. "c:\\foo"). Returns False if path
60 is ambiguous (e.g. "x:foo" or "\\foo"). 68 is ambiguous (e.g. "x:foo" or "\\foo").
61 """ 69 """
62 assert isWindows() 70 assert isWindows()
63 path = os.path.normpath(path) 71 path = os.path.normpath(path)
64 drive, tail = os.path.splitdrive(path) 72 drive, tail = os.path.splitdrive(path)
65 if tail: 73 if tail:
66 if not drive: 74 if not drive:
67 return tail[0] != os.sep # "\\foo" is invalid 75 return tail[0] != os.sep # "\\foo" is invalid
76 else:
77 return tail[0] == os.sep # "x:foo" is invalid
68 else: 78 else:
69 return tail[0] == os.sep # "x:foo" is invalid 79 return not drive # "x:" is invalid
70 else:
71 return not drive # "x:" is invalid
72 80
73 81
74def _makelongpath(path): 82def _makelongpath(path):
75 """Return the input path normalized to support the Windows long path syntax 83 """Return the input path normalized to support the Windows long path syntax
76 ("\\\\?\\" prefix) if needed, i.e. if the input path is longer than the 84 ("\\\\?\\" prefix) if needed, i.e. if the input path is longer than the
77 MAX_PATH limit. 85 MAX_PATH limit.
78 """ 86 """
79 if isWindows(): 87 if isWindows():
80 # Note: MAX_PATH is 260, but, for directories, the maximum value is actually 246. 88 # Note: MAX_PATH is 260, but, for directories, the maximum value is
81 if len(path) < 246: 89 # actually 246.
82 return path 90 if len(path) < 246:
83 if path.startswith(u"\\\\?\\"): 91 return path
84 return path 92 if path.startswith("\\\\?\\"):
85 if not os.path.isabs(path): 93 return path
86 return path 94 if not os.path.isabs(path):
87 # Append prefix and ensure unicode so that the special longpath syntax 95 return path
88 # is supported by underlying Win32 API calls 96 # Append prefix and ensure unicode so that the special longpath syntax
89 return u"\\\\?\\" + os.path.normpath(path) 97 # is supported by underlying Win32 API calls
90 else: 98 return "\\\\?\\" + os.path.normpath(path)
91 return path 99 else:
100 return path
92 101
93 102
94def rmtree(path, ignore_errors=False): 103def rmtree(path, ignore_errors=False):
95 """shutil.rmtree(path) wrapper with support for long paths on Windows. 104 """shutil.rmtree(path) wrapper with support for long paths on Windows.
96 105
97 Availability: Unix, Windows.""" 106 Availability: Unix, Windows.
98 onerror = None 107 """
99 if isWindows(): 108 onerror = None
100 path = _makelongpath(path) 109 if isWindows():
101 onerror = handle_rmtree_error 110 path = _makelongpath(path)
102 shutil.rmtree(path, ignore_errors=ignore_errors, onerror=onerror) 111 onerror = handle_rmtree_error
112 shutil.rmtree(path, ignore_errors=ignore_errors, onerror=onerror)
103 113
104 114
105def handle_rmtree_error(function, path, excinfo): 115def handle_rmtree_error(function, path, excinfo):
106 # Allow deleting read-only files 116 # Allow deleting read-only files.
107 os.chmod(path, stat.S_IWRITE) 117 os.chmod(path, stat.S_IWRITE)
108 function(path) 118 function(path)
109 119
110 120
111def rename(src, dst): 121def rename(src, dst):
112 """os.rename(src, dst) wrapper with support for long paths on Windows. 122 """os.rename(src, dst) wrapper with support for long paths on Windows.
113 123
114 Availability: Unix, Windows.""" 124 Availability: Unix, Windows.
115 if isWindows(): 125 """
116 # On Windows, rename fails if destination exists, see 126 if isWindows():
117 # https://docs.python.org/2/library/os.html#os.rename 127 # On Windows, rename fails if destination exists, see
118 try: 128 # https://docs.python.org/2/library/os.html#os.rename
119 os.rename(_makelongpath(src), _makelongpath(dst)) 129 try:
120 except OSError as e: 130 os.rename(_makelongpath(src), _makelongpath(dst))
121 if e.errno == errno.EEXIST: 131 except OSError as e:
122 os.remove(_makelongpath(dst)) 132 if e.errno == errno.EEXIST:
123 os.rename(_makelongpath(src), _makelongpath(dst)) 133 os.remove(_makelongpath(dst))
124 else: 134 os.rename(_makelongpath(src), _makelongpath(dst))
125 raise 135 else:
126 else: 136 raise
127 shutil.move(src, dst) 137 else:
138 shutil.move(src, dst)
128 139
129 140
130def remove(path, missing_ok=False): 141def remove(path, missing_ok=False):
131 """Remove (delete) the file path. This is a replacement for os.remove that 142 """Remove (delete) the file path. This is a replacement for os.remove that
132 allows deleting read-only files on Windows, with support for long paths and 143 allows deleting read-only files on Windows, with support for long paths and
133 for deleting directory symbolic links. 144 for deleting directory symbolic links.
134 145
135 Availability: Unix, Windows.""" 146 Availability: Unix, Windows.
136 longpath = _makelongpath(path) if isWindows() else path 147 """
137 try: 148 longpath = _makelongpath(path) if isWindows() else path
138 os.remove(longpath) 149 try:
139 except OSError as e:
140 if e.errno == errno.EACCES:
141 os.chmod(longpath, stat.S_IWRITE)
142 # Directory symbolic links must be deleted with 'rmdir'.
143 if islink(longpath) and isdir(longpath):
144 os.rmdir(longpath)
145 else:
146 os.remove(longpath) 150 os.remove(longpath)
147 elif missing_ok and e.errno == errno.ENOENT: 151 except OSError as e:
148 pass 152 if e.errno == errno.EACCES:
149 else: 153 os.chmod(longpath, stat.S_IWRITE)
150 raise 154 # Directory symbolic links must be deleted with 'rmdir'.
155 if islink(longpath) and isdir(longpath):
156 os.rmdir(longpath)
157 else:
158 os.remove(longpath)
159 elif missing_ok and e.errno == errno.ENOENT:
160 pass
161 else:
162 raise
151 163
152 164
153def walk(top, topdown=True, onerror=None, followlinks=False): 165def walk(top, topdown=True, onerror=None, followlinks=False):
154 """os.walk(path) wrapper with support for long paths on Windows. 166 """os.walk(path) wrapper with support for long paths on Windows.
155 167
156 Availability: Windows, Unix. 168 Availability: Windows, Unix.
157 """ 169 """
158 if isWindows(): 170 if isWindows():
159 return _walk_windows_impl(top, topdown, onerror, followlinks) 171 return _walk_windows_impl(top, topdown, onerror, followlinks)
160 else: 172 else:
161 return os.walk(top, topdown, onerror, followlinks) 173 return os.walk(top, topdown, onerror, followlinks)
162 174
163 175
164def _walk_windows_impl(top, topdown, onerror, followlinks): 176def _walk_windows_impl(top, topdown, onerror, followlinks):
165 try: 177 try:
166 names = listdir(top) 178 names = listdir(top)
167 except Exception as err: 179 except Exception as err:
168 if onerror is not None: 180 if onerror is not None:
169 onerror(err) 181 onerror(err)
170 return 182 return
171 183
172 dirs, nondirs = [], [] 184 dirs, nondirs = [], []
173 for name in names: 185 for name in names:
174 if isdir(os.path.join(top, name)): 186 if isdir(os.path.join(top, name)):
175 dirs.append(name) 187 dirs.append(name)
176 else: 188 else:
177 nondirs.append(name) 189 nondirs.append(name)
178 190
179 if topdown: 191 if topdown:
180 yield top, dirs, nondirs 192 yield top, dirs, nondirs
181 for name in dirs: 193 for name in dirs:
182 new_path = os.path.join(top, name) 194 new_path = os.path.join(top, name)
183 if followlinks or not islink(new_path): 195 if followlinks or not islink(new_path):
184 for x in _walk_windows_impl(new_path, topdown, onerror, followlinks): 196 for x in _walk_windows_impl(
185 yield x 197 new_path, topdown, onerror, followlinks
186 if not topdown: 198 ):
187 yield top, dirs, nondirs 199 yield x
200 if not topdown:
201 yield top, dirs, nondirs
188 202
189 203
190def listdir(path): 204def listdir(path):
191 """os.listdir(path) wrapper with support for long paths on Windows. 205 """os.listdir(path) wrapper with support for long paths on Windows.
192 206
193 Availability: Windows, Unix. 207 Availability: Windows, Unix.
194 """ 208 """
195 return os.listdir(_makelongpath(path)) 209 return os.listdir(_makelongpath(path))
196 210
197 211
198def rmdir(path): 212def rmdir(path):
199 """os.rmdir(path) wrapper with support for long paths on Windows. 213 """os.rmdir(path) wrapper with support for long paths on Windows.
200 214
201 Availability: Windows, Unix. 215 Availability: Windows, Unix.
202 """ 216 """
203 os.rmdir(_makelongpath(path)) 217 os.rmdir(_makelongpath(path))
204 218
205 219
206def isdir(path): 220def isdir(path):
207 """os.path.isdir(path) wrapper with support for long paths on Windows. 221 """os.path.isdir(path) wrapper with support for long paths on Windows.
208 222
209 Availability: Windows, Unix. 223 Availability: Windows, Unix.
210 """ 224 """
211 return os.path.isdir(_makelongpath(path)) 225 return os.path.isdir(_makelongpath(path))
212 226
213 227
214def islink(path): 228def islink(path):
215 """os.path.islink(path) wrapper with support for long paths on Windows. 229 """os.path.islink(path) wrapper with support for long paths on Windows.
216 230
217 Availability: Windows, Unix. 231 Availability: Windows, Unix.
218 """ 232 """
219 if isWindows(): 233 if isWindows():
220 import platform_utils_win32 234 import platform_utils_win32
221 return platform_utils_win32.islink(_makelongpath(path)) 235
222 else: 236 return platform_utils_win32.islink(_makelongpath(path))
223 return os.path.islink(path) 237 else:
238 return os.path.islink(path)
224 239
225 240
226def readlink(path): 241def readlink(path):
227 """Return a string representing the path to which the symbolic link 242 """Return a string representing the path to which the symbolic link
228 points. The result may be either an absolute or relative pathname; 243 points. The result may be either an absolute or relative pathname;
229 if it is relative, it may be converted to an absolute pathname using 244 if it is relative, it may be converted to an absolute pathname using
230 os.path.join(os.path.dirname(path), result). 245 os.path.join(os.path.dirname(path), result).
246
247 Availability: Windows, Unix.
248 """
249 if isWindows():
250 import platform_utils_win32
231 251
232 Availability: Windows, Unix. 252 return platform_utils_win32.readlink(_makelongpath(path))
233 """ 253 else:
234 if isWindows(): 254 return os.readlink(path)
235 import platform_utils_win32
236 return platform_utils_win32.readlink(_makelongpath(path))
237 else:
238 return os.readlink(path)
239 255
240 256
241def realpath(path): 257def realpath(path):
242 """Return the canonical path of the specified filename, eliminating 258 """Return the canonical path of the specified filename, eliminating
243 any symbolic links encountered in the path. 259 any symbolic links encountered in the path.
244 260
245 Availability: Windows, Unix. 261 Availability: Windows, Unix.
246 """ 262 """
247 if isWindows(): 263 if isWindows():
248 current_path = os.path.abspath(path) 264 current_path = os.path.abspath(path)
249 path_tail = [] 265 path_tail = []
250 for c in range(0, 100): # Avoid cycles 266 for c in range(0, 100): # Avoid cycles
251 if islink(current_path): 267 if islink(current_path):
252 target = readlink(current_path) 268 target = readlink(current_path)
253 current_path = os.path.join(os.path.dirname(current_path), target) 269 current_path = os.path.join(
254 else: 270 os.path.dirname(current_path), target
255 basename = os.path.basename(current_path) 271 )
256 if basename == '': 272 else:
257 path_tail.append(current_path) 273 basename = os.path.basename(current_path)
258 break 274 if basename == "":
259 path_tail.append(basename) 275 path_tail.append(current_path)
260 current_path = os.path.dirname(current_path) 276 break
261 path_tail.reverse() 277 path_tail.append(basename)
262 result = os.path.normpath(os.path.join(*path_tail)) 278 current_path = os.path.dirname(current_path)
263 return result 279 path_tail.reverse()
264 else: 280 result = os.path.normpath(os.path.join(*path_tail))
265 return os.path.realpath(path) 281 return result
282 else:
283 return os.path.realpath(path)
diff --git a/platform_utils_win32.py b/platform_utils_win32.py
index bf916d47..e9b15f46 100644
--- a/platform_utils_win32.py
+++ b/platform_utils_win32.py
@@ -19,7 +19,7 @@ from ctypes import c_buffer, c_ubyte, Structure, Union, byref
19from ctypes.wintypes import BOOL, BOOLEAN, LPCWSTR, DWORD, HANDLE 19from ctypes.wintypes import BOOL, BOOLEAN, LPCWSTR, DWORD, HANDLE
20from ctypes.wintypes import WCHAR, USHORT, LPVOID, ULONG, LPDWORD 20from ctypes.wintypes import WCHAR, USHORT, LPVOID, ULONG, LPDWORD
21 21
22kernel32 = WinDLL('kernel32', use_last_error=True) 22kernel32 = WinDLL("kernel32", use_last_error=True)
23 23
24UCHAR = c_ubyte 24UCHAR = c_ubyte
25 25
@@ -31,14 +31,17 @@ ERROR_PRIVILEGE_NOT_HELD = 1314
31# Win32 API entry points 31# Win32 API entry points
32CreateSymbolicLinkW = kernel32.CreateSymbolicLinkW 32CreateSymbolicLinkW = kernel32.CreateSymbolicLinkW
33CreateSymbolicLinkW.restype = BOOLEAN 33CreateSymbolicLinkW.restype = BOOLEAN
34CreateSymbolicLinkW.argtypes = (LPCWSTR, # lpSymlinkFileName In 34CreateSymbolicLinkW.argtypes = (
35 LPCWSTR, # lpTargetFileName In 35 LPCWSTR, # lpSymlinkFileName In
36 DWORD) # dwFlags In 36 LPCWSTR, # lpTargetFileName In
37 DWORD, # dwFlags In
38)
37 39
38# Symbolic link creation flags 40# Symbolic link creation flags
39SYMBOLIC_LINK_FLAG_FILE = 0x00 41SYMBOLIC_LINK_FLAG_FILE = 0x00
40SYMBOLIC_LINK_FLAG_DIRECTORY = 0x01 42SYMBOLIC_LINK_FLAG_DIRECTORY = 0x01
41# symlink support for CreateSymbolicLink() starting with Windows 10 (1703, v10.0.14972) 43# symlink support for CreateSymbolicLink() starting with Windows 10 (1703,
44# v10.0.14972)
42SYMBOLIC_LINK_FLAG_ALLOW_UNPRIVILEGED_CREATE = 0x02 45SYMBOLIC_LINK_FLAG_ALLOW_UNPRIVILEGED_CREATE = 0x02
43 46
44GetFileAttributesW = kernel32.GetFileAttributesW 47GetFileAttributesW = kernel32.GetFileAttributesW
@@ -50,13 +53,15 @@ FILE_ATTRIBUTE_REPARSE_POINT = 0x00400
50 53
51CreateFileW = kernel32.CreateFileW 54CreateFileW = kernel32.CreateFileW
52CreateFileW.restype = HANDLE 55CreateFileW.restype = HANDLE
53CreateFileW.argtypes = (LPCWSTR, # lpFileName In 56CreateFileW.argtypes = (
54 DWORD, # dwDesiredAccess In 57 LPCWSTR, # lpFileName In
55 DWORD, # dwShareMode In 58 DWORD, # dwDesiredAccess In
56 LPVOID, # lpSecurityAttributes In_opt 59 DWORD, # dwShareMode In
57 DWORD, # dwCreationDisposition In 60 LPVOID, # lpSecurityAttributes In_opt
58 DWORD, # dwFlagsAndAttributes In 61 DWORD, # dwCreationDisposition In
59 HANDLE) # hTemplateFile In_opt 62 DWORD, # dwFlagsAndAttributes In
63 HANDLE, # hTemplateFile In_opt
64)
60 65
61CloseHandle = kernel32.CloseHandle 66CloseHandle = kernel32.CloseHandle
62CloseHandle.restype = BOOL 67CloseHandle.restype = BOOL
@@ -69,14 +74,16 @@ FILE_FLAG_OPEN_REPARSE_POINT = 0x00200000
69 74
70DeviceIoControl = kernel32.DeviceIoControl 75DeviceIoControl = kernel32.DeviceIoControl
71DeviceIoControl.restype = BOOL 76DeviceIoControl.restype = BOOL
72DeviceIoControl.argtypes = (HANDLE, # hDevice In 77DeviceIoControl.argtypes = (
73 DWORD, # dwIoControlCode In 78 HANDLE, # hDevice In
74 LPVOID, # lpInBuffer In_opt 79 DWORD, # dwIoControlCode In
75 DWORD, # nInBufferSize In 80 LPVOID, # lpInBuffer In_opt
76 LPVOID, # lpOutBuffer Out_opt 81 DWORD, # nInBufferSize In
77 DWORD, # nOutBufferSize In 82 LPVOID, # lpOutBuffer Out_opt
78 LPDWORD, # lpBytesReturned Out_opt 83 DWORD, # nOutBufferSize In
79 LPVOID) # lpOverlapped Inout_opt 84 LPDWORD, # lpBytesReturned Out_opt
85 LPVOID, # lpOverlapped Inout_opt
86)
80 87
81# Device I/O control flags and options 88# Device I/O control flags and options
82FSCTL_GET_REPARSE_POINT = 0x000900A8 89FSCTL_GET_REPARSE_POINT = 0x000900A8
@@ -86,124 +93,138 @@ MAXIMUM_REPARSE_DATA_BUFFER_SIZE = 0x4000
86 93
87 94
88class GENERIC_REPARSE_BUFFER(Structure): 95class GENERIC_REPARSE_BUFFER(Structure):
89 _fields_ = (('DataBuffer', UCHAR * 1),) 96 _fields_ = (("DataBuffer", UCHAR * 1),)
90 97
91 98
92class SYMBOLIC_LINK_REPARSE_BUFFER(Structure): 99class SYMBOLIC_LINK_REPARSE_BUFFER(Structure):
93 _fields_ = (('SubstituteNameOffset', USHORT), 100 _fields_ = (
94 ('SubstituteNameLength', USHORT), 101 ("SubstituteNameOffset", USHORT),
95 ('PrintNameOffset', USHORT), 102 ("SubstituteNameLength", USHORT),
96 ('PrintNameLength', USHORT), 103 ("PrintNameOffset", USHORT),
97 ('Flags', ULONG), 104 ("PrintNameLength", USHORT),
98 ('PathBuffer', WCHAR * 1)) 105 ("Flags", ULONG),
99 106 ("PathBuffer", WCHAR * 1),
100 @property 107 )
101 def PrintName(self): 108
102 arrayt = WCHAR * (self.PrintNameLength // 2) 109 @property
103 offset = type(self).PathBuffer.offset + self.PrintNameOffset 110 def PrintName(self):
104 return arrayt.from_address(addressof(self) + offset).value 111 arrayt = WCHAR * (self.PrintNameLength // 2)
112 offset = type(self).PathBuffer.offset + self.PrintNameOffset
113 return arrayt.from_address(addressof(self) + offset).value
105 114
106 115
107class MOUNT_POINT_REPARSE_BUFFER(Structure): 116class MOUNT_POINT_REPARSE_BUFFER(Structure):
108 _fields_ = (('SubstituteNameOffset', USHORT), 117 _fields_ = (
109 ('SubstituteNameLength', USHORT), 118 ("SubstituteNameOffset", USHORT),
110 ('PrintNameOffset', USHORT), 119 ("SubstituteNameLength", USHORT),
111 ('PrintNameLength', USHORT), 120 ("PrintNameOffset", USHORT),
112 ('PathBuffer', WCHAR * 1)) 121 ("PrintNameLength", USHORT),
122 ("PathBuffer", WCHAR * 1),
123 )
113 124
114 @property 125 @property
115 def PrintName(self): 126 def PrintName(self):
116 arrayt = WCHAR * (self.PrintNameLength // 2) 127 arrayt = WCHAR * (self.PrintNameLength // 2)
117 offset = type(self).PathBuffer.offset + self.PrintNameOffset 128 offset = type(self).PathBuffer.offset + self.PrintNameOffset
118 return arrayt.from_address(addressof(self) + offset).value 129 return arrayt.from_address(addressof(self) + offset).value
119 130
120 131
121class REPARSE_DATA_BUFFER(Structure): 132class REPARSE_DATA_BUFFER(Structure):
122 class REPARSE_BUFFER(Union): 133 class REPARSE_BUFFER(Union):
123 _fields_ = (('SymbolicLinkReparseBuffer', SYMBOLIC_LINK_REPARSE_BUFFER), 134 _fields_ = (
124 ('MountPointReparseBuffer', MOUNT_POINT_REPARSE_BUFFER), 135 ("SymbolicLinkReparseBuffer", SYMBOLIC_LINK_REPARSE_BUFFER),
125 ('GenericReparseBuffer', GENERIC_REPARSE_BUFFER)) 136 ("MountPointReparseBuffer", MOUNT_POINT_REPARSE_BUFFER),
126 _fields_ = (('ReparseTag', ULONG), 137 ("GenericReparseBuffer", GENERIC_REPARSE_BUFFER),
127 ('ReparseDataLength', USHORT), 138 )
128 ('Reserved', USHORT), 139
129 ('ReparseBuffer', REPARSE_BUFFER)) 140 _fields_ = (
130 _anonymous_ = ('ReparseBuffer',) 141 ("ReparseTag", ULONG),
142 ("ReparseDataLength", USHORT),
143 ("Reserved", USHORT),
144 ("ReparseBuffer", REPARSE_BUFFER),
145 )
146 _anonymous_ = ("ReparseBuffer",)
131 147
132 148
133def create_filesymlink(source, link_name): 149def create_filesymlink(source, link_name):
134 """Creates a Windows file symbolic link source pointing to link_name.""" 150 """Creates a Windows file symbolic link source pointing to link_name."""
135 _create_symlink(source, link_name, SYMBOLIC_LINK_FLAG_FILE) 151 _create_symlink(source, link_name, SYMBOLIC_LINK_FLAG_FILE)
136 152
137 153
138def create_dirsymlink(source, link_name): 154def create_dirsymlink(source, link_name):
139 """Creates a Windows directory symbolic link source pointing to link_name. 155 """Creates a Windows directory symbolic link source pointing to link_name.""" # noqa: E501
140 """ 156 _create_symlink(source, link_name, SYMBOLIC_LINK_FLAG_DIRECTORY)
141 _create_symlink(source, link_name, SYMBOLIC_LINK_FLAG_DIRECTORY)
142 157
143 158
144def _create_symlink(source, link_name, dwFlags): 159def _create_symlink(source, link_name, dwFlags):
145 if not CreateSymbolicLinkW(link_name, source, 160 if not CreateSymbolicLinkW(
146 dwFlags | SYMBOLIC_LINK_FLAG_ALLOW_UNPRIVILEGED_CREATE): 161 link_name,
147 # See https://github.com/golang/go/pull/24307/files#diff-b87bc12e4da2497308f9ef746086e4f0 162 source,
148 # "the unprivileged create flag is unsupported below Windows 10 (1703, v10.0.14972). 163 dwFlags | SYMBOLIC_LINK_FLAG_ALLOW_UNPRIVILEGED_CREATE,
149 # retry without it." 164 ):
150 if not CreateSymbolicLinkW(link_name, source, dwFlags): 165 # See https://github.com/golang/go/pull/24307/files#diff-b87bc12e4da2497308f9ef746086e4f0 # noqa: E501
151 code = get_last_error() 166 # "the unprivileged create flag is unsupported below Windows 10 (1703,
152 error_desc = FormatError(code).strip() 167 # v10.0.14972). retry without it."
153 if code == ERROR_PRIVILEGE_NOT_HELD: 168 if not CreateSymbolicLinkW(link_name, source, dwFlags):
154 raise OSError(errno.EPERM, error_desc, link_name) 169 code = get_last_error()
155 _raise_winerror( 170 error_desc = FormatError(code).strip()
156 code, 171 if code == ERROR_PRIVILEGE_NOT_HELD:
157 'Error creating symbolic link \"%s\"'.format(link_name)) 172 raise OSError(errno.EPERM, error_desc, link_name)
173 _raise_winerror(
174 code, 'Error creating symbolic link "{}"'.format(link_name)
175 )
158 176
159 177
160def islink(path): 178def islink(path):
161 result = GetFileAttributesW(path) 179 result = GetFileAttributesW(path)
162 if result == INVALID_FILE_ATTRIBUTES: 180 if result == INVALID_FILE_ATTRIBUTES:
163 return False 181 return False
164 return bool(result & FILE_ATTRIBUTE_REPARSE_POINT) 182 return bool(result & FILE_ATTRIBUTE_REPARSE_POINT)
165 183
166 184
167def readlink(path): 185def readlink(path):
168 reparse_point_handle = CreateFileW(path, 186 reparse_point_handle = CreateFileW(
169 0, 187 path,
170 0, 188 0,
171 None, 189 0,
172 OPEN_EXISTING, 190 None,
173 FILE_FLAG_OPEN_REPARSE_POINT | 191 OPEN_EXISTING,
174 FILE_FLAG_BACKUP_SEMANTICS, 192 FILE_FLAG_OPEN_REPARSE_POINT | FILE_FLAG_BACKUP_SEMANTICS,
175 None) 193 None,
176 if reparse_point_handle == INVALID_HANDLE_VALUE: 194 )
177 _raise_winerror( 195 if reparse_point_handle == INVALID_HANDLE_VALUE:
178 get_last_error(), 196 _raise_winerror(
179 'Error opening symbolic link \"%s\"'.format(path)) 197 get_last_error(), 'Error opening symbolic link "{}"'.format(path)
180 target_buffer = c_buffer(MAXIMUM_REPARSE_DATA_BUFFER_SIZE) 198 )
181 n_bytes_returned = DWORD() 199 target_buffer = c_buffer(MAXIMUM_REPARSE_DATA_BUFFER_SIZE)
182 io_result = DeviceIoControl(reparse_point_handle, 200 n_bytes_returned = DWORD()
183 FSCTL_GET_REPARSE_POINT, 201 io_result = DeviceIoControl(
184 None, 202 reparse_point_handle,
185 0, 203 FSCTL_GET_REPARSE_POINT,
186 target_buffer, 204 None,
187 len(target_buffer), 205 0,
188 byref(n_bytes_returned), 206 target_buffer,
189 None) 207 len(target_buffer),
190 CloseHandle(reparse_point_handle) 208 byref(n_bytes_returned),
191 if not io_result: 209 None,
210 )
211 CloseHandle(reparse_point_handle)
212 if not io_result:
213 _raise_winerror(
214 get_last_error(), 'Error reading symbolic link "{}"'.format(path)
215 )
216 rdb = REPARSE_DATA_BUFFER.from_buffer(target_buffer)
217 if rdb.ReparseTag == IO_REPARSE_TAG_SYMLINK:
218 return rdb.SymbolicLinkReparseBuffer.PrintName
219 elif rdb.ReparseTag == IO_REPARSE_TAG_MOUNT_POINT:
220 return rdb.MountPointReparseBuffer.PrintName
221 # Unsupported reparse point type.
192 _raise_winerror( 222 _raise_winerror(
193 get_last_error(), 223 ERROR_NOT_SUPPORTED, 'Error reading symbolic link "{}"'.format(path)
194 'Error reading symbolic link \"%s\"'.format(path)) 224 )
195 rdb = REPARSE_DATA_BUFFER.from_buffer(target_buffer)
196 if rdb.ReparseTag == IO_REPARSE_TAG_SYMLINK:
197 return rdb.SymbolicLinkReparseBuffer.PrintName
198 elif rdb.ReparseTag == IO_REPARSE_TAG_MOUNT_POINT:
199 return rdb.MountPointReparseBuffer.PrintName
200 # Unsupported reparse point type
201 _raise_winerror(
202 ERROR_NOT_SUPPORTED,
203 'Error reading symbolic link \"%s\"'.format(path))
204 225
205 226
206def _raise_winerror(code, error_desc): 227def _raise_winerror(code, error_desc):
207 win_error_desc = FormatError(code).strip() 228 win_error_desc = FormatError(code).strip()
208 error_desc = "%s: %s".format(error_desc, win_error_desc) 229 error_desc = "{0}: {1}".format(error_desc, win_error_desc)
209 raise WinError(code, error_desc) 230 raise WinError(code, error_desc)
diff --git a/progress.py b/progress.py
index 526ce6c1..d1a7c543 100644
--- a/progress.py
+++ b/progress.py
@@ -22,115 +22,136 @@ _NOT_TTY = not os.isatty(2)
22# This will erase all content in the current line (wherever the cursor is). 22# This will erase all content in the current line (wherever the cursor is).
23# It does not move the cursor, so this is usually followed by \r to move to 23# It does not move the cursor, so this is usually followed by \r to move to
24# column 0. 24# column 0.
25CSI_ERASE_LINE = '\x1b[2K' 25CSI_ERASE_LINE = "\x1b[2K"
26 26
27# This will erase all content in the current line after the cursor. This is 27# This will erase all content in the current line after the cursor. This is
28# useful for partial updates & progress messages as the terminal can display 28# useful for partial updates & progress messages as the terminal can display
29# it better. 29# it better.
30CSI_ERASE_LINE_AFTER = '\x1b[K' 30CSI_ERASE_LINE_AFTER = "\x1b[K"
31 31
32 32
33def duration_str(total): 33def duration_str(total):
34 """A less noisy timedelta.__str__. 34 """A less noisy timedelta.__str__.
35 35
36 The default timedelta stringification contains a lot of leading zeros and 36 The default timedelta stringification contains a lot of leading zeros and
37 uses microsecond resolution. This makes for noisy output. 37 uses microsecond resolution. This makes for noisy output.
38 """ 38 """
39 hours, rem = divmod(total, 3600) 39 hours, rem = divmod(total, 3600)
40 mins, secs = divmod(rem, 60) 40 mins, secs = divmod(rem, 60)
41 ret = '%.3fs' % (secs,) 41 ret = "%.3fs" % (secs,)
42 if mins: 42 if mins:
43 ret = '%im%s' % (mins, ret) 43 ret = "%im%s" % (mins, ret)
44 if hours: 44 if hours:
45 ret = '%ih%s' % (hours, ret) 45 ret = "%ih%s" % (hours, ret)
46 return ret 46 return ret
47 47
48 48
49class Progress(object): 49class Progress(object):
50 def __init__(self, title, total=0, units='', print_newline=False, delay=True, 50 def __init__(
51 quiet=False): 51 self,
52 self._title = title 52 title,
53 self._total = total 53 total=0,
54 self._done = 0 54 units="",
55 self._start = time() 55 print_newline=False,
56 self._show = not delay 56 delay=True,
57 self._units = units 57 quiet=False,
58 self._print_newline = print_newline 58 ):
59 # Only show the active jobs section if we run more than one in parallel. 59 self._title = title
60 self._show_jobs = False 60 self._total = total
61 self._active = 0 61 self._done = 0
62 62 self._start = time()
63 # When quiet, never show any output. It's a bit hacky, but reusing the 63 self._show = not delay
64 # existing logic that delays initial output keeps the rest of the class 64 self._units = units
65 # clean. Basically we set the start time to years in the future. 65 self._print_newline = print_newline
66 if quiet: 66 # Only show the active jobs section if we run more than one in parallel.
67 self._show = False 67 self._show_jobs = False
68 self._start += 2**32 68 self._active = 0
69 69
70 def start(self, name): 70 # When quiet, never show any output. It's a bit hacky, but reusing the
71 self._active += 1 71 # existing logic that delays initial output keeps the rest of the class
72 if not self._show_jobs: 72 # clean. Basically we set the start time to years in the future.
73 self._show_jobs = self._active > 1 73 if quiet:
74 self.update(inc=0, msg='started ' + name) 74 self._show = False
75 75 self._start += 2**32
76 def finish(self, name): 76
77 self.update(msg='finished ' + name) 77 def start(self, name):
78 self._active -= 1 78 self._active += 1
79 79 if not self._show_jobs:
80 def update(self, inc=1, msg=''): 80 self._show_jobs = self._active > 1
81 self._done += inc 81 self.update(inc=0, msg="started " + name)
82 82
83 if _NOT_TTY or IsTraceToStderr(): 83 def finish(self, name):
84 return 84 self.update(msg="finished " + name)
85 85 self._active -= 1
86 if not self._show: 86
87 if 0.5 <= time() - self._start: 87 def update(self, inc=1, msg=""):
88 self._show = True 88 self._done += inc
89 else: 89
90 return 90 if _NOT_TTY or IsTraceToStderr():
91 91 return
92 if self._total <= 0: 92
93 sys.stderr.write('\r%s: %d,%s' % ( 93 if not self._show:
94 self._title, 94 if 0.5 <= time() - self._start:
95 self._done, 95 self._show = True
96 CSI_ERASE_LINE_AFTER)) 96 else:
97 sys.stderr.flush() 97 return
98 else: 98
99 p = (100 * self._done) / self._total 99 if self._total <= 0:
100 if self._show_jobs: 100 sys.stderr.write(
101 jobs = '[%d job%s] ' % (self._active, 's' if self._active > 1 else '') 101 "\r%s: %d,%s" % (self._title, self._done, CSI_ERASE_LINE_AFTER)
102 else: 102 )
103 jobs = '' 103 sys.stderr.flush()
104 sys.stderr.write('\r%s: %2d%% %s(%d%s/%d%s)%s%s%s%s' % ( 104 else:
105 self._title, 105 p = (100 * self._done) / self._total
106 p, 106 if self._show_jobs:
107 jobs, 107 jobs = "[%d job%s] " % (
108 self._done, self._units, 108 self._active,
109 self._total, self._units, 109 "s" if self._active > 1 else "",
110 ' ' if msg else '', msg, 110 )
111 CSI_ERASE_LINE_AFTER, 111 else:
112 '\n' if self._print_newline else '')) 112 jobs = ""
113 sys.stderr.flush() 113 sys.stderr.write(
114 114 "\r%s: %2d%% %s(%d%s/%d%s)%s%s%s%s"
115 def end(self): 115 % (
116 if _NOT_TTY or IsTraceToStderr() or not self._show: 116 self._title,
117 return 117 p,
118 118 jobs,
119 duration = duration_str(time() - self._start) 119 self._done,
120 if self._total <= 0: 120 self._units,
121 sys.stderr.write('\r%s: %d, done in %s%s\n' % ( 121 self._total,
122 self._title, 122 self._units,
123 self._done, 123 " " if msg else "",
124 duration, 124 msg,
125 CSI_ERASE_LINE_AFTER)) 125 CSI_ERASE_LINE_AFTER,
126 sys.stderr.flush() 126 "\n" if self._print_newline else "",
127 else: 127 )
128 p = (100 * self._done) / self._total 128 )
129 sys.stderr.write('\r%s: %3d%% (%d%s/%d%s), done in %s%s\n' % ( 129 sys.stderr.flush()
130 self._title, 130
131 p, 131 def end(self):
132 self._done, self._units, 132 if _NOT_TTY or IsTraceToStderr() or not self._show:
133 self._total, self._units, 133 return
134 duration, 134
135 CSI_ERASE_LINE_AFTER)) 135 duration = duration_str(time() - self._start)
136 sys.stderr.flush() 136 if self._total <= 0:
137 sys.stderr.write(
138 "\r%s: %d, done in %s%s\n"
139 % (self._title, self._done, duration, CSI_ERASE_LINE_AFTER)
140 )
141 sys.stderr.flush()
142 else:
143 p = (100 * self._done) / self._total
144 sys.stderr.write(
145 "\r%s: %3d%% (%d%s/%d%s), done in %s%s\n"
146 % (
147 self._title,
148 p,
149 self._done,
150 self._units,
151 self._total,
152 self._units,
153 duration,
154 CSI_ERASE_LINE_AFTER,
155 )
156 )
157 sys.stderr.flush()
diff --git a/project.py b/project.py
index 3ccfd140..887fe83a 100644
--- a/project.py
+++ b/project.py
@@ -32,8 +32,13 @@ import urllib.parse
32from color import Coloring 32from color import Coloring
33import fetch 33import fetch
34from git_command import GitCommand, git_require 34from git_command import GitCommand, git_require
35from git_config import GitConfig, IsId, GetSchemeFromUrl, GetUrlCookieFile, \ 35from git_config import (
36 ID_RE 36 GitConfig,
37 IsId,
38 GetSchemeFromUrl,
39 GetUrlCookieFile,
40 ID_RE,
41)
37import git_superproject 42import git_superproject
38from git_trace2_event_log import EventLog 43from git_trace2_event_log import EventLog
39from error import GitError, UploadError, DownloadError 44from error import GitError, UploadError, DownloadError
@@ -47,12 +52,13 @@ from git_refs import GitRefs, HEAD, R_HEADS, R_TAGS, R_PUB, R_M, R_WORKTREE_M
47 52
48 53
49class SyncNetworkHalfResult(NamedTuple): 54class SyncNetworkHalfResult(NamedTuple):
50 """Sync_NetworkHalf return value.""" 55 """Sync_NetworkHalf return value."""
51 # True if successful. 56
52 success: bool 57 # True if successful.
53 # Did we query the remote? False when optimized_fetch is True and we have the 58 success: bool
54 # commit already present. 59 # Did we query the remote? False when optimized_fetch is True and we have
55 remote_fetched: bool 60 # the commit already present.
61 remote_fetched: bool
56 62
57 63
58# Maximum sleep time allowed during retries. 64# Maximum sleep time allowed during retries.
@@ -62,3904 +68,4362 @@ RETRY_JITTER_PERCENT = 0.1
62 68
63# Whether to use alternates. Switching back and forth is *NOT* supported. 69# Whether to use alternates. Switching back and forth is *NOT* supported.
64# TODO(vapier): Remove knob once behavior is verified. 70# TODO(vapier): Remove knob once behavior is verified.
65_ALTERNATES = os.environ.get('REPO_USE_ALTERNATES') == '1' 71_ALTERNATES = os.environ.get("REPO_USE_ALTERNATES") == "1"
66 72
67 73
68def _lwrite(path, content): 74def _lwrite(path, content):
69 lock = '%s.lock' % path 75 lock = "%s.lock" % path
70 76
71 # Maintain Unix line endings on all OS's to match git behavior. 77 # Maintain Unix line endings on all OS's to match git behavior.
72 with open(lock, 'w', newline='\n') as fd: 78 with open(lock, "w", newline="\n") as fd:
73 fd.write(content) 79 fd.write(content)
74 80
75 try: 81 try:
76 platform_utils.rename(lock, path) 82 platform_utils.rename(lock, path)
77 except OSError: 83 except OSError:
78 platform_utils.remove(lock) 84 platform_utils.remove(lock)
79 raise 85 raise
80 86
81 87
82def _error(fmt, *args): 88def _error(fmt, *args):
83 msg = fmt % args 89 msg = fmt % args
84 print('error: %s' % msg, file=sys.stderr) 90 print("error: %s" % msg, file=sys.stderr)
85 91
86 92
87def _warn(fmt, *args): 93def _warn(fmt, *args):
88 msg = fmt % args 94 msg = fmt % args
89 print('warn: %s' % msg, file=sys.stderr) 95 print("warn: %s" % msg, file=sys.stderr)
90 96
91 97
92def not_rev(r): 98def not_rev(r):
93 return '^' + r 99 return "^" + r
94 100
95 101
96def sq(r): 102def sq(r):
97 return "'" + r.replace("'", "'\''") + "'" 103 return "'" + r.replace("'", "'''") + "'"
98 104
99 105
100_project_hook_list = None 106_project_hook_list = None
101 107
102 108
103def _ProjectHooks(): 109def _ProjectHooks():
104 """List the hooks present in the 'hooks' directory. 110 """List the hooks present in the 'hooks' directory.
105 111
106 These hooks are project hooks and are copied to the '.git/hooks' directory 112 These hooks are project hooks and are copied to the '.git/hooks' directory
107 of all subprojects. 113 of all subprojects.
108 114
109 This function caches the list of hooks (based on the contents of the 115 This function caches the list of hooks (based on the contents of the
110 'repo/hooks' directory) on the first call. 116 'repo/hooks' directory) on the first call.
111 117
112 Returns: 118 Returns:
113 A list of absolute paths to all of the files in the hooks directory. 119 A list of absolute paths to all of the files in the hooks directory.
114 """ 120 """
115 global _project_hook_list 121 global _project_hook_list
116 if _project_hook_list is None: 122 if _project_hook_list is None:
117 d = platform_utils.realpath(os.path.abspath(os.path.dirname(__file__))) 123 d = platform_utils.realpath(os.path.abspath(os.path.dirname(__file__)))
118 d = os.path.join(d, 'hooks') 124 d = os.path.join(d, "hooks")
119 _project_hook_list = [os.path.join(d, x) for x in platform_utils.listdir(d)] 125 _project_hook_list = [
120 return _project_hook_list 126 os.path.join(d, x) for x in platform_utils.listdir(d)
127 ]
128 return _project_hook_list
121 129
122 130
123class DownloadedChange(object): 131class DownloadedChange(object):
124 _commit_cache = None 132 _commit_cache = None
125 133
126 def __init__(self, project, base, change_id, ps_id, commit): 134 def __init__(self, project, base, change_id, ps_id, commit):
127 self.project = project 135 self.project = project
128 self.base = base 136 self.base = base
129 self.change_id = change_id 137 self.change_id = change_id
130 self.ps_id = ps_id 138 self.ps_id = ps_id
131 self.commit = commit 139 self.commit = commit
132 140
133 @property 141 @property
134 def commits(self): 142 def commits(self):
135 if self._commit_cache is None: 143 if self._commit_cache is None:
136 self._commit_cache = self.project.bare_git.rev_list('--abbrev=8', 144 self._commit_cache = self.project.bare_git.rev_list(
137 '--abbrev-commit', 145 "--abbrev=8",
138 '--pretty=oneline', 146 "--abbrev-commit",
139 '--reverse', 147 "--pretty=oneline",
140 '--date-order', 148 "--reverse",
141 not_rev(self.base), 149 "--date-order",
142 self.commit, 150 not_rev(self.base),
143 '--') 151 self.commit,
144 return self._commit_cache 152 "--",
153 )
154 return self._commit_cache
145 155
146 156
147class ReviewableBranch(object): 157class ReviewableBranch(object):
148 _commit_cache = None 158 _commit_cache = None
149 _base_exists = None 159 _base_exists = None
150 160
151 def __init__(self, project, branch, base): 161 def __init__(self, project, branch, base):
152 self.project = project 162 self.project = project
153 self.branch = branch 163 self.branch = branch
154 self.base = base 164 self.base = base
155 165
156 @property 166 @property
157 def name(self): 167 def name(self):
158 return self.branch.name 168 return self.branch.name
159 169
160 @property 170 @property
161 def commits(self): 171 def commits(self):
162 if self._commit_cache is None: 172 if self._commit_cache is None:
163 args = ('--abbrev=8', '--abbrev-commit', '--pretty=oneline', '--reverse', 173 args = (
164 '--date-order', not_rev(self.base), R_HEADS + self.name, '--') 174 "--abbrev=8",
165 try: 175 "--abbrev-commit",
166 self._commit_cache = self.project.bare_git.rev_list(*args) 176 "--pretty=oneline",
167 except GitError: 177 "--reverse",
168 # We weren't able to probe the commits for this branch. Was it tracking 178 "--date-order",
169 # a branch that no longer exists? If so, return no commits. Otherwise, 179 not_rev(self.base),
170 # rethrow the error as we don't know what's going on. 180 R_HEADS + self.name,
171 if self.base_exists: 181 "--",
172 raise 182 )
173 183 try:
174 self._commit_cache = [] 184 self._commit_cache = self.project.bare_git.rev_list(*args)
175 185 except GitError:
176 return self._commit_cache 186 # We weren't able to probe the commits for this branch. Was it
177 187 # tracking a branch that no longer exists? If so, return no
178 @property 188 # commits. Otherwise, rethrow the error as we don't know what's
179 def unabbrev_commits(self): 189 # going on.
180 r = dict() 190 if self.base_exists:
181 for commit in self.project.bare_git.rev_list(not_rev(self.base), 191 raise
182 R_HEADS + self.name, 192
183 '--'): 193 self._commit_cache = []
184 r[commit[0:8]] = commit 194
185 return r 195 return self._commit_cache
186 196
187 @property 197 @property
188 def date(self): 198 def unabbrev_commits(self):
189 return self.project.bare_git.log('--pretty=format:%cd', 199 r = dict()
190 '-n', '1', 200 for commit in self.project.bare_git.rev_list(
191 R_HEADS + self.name, 201 not_rev(self.base), R_HEADS + self.name, "--"
192 '--') 202 ):
193 203 r[commit[0:8]] = commit
194 @property 204 return r
195 def base_exists(self): 205
196 """Whether the branch we're tracking exists. 206 @property
197 207 def date(self):
198 Normally it should, but sometimes branches we track can get deleted. 208 return self.project.bare_git.log(
199 """ 209 "--pretty=format:%cd", "-n", "1", R_HEADS + self.name, "--"
200 if self._base_exists is None: 210 )
201 try:
202 self.project.bare_git.rev_parse('--verify', not_rev(self.base))
203 # If we're still here, the base branch exists.
204 self._base_exists = True
205 except GitError:
206 # If we failed to verify, the base branch doesn't exist.
207 self._base_exists = False
208
209 return self._base_exists
210
211 def UploadForReview(self, people,
212 dryrun=False,
213 auto_topic=False,
214 hashtags=(),
215 labels=(),
216 private=False,
217 notify=None,
218 wip=False,
219 ready=False,
220 dest_branch=None,
221 validate_certs=True,
222 push_options=None):
223 self.project.UploadForReview(branch=self.name,
224 people=people,
225 dryrun=dryrun,
226 auto_topic=auto_topic,
227 hashtags=hashtags,
228 labels=labels,
229 private=private,
230 notify=notify,
231 wip=wip,
232 ready=ready,
233 dest_branch=dest_branch,
234 validate_certs=validate_certs,
235 push_options=push_options)
236
237 def GetPublishedRefs(self):
238 refs = {}
239 output = self.project.bare_git.ls_remote(
240 self.branch.remote.SshReviewUrl(self.project.UserEmail),
241 'refs/changes/*')
242 for line in output.split('\n'):
243 try:
244 (sha, ref) = line.split()
245 refs[sha] = ref
246 except ValueError:
247 pass
248
249 return refs
250 211
212 @property
213 def base_exists(self):
214 """Whether the branch we're tracking exists.
251 215
252class StatusColoring(Coloring): 216 Normally it should, but sometimes branches we track can get deleted.
217 """
218 if self._base_exists is None:
219 try:
220 self.project.bare_git.rev_parse("--verify", not_rev(self.base))
221 # If we're still here, the base branch exists.
222 self._base_exists = True
223 except GitError:
224 # If we failed to verify, the base branch doesn't exist.
225 self._base_exists = False
226
227 return self._base_exists
228
229 def UploadForReview(
230 self,
231 people,
232 dryrun=False,
233 auto_topic=False,
234 hashtags=(),
235 labels=(),
236 private=False,
237 notify=None,
238 wip=False,
239 ready=False,
240 dest_branch=None,
241 validate_certs=True,
242 push_options=None,
243 ):
244 self.project.UploadForReview(
245 branch=self.name,
246 people=people,
247 dryrun=dryrun,
248 auto_topic=auto_topic,
249 hashtags=hashtags,
250 labels=labels,
251 private=private,
252 notify=notify,
253 wip=wip,
254 ready=ready,
255 dest_branch=dest_branch,
256 validate_certs=validate_certs,
257 push_options=push_options,
258 )
253 259
254 def __init__(self, config): 260 def GetPublishedRefs(self):
255 super().__init__(config, 'status') 261 refs = {}
256 self.project = self.printer('header', attr='bold') 262 output = self.project.bare_git.ls_remote(
257 self.branch = self.printer('header', attr='bold') 263 self.branch.remote.SshReviewUrl(self.project.UserEmail),
258 self.nobranch = self.printer('nobranch', fg='red') 264 "refs/changes/*",
259 self.important = self.printer('important', fg='red') 265 )
266 for line in output.split("\n"):
267 try:
268 (sha, ref) = line.split()
269 refs[sha] = ref
270 except ValueError:
271 pass
260 272
261 self.added = self.printer('added', fg='green') 273 return refs
262 self.changed = self.printer('changed', fg='red')
263 self.untracked = self.printer('untracked', fg='red')
264 274
265 275
266class DiffColoring(Coloring): 276class StatusColoring(Coloring):
277 def __init__(self, config):
278 super().__init__(config, "status")
279 self.project = self.printer("header", attr="bold")
280 self.branch = self.printer("header", attr="bold")
281 self.nobranch = self.printer("nobranch", fg="red")
282 self.important = self.printer("important", fg="red")
267 283
268 def __init__(self, config): 284 self.added = self.printer("added", fg="green")
269 super().__init__(config, 'diff') 285 self.changed = self.printer("changed", fg="red")
270 self.project = self.printer('header', attr='bold') 286 self.untracked = self.printer("untracked", fg="red")
271 self.fail = self.printer('fail', fg='red')
272 287
273 288
274class Annotation(object): 289class DiffColoring(Coloring):
290 def __init__(self, config):
291 super().__init__(config, "diff")
292 self.project = self.printer("header", attr="bold")
293 self.fail = self.printer("fail", fg="red")
294
275 295
276 def __init__(self, name, value, keep): 296class Annotation(object):
277 self.name = name 297 def __init__(self, name, value, keep):
278 self.value = value 298 self.name = name
279 self.keep = keep 299 self.value = value
300 self.keep = keep
280 301
281 def __eq__(self, other): 302 def __eq__(self, other):
282 if not isinstance(other, Annotation): 303 if not isinstance(other, Annotation):
283 return False 304 return False
284 return self.__dict__ == other.__dict__ 305 return self.__dict__ == other.__dict__
285 306
286 def __lt__(self, other): 307 def __lt__(self, other):
287 # This exists just so that lists of Annotation objects can be sorted, for 308 # This exists just so that lists of Annotation objects can be sorted,
288 # use in comparisons. 309 # for use in comparisons.
289 if not isinstance(other, Annotation): 310 if not isinstance(other, Annotation):
290 raise ValueError('comparison is not between two Annotation objects') 311 raise ValueError("comparison is not between two Annotation objects")
291 if self.name == other.name: 312 if self.name == other.name:
292 if self.value == other.value: 313 if self.value == other.value:
293 return self.keep < other.keep 314 return self.keep < other.keep
294 return self.value < other.value 315 return self.value < other.value
295 return self.name < other.name 316 return self.name < other.name
296 317
297 318
298def _SafeExpandPath(base, subpath, skipfinal=False): 319def _SafeExpandPath(base, subpath, skipfinal=False):
299 """Make sure |subpath| is completely safe under |base|. 320 """Make sure |subpath| is completely safe under |base|.
300 321
301 We make sure no intermediate symlinks are traversed, and that the final path 322 We make sure no intermediate symlinks are traversed, and that the final path
302 is not a special file (e.g. not a socket or fifo). 323 is not a special file (e.g. not a socket or fifo).
303 324
304 NB: We rely on a number of paths already being filtered out while parsing the 325 NB: We rely on a number of paths already being filtered out while parsing
305 manifest. See the validation logic in manifest_xml.py for more details. 326 the manifest. See the validation logic in manifest_xml.py for more details.
306 """ 327 """
307 # Split up the path by its components. We can't use os.path.sep exclusively 328 # Split up the path by its components. We can't use os.path.sep exclusively
308 # as some platforms (like Windows) will convert / to \ and that bypasses all 329 # as some platforms (like Windows) will convert / to \ and that bypasses all
309 # our constructed logic here. Especially since manifest authors only use 330 # our constructed logic here. Especially since manifest authors only use
310 # / in their paths. 331 # / in their paths.
311 resep = re.compile(r'[/%s]' % re.escape(os.path.sep)) 332 resep = re.compile(r"[/%s]" % re.escape(os.path.sep))
312 components = resep.split(subpath) 333 components = resep.split(subpath)
313 if skipfinal: 334 if skipfinal:
314 # Whether the caller handles the final component itself. 335 # Whether the caller handles the final component itself.
315 finalpart = components.pop() 336 finalpart = components.pop()
316 337
317 path = base 338 path = base
318 for part in components: 339 for part in components:
319 if part in {'.', '..'}: 340 if part in {".", ".."}:
320 raise ManifestInvalidPathError( 341 raise ManifestInvalidPathError(
321 '%s: "%s" not allowed in paths' % (subpath, part)) 342 '%s: "%s" not allowed in paths' % (subpath, part)
322 343 )
323 path = os.path.join(path, part) 344
324 if platform_utils.islink(path): 345 path = os.path.join(path, part)
325 raise ManifestInvalidPathError( 346 if platform_utils.islink(path):
326 '%s: traversing symlinks not allow' % (path,)) 347 raise ManifestInvalidPathError(
327 348 "%s: traversing symlinks not allow" % (path,)
328 if os.path.exists(path): 349 )
329 if not os.path.isfile(path) and not platform_utils.isdir(path): 350
330 raise ManifestInvalidPathError( 351 if os.path.exists(path):
331 '%s: only regular files & directories allowed' % (path,)) 352 if not os.path.isfile(path) and not platform_utils.isdir(path):
332 353 raise ManifestInvalidPathError(
333 if skipfinal: 354 "%s: only regular files & directories allowed" % (path,)
334 path = os.path.join(path, finalpart) 355 )
335 356
336 return path 357 if skipfinal:
358 path = os.path.join(path, finalpart)
359
360 return path
337 361
338 362
339class _CopyFile(object): 363class _CopyFile(object):
340 """Container for <copyfile> manifest element.""" 364 """Container for <copyfile> manifest element."""
341 365
342 def __init__(self, git_worktree, src, topdir, dest): 366 def __init__(self, git_worktree, src, topdir, dest):
343 """Register a <copyfile> request. 367 """Register a <copyfile> request.
344 368
345 Args: 369 Args:
346 git_worktree: Absolute path to the git project checkout. 370 git_worktree: Absolute path to the git project checkout.
347 src: Relative path under |git_worktree| of file to read. 371 src: Relative path under |git_worktree| of file to read.
348 topdir: Absolute path to the top of the repo client checkout. 372 topdir: Absolute path to the top of the repo client checkout.
349 dest: Relative path under |topdir| of file to write. 373 dest: Relative path under |topdir| of file to write.
350 """ 374 """
351 self.git_worktree = git_worktree 375 self.git_worktree = git_worktree
352 self.topdir = topdir 376 self.topdir = topdir
353 self.src = src 377 self.src = src
354 self.dest = dest 378 self.dest = dest
355 379
356 def _Copy(self): 380 def _Copy(self):
357 src = _SafeExpandPath(self.git_worktree, self.src) 381 src = _SafeExpandPath(self.git_worktree, self.src)
358 dest = _SafeExpandPath(self.topdir, self.dest) 382 dest = _SafeExpandPath(self.topdir, self.dest)
359 383
360 if platform_utils.isdir(src): 384 if platform_utils.isdir(src):
361 raise ManifestInvalidPathError( 385 raise ManifestInvalidPathError(
362 '%s: copying from directory not supported' % (self.src,)) 386 "%s: copying from directory not supported" % (self.src,)
363 if platform_utils.isdir(dest): 387 )
364 raise ManifestInvalidPathError( 388 if platform_utils.isdir(dest):
365 '%s: copying to directory not allowed' % (self.dest,)) 389 raise ManifestInvalidPathError(
366 390 "%s: copying to directory not allowed" % (self.dest,)
367 # copy file if it does not exist or is out of date 391 )
368 if not os.path.exists(dest) or not filecmp.cmp(src, dest): 392
369 try: 393 # Copy file if it does not exist or is out of date.
370 # remove existing file first, since it might be read-only 394 if not os.path.exists(dest) or not filecmp.cmp(src, dest):
371 if os.path.exists(dest): 395 try:
372 platform_utils.remove(dest) 396 # Remove existing file first, since it might be read-only.
373 else: 397 if os.path.exists(dest):
374 dest_dir = os.path.dirname(dest) 398 platform_utils.remove(dest)
375 if not platform_utils.isdir(dest_dir): 399 else:
376 os.makedirs(dest_dir) 400 dest_dir = os.path.dirname(dest)
377 shutil.copy(src, dest) 401 if not platform_utils.isdir(dest_dir):
378 # make the file read-only 402 os.makedirs(dest_dir)
379 mode = os.stat(dest)[stat.ST_MODE] 403 shutil.copy(src, dest)
380 mode = mode & ~(stat.S_IWUSR | stat.S_IWGRP | stat.S_IWOTH) 404 # Make the file read-only.
381 os.chmod(dest, mode) 405 mode = os.stat(dest)[stat.ST_MODE]
382 except IOError: 406 mode = mode & ~(stat.S_IWUSR | stat.S_IWGRP | stat.S_IWOTH)
383 _error('Cannot copy file %s to %s', src, dest) 407 os.chmod(dest, mode)
408 except IOError:
409 _error("Cannot copy file %s to %s", src, dest)
384 410
385 411
386class _LinkFile(object): 412class _LinkFile(object):
387 """Container for <linkfile> manifest element.""" 413 """Container for <linkfile> manifest element."""
388 414
389 def __init__(self, git_worktree, src, topdir, dest): 415 def __init__(self, git_worktree, src, topdir, dest):
390 """Register a <linkfile> request. 416 """Register a <linkfile> request.
391 417
392 Args: 418 Args:
393 git_worktree: Absolute path to the git project checkout. 419 git_worktree: Absolute path to the git project checkout.
394 src: Target of symlink relative to path under |git_worktree|. 420 src: Target of symlink relative to path under |git_worktree|.
395 topdir: Absolute path to the top of the repo client checkout. 421 topdir: Absolute path to the top of the repo client checkout.
396 dest: Relative path under |topdir| of symlink to create. 422 dest: Relative path under |topdir| of symlink to create.
397 """ 423 """
398 self.git_worktree = git_worktree 424 self.git_worktree = git_worktree
399 self.topdir = topdir 425 self.topdir = topdir
400 self.src = src 426 self.src = src
401 self.dest = dest 427 self.dest = dest
402 428
403 def __linkIt(self, relSrc, absDest): 429 def __linkIt(self, relSrc, absDest):
404 # link file if it does not exist or is out of date 430 # Link file if it does not exist or is out of date.
405 if not platform_utils.islink(absDest) or (platform_utils.readlink(absDest) != relSrc): 431 if not platform_utils.islink(absDest) or (
406 try: 432 platform_utils.readlink(absDest) != relSrc
407 # remove existing file first, since it might be read-only 433 ):
408 if os.path.lexists(absDest): 434 try:
409 platform_utils.remove(absDest) 435 # Remove existing file first, since it might be read-only.
436 if os.path.lexists(absDest):
437 platform_utils.remove(absDest)
438 else:
439 dest_dir = os.path.dirname(absDest)
440 if not platform_utils.isdir(dest_dir):
441 os.makedirs(dest_dir)
442 platform_utils.symlink(relSrc, absDest)
443 except IOError:
444 _error("Cannot link file %s to %s", relSrc, absDest)
445
446 def _Link(self):
447 """Link the self.src & self.dest paths.
448
449 Handles wild cards on the src linking all of the files in the source in
450 to the destination directory.
451 """
452 # Some people use src="." to create stable links to projects. Let's
453 # allow that but reject all other uses of "." to keep things simple.
454 if self.src == ".":
455 src = self.git_worktree
410 else: 456 else:
411 dest_dir = os.path.dirname(absDest) 457 src = _SafeExpandPath(self.git_worktree, self.src)
412 if not platform_utils.isdir(dest_dir): 458
413 os.makedirs(dest_dir) 459 if not glob.has_magic(src):
414 platform_utils.symlink(relSrc, absDest) 460 # Entity does not contain a wild card so just a simple one to one
415 except IOError: 461 # link operation.
416 _error('Cannot link file %s to %s', relSrc, absDest) 462 dest = _SafeExpandPath(self.topdir, self.dest, skipfinal=True)
417 463 # dest & src are absolute paths at this point. Make sure the target
418 def _Link(self): 464 # of the symlink is relative in the context of the repo client
419 """Link the self.src & self.dest paths. 465 # checkout.
420 466 relpath = os.path.relpath(src, os.path.dirname(dest))
421 Handles wild cards on the src linking all of the files in the source in to 467 self.__linkIt(relpath, dest)
422 the destination directory. 468 else:
423 """ 469 dest = _SafeExpandPath(self.topdir, self.dest)
424 # Some people use src="." to create stable links to projects. Lets allow 470 # Entity contains a wild card.
425 # that but reject all other uses of "." to keep things simple. 471 if os.path.exists(dest) and not platform_utils.isdir(dest):
426 if self.src == '.': 472 _error(
427 src = self.git_worktree 473 "Link error: src with wildcard, %s must be a directory",
428 else: 474 dest,
429 src = _SafeExpandPath(self.git_worktree, self.src) 475 )
430 476 else:
431 if not glob.has_magic(src): 477 for absSrcFile in glob.glob(src):
432 # Entity does not contain a wild card so just a simple one to one link operation. 478 # Create a releative path from source dir to destination
433 dest = _SafeExpandPath(self.topdir, self.dest, skipfinal=True) 479 # dir.
434 # dest & src are absolute paths at this point. Make sure the target of 480 absSrcDir = os.path.dirname(absSrcFile)
435 # the symlink is relative in the context of the repo client checkout. 481 relSrcDir = os.path.relpath(absSrcDir, dest)
436 relpath = os.path.relpath(src, os.path.dirname(dest)) 482
437 self.__linkIt(relpath, dest) 483 # Get the source file name.
438 else: 484 srcFile = os.path.basename(absSrcFile)
439 dest = _SafeExpandPath(self.topdir, self.dest) 485
440 # Entity contains a wild card. 486 # Now form the final full paths to srcFile. They will be
441 if os.path.exists(dest) and not platform_utils.isdir(dest): 487 # absolute for the desintaiton and relative for the source.
442 _error('Link error: src with wildcard, %s must be a directory', dest) 488 absDest = os.path.join(dest, srcFile)
443 else: 489 relSrc = os.path.join(relSrcDir, srcFile)
444 for absSrcFile in glob.glob(src): 490 self.__linkIt(relSrc, absDest)
445 # Create a releative path from source dir to destination dir
446 absSrcDir = os.path.dirname(absSrcFile)
447 relSrcDir = os.path.relpath(absSrcDir, dest)
448
449 # Get the source file name
450 srcFile = os.path.basename(absSrcFile)
451
452 # Now form the final full paths to srcFile. They will be
453 # absolute for the desintaiton and relative for the srouce.
454 absDest = os.path.join(dest, srcFile)
455 relSrc = os.path.join(relSrcDir, srcFile)
456 self.__linkIt(relSrc, absDest)
457 491
458 492
459class RemoteSpec(object): 493class RemoteSpec(object):
460 494 def __init__(
461 def __init__(self, 495 self,
462 name, 496 name,
463 url=None, 497 url=None,
464 pushUrl=None, 498 pushUrl=None,
465 review=None, 499 review=None,
466 revision=None, 500 revision=None,
467 orig_name=None, 501 orig_name=None,
468 fetchUrl=None): 502 fetchUrl=None,
469 self.name = name 503 ):
470 self.url = url 504 self.name = name
471 self.pushUrl = pushUrl 505 self.url = url
472 self.review = review 506 self.pushUrl = pushUrl
473 self.revision = revision 507 self.review = review
474 self.orig_name = orig_name 508 self.revision = revision
475 self.fetchUrl = fetchUrl 509 self.orig_name = orig_name
510 self.fetchUrl = fetchUrl
476 511
477 512
478class Project(object): 513class Project(object):
479 # These objects can be shared between several working trees. 514 # These objects can be shared between several working trees.
480 @property 515 @property
481 def shareable_dirs(self): 516 def shareable_dirs(self):
482 """Return the shareable directories""" 517 """Return the shareable directories"""
483 if self.UseAlternates: 518 if self.UseAlternates:
484 return ['hooks', 'rr-cache'] 519 return ["hooks", "rr-cache"]
485 else: 520 else:
486 return ['hooks', 'objects', 'rr-cache'] 521 return ["hooks", "objects", "rr-cache"]
487 522
488 def __init__(self, 523 def __init__(
489 manifest, 524 self,
490 name, 525 manifest,
491 remote, 526 name,
492 gitdir, 527 remote,
493 objdir, 528 gitdir,
494 worktree, 529 objdir,
495 relpath, 530 worktree,
496 revisionExpr, 531 relpath,
497 revisionId, 532 revisionExpr,
498 rebase=True, 533 revisionId,
499 groups=None, 534 rebase=True,
500 sync_c=False, 535 groups=None,
501 sync_s=False, 536 sync_c=False,
502 sync_tags=True, 537 sync_s=False,
503 clone_depth=None, 538 sync_tags=True,
504 upstream=None, 539 clone_depth=None,
505 parent=None, 540 upstream=None,
506 use_git_worktrees=False, 541 parent=None,
507 is_derived=False, 542 use_git_worktrees=False,
508 dest_branch=None, 543 is_derived=False,
509 optimized_fetch=False, 544 dest_branch=None,
510 retry_fetches=0, 545 optimized_fetch=False,
511 old_revision=None): 546 retry_fetches=0,
512 """Init a Project object. 547 old_revision=None,
513 548 ):
514 Args: 549 """Init a Project object.
515 manifest: The XmlManifest object.
516 name: The `name` attribute of manifest.xml's project element.
517 remote: RemoteSpec object specifying its remote's properties.
518 gitdir: Absolute path of git directory.
519 objdir: Absolute path of directory to store git objects.
520 worktree: Absolute path of git working tree.
521 relpath: Relative path of git working tree to repo's top directory.
522 revisionExpr: The `revision` attribute of manifest.xml's project element.
523 revisionId: git commit id for checking out.
524 rebase: The `rebase` attribute of manifest.xml's project element.
525 groups: The `groups` attribute of manifest.xml's project element.
526 sync_c: The `sync-c` attribute of manifest.xml's project element.
527 sync_s: The `sync-s` attribute of manifest.xml's project element.
528 sync_tags: The `sync-tags` attribute of manifest.xml's project element.
529 upstream: The `upstream` attribute of manifest.xml's project element.
530 parent: The parent Project object.
531 use_git_worktrees: Whether to use `git worktree` for this project.
532 is_derived: False if the project was explicitly defined in the manifest;
533 True if the project is a discovered submodule.
534 dest_branch: The branch to which to push changes for review by default.
535 optimized_fetch: If True, when a project is set to a sha1 revision, only
536 fetch from the remote if the sha1 is not present locally.
537 retry_fetches: Retry remote fetches n times upon receiving transient error
538 with exponential backoff and jitter.
539 old_revision: saved git commit id for open GITC projects.
540 """
541 self.client = self.manifest = manifest
542 self.name = name
543 self.remote = remote
544 self.UpdatePaths(relpath, worktree, gitdir, objdir)
545 self.SetRevision(revisionExpr, revisionId=revisionId)
546
547 self.rebase = rebase
548 self.groups = groups
549 self.sync_c = sync_c
550 self.sync_s = sync_s
551 self.sync_tags = sync_tags
552 self.clone_depth = clone_depth
553 self.upstream = upstream
554 self.parent = parent
555 # NB: Do not use this setting in __init__ to change behavior so that the
556 # manifest.git checkout can inspect & change it after instantiating. See
557 # the XmlManifest init code for more info.
558 self.use_git_worktrees = use_git_worktrees
559 self.is_derived = is_derived
560 self.optimized_fetch = optimized_fetch
561 self.retry_fetches = max(0, retry_fetches)
562 self.subprojects = []
563
564 self.snapshots = {}
565 self.copyfiles = []
566 self.linkfiles = []
567 self.annotations = []
568 self.dest_branch = dest_branch
569 self.old_revision = old_revision
570
571 # This will be filled in if a project is later identified to be the
572 # project containing repo hooks.
573 self.enabled_repo_hooks = []
574
575 def RelPath(self, local=True):
576 """Return the path for the project relative to a manifest.
577
578 Args:
579 local: a boolean, if True, the path is relative to the local
580 (sub)manifest. If false, the path is relative to the
581 outermost manifest.
582 """
583 if local:
584 return self.relpath
585 return os.path.join(self.manifest.path_prefix, self.relpath)
586
587 def SetRevision(self, revisionExpr, revisionId=None):
588 """Set revisionId based on revision expression and id"""
589 self.revisionExpr = revisionExpr
590 if revisionId is None and revisionExpr and IsId(revisionExpr):
591 self.revisionId = self.revisionExpr
592 else:
593 self.revisionId = revisionId
594
595 def UpdatePaths(self, relpath, worktree, gitdir, objdir):
596 """Update paths used by this project"""
597 self.gitdir = gitdir.replace('\\', '/')
598 self.objdir = objdir.replace('\\', '/')
599 if worktree:
600 self.worktree = os.path.normpath(worktree).replace('\\', '/')
601 else:
602 self.worktree = None
603 self.relpath = relpath
604
605 self.config = GitConfig.ForRepository(gitdir=self.gitdir,
606 defaults=self.manifest.globalConfig)
607
608 if self.worktree:
609 self.work_git = self._GitGetByExec(self, bare=False, gitdir=self.gitdir)
610 else:
611 self.work_git = None
612 self.bare_git = self._GitGetByExec(self, bare=True, gitdir=self.gitdir)
613 self.bare_ref = GitRefs(self.gitdir)
614 self.bare_objdir = self._GitGetByExec(self, bare=True, gitdir=self.objdir)
615
616 @property
617 def UseAlternates(self):
618 """Whether git alternates are in use.
619
620 This will be removed once migration to alternates is complete.
621 """
622 return _ALTERNATES or self.manifest.is_multimanifest
623 550
624 @property 551 Args:
625 def Derived(self): 552 manifest: The XmlManifest object.
626 return self.is_derived 553 name: The `name` attribute of manifest.xml's project element.
554 remote: RemoteSpec object specifying its remote's properties.
555 gitdir: Absolute path of git directory.
556 objdir: Absolute path of directory to store git objects.
557 worktree: Absolute path of git working tree.
558 relpath: Relative path of git working tree to repo's top directory.
559 revisionExpr: The `revision` attribute of manifest.xml's project
560 element.
561 revisionId: git commit id for checking out.
562 rebase: The `rebase` attribute of manifest.xml's project element.
563 groups: The `groups` attribute of manifest.xml's project element.
564 sync_c: The `sync-c` attribute of manifest.xml's project element.
565 sync_s: The `sync-s` attribute of manifest.xml's project element.
566 sync_tags: The `sync-tags` attribute of manifest.xml's project
567 element.
568 upstream: The `upstream` attribute of manifest.xml's project
569 element.
570 parent: The parent Project object.
571 use_git_worktrees: Whether to use `git worktree` for this project.
572 is_derived: False if the project was explicitly defined in the
573 manifest; True if the project is a discovered submodule.
574 dest_branch: The branch to which to push changes for review by
575 default.
576 optimized_fetch: If True, when a project is set to a sha1 revision,
577 only fetch from the remote if the sha1 is not present locally.
578 retry_fetches: Retry remote fetches n times upon receiving transient
579 error with exponential backoff and jitter.
580 old_revision: saved git commit id for open GITC projects.
581 """
582 self.client = self.manifest = manifest
583 self.name = name
584 self.remote = remote
585 self.UpdatePaths(relpath, worktree, gitdir, objdir)
586 self.SetRevision(revisionExpr, revisionId=revisionId)
587
588 self.rebase = rebase
589 self.groups = groups
590 self.sync_c = sync_c
591 self.sync_s = sync_s
592 self.sync_tags = sync_tags
593 self.clone_depth = clone_depth
594 self.upstream = upstream
595 self.parent = parent
596 # NB: Do not use this setting in __init__ to change behavior so that the
597 # manifest.git checkout can inspect & change it after instantiating.
598 # See the XmlManifest init code for more info.
599 self.use_git_worktrees = use_git_worktrees
600 self.is_derived = is_derived
601 self.optimized_fetch = optimized_fetch
602 self.retry_fetches = max(0, retry_fetches)
603 self.subprojects = []
604
605 self.snapshots = {}
606 self.copyfiles = []
607 self.linkfiles = []
608 self.annotations = []
609 self.dest_branch = dest_branch
610 self.old_revision = old_revision
611
612 # This will be filled in if a project is later identified to be the
613 # project containing repo hooks.
614 self.enabled_repo_hooks = []
615
616 def RelPath(self, local=True):
617 """Return the path for the project relative to a manifest.
627 618
628 @property 619 Args:
629 def Exists(self): 620 local: a boolean, if True, the path is relative to the local
630 return platform_utils.isdir(self.gitdir) and platform_utils.isdir(self.objdir) 621 (sub)manifest. If false, the path is relative to the outermost
622 manifest.
623 """
624 if local:
625 return self.relpath
626 return os.path.join(self.manifest.path_prefix, self.relpath)
627
628 def SetRevision(self, revisionExpr, revisionId=None):
629 """Set revisionId based on revision expression and id"""
630 self.revisionExpr = revisionExpr
631 if revisionId is None and revisionExpr and IsId(revisionExpr):
632 self.revisionId = self.revisionExpr
633 else:
634 self.revisionId = revisionId
635
636 def UpdatePaths(self, relpath, worktree, gitdir, objdir):
637 """Update paths used by this project"""
638 self.gitdir = gitdir.replace("\\", "/")
639 self.objdir = objdir.replace("\\", "/")
640 if worktree:
641 self.worktree = os.path.normpath(worktree).replace("\\", "/")
642 else:
643 self.worktree = None
644 self.relpath = relpath
631 645
632 @property 646 self.config = GitConfig.ForRepository(
633 def CurrentBranch(self): 647 gitdir=self.gitdir, defaults=self.manifest.globalConfig
634 """Obtain the name of the currently checked out branch. 648 )
635 649
636 The branch name omits the 'refs/heads/' prefix. 650 if self.worktree:
637 None is returned if the project is on a detached HEAD, or if the work_git is 651 self.work_git = self._GitGetByExec(
638 otheriwse inaccessible (e.g. an incomplete sync). 652 self, bare=False, gitdir=self.gitdir
639 """ 653 )
640 try: 654 else:
641 b = self.work_git.GetHead() 655 self.work_git = None
642 except NoManifestException: 656 self.bare_git = self._GitGetByExec(self, bare=True, gitdir=self.gitdir)
643 # If the local checkout is in a bad state, don't barf. Let the callers 657 self.bare_ref = GitRefs(self.gitdir)
644 # process this like the head is unreadable. 658 self.bare_objdir = self._GitGetByExec(
645 return None 659 self, bare=True, gitdir=self.objdir
646 if b.startswith(R_HEADS): 660 )
647 return b[len(R_HEADS):]
648 return None
649
650 def IsRebaseInProgress(self):
651 return (os.path.exists(self.work_git.GetDotgitPath('rebase-apply')) or
652 os.path.exists(self.work_git.GetDotgitPath('rebase-merge')) or
653 os.path.exists(os.path.join(self.worktree, '.dotest')))
654
655 def IsDirty(self, consider_untracked=True):
656 """Is the working directory modified in some way?
657 """
658 self.work_git.update_index('-q',
659 '--unmerged',
660 '--ignore-missing',
661 '--refresh')
662 if self.work_git.DiffZ('diff-index', '-M', '--cached', HEAD):
663 return True
664 if self.work_git.DiffZ('diff-files'):
665 return True
666 if consider_untracked and self.UntrackedFiles():
667 return True
668 return False
669
670 _userident_name = None
671 _userident_email = None
672
673 @property
674 def UserName(self):
675 """Obtain the user's personal name.
676 """
677 if self._userident_name is None:
678 self._LoadUserIdentity()
679 return self._userident_name
680
681 @property
682 def UserEmail(self):
683 """Obtain the user's email address. This is very likely
684 to be their Gerrit login.
685 """
686 if self._userident_email is None:
687 self._LoadUserIdentity()
688 return self._userident_email
689
690 def _LoadUserIdentity(self):
691 u = self.bare_git.var('GIT_COMMITTER_IDENT')
692 m = re.compile("^(.*) <([^>]*)> ").match(u)
693 if m:
694 self._userident_name = m.group(1)
695 self._userident_email = m.group(2)
696 else:
697 self._userident_name = ''
698 self._userident_email = ''
699
700 def GetRemote(self, name=None):
701 """Get the configuration for a single remote.
702
703 Defaults to the current project's remote.
704 """
705 if name is None:
706 name = self.remote.name
707 return self.config.GetRemote(name)
708 661
709 def GetBranch(self, name): 662 @property
710 """Get the configuration for a single branch. 663 def UseAlternates(self):
711 """ 664 """Whether git alternates are in use.
712 return self.config.GetBranch(name) 665
666 This will be removed once migration to alternates is complete.
667 """
668 return _ALTERNATES or self.manifest.is_multimanifest
669
670 @property
671 def Derived(self):
672 return self.is_derived
673
674 @property
675 def Exists(self):
676 return platform_utils.isdir(self.gitdir) and platform_utils.isdir(
677 self.objdir
678 )
679
680 @property
681 def CurrentBranch(self):
682 """Obtain the name of the currently checked out branch.
683
684 The branch name omits the 'refs/heads/' prefix.
685 None is returned if the project is on a detached HEAD, or if the
686 work_git is otheriwse inaccessible (e.g. an incomplete sync).
687 """
688 try:
689 b = self.work_git.GetHead()
690 except NoManifestException:
691 # If the local checkout is in a bad state, don't barf. Let the
692 # callers process this like the head is unreadable.
693 return None
694 if b.startswith(R_HEADS):
695 return b[len(R_HEADS) :]
696 return None
697
698 def IsRebaseInProgress(self):
699 return (
700 os.path.exists(self.work_git.GetDotgitPath("rebase-apply"))
701 or os.path.exists(self.work_git.GetDotgitPath("rebase-merge"))
702 or os.path.exists(os.path.join(self.worktree, ".dotest"))
703 )
704
705 def IsDirty(self, consider_untracked=True):
706 """Is the working directory modified in some way?"""
707 self.work_git.update_index(
708 "-q", "--unmerged", "--ignore-missing", "--refresh"
709 )
710 if self.work_git.DiffZ("diff-index", "-M", "--cached", HEAD):
711 return True
712 if self.work_git.DiffZ("diff-files"):
713 return True
714 if consider_untracked and self.UntrackedFiles():
715 return True
716 return False
717
718 _userident_name = None
719 _userident_email = None
720
721 @property
722 def UserName(self):
723 """Obtain the user's personal name."""
724 if self._userident_name is None:
725 self._LoadUserIdentity()
726 return self._userident_name
727
728 @property
729 def UserEmail(self):
730 """Obtain the user's email address. This is very likely
731 to be their Gerrit login.
732 """
733 if self._userident_email is None:
734 self._LoadUserIdentity()
735 return self._userident_email
736
737 def _LoadUserIdentity(self):
738 u = self.bare_git.var("GIT_COMMITTER_IDENT")
739 m = re.compile("^(.*) <([^>]*)> ").match(u)
740 if m:
741 self._userident_name = m.group(1)
742 self._userident_email = m.group(2)
743 else:
744 self._userident_name = ""
745 self._userident_email = ""
746
747 def GetRemote(self, name=None):
748 """Get the configuration for a single remote.
749
750 Defaults to the current project's remote.
751 """
752 if name is None:
753 name = self.remote.name
754 return self.config.GetRemote(name)
755
756 def GetBranch(self, name):
757 """Get the configuration for a single branch."""
758 return self.config.GetBranch(name)
759
760 def GetBranches(self):
761 """Get all existing local branches."""
762 current = self.CurrentBranch
763 all_refs = self._allrefs
764 heads = {}
765
766 for name, ref_id in all_refs.items():
767 if name.startswith(R_HEADS):
768 name = name[len(R_HEADS) :]
769 b = self.GetBranch(name)
770 b.current = name == current
771 b.published = None
772 b.revision = ref_id
773 heads[name] = b
774
775 for name, ref_id in all_refs.items():
776 if name.startswith(R_PUB):
777 name = name[len(R_PUB) :]
778 b = heads.get(name)
779 if b:
780 b.published = ref_id
781
782 return heads
783
784 def MatchesGroups(self, manifest_groups):
785 """Returns true if the manifest groups specified at init should cause
786 this project to be synced.
787 Prefixing a manifest group with "-" inverts the meaning of a group.
788 All projects are implicitly labelled with "all".
789
790 labels are resolved in order. In the example case of
791 project_groups: "all,group1,group2"
792 manifest_groups: "-group1,group2"
793 the project will be matched.
794
795 The special manifest group "default" will match any project that
796 does not have the special project group "notdefault"
797 """
798 default_groups = self.manifest.default_groups or ["default"]
799 expanded_manifest_groups = manifest_groups or default_groups
800 expanded_project_groups = ["all"] + (self.groups or [])
801 if "notdefault" not in expanded_project_groups:
802 expanded_project_groups += ["default"]
713 803
714 def GetBranches(self):
715 """Get all existing local branches.
716 """
717 current = self.CurrentBranch
718 all_refs = self._allrefs
719 heads = {}
720
721 for name, ref_id in all_refs.items():
722 if name.startswith(R_HEADS):
723 name = name[len(R_HEADS):]
724 b = self.GetBranch(name)
725 b.current = name == current
726 b.published = None
727 b.revision = ref_id
728 heads[name] = b
729
730 for name, ref_id in all_refs.items():
731 if name.startswith(R_PUB):
732 name = name[len(R_PUB):]
733 b = heads.get(name)
734 if b:
735 b.published = ref_id
736
737 return heads
738
739 def MatchesGroups(self, manifest_groups):
740 """Returns true if the manifest groups specified at init should cause
741 this project to be synced.
742 Prefixing a manifest group with "-" inverts the meaning of a group.
743 All projects are implicitly labelled with "all".
744
745 labels are resolved in order. In the example case of
746 project_groups: "all,group1,group2"
747 manifest_groups: "-group1,group2"
748 the project will be matched.
749
750 The special manifest group "default" will match any project that
751 does not have the special project group "notdefault"
752 """
753 default_groups = self.manifest.default_groups or ['default']
754 expanded_manifest_groups = manifest_groups or default_groups
755 expanded_project_groups = ['all'] + (self.groups or [])
756 if 'notdefault' not in expanded_project_groups:
757 expanded_project_groups += ['default']
758
759 matched = False
760 for group in expanded_manifest_groups:
761 if group.startswith('-') and group[1:] in expanded_project_groups:
762 matched = False 804 matched = False
763 elif group in expanded_project_groups: 805 for group in expanded_manifest_groups:
764 matched = True 806 if group.startswith("-") and group[1:] in expanded_project_groups:
807 matched = False
808 elif group in expanded_project_groups:
809 matched = True
765 810
766 return matched 811 return matched
767 812
768# Status Display ## 813 def UncommitedFiles(self, get_all=True):
769 def UncommitedFiles(self, get_all=True): 814 """Returns a list of strings, uncommitted files in the git tree.
770 """Returns a list of strings, uncommitted files in the git tree.
771 815
772 Args: 816 Args:
773 get_all: a boolean, if True - get information about all different 817 get_all: a boolean, if True - get information about all different
774 uncommitted files. If False - return as soon as any kind of 818 uncommitted files. If False - return as soon as any kind of
775 uncommitted files is detected. 819 uncommitted files is detected.
776 """ 820 """
777 details = [] 821 details = []
778 self.work_git.update_index('-q', 822 self.work_git.update_index(
779 '--unmerged', 823 "-q", "--unmerged", "--ignore-missing", "--refresh"
780 '--ignore-missing', 824 )
781 '--refresh') 825 if self.IsRebaseInProgress():
782 if self.IsRebaseInProgress(): 826 details.append("rebase in progress")
783 details.append("rebase in progress") 827 if not get_all:
784 if not get_all: 828 return details
785 return details 829
830 changes = self.work_git.DiffZ("diff-index", "--cached", HEAD).keys()
831 if changes:
832 details.extend(changes)
833 if not get_all:
834 return details
835
836 changes = self.work_git.DiffZ("diff-files").keys()
837 if changes:
838 details.extend(changes)
839 if not get_all:
840 return details
841
842 changes = self.UntrackedFiles()
843 if changes:
844 details.extend(changes)
786 845
787 changes = self.work_git.DiffZ('diff-index', '--cached', HEAD).keys()
788 if changes:
789 details.extend(changes)
790 if not get_all:
791 return details 846 return details
792 847
793 changes = self.work_git.DiffZ('diff-files').keys() 848 def UntrackedFiles(self):
794 if changes: 849 """Returns a list of strings, untracked files in the git tree."""
795 details.extend(changes) 850 return self.work_git.LsOthers()
796 if not get_all:
797 return details
798 851
799 changes = self.UntrackedFiles() 852 def HasChanges(self):
800 if changes: 853 """Returns true if there are uncommitted changes."""
801 details.extend(changes) 854 return bool(self.UncommitedFiles(get_all=False))
802 855
803 return details 856 def PrintWorkTreeStatus(self, output_redir=None, quiet=False, local=False):
857 """Prints the status of the repository to stdout.
804 858
805 def UntrackedFiles(self): 859 Args:
806 """Returns a list of strings, untracked files in the git tree.""" 860 output_redir: If specified, redirect the output to this object.
807 return self.work_git.LsOthers() 861 quiet: If True then only print the project name. Do not print
862 the modified files, branch name, etc.
863 local: a boolean, if True, the path is relative to the local
864 (sub)manifest. If false, the path is relative to the outermost
865 manifest.
866 """
867 if not platform_utils.isdir(self.worktree):
868 if output_redir is None:
869 output_redir = sys.stdout
870 print(file=output_redir)
871 print("project %s/" % self.RelPath(local), file=output_redir)
872 print(' missing (run "repo sync")', file=output_redir)
873 return
874
875 self.work_git.update_index(
876 "-q", "--unmerged", "--ignore-missing", "--refresh"
877 )
878 rb = self.IsRebaseInProgress()
879 di = self.work_git.DiffZ("diff-index", "-M", "--cached", HEAD)
880 df = self.work_git.DiffZ("diff-files")
881 do = self.work_git.LsOthers()
882 if not rb and not di and not df and not do and not self.CurrentBranch:
883 return "CLEAN"
884
885 out = StatusColoring(self.config)
886 if output_redir is not None:
887 out.redirect(output_redir)
888 out.project("project %-40s", self.RelPath(local) + "/ ")
889
890 if quiet:
891 out.nl()
892 return "DIRTY"
893
894 branch = self.CurrentBranch
895 if branch is None:
896 out.nobranch("(*** NO BRANCH ***)")
897 else:
898 out.branch("branch %s", branch)
899 out.nl()
808 900
809 def HasChanges(self): 901 if rb:
810 """Returns true if there are uncommitted changes. 902 out.important("prior sync failed; rebase still in progress")
811 """ 903 out.nl()
812 return bool(self.UncommitedFiles(get_all=False))
813
814 def PrintWorkTreeStatus(self, output_redir=None, quiet=False, local=False):
815 """Prints the status of the repository to stdout.
816
817 Args:
818 output_redir: If specified, redirect the output to this object.
819 quiet: If True then only print the project name. Do not print
820 the modified files, branch name, etc.
821 local: a boolean, if True, the path is relative to the local
822 (sub)manifest. If false, the path is relative to the
823 outermost manifest.
824 """
825 if not platform_utils.isdir(self.worktree):
826 if output_redir is None:
827 output_redir = sys.stdout
828 print(file=output_redir)
829 print('project %s/' % self.RelPath(local), file=output_redir)
830 print(' missing (run "repo sync")', file=output_redir)
831 return
832
833 self.work_git.update_index('-q',
834 '--unmerged',
835 '--ignore-missing',
836 '--refresh')
837 rb = self.IsRebaseInProgress()
838 di = self.work_git.DiffZ('diff-index', '-M', '--cached', HEAD)
839 df = self.work_git.DiffZ('diff-files')
840 do = self.work_git.LsOthers()
841 if not rb and not di and not df and not do and not self.CurrentBranch:
842 return 'CLEAN'
843
844 out = StatusColoring(self.config)
845 if output_redir is not None:
846 out.redirect(output_redir)
847 out.project('project %-40s', self.RelPath(local) + '/ ')
848
849 if quiet:
850 out.nl()
851 return 'DIRTY'
852
853 branch = self.CurrentBranch
854 if branch is None:
855 out.nobranch('(*** NO BRANCH ***)')
856 else:
857 out.branch('branch %s', branch)
858 out.nl()
859
860 if rb:
861 out.important('prior sync failed; rebase still in progress')
862 out.nl()
863
864 paths = list()
865 paths.extend(di.keys())
866 paths.extend(df.keys())
867 paths.extend(do)
868
869 for p in sorted(set(paths)):
870 try:
871 i = di[p]
872 except KeyError:
873 i = None
874
875 try:
876 f = df[p]
877 except KeyError:
878 f = None
879
880 if i:
881 i_status = i.status.upper()
882 else:
883 i_status = '-'
884
885 if f:
886 f_status = f.status.lower()
887 else:
888 f_status = '-'
889
890 if i and i.src_path:
891 line = ' %s%s\t%s => %s (%s%%)' % (i_status, f_status,
892 i.src_path, p, i.level)
893 else:
894 line = ' %s%s\t%s' % (i_status, f_status, p)
895
896 if i and not f:
897 out.added('%s', line)
898 elif (i and f) or (not i and f):
899 out.changed('%s', line)
900 elif not i and not f:
901 out.untracked('%s', line)
902 else:
903 out.write('%s', line)
904 out.nl()
905
906 return 'DIRTY'
907
908 def PrintWorkTreeDiff(self, absolute_paths=False, output_redir=None,
909 local=False):
910 """Prints the status of the repository to stdout.
911 """
912 out = DiffColoring(self.config)
913 if output_redir:
914 out.redirect(output_redir)
915 cmd = ['diff']
916 if out.is_on:
917 cmd.append('--color')
918 cmd.append(HEAD)
919 if absolute_paths:
920 cmd.append('--src-prefix=a/%s/' % self.RelPath(local))
921 cmd.append('--dst-prefix=b/%s/' % self.RelPath(local))
922 cmd.append('--')
923 try:
924 p = GitCommand(self,
925 cmd,
926 capture_stdout=True,
927 capture_stderr=True)
928 p.Wait()
929 except GitError as e:
930 out.nl()
931 out.project('project %s/' % self.RelPath(local))
932 out.nl()
933 out.fail('%s', str(e))
934 out.nl()
935 return False
936 if p.stdout:
937 out.nl()
938 out.project('project %s/' % self.RelPath(local))
939 out.nl()
940 out.write('%s', p.stdout)
941 return p.Wait() == 0
942
943# Publish / Upload ##
944 def WasPublished(self, branch, all_refs=None):
945 """Was the branch published (uploaded) for code review?
946 If so, returns the SHA-1 hash of the last published
947 state for the branch.
948 """
949 key = R_PUB + branch
950 if all_refs is None:
951 try:
952 return self.bare_git.rev_parse(key)
953 except GitError:
954 return None
955 else:
956 try:
957 return all_refs[key]
958 except KeyError:
959 return None
960 904
961 def CleanPublishedCache(self, all_refs=None): 905 paths = list()
962 """Prunes any stale published refs. 906 paths.extend(di.keys())
963 """ 907 paths.extend(df.keys())
964 if all_refs is None: 908 paths.extend(do)
965 all_refs = self._allrefs
966 heads = set()
967 canrm = {}
968 for name, ref_id in all_refs.items():
969 if name.startswith(R_HEADS):
970 heads.add(name)
971 elif name.startswith(R_PUB):
972 canrm[name] = ref_id
973
974 for name, ref_id in canrm.items():
975 n = name[len(R_PUB):]
976 if R_HEADS + n not in heads:
977 self.bare_git.DeleteRef(name, ref_id)
978
979 def GetUploadableBranches(self, selected_branch=None):
980 """List any branches which can be uploaded for review.
981 """
982 heads = {}
983 pubed = {}
984
985 for name, ref_id in self._allrefs.items():
986 if name.startswith(R_HEADS):
987 heads[name[len(R_HEADS):]] = ref_id
988 elif name.startswith(R_PUB):
989 pubed[name[len(R_PUB):]] = ref_id
990
991 ready = []
992 for branch, ref_id in heads.items():
993 if branch in pubed and pubed[branch] == ref_id:
994 continue
995 if selected_branch and branch != selected_branch:
996 continue
997
998 rb = self.GetUploadableBranch(branch)
999 if rb:
1000 ready.append(rb)
1001 return ready
1002
1003 def GetUploadableBranch(self, branch_name):
1004 """Get a single uploadable branch, or None.
1005 """
1006 branch = self.GetBranch(branch_name)
1007 base = branch.LocalMerge
1008 if branch.LocalMerge:
1009 rb = ReviewableBranch(self, branch, base)
1010 if rb.commits:
1011 return rb
1012 return None
1013
1014 def UploadForReview(self, branch=None,
1015 people=([], []),
1016 dryrun=False,
1017 auto_topic=False,
1018 hashtags=(),
1019 labels=(),
1020 private=False,
1021 notify=None,
1022 wip=False,
1023 ready=False,
1024 dest_branch=None,
1025 validate_certs=True,
1026 push_options=None):
1027 """Uploads the named branch for code review.
1028 """
1029 if branch is None:
1030 branch = self.CurrentBranch
1031 if branch is None:
1032 raise GitError('not currently on a branch')
1033
1034 branch = self.GetBranch(branch)
1035 if not branch.LocalMerge:
1036 raise GitError('branch %s does not track a remote' % branch.name)
1037 if not branch.remote.review:
1038 raise GitError('remote %s has no review url' % branch.remote.name)
1039
1040 # Basic validity check on label syntax.
1041 for label in labels:
1042 if not re.match(r'^.+[+-][0-9]+$', label):
1043 raise UploadError(
1044 f'invalid label syntax "{label}": labels use forms like '
1045 'CodeReview+1 or Verified-1')
1046
1047 if dest_branch is None:
1048 dest_branch = self.dest_branch
1049 if dest_branch is None:
1050 dest_branch = branch.merge
1051 if not dest_branch.startswith(R_HEADS):
1052 dest_branch = R_HEADS + dest_branch
1053
1054 if not branch.remote.projectname:
1055 branch.remote.projectname = self.name
1056 branch.remote.Save()
1057
1058 url = branch.remote.ReviewUrl(self.UserEmail, validate_certs)
1059 if url is None:
1060 raise UploadError('review not configured')
1061 cmd = ['push']
1062 if dryrun:
1063 cmd.append('-n')
1064
1065 if url.startswith('ssh://'):
1066 cmd.append('--receive-pack=gerrit receive-pack')
1067
1068 for push_option in (push_options or []):
1069 cmd.append('-o')
1070 cmd.append(push_option)
1071
1072 cmd.append(url)
1073
1074 if dest_branch.startswith(R_HEADS):
1075 dest_branch = dest_branch[len(R_HEADS):]
1076
1077 ref_spec = '%s:refs/for/%s' % (R_HEADS + branch.name, dest_branch)
1078 opts = []
1079 if auto_topic:
1080 opts += ['topic=' + branch.name]
1081 opts += ['t=%s' % p for p in hashtags]
1082 # NB: No need to encode labels as they've been validated above.
1083 opts += ['l=%s' % p for p in labels]
1084
1085 opts += ['r=%s' % p for p in people[0]]
1086 opts += ['cc=%s' % p for p in people[1]]
1087 if notify:
1088 opts += ['notify=' + notify]
1089 if private:
1090 opts += ['private']
1091 if wip:
1092 opts += ['wip']
1093 if ready:
1094 opts += ['ready']
1095 if opts:
1096 ref_spec = ref_spec + '%' + ','.join(opts)
1097 cmd.append(ref_spec)
1098
1099 if GitCommand(self, cmd, bare=True).Wait() != 0:
1100 raise UploadError('Upload failed')
1101
1102 if not dryrun:
1103 msg = "posted to %s for %s" % (branch.remote.review, dest_branch)
1104 self.bare_git.UpdateRef(R_PUB + branch.name,
1105 R_HEADS + branch.name,
1106 message=msg)
1107
1108# Sync ##
1109 def _ExtractArchive(self, tarpath, path=None):
1110 """Extract the given tar on its current location
1111
1112 Args:
1113 - tarpath: The path to the actual tar file
1114 909
1115 """ 910 for p in sorted(set(paths)):
1116 try: 911 try:
1117 with tarfile.open(tarpath, 'r') as tar: 912 i = di[p]
1118 tar.extractall(path=path) 913 except KeyError:
1119 return True 914 i = None
1120 except (IOError, tarfile.TarError) as e:
1121 _error("Cannot extract archive %s: %s", tarpath, str(e))
1122 return False
1123
1124 def Sync_NetworkHalf(self,
1125 quiet=False,
1126 verbose=False,
1127 output_redir=None,
1128 is_new=None,
1129 current_branch_only=None,
1130 force_sync=False,
1131 clone_bundle=True,
1132 tags=None,
1133 archive=False,
1134 optimized_fetch=False,
1135 retry_fetches=0,
1136 prune=False,
1137 submodules=False,
1138 ssh_proxy=None,
1139 clone_filter=None,
1140 partial_clone_exclude=set()):
1141 """Perform only the network IO portion of the sync process.
1142 Local working directory/branch state is not affected.
1143 """
1144 if archive and not isinstance(self, MetaProject):
1145 if self.remote.url.startswith(('http://', 'https://')):
1146 _error("%s: Cannot fetch archives from http/https remotes.", self.name)
1147 return SyncNetworkHalfResult(False, False)
1148
1149 name = self.relpath.replace('\\', '/')
1150 name = name.replace('/', '_')
1151 tarpath = '%s.tar' % name
1152 topdir = self.manifest.topdir
1153
1154 try:
1155 self._FetchArchive(tarpath, cwd=topdir)
1156 except GitError as e:
1157 _error('%s', e)
1158 return SyncNetworkHalfResult(False, False)
1159
1160 # From now on, we only need absolute tarpath
1161 tarpath = os.path.join(topdir, tarpath)
1162
1163 if not self._ExtractArchive(tarpath, path=topdir):
1164 return SyncNetworkHalfResult(False, True)
1165 try:
1166 platform_utils.remove(tarpath)
1167 except OSError as e:
1168 _warn("Cannot remove archive %s: %s", tarpath, str(e))
1169 self._CopyAndLinkFiles()
1170 return SyncNetworkHalfResult(True, True)
1171
1172 # If the shared object dir already exists, don't try to rebootstrap with a
1173 # clone bundle download. We should have the majority of objects already.
1174 if clone_bundle and os.path.exists(self.objdir):
1175 clone_bundle = False
1176
1177 if self.name in partial_clone_exclude:
1178 clone_bundle = True
1179 clone_filter = None
1180
1181 if is_new is None:
1182 is_new = not self.Exists
1183 if is_new:
1184 self._InitGitDir(force_sync=force_sync, quiet=quiet)
1185 else:
1186 self._UpdateHooks(quiet=quiet)
1187 self._InitRemote()
1188
1189 if self.UseAlternates:
1190 # If gitdir/objects is a symlink, migrate it from the old layout.
1191 gitdir_objects = os.path.join(self.gitdir, 'objects')
1192 if platform_utils.islink(gitdir_objects):
1193 platform_utils.remove(gitdir_objects, missing_ok=True)
1194 gitdir_alt = os.path.join(self.gitdir, 'objects/info/alternates')
1195 if not os.path.exists(gitdir_alt):
1196 os.makedirs(os.path.dirname(gitdir_alt), exist_ok=True)
1197 _lwrite(gitdir_alt, os.path.join(
1198 os.path.relpath(self.objdir, gitdir_objects), 'objects') + '\n')
1199
1200 if is_new:
1201 alt = os.path.join(self.objdir, 'objects/info/alternates')
1202 try:
1203 with open(alt) as fd:
1204 # This works for both absolute and relative alternate directories.
1205 alt_dir = os.path.join(self.objdir, 'objects', fd.readline().rstrip())
1206 except IOError:
1207 alt_dir = None
1208 else:
1209 alt_dir = None
1210
1211 if (clone_bundle
1212 and alt_dir is None
1213 and self._ApplyCloneBundle(initial=is_new, quiet=quiet, verbose=verbose)):
1214 is_new = False
1215
1216 if current_branch_only is None:
1217 if self.sync_c:
1218 current_branch_only = True
1219 elif not self.manifest._loaded:
1220 # Manifest cannot check defaults until it syncs.
1221 current_branch_only = False
1222 elif self.manifest.default.sync_c:
1223 current_branch_only = True
1224
1225 if tags is None:
1226 tags = self.sync_tags
1227
1228 if self.clone_depth:
1229 depth = self.clone_depth
1230 else:
1231 depth = self.manifest.manifestProject.depth
1232
1233 # See if we can skip the network fetch entirely.
1234 remote_fetched = False
1235 if not (optimized_fetch and
1236 (ID_RE.match(self.revisionExpr) and
1237 self._CheckForImmutableRevision())):
1238 remote_fetched = True
1239 if not self._RemoteFetch(
1240 initial=is_new,
1241 quiet=quiet, verbose=verbose, output_redir=output_redir,
1242 alt_dir=alt_dir, current_branch_only=current_branch_only,
1243 tags=tags, prune=prune, depth=depth,
1244 submodules=submodules, force_sync=force_sync,
1245 ssh_proxy=ssh_proxy,
1246 clone_filter=clone_filter, retry_fetches=retry_fetches):
1247 return SyncNetworkHalfResult(False, remote_fetched)
1248
1249 mp = self.manifest.manifestProject
1250 dissociate = mp.dissociate
1251 if dissociate:
1252 alternates_file = os.path.join(self.objdir, 'objects/info/alternates')
1253 if os.path.exists(alternates_file):
1254 cmd = ['repack', '-a', '-d']
1255 p = GitCommand(self, cmd, bare=True, capture_stdout=bool(output_redir),
1256 merge_output=bool(output_redir))
1257 if p.stdout and output_redir:
1258 output_redir.write(p.stdout)
1259 if p.Wait() != 0:
1260 return SyncNetworkHalfResult(False, remote_fetched)
1261 platform_utils.remove(alternates_file)
1262
1263 if self.worktree:
1264 self._InitMRef()
1265 else:
1266 self._InitMirrorHead()
1267 platform_utils.remove(os.path.join(self.gitdir, 'FETCH_HEAD'),
1268 missing_ok=True)
1269 return SyncNetworkHalfResult(True, remote_fetched)
1270
1271 def PostRepoUpgrade(self):
1272 self._InitHooks()
1273
1274 def _CopyAndLinkFiles(self):
1275 if self.client.isGitcClient:
1276 return
1277 for copyfile in self.copyfiles:
1278 copyfile._Copy()
1279 for linkfile in self.linkfiles:
1280 linkfile._Link()
1281
1282 def GetCommitRevisionId(self):
1283 """Get revisionId of a commit.
1284
1285 Use this method instead of GetRevisionId to get the id of the commit rather
1286 than the id of the current git object (for example, a tag)
1287 915
1288 """ 916 try:
1289 if not self.revisionExpr.startswith(R_TAGS): 917 f = df[p]
1290 return self.GetRevisionId(self._allrefs) 918 except KeyError:
919 f = None
920
921 if i:
922 i_status = i.status.upper()
923 else:
924 i_status = "-"
925
926 if f:
927 f_status = f.status.lower()
928 else:
929 f_status = "-"
930
931 if i and i.src_path:
932 line = " %s%s\t%s => %s (%s%%)" % (
933 i_status,
934 f_status,
935 i.src_path,
936 p,
937 i.level,
938 )
939 else:
940 line = " %s%s\t%s" % (i_status, f_status, p)
941
942 if i and not f:
943 out.added("%s", line)
944 elif (i and f) or (not i and f):
945 out.changed("%s", line)
946 elif not i and not f:
947 out.untracked("%s", line)
948 else:
949 out.write("%s", line)
950 out.nl()
951
952 return "DIRTY"
953
954 def PrintWorkTreeDiff(
955 self, absolute_paths=False, output_redir=None, local=False
956 ):
957 """Prints the status of the repository to stdout."""
958 out = DiffColoring(self.config)
959 if output_redir:
960 out.redirect(output_redir)
961 cmd = ["diff"]
962 if out.is_on:
963 cmd.append("--color")
964 cmd.append(HEAD)
965 if absolute_paths:
966 cmd.append("--src-prefix=a/%s/" % self.RelPath(local))
967 cmd.append("--dst-prefix=b/%s/" % self.RelPath(local))
968 cmd.append("--")
969 try:
970 p = GitCommand(self, cmd, capture_stdout=True, capture_stderr=True)
971 p.Wait()
972 except GitError as e:
973 out.nl()
974 out.project("project %s/" % self.RelPath(local))
975 out.nl()
976 out.fail("%s", str(e))
977 out.nl()
978 return False
979 if p.stdout:
980 out.nl()
981 out.project("project %s/" % self.RelPath(local))
982 out.nl()
983 out.write("%s", p.stdout)
984 return p.Wait() == 0
985
986 def WasPublished(self, branch, all_refs=None):
987 """Was the branch published (uploaded) for code review?
988 If so, returns the SHA-1 hash of the last published
989 state for the branch.
990 """
991 key = R_PUB + branch
992 if all_refs is None:
993 try:
994 return self.bare_git.rev_parse(key)
995 except GitError:
996 return None
997 else:
998 try:
999 return all_refs[key]
1000 except KeyError:
1001 return None
1002
1003 def CleanPublishedCache(self, all_refs=None):
1004 """Prunes any stale published refs."""
1005 if all_refs is None:
1006 all_refs = self._allrefs
1007 heads = set()
1008 canrm = {}
1009 for name, ref_id in all_refs.items():
1010 if name.startswith(R_HEADS):
1011 heads.add(name)
1012 elif name.startswith(R_PUB):
1013 canrm[name] = ref_id
1014
1015 for name, ref_id in canrm.items():
1016 n = name[len(R_PUB) :]
1017 if R_HEADS + n not in heads:
1018 self.bare_git.DeleteRef(name, ref_id)
1019
1020 def GetUploadableBranches(self, selected_branch=None):
1021 """List any branches which can be uploaded for review."""
1022 heads = {}
1023 pubed = {}
1024
1025 for name, ref_id in self._allrefs.items():
1026 if name.startswith(R_HEADS):
1027 heads[name[len(R_HEADS) :]] = ref_id
1028 elif name.startswith(R_PUB):
1029 pubed[name[len(R_PUB) :]] = ref_id
1030
1031 ready = []
1032 for branch, ref_id in heads.items():
1033 if branch in pubed and pubed[branch] == ref_id:
1034 continue
1035 if selected_branch and branch != selected_branch:
1036 continue
1037
1038 rb = self.GetUploadableBranch(branch)
1039 if rb:
1040 ready.append(rb)
1041 return ready
1042
1043 def GetUploadableBranch(self, branch_name):
1044 """Get a single uploadable branch, or None."""
1045 branch = self.GetBranch(branch_name)
1046 base = branch.LocalMerge
1047 if branch.LocalMerge:
1048 rb = ReviewableBranch(self, branch, base)
1049 if rb.commits:
1050 return rb
1051 return None
1291 1052
1292 try: 1053 def UploadForReview(
1293 return self.bare_git.rev_list(self.revisionExpr, '-1')[0] 1054 self,
1294 except GitError: 1055 branch=None,
1295 raise ManifestInvalidRevisionError('revision %s in %s not found' % 1056 people=([], []),
1296 (self.revisionExpr, self.name)) 1057 dryrun=False,
1058 auto_topic=False,
1059 hashtags=(),
1060 labels=(),
1061 private=False,
1062 notify=None,
1063 wip=False,
1064 ready=False,
1065 dest_branch=None,
1066 validate_certs=True,
1067 push_options=None,
1068 ):
1069 """Uploads the named branch for code review."""
1070 if branch is None:
1071 branch = self.CurrentBranch
1072 if branch is None:
1073 raise GitError("not currently on a branch")
1297 1074
1298 def GetRevisionId(self, all_refs=None): 1075 branch = self.GetBranch(branch)
1299 if self.revisionId: 1076 if not branch.LocalMerge:
1300 return self.revisionId 1077 raise GitError("branch %s does not track a remote" % branch.name)
1078 if not branch.remote.review:
1079 raise GitError("remote %s has no review url" % branch.remote.name)
1080
1081 # Basic validity check on label syntax.
1082 for label in labels:
1083 if not re.match(r"^.+[+-][0-9]+$", label):
1084 raise UploadError(
1085 f'invalid label syntax "{label}": labels use forms like '
1086 "CodeReview+1 or Verified-1"
1087 )
1088
1089 if dest_branch is None:
1090 dest_branch = self.dest_branch
1091 if dest_branch is None:
1092 dest_branch = branch.merge
1093 if not dest_branch.startswith(R_HEADS):
1094 dest_branch = R_HEADS + dest_branch
1095
1096 if not branch.remote.projectname:
1097 branch.remote.projectname = self.name
1098 branch.remote.Save()
1099
1100 url = branch.remote.ReviewUrl(self.UserEmail, validate_certs)
1101 if url is None:
1102 raise UploadError("review not configured")
1103 cmd = ["push"]
1104 if dryrun:
1105 cmd.append("-n")
1106
1107 if url.startswith("ssh://"):
1108 cmd.append("--receive-pack=gerrit receive-pack")
1109
1110 for push_option in push_options or []:
1111 cmd.append("-o")
1112 cmd.append(push_option)
1113
1114 cmd.append(url)
1115
1116 if dest_branch.startswith(R_HEADS):
1117 dest_branch = dest_branch[len(R_HEADS) :]
1118
1119 ref_spec = "%s:refs/for/%s" % (R_HEADS + branch.name, dest_branch)
1120 opts = []
1121 if auto_topic:
1122 opts += ["topic=" + branch.name]
1123 opts += ["t=%s" % p for p in hashtags]
1124 # NB: No need to encode labels as they've been validated above.
1125 opts += ["l=%s" % p for p in labels]
1126
1127 opts += ["r=%s" % p for p in people[0]]
1128 opts += ["cc=%s" % p for p in people[1]]
1129 if notify:
1130 opts += ["notify=" + notify]
1131 if private:
1132 opts += ["private"]
1133 if wip:
1134 opts += ["wip"]
1135 if ready:
1136 opts += ["ready"]
1137 if opts:
1138 ref_spec = ref_spec + "%" + ",".join(opts)
1139 cmd.append(ref_spec)
1140
1141 if GitCommand(self, cmd, bare=True).Wait() != 0:
1142 raise UploadError("Upload failed")
1143
1144 if not dryrun:
1145 msg = "posted to %s for %s" % (branch.remote.review, dest_branch)
1146 self.bare_git.UpdateRef(
1147 R_PUB + branch.name, R_HEADS + branch.name, message=msg
1148 )
1149
1150 def _ExtractArchive(self, tarpath, path=None):
1151 """Extract the given tar on its current location
1301 1152
1302 rem = self.GetRemote() 1153 Args:
1303 rev = rem.ToLocal(self.revisionExpr) 1154 tarpath: The path to the actual tar file
1304 1155
1305 if all_refs is not None and rev in all_refs: 1156 """
1306 return all_refs[rev] 1157 try:
1158 with tarfile.open(tarpath, "r") as tar:
1159 tar.extractall(path=path)
1160 return True
1161 except (IOError, tarfile.TarError) as e:
1162 _error("Cannot extract archive %s: %s", tarpath, str(e))
1163 return False
1307 1164
1308 try: 1165 def Sync_NetworkHalf(
1309 return self.bare_git.rev_parse('--verify', '%s^0' % rev) 1166 self,
1310 except GitError: 1167 quiet=False,
1311 raise ManifestInvalidRevisionError('revision %s in %s not found' % 1168 verbose=False,
1312 (self.revisionExpr, self.name)) 1169 output_redir=None,
1170 is_new=None,
1171 current_branch_only=None,
1172 force_sync=False,
1173 clone_bundle=True,
1174 tags=None,
1175 archive=False,
1176 optimized_fetch=False,
1177 retry_fetches=0,
1178 prune=False,
1179 submodules=False,
1180 ssh_proxy=None,
1181 clone_filter=None,
1182 partial_clone_exclude=set(),
1183 ):
1184 """Perform only the network IO portion of the sync process.
1185 Local working directory/branch state is not affected.
1186 """
1187 if archive and not isinstance(self, MetaProject):
1188 if self.remote.url.startswith(("http://", "https://")):
1189 _error(
1190 "%s: Cannot fetch archives from http/https remotes.",
1191 self.name,
1192 )
1193 return SyncNetworkHalfResult(False, False)
1194
1195 name = self.relpath.replace("\\", "/")
1196 name = name.replace("/", "_")
1197 tarpath = "%s.tar" % name
1198 topdir = self.manifest.topdir
1313 1199
1314 def SetRevisionId(self, revisionId): 1200 try:
1315 if self.revisionExpr: 1201 self._FetchArchive(tarpath, cwd=topdir)
1316 self.upstream = self.revisionExpr 1202 except GitError as e:
1203 _error("%s", e)
1204 return SyncNetworkHalfResult(False, False)
1317 1205
1318 self.revisionId = revisionId 1206 # From now on, we only need absolute tarpath.
1207 tarpath = os.path.join(topdir, tarpath)
1319 1208
1320 def Sync_LocalHalf(self, syncbuf, force_sync=False, submodules=False): 1209 if not self._ExtractArchive(tarpath, path=topdir):
1321 """Perform only the local IO portion of the sync process. 1210 return SyncNetworkHalfResult(False, True)
1322 Network access is not required. 1211 try:
1323 """ 1212 platform_utils.remove(tarpath)
1324 if not os.path.exists(self.gitdir): 1213 except OSError as e:
1325 syncbuf.fail(self, 1214 _warn("Cannot remove archive %s: %s", tarpath, str(e))
1326 'Cannot checkout %s due to missing network sync; Run ' 1215 self._CopyAndLinkFiles()
1327 '`repo sync -n %s` first.' % 1216 return SyncNetworkHalfResult(True, True)
1328 (self.name, self.name)) 1217
1329 return 1218 # If the shared object dir already exists, don't try to rebootstrap with
1330 1219 # a clone bundle download. We should have the majority of objects
1331 self._InitWorkTree(force_sync=force_sync, submodules=submodules) 1220 # already.
1332 all_refs = self.bare_ref.all 1221 if clone_bundle and os.path.exists(self.objdir):
1333 self.CleanPublishedCache(all_refs) 1222 clone_bundle = False
1334 revid = self.GetRevisionId(all_refs) 1223
1335 1224 if self.name in partial_clone_exclude:
1336 # Special case the root of the repo client checkout. Make sure it doesn't 1225 clone_bundle = True
1337 # contain files being checked out to dirs we don't allow. 1226 clone_filter = None
1338 if self.relpath == '.': 1227
1339 PROTECTED_PATHS = {'.repo'} 1228 if is_new is None:
1340 paths = set(self.work_git.ls_tree('-z', '--name-only', '--', revid).split('\0')) 1229 is_new = not self.Exists
1341 bad_paths = paths & PROTECTED_PATHS 1230 if is_new:
1342 if bad_paths: 1231 self._InitGitDir(force_sync=force_sync, quiet=quiet)
1343 syncbuf.fail(self, 1232 else:
1344 'Refusing to checkout project that writes to protected ' 1233 self._UpdateHooks(quiet=quiet)
1345 'paths: %s' % (', '.join(bad_paths),)) 1234 self._InitRemote()
1346 return 1235
1347 1236 if self.UseAlternates:
1348 def _doff(): 1237 # If gitdir/objects is a symlink, migrate it from the old layout.
1349 self._FastForward(revid) 1238 gitdir_objects = os.path.join(self.gitdir, "objects")
1350 self._CopyAndLinkFiles() 1239 if platform_utils.islink(gitdir_objects):
1351 1240 platform_utils.remove(gitdir_objects, missing_ok=True)
1352 def _dosubmodules(): 1241 gitdir_alt = os.path.join(self.gitdir, "objects/info/alternates")
1353 self._SyncSubmodules(quiet=True) 1242 if not os.path.exists(gitdir_alt):
1354 1243 os.makedirs(os.path.dirname(gitdir_alt), exist_ok=True)
1355 head = self.work_git.GetHead() 1244 _lwrite(
1356 if head.startswith(R_HEADS): 1245 gitdir_alt,
1357 branch = head[len(R_HEADS):] 1246 os.path.join(
1358 try: 1247 os.path.relpath(self.objdir, gitdir_objects), "objects"
1359 head = all_refs[head] 1248 )
1360 except KeyError: 1249 + "\n",
1361 head = None 1250 )
1362 else:
1363 branch = None
1364
1365 if branch is None or syncbuf.detach_head:
1366 # Currently on a detached HEAD. The user is assumed to
1367 # not have any local modifications worth worrying about.
1368 #
1369 if self.IsRebaseInProgress():
1370 syncbuf.fail(self, _PriorSyncFailedError())
1371 return
1372
1373 if head == revid:
1374 # No changes; don't do anything further.
1375 # Except if the head needs to be detached
1376 #
1377 if not syncbuf.detach_head:
1378 # The copy/linkfile config may have changed.
1379 self._CopyAndLinkFiles()
1380 return
1381 else:
1382 lost = self._revlist(not_rev(revid), HEAD)
1383 if lost:
1384 syncbuf.info(self, "discarding %d commits", len(lost))
1385
1386 try:
1387 self._Checkout(revid, quiet=True)
1388 if submodules:
1389 self._SyncSubmodules(quiet=True)
1390 except GitError as e:
1391 syncbuf.fail(self, e)
1392 return
1393 self._CopyAndLinkFiles()
1394 return
1395
1396 if head == revid:
1397 # No changes; don't do anything further.
1398 #
1399 # The copy/linkfile config may have changed.
1400 self._CopyAndLinkFiles()
1401 return
1402
1403 branch = self.GetBranch(branch)
1404
1405 if not branch.LocalMerge:
1406 # The current branch has no tracking configuration.
1407 # Jump off it to a detached HEAD.
1408 #
1409 syncbuf.info(self,
1410 "leaving %s; does not track upstream",
1411 branch.name)
1412 try:
1413 self._Checkout(revid, quiet=True)
1414 if submodules:
1415 self._SyncSubmodules(quiet=True)
1416 except GitError as e:
1417 syncbuf.fail(self, e)
1418 return
1419 self._CopyAndLinkFiles()
1420 return
1421 1251
1422 upstream_gain = self._revlist(not_rev(HEAD), revid) 1252 if is_new:
1253 alt = os.path.join(self.objdir, "objects/info/alternates")
1254 try:
1255 with open(alt) as fd:
1256 # This works for both absolute and relative alternate
1257 # directories.
1258 alt_dir = os.path.join(
1259 self.objdir, "objects", fd.readline().rstrip()
1260 )
1261 except IOError:
1262 alt_dir = None
1263 else:
1264 alt_dir = None
1423 1265
1424 # See if we can perform a fast forward merge. This can happen if our 1266 if (
1425 # branch isn't in the exact same state as we last published. 1267 clone_bundle
1426 try: 1268 and alt_dir is None
1427 self.work_git.merge_base('--is-ancestor', HEAD, revid) 1269 and self._ApplyCloneBundle(
1428 # Skip the published logic. 1270 initial=is_new, quiet=quiet, verbose=verbose
1429 pub = False 1271 )
1430 except GitError: 1272 ):
1431 pub = self.WasPublished(branch.name, all_refs) 1273 is_new = False
1432 1274
1433 if pub: 1275 if current_branch_only is None:
1434 not_merged = self._revlist(not_rev(revid), pub) 1276 if self.sync_c:
1435 if not_merged: 1277 current_branch_only = True
1436 if upstream_gain: 1278 elif not self.manifest._loaded:
1437 # The user has published this branch and some of those 1279 # Manifest cannot check defaults until it syncs.
1438 # commits are not yet merged upstream. We do not want 1280 current_branch_only = False
1439 # to rewrite the published commits so we punt. 1281 elif self.manifest.default.sync_c:
1440 # 1282 current_branch_only = True
1441 syncbuf.fail(self, 1283
1442 "branch %s is published (but not merged) and is now " 1284 if tags is None:
1443 "%d commits behind" % (branch.name, len(upstream_gain))) 1285 tags = self.sync_tags
1444 return 1286
1445 elif pub == head: 1287 if self.clone_depth:
1446 # All published commits are merged, and thus we are a 1288 depth = self.clone_depth
1447 # strict subset. We can fast-forward safely. 1289 else:
1448 # 1290 depth = self.manifest.manifestProject.depth
1449 syncbuf.later1(self, _doff) 1291
1450 if submodules: 1292 # See if we can skip the network fetch entirely.
1451 syncbuf.later1(self, _dosubmodules) 1293 remote_fetched = False
1452 return 1294 if not (
1453 1295 optimized_fetch
1454 # Examine the local commits not in the remote. Find the 1296 and (
1455 # last one attributed to this user, if any. 1297 ID_RE.match(self.revisionExpr)
1456 # 1298 and self._CheckForImmutableRevision()
1457 local_changes = self._revlist(not_rev(revid), HEAD, format='%H %ce') 1299 )
1458 last_mine = None 1300 ):
1459 cnt_mine = 0 1301 remote_fetched = True
1460 for commit in local_changes: 1302 if not self._RemoteFetch(
1461 commit_id, committer_email = commit.split(' ', 1) 1303 initial=is_new,
1462 if committer_email == self.UserEmail: 1304 quiet=quiet,
1463 last_mine = commit_id 1305 verbose=verbose,
1464 cnt_mine += 1 1306 output_redir=output_redir,
1465 1307 alt_dir=alt_dir,
1466 if not upstream_gain and cnt_mine == len(local_changes): 1308 current_branch_only=current_branch_only,
1467 # The copy/linkfile config may have changed. 1309 tags=tags,
1468 self._CopyAndLinkFiles() 1310 prune=prune,
1469 return 1311 depth=depth,
1470 1312 submodules=submodules,
1471 if self.IsDirty(consider_untracked=False): 1313 force_sync=force_sync,
1472 syncbuf.fail(self, _DirtyError()) 1314 ssh_proxy=ssh_proxy,
1473 return 1315 clone_filter=clone_filter,
1474 1316 retry_fetches=retry_fetches,
1475 # If the upstream switched on us, warn the user. 1317 ):
1476 # 1318 return SyncNetworkHalfResult(False, remote_fetched)
1477 if branch.merge != self.revisionExpr:
1478 if branch.merge and self.revisionExpr:
1479 syncbuf.info(self,
1480 'manifest switched %s...%s',
1481 branch.merge,
1482 self.revisionExpr)
1483 elif branch.merge:
1484 syncbuf.info(self,
1485 'manifest no longer tracks %s',
1486 branch.merge)
1487
1488 if cnt_mine < len(local_changes):
1489 # Upstream rebased. Not everything in HEAD
1490 # was created by this user.
1491 #
1492 syncbuf.info(self,
1493 "discarding %d commits removed from upstream",
1494 len(local_changes) - cnt_mine)
1495
1496 branch.remote = self.GetRemote()
1497 if not ID_RE.match(self.revisionExpr):
1498 # in case of manifest sync the revisionExpr might be a SHA1
1499 branch.merge = self.revisionExpr
1500 if not branch.merge.startswith('refs/'):
1501 branch.merge = R_HEADS + branch.merge
1502 branch.Save()
1503
1504 if cnt_mine > 0 and self.rebase:
1505 def _docopyandlink():
1506 self._CopyAndLinkFiles()
1507
1508 def _dorebase():
1509 self._Rebase(upstream='%s^1' % last_mine, onto=revid)
1510 syncbuf.later2(self, _dorebase)
1511 if submodules:
1512 syncbuf.later2(self, _dosubmodules)
1513 syncbuf.later2(self, _docopyandlink)
1514 elif local_changes:
1515 try:
1516 self._ResetHard(revid)
1517 if submodules:
1518 self._SyncSubmodules(quiet=True)
1519 self._CopyAndLinkFiles()
1520 except GitError as e:
1521 syncbuf.fail(self, e)
1522 return
1523 else:
1524 syncbuf.later1(self, _doff)
1525 if submodules:
1526 syncbuf.later1(self, _dosubmodules)
1527
1528 def AddCopyFile(self, src, dest, topdir):
1529 """Mark |src| for copying to |dest| (relative to |topdir|).
1530
1531 No filesystem changes occur here. Actual copying happens later on.
1532
1533 Paths should have basic validation run on them before being queued.
1534 Further checking will be handled when the actual copy happens.
1535 """
1536 self.copyfiles.append(_CopyFile(self.worktree, src, topdir, dest))
1537 1319
1538 def AddLinkFile(self, src, dest, topdir): 1320 mp = self.manifest.manifestProject
1539 """Mark |dest| to create a symlink (relative to |topdir|) pointing to |src|. 1321 dissociate = mp.dissociate
1322 if dissociate:
1323 alternates_file = os.path.join(
1324 self.objdir, "objects/info/alternates"
1325 )
1326 if os.path.exists(alternates_file):
1327 cmd = ["repack", "-a", "-d"]
1328 p = GitCommand(
1329 self,
1330 cmd,
1331 bare=True,
1332 capture_stdout=bool(output_redir),
1333 merge_output=bool(output_redir),
1334 )
1335 if p.stdout and output_redir:
1336 output_redir.write(p.stdout)
1337 if p.Wait() != 0:
1338 return SyncNetworkHalfResult(False, remote_fetched)
1339 platform_utils.remove(alternates_file)
1340
1341 if self.worktree:
1342 self._InitMRef()
1343 else:
1344 self._InitMirrorHead()
1345 platform_utils.remove(
1346 os.path.join(self.gitdir, "FETCH_HEAD"), missing_ok=True
1347 )
1348 return SyncNetworkHalfResult(True, remote_fetched)
1540 1349
1541 No filesystem changes occur here. Actual linking happens later on. 1350 def PostRepoUpgrade(self):
1351 self._InitHooks()
1542 1352
1543 Paths should have basic validation run on them before being queued. 1353 def _CopyAndLinkFiles(self):
1544 Further checking will be handled when the actual link happens. 1354 if self.client.isGitcClient:
1545 """ 1355 return
1546 self.linkfiles.append(_LinkFile(self.worktree, src, topdir, dest)) 1356 for copyfile in self.copyfiles:
1357 copyfile._Copy()
1358 for linkfile in self.linkfiles:
1359 linkfile._Link()
1547 1360
1548 def AddAnnotation(self, name, value, keep): 1361 def GetCommitRevisionId(self):
1549 self.annotations.append(Annotation(name, value, keep)) 1362 """Get revisionId of a commit.
1550 1363
1551 def DownloadPatchSet(self, change_id, patch_id): 1364 Use this method instead of GetRevisionId to get the id of the commit
1552 """Download a single patch set of a single change to FETCH_HEAD. 1365 rather than the id of the current git object (for example, a tag)
1553 """
1554 remote = self.GetRemote()
1555
1556 cmd = ['fetch', remote.name]
1557 cmd.append('refs/changes/%2.2d/%d/%d'
1558 % (change_id % 100, change_id, patch_id))
1559 if GitCommand(self, cmd, bare=True).Wait() != 0:
1560 return None
1561 return DownloadedChange(self,
1562 self.GetRevisionId(),
1563 change_id,
1564 patch_id,
1565 self.bare_git.rev_parse('FETCH_HEAD'))
1566
1567 def DeleteWorktree(self, quiet=False, force=False):
1568 """Delete the source checkout and any other housekeeping tasks.
1569
1570 This currently leaves behind the internal .repo/ cache state. This helps
1571 when switching branches or manifest changes get reverted as we don't have
1572 to redownload all the git objects. But we should do some GC at some point.
1573
1574 Args:
1575 quiet: Whether to hide normal messages.
1576 force: Always delete tree even if dirty.
1577 1366
1578 Returns: 1367 """
1579 True if the worktree was completely cleaned out. 1368 if not self.revisionExpr.startswith(R_TAGS):
1580 """ 1369 return self.GetRevisionId(self._allrefs)
1581 if self.IsDirty():
1582 if force:
1583 print('warning: %s: Removing dirty project: uncommitted changes lost.' %
1584 (self.RelPath(local=False),), file=sys.stderr)
1585 else:
1586 print('error: %s: Cannot remove project: uncommitted changes are '
1587 'present.\n' % (self.RelPath(local=False),), file=sys.stderr)
1588 return False
1589 1370
1590 if not quiet:
1591 print('%s: Deleting obsolete checkout.' % (self.RelPath(local=False),))
1592
1593 # Unlock and delink from the main worktree. We don't use git's worktree
1594 # remove because it will recursively delete projects -- we handle that
1595 # ourselves below. https://crbug.com/git/48
1596 if self.use_git_worktrees:
1597 needle = platform_utils.realpath(self.gitdir)
1598 # Find the git worktree commondir under .repo/worktrees/.
1599 output = self.bare_git.worktree('list', '--porcelain').splitlines()[0]
1600 assert output.startswith('worktree '), output
1601 commondir = output[9:]
1602 # Walk each of the git worktrees to see where they point.
1603 configs = os.path.join(commondir, 'worktrees')
1604 for name in os.listdir(configs):
1605 gitdir = os.path.join(configs, name, 'gitdir')
1606 with open(gitdir) as fp:
1607 relpath = fp.read().strip()
1608 # Resolve the checkout path and see if it matches this project.
1609 fullpath = platform_utils.realpath(os.path.join(configs, name, relpath))
1610 if fullpath == needle:
1611 platform_utils.rmtree(os.path.join(configs, name))
1612
1613 # Delete the .git directory first, so we're less likely to have a partially
1614 # working git repository around. There shouldn't be any git projects here,
1615 # so rmtree works.
1616
1617 # Try to remove plain files first in case of git worktrees. If this fails
1618 # for any reason, we'll fall back to rmtree, and that'll display errors if
1619 # it can't remove things either.
1620 try:
1621 platform_utils.remove(self.gitdir)
1622 except OSError:
1623 pass
1624 try:
1625 platform_utils.rmtree(self.gitdir)
1626 except OSError as e:
1627 if e.errno != errno.ENOENT:
1628 print('error: %s: %s' % (self.gitdir, e), file=sys.stderr)
1629 print('error: %s: Failed to delete obsolete checkout; remove manually, '
1630 'then run `repo sync -l`.' % (self.RelPath(local=False),),
1631 file=sys.stderr)
1632 return False
1633
1634 # Delete everything under the worktree, except for directories that contain
1635 # another git project.
1636 dirs_to_remove = []
1637 failed = False
1638 for root, dirs, files in platform_utils.walk(self.worktree):
1639 for f in files:
1640 path = os.path.join(root, f)
1641 try:
1642 platform_utils.remove(path)
1643 except OSError as e:
1644 if e.errno != errno.ENOENT:
1645 print('error: %s: Failed to remove: %s' % (path, e), file=sys.stderr)
1646 failed = True
1647 dirs[:] = [d for d in dirs
1648 if not os.path.lexists(os.path.join(root, d, '.git'))]
1649 dirs_to_remove += [os.path.join(root, d) for d in dirs
1650 if os.path.join(root, d) not in dirs_to_remove]
1651 for d in reversed(dirs_to_remove):
1652 if platform_utils.islink(d):
1653 try: 1371 try:
1654 platform_utils.remove(d) 1372 return self.bare_git.rev_list(self.revisionExpr, "-1")[0]
1655 except OSError as e: 1373 except GitError:
1656 if e.errno != errno.ENOENT: 1374 raise ManifestInvalidRevisionError(
1657 print('error: %s: Failed to remove: %s' % (d, e), file=sys.stderr) 1375 "revision %s in %s not found" % (self.revisionExpr, self.name)
1658 failed = True 1376 )
1659 elif not platform_utils.listdir(d):
1660 try:
1661 platform_utils.rmdir(d)
1662 except OSError as e:
1663 if e.errno != errno.ENOENT:
1664 print('error: %s: Failed to remove: %s' % (d, e), file=sys.stderr)
1665 failed = True
1666 if failed:
1667 print('error: %s: Failed to delete obsolete checkout.' % (self.RelPath(local=False),),
1668 file=sys.stderr)
1669 print(' Remove manually, then run `repo sync -l`.', file=sys.stderr)
1670 return False
1671
1672 # Try deleting parent dirs if they are empty.
1673 path = self.worktree
1674 while path != self.manifest.topdir:
1675 try:
1676 platform_utils.rmdir(path)
1677 except OSError as e:
1678 if e.errno != errno.ENOENT:
1679 break
1680 path = os.path.dirname(path)
1681
1682 return True
1683
1684# Branch Management ##
1685 def StartBranch(self, name, branch_merge='', revision=None):
1686 """Create a new branch off the manifest's revision.
1687 """
1688 if not branch_merge:
1689 branch_merge = self.revisionExpr
1690 head = self.work_git.GetHead()
1691 if head == (R_HEADS + name):
1692 return True
1693
1694 all_refs = self.bare_ref.all
1695 if R_HEADS + name in all_refs:
1696 return GitCommand(self, ['checkout', '-q', name, '--']).Wait() == 0
1697
1698 branch = self.GetBranch(name)
1699 branch.remote = self.GetRemote()
1700 branch.merge = branch_merge
1701 if not branch.merge.startswith('refs/') and not ID_RE.match(branch_merge):
1702 branch.merge = R_HEADS + branch_merge
1703
1704 if revision is None:
1705 revid = self.GetRevisionId(all_refs)
1706 else:
1707 revid = self.work_git.rev_parse(revision)
1708
1709 if head.startswith(R_HEADS):
1710 try:
1711 head = all_refs[head]
1712 except KeyError:
1713 head = None
1714 if revid and head and revid == head:
1715 ref = R_HEADS + name
1716 self.work_git.update_ref(ref, revid)
1717 self.work_git.symbolic_ref(HEAD, ref)
1718 branch.Save()
1719 return True
1720
1721 if GitCommand(self, ['checkout', '-q', '-b', branch.name, revid]).Wait() == 0:
1722 branch.Save()
1723 return True
1724 return False
1725
1726 def CheckoutBranch(self, name):
1727 """Checkout a local topic branch.
1728 1377
1729 Args: 1378 def GetRevisionId(self, all_refs=None):
1730 name: The name of the branch to checkout. 1379 if self.revisionId:
1380 return self.revisionId
1731 1381
1732 Returns: 1382 rem = self.GetRemote()
1733 True if the checkout succeeded; False if it didn't; None if the branch 1383 rev = rem.ToLocal(self.revisionExpr)
1734 didn't exist.
1735 """
1736 rev = R_HEADS + name
1737 head = self.work_git.GetHead()
1738 if head == rev:
1739 # Already on the branch
1740 #
1741 return True
1742
1743 all_refs = self.bare_ref.all
1744 try:
1745 revid = all_refs[rev]
1746 except KeyError:
1747 # Branch does not exist in this project
1748 #
1749 return None
1750
1751 if head.startswith(R_HEADS):
1752 try:
1753 head = all_refs[head]
1754 except KeyError:
1755 head = None
1756
1757 if head == revid:
1758 # Same revision; just update HEAD to point to the new
1759 # target branch, but otherwise take no other action.
1760 #
1761 _lwrite(self.work_git.GetDotgitPath(subpath=HEAD),
1762 'ref: %s%s\n' % (R_HEADS, name))
1763 return True
1764
1765 return GitCommand(self,
1766 ['checkout', name, '--'],
1767 capture_stdout=True,
1768 capture_stderr=True).Wait() == 0
1769
1770 def AbandonBranch(self, name):
1771 """Destroy a local topic branch.
1772
1773 Args:
1774 name: The name of the branch to abandon.
1775 1384
1776 Returns: 1385 if all_refs is not None and rev in all_refs:
1777 True if the abandon succeeded; False if it didn't; None if the branch 1386 return all_refs[rev]
1778 didn't exist.
1779 """
1780 rev = R_HEADS + name
1781 all_refs = self.bare_ref.all
1782 if rev not in all_refs:
1783 # Doesn't exist
1784 return None
1785
1786 head = self.work_git.GetHead()
1787 if head == rev:
1788 # We can't destroy the branch while we are sitting
1789 # on it. Switch to a detached HEAD.
1790 #
1791 head = all_refs[head]
1792
1793 revid = self.GetRevisionId(all_refs)
1794 if head == revid:
1795 _lwrite(self.work_git.GetDotgitPath(subpath=HEAD), '%s\n' % revid)
1796 else:
1797 self._Checkout(revid, quiet=True)
1798
1799 return GitCommand(self,
1800 ['branch', '-D', name],
1801 capture_stdout=True,
1802 capture_stderr=True).Wait() == 0
1803
1804 def PruneHeads(self):
1805 """Prune any topic branches already merged into upstream.
1806 """
1807 cb = self.CurrentBranch
1808 kill = []
1809 left = self._allrefs
1810 for name in left.keys():
1811 if name.startswith(R_HEADS):
1812 name = name[len(R_HEADS):]
1813 if cb is None or name != cb:
1814 kill.append(name)
1815
1816 # Minor optimization: If there's nothing to prune, then don't try to read
1817 # any project state.
1818 if not kill and not cb:
1819 return []
1820
1821 rev = self.GetRevisionId(left)
1822 if cb is not None \
1823 and not self._revlist(HEAD + '...' + rev) \
1824 and not self.IsDirty(consider_untracked=False):
1825 self.work_git.DetachHead(HEAD)
1826 kill.append(cb)
1827
1828 if kill:
1829 old = self.bare_git.GetHead()
1830
1831 try:
1832 self.bare_git.DetachHead(rev)
1833
1834 b = ['branch', '-d']
1835 b.extend(kill)
1836 b = GitCommand(self, b, bare=True,
1837 capture_stdout=True,
1838 capture_stderr=True)
1839 b.Wait()
1840 finally:
1841 if ID_RE.match(old):
1842 self.bare_git.DetachHead(old)
1843 else:
1844 self.bare_git.SetHead(old)
1845 left = self._allrefs
1846 1387
1847 for branch in kill: 1388 try:
1848 if (R_HEADS + branch) not in left: 1389 return self.bare_git.rev_parse("--verify", "%s^0" % rev)
1849 self.CleanPublishedCache() 1390 except GitError:
1850 break 1391 raise ManifestInvalidRevisionError(
1392 "revision %s in %s not found" % (self.revisionExpr, self.name)
1393 )
1394
1395 def SetRevisionId(self, revisionId):
1396 if self.revisionExpr:
1397 self.upstream = self.revisionExpr
1398
1399 self.revisionId = revisionId
1400
1401 def Sync_LocalHalf(self, syncbuf, force_sync=False, submodules=False):
1402 """Perform only the local IO portion of the sync process.
1403
1404 Network access is not required.
1405 """
1406 if not os.path.exists(self.gitdir):
1407 syncbuf.fail(
1408 self,
1409 "Cannot checkout %s due to missing network sync; Run "
1410 "`repo sync -n %s` first." % (self.name, self.name),
1411 )
1412 return
1413
1414 self._InitWorkTree(force_sync=force_sync, submodules=submodules)
1415 all_refs = self.bare_ref.all
1416 self.CleanPublishedCache(all_refs)
1417 revid = self.GetRevisionId(all_refs)
1418
1419 # Special case the root of the repo client checkout. Make sure it
1420 # doesn't contain files being checked out to dirs we don't allow.
1421 if self.relpath == ".":
1422 PROTECTED_PATHS = {".repo"}
1423 paths = set(
1424 self.work_git.ls_tree("-z", "--name-only", "--", revid).split(
1425 "\0"
1426 )
1427 )
1428 bad_paths = paths & PROTECTED_PATHS
1429 if bad_paths:
1430 syncbuf.fail(
1431 self,
1432 "Refusing to checkout project that writes to protected "
1433 "paths: %s" % (", ".join(bad_paths),),
1434 )
1435 return
1436
1437 def _doff():
1438 self._FastForward(revid)
1439 self._CopyAndLinkFiles()
1440
1441 def _dosubmodules():
1442 self._SyncSubmodules(quiet=True)
1443
1444 head = self.work_git.GetHead()
1445 if head.startswith(R_HEADS):
1446 branch = head[len(R_HEADS) :]
1447 try:
1448 head = all_refs[head]
1449 except KeyError:
1450 head = None
1451 else:
1452 branch = None
1453
1454 if branch is None or syncbuf.detach_head:
1455 # Currently on a detached HEAD. The user is assumed to
1456 # not have any local modifications worth worrying about.
1457 if self.IsRebaseInProgress():
1458 syncbuf.fail(self, _PriorSyncFailedError())
1459 return
1460
1461 if head == revid:
1462 # No changes; don't do anything further.
1463 # Except if the head needs to be detached.
1464 if not syncbuf.detach_head:
1465 # The copy/linkfile config may have changed.
1466 self._CopyAndLinkFiles()
1467 return
1468 else:
1469 lost = self._revlist(not_rev(revid), HEAD)
1470 if lost:
1471 syncbuf.info(self, "discarding %d commits", len(lost))
1851 1472
1852 if cb and cb not in kill: 1473 try:
1853 kill.append(cb) 1474 self._Checkout(revid, quiet=True)
1854 kill.sort() 1475 if submodules:
1476 self._SyncSubmodules(quiet=True)
1477 except GitError as e:
1478 syncbuf.fail(self, e)
1479 return
1480 self._CopyAndLinkFiles()
1481 return
1482
1483 if head == revid:
1484 # No changes; don't do anything further.
1485 #
1486 # The copy/linkfile config may have changed.
1487 self._CopyAndLinkFiles()
1488 return
1855 1489
1856 kept = []
1857 for branch in kill:
1858 if R_HEADS + branch in left:
1859 branch = self.GetBranch(branch) 1490 branch = self.GetBranch(branch)
1860 base = branch.LocalMerge 1491
1861 if not base: 1492 if not branch.LocalMerge:
1862 base = rev 1493 # The current branch has no tracking configuration.
1863 kept.append(ReviewableBranch(self, branch, base)) 1494 # Jump off it to a detached HEAD.
1864 return kept 1495 syncbuf.info(
1865 1496 self, "leaving %s; does not track upstream", branch.name
1866# Submodule Management ## 1497 )
1867 def GetRegisteredSubprojects(self): 1498 try:
1868 result = [] 1499 self._Checkout(revid, quiet=True)
1869 1500 if submodules:
1870 def rec(subprojects): 1501 self._SyncSubmodules(quiet=True)
1871 if not subprojects: 1502 except GitError as e:
1872 return 1503 syncbuf.fail(self, e)
1873 result.extend(subprojects) 1504 return
1874 for p in subprojects: 1505 self._CopyAndLinkFiles()
1875 rec(p.subprojects) 1506 return
1876 rec(self.subprojects) 1507
1877 return result 1508 upstream_gain = self._revlist(not_rev(HEAD), revid)
1878 1509
1879 def _GetSubmodules(self): 1510 # See if we can perform a fast forward merge. This can happen if our
1880 # Unfortunately we cannot call `git submodule status --recursive` here 1511 # branch isn't in the exact same state as we last published.
1881 # because the working tree might not exist yet, and it cannot be used
1882 # without a working tree in its current implementation.
1883
1884 def get_submodules(gitdir, rev):
1885 # Parse .gitmodules for submodule sub_paths and sub_urls
1886 sub_paths, sub_urls = parse_gitmodules(gitdir, rev)
1887 if not sub_paths:
1888 return []
1889 # Run `git ls-tree` to read SHAs of submodule object, which happen to be
1890 # revision of submodule repository
1891 sub_revs = git_ls_tree(gitdir, rev, sub_paths)
1892 submodules = []
1893 for sub_path, sub_url in zip(sub_paths, sub_urls):
1894 try: 1512 try:
1895 sub_rev = sub_revs[sub_path] 1513 self.work_git.merge_base("--is-ancestor", HEAD, revid)
1896 except KeyError: 1514 # Skip the published logic.
1897 # Ignore non-exist submodules 1515 pub = False
1898 continue 1516 except GitError:
1899 submodules.append((sub_rev, sub_path, sub_url)) 1517 pub = self.WasPublished(branch.name, all_refs)
1900 return submodules 1518
1901 1519 if pub:
1902 re_path = re.compile(r'^submodule\.(.+)\.path=(.*)$') 1520 not_merged = self._revlist(not_rev(revid), pub)
1903 re_url = re.compile(r'^submodule\.(.+)\.url=(.*)$') 1521 if not_merged:
1904 1522 if upstream_gain:
1905 def parse_gitmodules(gitdir, rev): 1523 # The user has published this branch and some of those
1906 cmd = ['cat-file', 'blob', '%s:.gitmodules' % rev] 1524 # commits are not yet merged upstream. We do not want
1907 try: 1525 # to rewrite the published commits so we punt.
1908 p = GitCommand(None, cmd, capture_stdout=True, capture_stderr=True, 1526 syncbuf.fail(
1909 bare=True, gitdir=gitdir) 1527 self,
1910 except GitError: 1528 "branch %s is published (but not merged) and is now "
1911 return [], [] 1529 "%d commits behind" % (branch.name, len(upstream_gain)),
1912 if p.Wait() != 0: 1530 )
1913 return [], [] 1531 return
1914 1532 elif pub == head:
1915 gitmodules_lines = [] 1533 # All published commits are merged, and thus we are a
1916 fd, temp_gitmodules_path = tempfile.mkstemp() 1534 # strict subset. We can fast-forward safely.
1917 try: 1535 syncbuf.later1(self, _doff)
1918 os.write(fd, p.stdout.encode('utf-8')) 1536 if submodules:
1919 os.close(fd) 1537 syncbuf.later1(self, _dosubmodules)
1920 cmd = ['config', '--file', temp_gitmodules_path, '--list'] 1538 return
1921 p = GitCommand(None, cmd, capture_stdout=True, capture_stderr=True, 1539
1922 bare=True, gitdir=gitdir) 1540 # Examine the local commits not in the remote. Find the
1923 if p.Wait() != 0: 1541 # last one attributed to this user, if any.
1924 return [], [] 1542 local_changes = self._revlist(not_rev(revid), HEAD, format="%H %ce")
1925 gitmodules_lines = p.stdout.split('\n') 1543 last_mine = None
1926 except GitError: 1544 cnt_mine = 0
1927 return [], [] 1545 for commit in local_changes:
1928 finally: 1546 commit_id, committer_email = commit.split(" ", 1)
1929 platform_utils.remove(temp_gitmodules_path) 1547 if committer_email == self.UserEmail:
1930 1548 last_mine = commit_id
1931 names = set() 1549 cnt_mine += 1
1932 paths = {} 1550
1933 urls = {} 1551 if not upstream_gain and cnt_mine == len(local_changes):
1934 for line in gitmodules_lines: 1552 # The copy/linkfile config may have changed.
1935 if not line: 1553 self._CopyAndLinkFiles()
1936 continue 1554 return
1937 m = re_path.match(line) 1555
1938 if m: 1556 if self.IsDirty(consider_untracked=False):
1939 names.add(m.group(1)) 1557 syncbuf.fail(self, _DirtyError())
1940 paths[m.group(1)] = m.group(2) 1558 return
1941 continue 1559
1942 m = re_url.match(line) 1560 # If the upstream switched on us, warn the user.
1943 if m: 1561 if branch.merge != self.revisionExpr:
1944 names.add(m.group(1)) 1562 if branch.merge and self.revisionExpr:
1945 urls[m.group(1)] = m.group(2) 1563 syncbuf.info(
1946 continue 1564 self,
1947 names = sorted(names) 1565 "manifest switched %s...%s",
1948 return ([paths.get(name, '') for name in names], 1566 branch.merge,
1949 [urls.get(name, '') for name in names]) 1567 self.revisionExpr,
1950 1568 )
1951 def git_ls_tree(gitdir, rev, paths): 1569 elif branch.merge:
1952 cmd = ['ls-tree', rev, '--'] 1570 syncbuf.info(self, "manifest no longer tracks %s", branch.merge)
1953 cmd.extend(paths) 1571
1954 try: 1572 if cnt_mine < len(local_changes):
1955 p = GitCommand(None, cmd, capture_stdout=True, capture_stderr=True, 1573 # Upstream rebased. Not everything in HEAD was created by this user.
1956 bare=True, gitdir=gitdir) 1574 syncbuf.info(
1957 except GitError: 1575 self,
1958 return [] 1576 "discarding %d commits removed from upstream",
1959 if p.Wait() != 0: 1577 len(local_changes) - cnt_mine,
1960 return [] 1578 )
1961 objects = {} 1579
1962 for line in p.stdout.split('\n'): 1580 branch.remote = self.GetRemote()
1963 if not line.strip(): 1581 if not ID_RE.match(self.revisionExpr):
1964 continue 1582 # In case of manifest sync the revisionExpr might be a SHA1.
1965 object_rev, object_path = line.split()[2:4] 1583 branch.merge = self.revisionExpr
1966 objects[object_path] = object_rev 1584 if not branch.merge.startswith("refs/"):
1967 return objects 1585 branch.merge = R_HEADS + branch.merge
1586 branch.Save()
1587
1588 if cnt_mine > 0 and self.rebase:
1589
1590 def _docopyandlink():
1591 self._CopyAndLinkFiles()
1592
1593 def _dorebase():
1594 self._Rebase(upstream="%s^1" % last_mine, onto=revid)
1595
1596 syncbuf.later2(self, _dorebase)
1597 if submodules:
1598 syncbuf.later2(self, _dosubmodules)
1599 syncbuf.later2(self, _docopyandlink)
1600 elif local_changes:
1601 try:
1602 self._ResetHard(revid)
1603 if submodules:
1604 self._SyncSubmodules(quiet=True)
1605 self._CopyAndLinkFiles()
1606 except GitError as e:
1607 syncbuf.fail(self, e)
1608 return
1609 else:
1610 syncbuf.later1(self, _doff)
1611 if submodules:
1612 syncbuf.later1(self, _dosubmodules)
1968 1613
1969 try: 1614 def AddCopyFile(self, src, dest, topdir):
1970 rev = self.GetRevisionId() 1615 """Mark |src| for copying to |dest| (relative to |topdir|).
1971 except GitError:
1972 return []
1973 return get_submodules(self.gitdir, rev)
1974
1975 def GetDerivedSubprojects(self):
1976 result = []
1977 if not self.Exists:
1978 # If git repo does not exist yet, querying its submodules will
1979 # mess up its states; so return here.
1980 return result
1981 for rev, path, url in self._GetSubmodules():
1982 name = self.manifest.GetSubprojectName(self, path)
1983 relpath, worktree, gitdir, objdir = \
1984 self.manifest.GetSubprojectPaths(self, name, path)
1985 project = self.manifest.paths.get(relpath)
1986 if project:
1987 result.extend(project.GetDerivedSubprojects())
1988 continue
1989
1990 if url.startswith('..'):
1991 url = urllib.parse.urljoin("%s/" % self.remote.url, url)
1992 remote = RemoteSpec(self.remote.name,
1993 url=url,
1994 pushUrl=self.remote.pushUrl,
1995 review=self.remote.review,
1996 revision=self.remote.revision)
1997 subproject = Project(manifest=self.manifest,
1998 name=name,
1999 remote=remote,
2000 gitdir=gitdir,
2001 objdir=objdir,
2002 worktree=worktree,
2003 relpath=relpath,
2004 revisionExpr=rev,
2005 revisionId=rev,
2006 rebase=self.rebase,
2007 groups=self.groups,
2008 sync_c=self.sync_c,
2009 sync_s=self.sync_s,
2010 sync_tags=self.sync_tags,
2011 parent=self,
2012 is_derived=True)
2013 result.append(subproject)
2014 result.extend(subproject.GetDerivedSubprojects())
2015 return result
2016
2017# Direct Git Commands ##
2018 def EnableRepositoryExtension(self, key, value='true', version=1):
2019 """Enable git repository extension |key| with |value|.
2020
2021 Args:
2022 key: The extension to enabled. Omit the "extensions." prefix.
2023 value: The value to use for the extension.
2024 version: The minimum git repository version needed.
2025 """
2026 # Make sure the git repo version is new enough already.
2027 found_version = self.config.GetInt('core.repositoryFormatVersion')
2028 if found_version is None:
2029 found_version = 0
2030 if found_version < version:
2031 self.config.SetString('core.repositoryFormatVersion', str(version))
2032 1616
2033 # Enable the extension! 1617 No filesystem changes occur here. Actual copying happens later on.
2034 self.config.SetString('extensions.%s' % (key,), value)
2035 1618
2036 def ResolveRemoteHead(self, name=None): 1619 Paths should have basic validation run on them before being queued.
2037 """Find out what the default branch (HEAD) points to. 1620 Further checking will be handled when the actual copy happens.
1621 """
1622 self.copyfiles.append(_CopyFile(self.worktree, src, topdir, dest))
2038 1623
2039 Normally this points to refs/heads/master, but projects are moving to main. 1624 def AddLinkFile(self, src, dest, topdir):
2040 Support whatever the server uses rather than hardcoding "master" ourselves. 1625 """Mark |dest| to create a symlink (relative to |topdir|) pointing to
2041 """ 1626 |src|.
2042 if name is None:
2043 name = self.remote.name
2044 1627
2045 # The output will look like (NB: tabs are separators): 1628 No filesystem changes occur here. Actual linking happens later on.
2046 # ref: refs/heads/master HEAD
2047 # 5f6803b100bb3cd0f534e96e88c91373e8ed1c44 HEAD
2048 output = self.bare_git.ls_remote('-q', '--symref', '--exit-code', name, 'HEAD')
2049 1629
2050 for line in output.splitlines(): 1630 Paths should have basic validation run on them before being queued.
2051 lhs, rhs = line.split('\t', 1) 1631 Further checking will be handled when the actual link happens.
2052 if rhs == 'HEAD' and lhs.startswith('ref:'): 1632 """
2053 return lhs[4:].strip() 1633 self.linkfiles.append(_LinkFile(self.worktree, src, topdir, dest))
2054 1634
2055 return None 1635 def AddAnnotation(self, name, value, keep):
1636 self.annotations.append(Annotation(name, value, keep))
2056 1637
2057 def _CheckForImmutableRevision(self): 1638 def DownloadPatchSet(self, change_id, patch_id):
2058 try: 1639 """Download a single patch set of a single change to FETCH_HEAD."""
2059 # if revision (sha or tag) is not present then following function 1640 remote = self.GetRemote()
2060 # throws an error.
2061 self.bare_git.rev_list('-1', '--missing=allow-any',
2062 '%s^0' % self.revisionExpr, '--')
2063 if self.upstream:
2064 rev = self.GetRemote().ToLocal(self.upstream)
2065 self.bare_git.rev_list('-1', '--missing=allow-any',
2066 '%s^0' % rev, '--')
2067 self.bare_git.merge_base('--is-ancestor', self.revisionExpr, rev)
2068 return True
2069 except GitError:
2070 # There is no such persistent revision. We have to fetch it.
2071 return False
2072
2073 def _FetchArchive(self, tarpath, cwd=None):
2074 cmd = ['archive', '-v', '-o', tarpath]
2075 cmd.append('--remote=%s' % self.remote.url)
2076 cmd.append('--prefix=%s/' % self.RelPath(local=False))
2077 cmd.append(self.revisionExpr)
2078
2079 command = GitCommand(self, cmd, cwd=cwd,
2080 capture_stdout=True,
2081 capture_stderr=True)
2082
2083 if command.Wait() != 0:
2084 raise GitError('git archive %s: %s' % (self.name, command.stderr))
2085
2086 def _RemoteFetch(self, name=None,
2087 current_branch_only=False,
2088 initial=False,
2089 quiet=False,
2090 verbose=False,
2091 output_redir=None,
2092 alt_dir=None,
2093 tags=True,
2094 prune=False,
2095 depth=None,
2096 submodules=False,
2097 ssh_proxy=None,
2098 force_sync=False,
2099 clone_filter=None,
2100 retry_fetches=2,
2101 retry_sleep_initial_sec=4.0,
2102 retry_exp_factor=2.0):
2103 is_sha1 = False
2104 tag_name = None
2105 # The depth should not be used when fetching to a mirror because
2106 # it will result in a shallow repository that cannot be cloned or
2107 # fetched from.
2108 # The repo project should also never be synced with partial depth.
2109 if self.manifest.IsMirror or self.relpath == '.repo/repo':
2110 depth = None
2111
2112 if depth:
2113 current_branch_only = True
2114
2115 if ID_RE.match(self.revisionExpr) is not None:
2116 is_sha1 = True
2117
2118 if current_branch_only:
2119 if self.revisionExpr.startswith(R_TAGS):
2120 # This is a tag and its commit id should never change.
2121 tag_name = self.revisionExpr[len(R_TAGS):]
2122 elif self.upstream and self.upstream.startswith(R_TAGS):
2123 # This is a tag and its commit id should never change.
2124 tag_name = self.upstream[len(R_TAGS):]
2125
2126 if is_sha1 or tag_name is not None:
2127 if self._CheckForImmutableRevision():
2128 if verbose:
2129 print('Skipped fetching project %s (already have persistent ref)'
2130 % self.name)
2131 return True
2132 if is_sha1 and not depth:
2133 # When syncing a specific commit and --depth is not set:
2134 # * if upstream is explicitly specified and is not a sha1, fetch only
2135 # upstream as users expect only upstream to be fetch.
2136 # Note: The commit might not be in upstream in which case the sync
2137 # will fail.
2138 # * otherwise, fetch all branches to make sure we end up with the
2139 # specific commit.
2140 if self.upstream:
2141 current_branch_only = not ID_RE.match(self.upstream)
2142 else:
2143 current_branch_only = False
2144 1641
2145 if not name: 1642 cmd = ["fetch", remote.name]
2146 name = self.remote.name 1643 cmd.append(
1644 "refs/changes/%2.2d/%d/%d" % (change_id % 100, change_id, patch_id)
1645 )
1646 if GitCommand(self, cmd, bare=True).Wait() != 0:
1647 return None
1648 return DownloadedChange(
1649 self,
1650 self.GetRevisionId(),
1651 change_id,
1652 patch_id,
1653 self.bare_git.rev_parse("FETCH_HEAD"),
1654 )
2147 1655
2148 remote = self.GetRemote(name) 1656 def DeleteWorktree(self, quiet=False, force=False):
2149 if not remote.PreConnectFetch(ssh_proxy): 1657 """Delete the source checkout and any other housekeeping tasks.
2150 ssh_proxy = None
2151 1658
2152 if initial: 1659 This currently leaves behind the internal .repo/ cache state. This
2153 if alt_dir and 'objects' == os.path.basename(alt_dir): 1660 helps when switching branches or manifest changes get reverted as we
2154 ref_dir = os.path.dirname(alt_dir) 1661 don't have to redownload all the git objects. But we should do some GC
2155 packed_refs = os.path.join(self.gitdir, 'packed-refs') 1662 at some point.
2156 1663
2157 all_refs = self.bare_ref.all 1664 Args:
2158 ids = set(all_refs.values()) 1665 quiet: Whether to hide normal messages.
2159 tmp = set() 1666 force: Always delete tree even if dirty.
2160 1667
2161 for r, ref_id in GitRefs(ref_dir).all.items(): 1668 Returns:
2162 if r not in all_refs: 1669 True if the worktree was completely cleaned out.
2163 if r.startswith(R_TAGS) or remote.WritesTo(r): 1670 """
2164 all_refs[r] = ref_id 1671 if self.IsDirty():
2165 ids.add(ref_id) 1672 if force:
2166 continue 1673 print(
2167 1674 "warning: %s: Removing dirty project: uncommitted changes "
2168 if ref_id in ids: 1675 "lost." % (self.RelPath(local=False),),
2169 continue 1676 file=sys.stderr,
2170 1677 )
2171 r = 'refs/_alt/%s' % ref_id 1678 else:
2172 all_refs[r] = ref_id 1679 print(
2173 ids.add(ref_id) 1680 "error: %s: Cannot remove project: uncommitted changes are "
2174 tmp.add(r) 1681 "present.\n" % (self.RelPath(local=False),),
2175 1682 file=sys.stderr,
2176 tmp_packed_lines = [] 1683 )
2177 old_packed_lines = [] 1684 return False
2178 1685
2179 for r in sorted(all_refs): 1686 if not quiet:
2180 line = '%s %s\n' % (all_refs[r], r) 1687 print(
2181 tmp_packed_lines.append(line) 1688 "%s: Deleting obsolete checkout." % (self.RelPath(local=False),)
2182 if r not in tmp: 1689 )
2183 old_packed_lines.append(line) 1690
2184 1691 # Unlock and delink from the main worktree. We don't use git's worktree
2185 tmp_packed = ''.join(tmp_packed_lines) 1692 # remove because it will recursively delete projects -- we handle that
2186 old_packed = ''.join(old_packed_lines) 1693 # ourselves below. https://crbug.com/git/48
2187 _lwrite(packed_refs, tmp_packed) 1694 if self.use_git_worktrees:
2188 else: 1695 needle = platform_utils.realpath(self.gitdir)
2189 alt_dir = None 1696 # Find the git worktree commondir under .repo/worktrees/.
2190 1697 output = self.bare_git.worktree("list", "--porcelain").splitlines()[
2191 cmd = ['fetch'] 1698 0
2192 1699 ]
2193 if clone_filter: 1700 assert output.startswith("worktree "), output
2194 git_require((2, 19, 0), fail=True, msg='partial clones') 1701 commondir = output[9:]
2195 cmd.append('--filter=%s' % clone_filter) 1702 # Walk each of the git worktrees to see where they point.
2196 self.EnableRepositoryExtension('partialclone', self.remote.name) 1703 configs = os.path.join(commondir, "worktrees")
2197 1704 for name in os.listdir(configs):
2198 if depth: 1705 gitdir = os.path.join(configs, name, "gitdir")
2199 cmd.append('--depth=%s' % depth) 1706 with open(gitdir) as fp:
2200 else: 1707 relpath = fp.read().strip()
2201 # If this repo has shallow objects, then we don't know which refs have 1708 # Resolve the checkout path and see if it matches this project.
2202 # shallow objects or not. Tell git to unshallow all fetched refs. Don't 1709 fullpath = platform_utils.realpath(
2203 # do this with projects that don't have shallow objects, since it is less 1710 os.path.join(configs, name, relpath)
2204 # efficient. 1711 )
2205 if os.path.exists(os.path.join(self.gitdir, 'shallow')): 1712 if fullpath == needle:
2206 cmd.append('--depth=2147483647') 1713 platform_utils.rmtree(os.path.join(configs, name))
2207 1714
2208 if not verbose: 1715 # Delete the .git directory first, so we're less likely to have a
2209 cmd.append('--quiet') 1716 # partially working git repository around. There shouldn't be any git
2210 if not quiet and sys.stdout.isatty(): 1717 # projects here, so rmtree works.
2211 cmd.append('--progress') 1718
2212 if not self.worktree: 1719 # Try to remove plain files first in case of git worktrees. If this
2213 cmd.append('--update-head-ok') 1720 # fails for any reason, we'll fall back to rmtree, and that'll display
2214 cmd.append(name) 1721 # errors if it can't remove things either.
2215
2216 if force_sync:
2217 cmd.append('--force')
2218
2219 if prune:
2220 cmd.append('--prune')
2221
2222 # Always pass something for --recurse-submodules, git with GIT_DIR behaves
2223 # incorrectly when not given `--recurse-submodules=no`. (b/218891912)
2224 cmd.append(f'--recurse-submodules={"on-demand" if submodules else "no"}')
2225
2226 spec = []
2227 if not current_branch_only:
2228 # Fetch whole repo
2229 spec.append(str((u'+refs/heads/*:') + remote.ToLocal('refs/heads/*')))
2230 elif tag_name is not None:
2231 spec.append('tag')
2232 spec.append(tag_name)
2233
2234 if self.manifest.IsMirror and not current_branch_only:
2235 branch = None
2236 else:
2237 branch = self.revisionExpr
2238 if (not self.manifest.IsMirror and is_sha1 and depth
2239 and git_require((1, 8, 3))):
2240 # Shallow checkout of a specific commit, fetch from that commit and not
2241 # the heads only as the commit might be deeper in the history.
2242 spec.append(branch)
2243 if self.upstream:
2244 spec.append(self.upstream)
2245 else:
2246 if is_sha1:
2247 branch = self.upstream
2248 if branch is not None and branch.strip():
2249 if not branch.startswith('refs/'):
2250 branch = R_HEADS + branch
2251 spec.append(str((u'+%s:' % branch) + remote.ToLocal(branch)))
2252
2253 # If mirroring repo and we cannot deduce the tag or branch to fetch, fetch
2254 # whole repo.
2255 if self.manifest.IsMirror and not spec:
2256 spec.append(str((u'+refs/heads/*:') + remote.ToLocal('refs/heads/*')))
2257
2258 # If using depth then we should not get all the tags since they may
2259 # be outside of the depth.
2260 if not tags or depth:
2261 cmd.append('--no-tags')
2262 else:
2263 cmd.append('--tags')
2264 spec.append(str((u'+refs/tags/*:') + remote.ToLocal('refs/tags/*')))
2265
2266 cmd.extend(spec)
2267
2268 # At least one retry minimum due to git remote prune.
2269 retry_fetches = max(retry_fetches, 2)
2270 retry_cur_sleep = retry_sleep_initial_sec
2271 ok = prune_tried = False
2272 for try_n in range(retry_fetches):
2273 gitcmd = GitCommand(
2274 self, cmd, bare=True, objdir=os.path.join(self.objdir, 'objects'),
2275 ssh_proxy=ssh_proxy,
2276 merge_output=True, capture_stdout=quiet or bool(output_redir))
2277 if gitcmd.stdout and not quiet and output_redir:
2278 output_redir.write(gitcmd.stdout)
2279 ret = gitcmd.Wait()
2280 if ret == 0:
2281 ok = True
2282 break
2283
2284 # Retry later due to HTTP 429 Too Many Requests.
2285 elif (gitcmd.stdout and
2286 'error:' in gitcmd.stdout and
2287 'HTTP 429' in gitcmd.stdout):
2288 # Fallthru to sleep+retry logic at the bottom.
2289 pass
2290
2291 # Try to prune remote branches once in case there are conflicts.
2292 # For example, if the remote had refs/heads/upstream, but deleted that and
2293 # now has refs/heads/upstream/foo.
2294 elif (gitcmd.stdout and
2295 'error:' in gitcmd.stdout and
2296 'git remote prune' in gitcmd.stdout and
2297 not prune_tried):
2298 prune_tried = True
2299 prunecmd = GitCommand(self, ['remote', 'prune', name], bare=True,
2300 ssh_proxy=ssh_proxy)
2301 ret = prunecmd.Wait()
2302 if ret:
2303 break
2304 print('retrying fetch after pruning remote branches', file=output_redir)
2305 # Continue right away so we don't sleep as we shouldn't need to.
2306 continue
2307 elif current_branch_only and is_sha1 and ret == 128:
2308 # Exit code 128 means "couldn't find the ref you asked for"; if we're
2309 # in sha1 mode, we just tried sync'ing from the upstream field; it
2310 # doesn't exist, thus abort the optimization attempt and do a full sync.
2311 break
2312 elif ret < 0:
2313 # Git died with a signal, exit immediately
2314 break
2315
2316 # Figure out how long to sleep before the next attempt, if there is one.
2317 if not verbose and gitcmd.stdout:
2318 print('\n%s:\n%s' % (self.name, gitcmd.stdout), end='', file=output_redir)
2319 if try_n < retry_fetches - 1:
2320 print('%s: sleeping %s seconds before retrying' % (self.name, retry_cur_sleep),
2321 file=output_redir)
2322 time.sleep(retry_cur_sleep)
2323 retry_cur_sleep = min(retry_exp_factor * retry_cur_sleep,
2324 MAXIMUM_RETRY_SLEEP_SEC)
2325 retry_cur_sleep *= (1 - random.uniform(-RETRY_JITTER_PERCENT,
2326 RETRY_JITTER_PERCENT))
2327
2328 if initial:
2329 if alt_dir:
2330 if old_packed != '':
2331 _lwrite(packed_refs, old_packed)
2332 else:
2333 platform_utils.remove(packed_refs)
2334 self.bare_git.pack_refs('--all', '--prune')
2335
2336 if is_sha1 and current_branch_only:
2337 # We just synced the upstream given branch; verify we
2338 # got what we wanted, else trigger a second run of all
2339 # refs.
2340 if not self._CheckForImmutableRevision():
2341 # Sync the current branch only with depth set to None.
2342 # We always pass depth=None down to avoid infinite recursion.
2343 return self._RemoteFetch(
2344 name=name, quiet=quiet, verbose=verbose, output_redir=output_redir,
2345 current_branch_only=current_branch_only and depth,
2346 initial=False, alt_dir=alt_dir,
2347 depth=None, ssh_proxy=ssh_proxy, clone_filter=clone_filter)
2348
2349 return ok
2350
2351 def _ApplyCloneBundle(self, initial=False, quiet=False, verbose=False):
2352 if initial and (self.manifest.manifestProject.depth or self.clone_depth):
2353 return False
2354
2355 remote = self.GetRemote()
2356 bundle_url = remote.url + '/clone.bundle'
2357 bundle_url = GitConfig.ForUser().UrlInsteadOf(bundle_url)
2358 if GetSchemeFromUrl(bundle_url) not in ('http', 'https',
2359 'persistent-http',
2360 'persistent-https'):
2361 return False
2362
2363 bundle_dst = os.path.join(self.gitdir, 'clone.bundle')
2364 bundle_tmp = os.path.join(self.gitdir, 'clone.bundle.tmp')
2365
2366 exist_dst = os.path.exists(bundle_dst)
2367 exist_tmp = os.path.exists(bundle_tmp)
2368
2369 if not initial and not exist_dst and not exist_tmp:
2370 return False
2371
2372 if not exist_dst:
2373 exist_dst = self._FetchBundle(bundle_url, bundle_tmp, bundle_dst, quiet,
2374 verbose)
2375 if not exist_dst:
2376 return False
2377
2378 cmd = ['fetch']
2379 if not verbose:
2380 cmd.append('--quiet')
2381 if not quiet and sys.stdout.isatty():
2382 cmd.append('--progress')
2383 if not self.worktree:
2384 cmd.append('--update-head-ok')
2385 cmd.append(bundle_dst)
2386 for f in remote.fetch:
2387 cmd.append(str(f))
2388 cmd.append('+refs/tags/*:refs/tags/*')
2389
2390 ok = GitCommand(
2391 self, cmd, bare=True, objdir=os.path.join(self.objdir, 'objects')).Wait() == 0
2392 platform_utils.remove(bundle_dst, missing_ok=True)
2393 platform_utils.remove(bundle_tmp, missing_ok=True)
2394 return ok
2395
2396 def _FetchBundle(self, srcUrl, tmpPath, dstPath, quiet, verbose):
2397 platform_utils.remove(dstPath, missing_ok=True)
2398
2399 cmd = ['curl', '--fail', '--output', tmpPath, '--netrc', '--location']
2400 if quiet:
2401 cmd += ['--silent', '--show-error']
2402 if os.path.exists(tmpPath):
2403 size = os.stat(tmpPath).st_size
2404 if size >= 1024:
2405 cmd += ['--continue-at', '%d' % (size,)]
2406 else:
2407 platform_utils.remove(tmpPath)
2408 with GetUrlCookieFile(srcUrl, quiet) as (cookiefile, proxy):
2409 if cookiefile:
2410 cmd += ['--cookie', cookiefile]
2411 if proxy:
2412 cmd += ['--proxy', proxy]
2413 elif 'http_proxy' in os.environ and 'darwin' == sys.platform:
2414 cmd += ['--proxy', os.environ['http_proxy']]
2415 if srcUrl.startswith('persistent-https'):
2416 srcUrl = 'http' + srcUrl[len('persistent-https'):]
2417 elif srcUrl.startswith('persistent-http'):
2418 srcUrl = 'http' + srcUrl[len('persistent-http'):]
2419 cmd += [srcUrl]
2420
2421 proc = None
2422 with Trace('Fetching bundle: %s', ' '.join(cmd)):
2423 if verbose:
2424 print('%s: Downloading bundle: %s' % (self.name, srcUrl))
2425 stdout = None if verbose else subprocess.PIPE
2426 stderr = None if verbose else subprocess.STDOUT
2427 try: 1722 try:
2428 proc = subprocess.Popen(cmd, stdout=stdout, stderr=stderr) 1723 platform_utils.remove(self.gitdir)
2429 except OSError: 1724 except OSError:
2430 return False 1725 pass
2431 1726 try:
2432 (output, _) = proc.communicate() 1727 platform_utils.rmtree(self.gitdir)
2433 curlret = proc.returncode 1728 except OSError as e:
2434 1729 if e.errno != errno.ENOENT:
2435 if curlret == 22: 1730 print("error: %s: %s" % (self.gitdir, e), file=sys.stderr)
2436 # From curl man page: 1731 print(
2437 # 22: HTTP page not retrieved. The requested url was not found or 1732 "error: %s: Failed to delete obsolete checkout; remove "
2438 # returned another error with the HTTP error code being 400 or above. 1733 "manually, then run `repo sync -l`."
2439 # This return code only appears if -f, --fail is used. 1734 % (self.RelPath(local=False),),
2440 if verbose: 1735 file=sys.stderr,
2441 print('%s: Unable to retrieve clone.bundle; ignoring.' % self.name) 1736 )
2442 if output: 1737 return False
2443 print('Curl output:\n%s' % output) 1738
2444 return False 1739 # Delete everything under the worktree, except for directories that
2445 elif curlret and not verbose and output: 1740 # contain another git project.
2446 print('%s' % output, file=sys.stderr) 1741 dirs_to_remove = []
1742 failed = False
1743 for root, dirs, files in platform_utils.walk(self.worktree):
1744 for f in files:
1745 path = os.path.join(root, f)
1746 try:
1747 platform_utils.remove(path)
1748 except OSError as e:
1749 if e.errno != errno.ENOENT:
1750 print(
1751 "error: %s: Failed to remove: %s" % (path, e),
1752 file=sys.stderr,
1753 )
1754 failed = True
1755 dirs[:] = [
1756 d
1757 for d in dirs
1758 if not os.path.lexists(os.path.join(root, d, ".git"))
1759 ]
1760 dirs_to_remove += [
1761 os.path.join(root, d)
1762 for d in dirs
1763 if os.path.join(root, d) not in dirs_to_remove
1764 ]
1765 for d in reversed(dirs_to_remove):
1766 if platform_utils.islink(d):
1767 try:
1768 platform_utils.remove(d)
1769 except OSError as e:
1770 if e.errno != errno.ENOENT:
1771 print(
1772 "error: %s: Failed to remove: %s" % (d, e),
1773 file=sys.stderr,
1774 )
1775 failed = True
1776 elif not platform_utils.listdir(d):
1777 try:
1778 platform_utils.rmdir(d)
1779 except OSError as e:
1780 if e.errno != errno.ENOENT:
1781 print(
1782 "error: %s: Failed to remove: %s" % (d, e),
1783 file=sys.stderr,
1784 )
1785 failed = True
1786 if failed:
1787 print(
1788 "error: %s: Failed to delete obsolete checkout."
1789 % (self.RelPath(local=False),),
1790 file=sys.stderr,
1791 )
1792 print(
1793 " Remove manually, then run `repo sync -l`.",
1794 file=sys.stderr,
1795 )
1796 return False
1797
1798 # Try deleting parent dirs if they are empty.
1799 path = self.worktree
1800 while path != self.manifest.topdir:
1801 try:
1802 platform_utils.rmdir(path)
1803 except OSError as e:
1804 if e.errno != errno.ENOENT:
1805 break
1806 path = os.path.dirname(path)
2447 1807
2448 if os.path.exists(tmpPath):
2449 if curlret == 0 and self._IsValidBundle(tmpPath, quiet):
2450 platform_utils.rename(tmpPath, dstPath)
2451 return True 1808 return True
2452 else:
2453 platform_utils.remove(tmpPath)
2454 return False
2455 else:
2456 return False
2457 1809
2458 def _IsValidBundle(self, path, quiet): 1810 def StartBranch(self, name, branch_merge="", revision=None):
2459 try: 1811 """Create a new branch off the manifest's revision."""
2460 with open(path, 'rb') as f: 1812 if not branch_merge:
2461 if f.read(16) == b'# v2 git bundle\n': 1813 branch_merge = self.revisionExpr
2462 return True 1814 head = self.work_git.GetHead()
1815 if head == (R_HEADS + name):
1816 return True
1817
1818 all_refs = self.bare_ref.all
1819 if R_HEADS + name in all_refs:
1820 return GitCommand(self, ["checkout", "-q", name, "--"]).Wait() == 0
1821
1822 branch = self.GetBranch(name)
1823 branch.remote = self.GetRemote()
1824 branch.merge = branch_merge
1825 if not branch.merge.startswith("refs/") and not ID_RE.match(
1826 branch_merge
1827 ):
1828 branch.merge = R_HEADS + branch_merge
1829
1830 if revision is None:
1831 revid = self.GetRevisionId(all_refs)
2463 else: 1832 else:
2464 if not quiet: 1833 revid = self.work_git.rev_parse(revision)
2465 print("Invalid clone.bundle file; ignoring.", file=sys.stderr) 1834
2466 return False 1835 if head.startswith(R_HEADS):
2467 except OSError: 1836 try:
2468 return False 1837 head = all_refs[head]
2469 1838 except KeyError:
2470 def _Checkout(self, rev, quiet=False): 1839 head = None
2471 cmd = ['checkout'] 1840 if revid and head and revid == head:
2472 if quiet: 1841 ref = R_HEADS + name
2473 cmd.append('-q') 1842 self.work_git.update_ref(ref, revid)
2474 cmd.append(rev) 1843 self.work_git.symbolic_ref(HEAD, ref)
2475 cmd.append('--') 1844 branch.Save()
2476 if GitCommand(self, cmd).Wait() != 0: 1845 return True
2477 if self._allrefs: 1846
2478 raise GitError('%s checkout %s ' % (self.name, rev)) 1847 if (
2479 1848 GitCommand(
2480 def _CherryPick(self, rev, ffonly=False, record_origin=False): 1849 self, ["checkout", "-q", "-b", branch.name, revid]
2481 cmd = ['cherry-pick'] 1850 ).Wait()
2482 if ffonly: 1851 == 0
2483 cmd.append('--ff') 1852 ):
2484 if record_origin: 1853 branch.Save()
2485 cmd.append('-x') 1854 return True
2486 cmd.append(rev) 1855 return False
2487 cmd.append('--')
2488 if GitCommand(self, cmd).Wait() != 0:
2489 if self._allrefs:
2490 raise GitError('%s cherry-pick %s ' % (self.name, rev))
2491
2492 def _LsRemote(self, refs):
2493 cmd = ['ls-remote', self.remote.name, refs]
2494 p = GitCommand(self, cmd, capture_stdout=True)
2495 if p.Wait() == 0:
2496 return p.stdout
2497 return None
2498
2499 def _Revert(self, rev):
2500 cmd = ['revert']
2501 cmd.append('--no-edit')
2502 cmd.append(rev)
2503 cmd.append('--')
2504 if GitCommand(self, cmd).Wait() != 0:
2505 if self._allrefs:
2506 raise GitError('%s revert %s ' % (self.name, rev))
2507
2508 def _ResetHard(self, rev, quiet=True):
2509 cmd = ['reset', '--hard']
2510 if quiet:
2511 cmd.append('-q')
2512 cmd.append(rev)
2513 if GitCommand(self, cmd).Wait() != 0:
2514 raise GitError('%s reset --hard %s ' % (self.name, rev))
2515
2516 def _SyncSubmodules(self, quiet=True):
2517 cmd = ['submodule', 'update', '--init', '--recursive']
2518 if quiet:
2519 cmd.append('-q')
2520 if GitCommand(self, cmd).Wait() != 0:
2521 raise GitError('%s submodule update --init --recursive ' % self.name)
2522
2523 def _Rebase(self, upstream, onto=None):
2524 cmd = ['rebase']
2525 if onto is not None:
2526 cmd.extend(['--onto', onto])
2527 cmd.append(upstream)
2528 if GitCommand(self, cmd).Wait() != 0:
2529 raise GitError('%s rebase %s ' % (self.name, upstream))
2530
2531 def _FastForward(self, head, ffonly=False):
2532 cmd = ['merge', '--no-stat', head]
2533 if ffonly:
2534 cmd.append("--ff-only")
2535 if GitCommand(self, cmd).Wait() != 0:
2536 raise GitError('%s merge %s ' % (self.name, head))
2537
2538 def _InitGitDir(self, mirror_git=None, force_sync=False, quiet=False):
2539 init_git_dir = not os.path.exists(self.gitdir)
2540 init_obj_dir = not os.path.exists(self.objdir)
2541 try:
2542 # Initialize the bare repository, which contains all of the objects.
2543 if init_obj_dir:
2544 os.makedirs(self.objdir)
2545 self.bare_objdir.init()
2546 1856
2547 self._UpdateHooks(quiet=quiet) 1857 def CheckoutBranch(self, name):
1858 """Checkout a local topic branch.
2548 1859
2549 if self.use_git_worktrees: 1860 Args:
2550 # Enable per-worktree config file support if possible. This is more a 1861 name: The name of the branch to checkout.
2551 # nice-to-have feature for users rather than a hard requirement. 1862
2552 if git_require((2, 20, 0)): 1863 Returns:
2553 self.EnableRepositoryExtension('worktreeConfig') 1864 True if the checkout succeeded; False if it didn't; None if the
2554 1865 branch didn't exist.
2555 # If we have a separate directory to hold refs, initialize it as well. 1866 """
2556 if self.objdir != self.gitdir: 1867 rev = R_HEADS + name
2557 if init_git_dir: 1868 head = self.work_git.GetHead()
2558 os.makedirs(self.gitdir) 1869 if head == rev:
2559 1870 # Already on the branch.
2560 if init_obj_dir or init_git_dir: 1871 return True
2561 self._ReferenceGitDir(self.objdir, self.gitdir, copy_all=True) 1872
1873 all_refs = self.bare_ref.all
2562 try: 1874 try:
2563 self._CheckDirReference(self.objdir, self.gitdir) 1875 revid = all_refs[rev]
2564 except GitError as e: 1876 except KeyError:
2565 if force_sync: 1877 # Branch does not exist in this project.
2566 print("Retrying clone after deleting %s" % 1878 return None
2567 self.gitdir, file=sys.stderr) 1879
1880 if head.startswith(R_HEADS):
2568 try: 1881 try:
2569 platform_utils.rmtree(platform_utils.realpath(self.gitdir)) 1882 head = all_refs[head]
2570 if self.worktree and os.path.exists(platform_utils.realpath 1883 except KeyError:
2571 (self.worktree)): 1884 head = None
2572 platform_utils.rmtree(platform_utils.realpath(self.worktree)) 1885
2573 return self._InitGitDir(mirror_git=mirror_git, force_sync=False, 1886 if head == revid:
2574 quiet=quiet) 1887 # Same revision; just update HEAD to point to the new
2575 except Exception: 1888 # target branch, but otherwise take no other action.
2576 raise e 1889 _lwrite(
2577 raise e 1890 self.work_git.GetDotgitPath(subpath=HEAD),
2578 1891 "ref: %s%s\n" % (R_HEADS, name),
2579 if init_git_dir: 1892 )
2580 mp = self.manifest.manifestProject 1893 return True
2581 ref_dir = mp.reference or '' 1894
2582 1895 return (
2583 def _expanded_ref_dirs(): 1896 GitCommand(
2584 """Iterate through the possible git reference directory paths.""" 1897 self,
2585 name = self.name + '.git' 1898 ["checkout", name, "--"],
2586 yield mirror_git or os.path.join(ref_dir, name) 1899 capture_stdout=True,
2587 for prefix in '', self.remote.name: 1900 capture_stderr=True,
2588 yield os.path.join(ref_dir, '.repo', 'project-objects', prefix, name) 1901 ).Wait()
2589 yield os.path.join(ref_dir, '.repo', 'worktrees', prefix, name) 1902 == 0
2590 1903 )
2591 if ref_dir or mirror_git:
2592 found_ref_dir = None
2593 for path in _expanded_ref_dirs():
2594 if os.path.exists(path):
2595 found_ref_dir = path
2596 break
2597 ref_dir = found_ref_dir
2598
2599 if ref_dir:
2600 if not os.path.isabs(ref_dir):
2601 # The alternate directory is relative to the object database.
2602 ref_dir = os.path.relpath(ref_dir,
2603 os.path.join(self.objdir, 'objects'))
2604 _lwrite(os.path.join(self.objdir, 'objects/info/alternates'),
2605 os.path.join(ref_dir, 'objects') + '\n')
2606
2607 m = self.manifest.manifestProject.config
2608 for key in ['user.name', 'user.email']:
2609 if m.Has(key, include_defaults=False):
2610 self.config.SetString(key, m.GetString(key))
2611 if not self.manifest.EnableGitLfs:
2612 self.config.SetString('filter.lfs.smudge', 'git-lfs smudge --skip -- %f')
2613 self.config.SetString('filter.lfs.process', 'git-lfs filter-process --skip')
2614 self.config.SetBoolean('core.bare', True if self.manifest.IsMirror else None)
2615 except Exception:
2616 if init_obj_dir and os.path.exists(self.objdir):
2617 platform_utils.rmtree(self.objdir)
2618 if init_git_dir and os.path.exists(self.gitdir):
2619 platform_utils.rmtree(self.gitdir)
2620 raise
2621
2622 def _UpdateHooks(self, quiet=False):
2623 if os.path.exists(self.objdir):
2624 self._InitHooks(quiet=quiet)
2625
2626 def _InitHooks(self, quiet=False):
2627 hooks = platform_utils.realpath(os.path.join(self.objdir, 'hooks'))
2628 if not os.path.exists(hooks):
2629 os.makedirs(hooks)
2630
2631 # Delete sample hooks. They're noise.
2632 for hook in glob.glob(os.path.join(hooks, '*.sample')):
2633 try:
2634 platform_utils.remove(hook, missing_ok=True)
2635 except PermissionError:
2636 pass
2637
2638 for stock_hook in _ProjectHooks():
2639 name = os.path.basename(stock_hook)
2640
2641 if name in ('commit-msg',) and not self.remote.review \
2642 and self is not self.manifest.manifestProject:
2643 # Don't install a Gerrit Code Review hook if this
2644 # project does not appear to use it for reviews.
2645 #
2646 # Since the manifest project is one of those, but also
2647 # managed through gerrit, it's excluded
2648 continue
2649
2650 dst = os.path.join(hooks, name)
2651 if platform_utils.islink(dst):
2652 continue
2653 if os.path.exists(dst):
2654 # If the files are the same, we'll leave it alone. We create symlinks
2655 # below by default but fallback to hardlinks if the OS blocks them.
2656 # So if we're here, it's probably because we made a hardlink below.
2657 if not filecmp.cmp(stock_hook, dst, shallow=False):
2658 if not quiet:
2659 _warn("%s: Not replacing locally modified %s hook",
2660 self.RelPath(local=False), name)
2661 continue
2662 try:
2663 platform_utils.symlink(
2664 os.path.relpath(stock_hook, os.path.dirname(dst)), dst)
2665 except OSError as e:
2666 if e.errno == errno.EPERM:
2667 try:
2668 os.link(stock_hook, dst)
2669 except OSError:
2670 raise GitError(self._get_symlink_error_message())
2671 else:
2672 raise
2673
2674 def _InitRemote(self):
2675 if self.remote.url:
2676 remote = self.GetRemote()
2677 remote.url = self.remote.url
2678 remote.pushUrl = self.remote.pushUrl
2679 remote.review = self.remote.review
2680 remote.projectname = self.name
2681
2682 if self.worktree:
2683 remote.ResetFetch(mirror=False)
2684 else:
2685 remote.ResetFetch(mirror=True)
2686 remote.Save()
2687
2688 def _InitMRef(self):
2689 """Initialize the pseudo m/<manifest branch> ref."""
2690 if self.manifest.branch:
2691 if self.use_git_worktrees:
2692 # Set up the m/ space to point to the worktree-specific ref space.
2693 # We'll update the worktree-specific ref space on each checkout.
2694 ref = R_M + self.manifest.branch
2695 if not self.bare_ref.symref(ref):
2696 self.bare_git.symbolic_ref(
2697 '-m', 'redirecting to worktree scope',
2698 ref, R_WORKTREE_M + self.manifest.branch)
2699
2700 # We can't update this ref with git worktrees until it exists.
2701 # We'll wait until the initial checkout to set it.
2702 if not os.path.exists(self.worktree):
2703 return
2704
2705 base = R_WORKTREE_M
2706 active_git = self.work_git
2707
2708 self._InitAnyMRef(HEAD, self.bare_git, detach=True)
2709 else:
2710 base = R_M
2711 active_git = self.bare_git
2712
2713 self._InitAnyMRef(base + self.manifest.branch, active_git)
2714
2715 def _InitMirrorHead(self):
2716 self._InitAnyMRef(HEAD, self.bare_git)
2717
2718 def _InitAnyMRef(self, ref, active_git, detach=False):
2719 """Initialize |ref| in |active_git| to the value in the manifest.
2720
2721 This points |ref| to the <project> setting in the manifest.
2722
2723 Args:
2724 ref: The branch to update.
2725 active_git: The git repository to make updates in.
2726 detach: Whether to update target of symbolic refs, or overwrite the ref
2727 directly (and thus make it non-symbolic).
2728 """
2729 cur = self.bare_ref.symref(ref)
2730
2731 if self.revisionId:
2732 if cur != '' or self.bare_ref.get(ref) != self.revisionId:
2733 msg = 'manifest set to %s' % self.revisionId
2734 dst = self.revisionId + '^0'
2735 active_git.UpdateRef(ref, dst, message=msg, detach=True)
2736 else:
2737 remote = self.GetRemote()
2738 dst = remote.ToLocal(self.revisionExpr)
2739 if cur != dst:
2740 msg = 'manifest set to %s' % self.revisionExpr
2741 if detach:
2742 active_git.UpdateRef(ref, dst, message=msg, detach=True)
2743 else:
2744 active_git.symbolic_ref('-m', msg, ref, dst)
2745
2746 def _CheckDirReference(self, srcdir, destdir):
2747 # Git worktrees don't use symlinks to share at all.
2748 if self.use_git_worktrees:
2749 return
2750
2751 for name in self.shareable_dirs:
2752 # Try to self-heal a bit in simple cases.
2753 dst_path = os.path.join(destdir, name)
2754 src_path = os.path.join(srcdir, name)
2755
2756 dst = platform_utils.realpath(dst_path)
2757 if os.path.lexists(dst):
2758 src = platform_utils.realpath(src_path)
2759 # Fail if the links are pointing to the wrong place
2760 if src != dst:
2761 _error('%s is different in %s vs %s', name, destdir, srcdir)
2762 raise GitError('--force-sync not enabled; cannot overwrite a local '
2763 'work tree. If you\'re comfortable with the '
2764 'possibility of losing the work tree\'s git metadata,'
2765 ' use `repo sync --force-sync {0}` to '
2766 'proceed.'.format(self.RelPath(local=False)))
2767
2768 def _ReferenceGitDir(self, gitdir, dotgit, copy_all):
2769 """Update |dotgit| to reference |gitdir|, using symlinks where possible.
2770
2771 Args:
2772 gitdir: The bare git repository. Must already be initialized.
2773 dotgit: The repository you would like to initialize.
2774 copy_all: If true, copy all remaining files from |gitdir| -> |dotgit|.
2775 This saves you the effort of initializing |dotgit| yourself.
2776 """
2777 symlink_dirs = self.shareable_dirs[:]
2778 to_symlink = symlink_dirs
2779
2780 to_copy = []
2781 if copy_all:
2782 to_copy = platform_utils.listdir(gitdir)
2783
2784 dotgit = platform_utils.realpath(dotgit)
2785 for name in set(to_copy).union(to_symlink):
2786 try:
2787 src = platform_utils.realpath(os.path.join(gitdir, name))
2788 dst = os.path.join(dotgit, name)
2789
2790 if os.path.lexists(dst):
2791 continue
2792
2793 # If the source dir doesn't exist, create an empty dir.
2794 if name in symlink_dirs and not os.path.lexists(src):
2795 os.makedirs(src)
2796
2797 if name in to_symlink:
2798 platform_utils.symlink(
2799 os.path.relpath(src, os.path.dirname(dst)), dst)
2800 elif copy_all and not platform_utils.islink(dst):
2801 if platform_utils.isdir(src):
2802 shutil.copytree(src, dst)
2803 elif os.path.isfile(src):
2804 shutil.copy(src, dst)
2805
2806 except OSError as e:
2807 if e.errno == errno.EPERM:
2808 raise DownloadError(self._get_symlink_error_message())
2809 else:
2810 raise
2811
2812 def _InitGitWorktree(self):
2813 """Init the project using git worktrees."""
2814 self.bare_git.worktree('prune')
2815 self.bare_git.worktree('add', '-ff', '--checkout', '--detach', '--lock',
2816 self.worktree, self.GetRevisionId())
2817
2818 # Rewrite the internal state files to use relative paths between the
2819 # checkouts & worktrees.
2820 dotgit = os.path.join(self.worktree, '.git')
2821 with open(dotgit, 'r') as fp:
2822 # Figure out the checkout->worktree path.
2823 setting = fp.read()
2824 assert setting.startswith('gitdir:')
2825 git_worktree_path = setting.split(':', 1)[1].strip()
2826 # Some platforms (e.g. Windows) won't let us update dotgit in situ because
2827 # of file permissions. Delete it and recreate it from scratch to avoid.
2828 platform_utils.remove(dotgit)
2829 # Use relative path from checkout->worktree & maintain Unix line endings
2830 # on all OS's to match git behavior.
2831 with open(dotgit, 'w', newline='\n') as fp:
2832 print('gitdir:', os.path.relpath(git_worktree_path, self.worktree),
2833 file=fp)
2834 # Use relative path from worktree->checkout & maintain Unix line endings
2835 # on all OS's to match git behavior.
2836 with open(os.path.join(git_worktree_path, 'gitdir'), 'w', newline='\n') as fp:
2837 print(os.path.relpath(dotgit, git_worktree_path), file=fp)
2838
2839 self._InitMRef()
2840
2841 def _InitWorkTree(self, force_sync=False, submodules=False):
2842 """Setup the worktree .git path.
2843
2844 This is the user-visible path like src/foo/.git/.
2845
2846 With non-git-worktrees, this will be a symlink to the .repo/projects/ path.
2847 With git-worktrees, this will be a .git file using "gitdir: ..." syntax.
2848
2849 Older checkouts had .git/ directories. If we see that, migrate it.
2850
2851 This also handles changes in the manifest. Maybe this project was backed
2852 by "foo/bar" on the server, but now it's "new/foo/bar". We have to update
2853 the path we point to under .repo/projects/ to match.
2854 """
2855 dotgit = os.path.join(self.worktree, '.git')
2856
2857 # If using an old layout style (a directory), migrate it.
2858 if not platform_utils.islink(dotgit) and platform_utils.isdir(dotgit):
2859 self._MigrateOldWorkTreeGitDir(dotgit)
2860
2861 init_dotgit = not os.path.exists(dotgit)
2862 if self.use_git_worktrees:
2863 if init_dotgit:
2864 self._InitGitWorktree()
2865 self._CopyAndLinkFiles()
2866 else:
2867 if not init_dotgit:
2868 # See if the project has changed.
2869 if platform_utils.realpath(self.gitdir) != platform_utils.realpath(dotgit):
2870 platform_utils.remove(dotgit)
2871
2872 if init_dotgit or not os.path.exists(dotgit):
2873 os.makedirs(self.worktree, exist_ok=True)
2874 platform_utils.symlink(os.path.relpath(self.gitdir, self.worktree), dotgit)
2875
2876 if init_dotgit:
2877 _lwrite(os.path.join(dotgit, HEAD), '%s\n' % self.GetRevisionId())
2878
2879 # Finish checking out the worktree.
2880 cmd = ['read-tree', '--reset', '-u', '-v', HEAD]
2881 if GitCommand(self, cmd).Wait() != 0:
2882 raise GitError('Cannot initialize work tree for ' + self.name)
2883 1904
2884 if submodules: 1905 def AbandonBranch(self, name):
2885 self._SyncSubmodules(quiet=True) 1906 """Destroy a local topic branch.
2886 self._CopyAndLinkFiles()
2887 1907
2888 @classmethod 1908 Args:
2889 def _MigrateOldWorkTreeGitDir(cls, dotgit): 1909 name: The name of the branch to abandon.
2890 """Migrate the old worktree .git/ dir style to a symlink.
2891 1910
2892 This logic specifically only uses state from |dotgit| to figure out where to 1911 Returns:
2893 move content and not |self|. This way if the backing project also changed 1912 True if the abandon succeeded; False if it didn't; None if the
2894 places, we only do the .git/ dir to .git symlink migration here. The path 1913 branch didn't exist.
2895 updates will happen independently. 1914 """
2896 """ 1915 rev = R_HEADS + name
2897 # Figure out where in .repo/projects/ it's pointing to. 1916 all_refs = self.bare_ref.all
2898 if not os.path.islink(os.path.join(dotgit, 'refs')): 1917 if rev not in all_refs:
2899 raise GitError(f'{dotgit}: unsupported checkout state') 1918 # Doesn't exist
2900 gitdir = os.path.dirname(os.path.realpath(os.path.join(dotgit, 'refs'))) 1919 return None
2901 1920
2902 # Remove known symlink paths that exist in .repo/projects/. 1921 head = self.work_git.GetHead()
2903 KNOWN_LINKS = { 1922 if head == rev:
2904 'config', 'description', 'hooks', 'info', 'logs', 'objects', 1923 # We can't destroy the branch while we are sitting
2905 'packed-refs', 'refs', 'rr-cache', 'shallow', 'svn', 1924 # on it. Switch to a detached HEAD.
2906 } 1925 head = all_refs[head]
2907 # Paths that we know will be in both, but are safe to clobber in .repo/projects/. 1926
2908 SAFE_TO_CLOBBER = { 1927 revid = self.GetRevisionId(all_refs)
2909 'COMMIT_EDITMSG', 'FETCH_HEAD', 'HEAD', 'gc.log', 'gitk.cache', 'index', 1928 if head == revid:
2910 'ORIG_HEAD', 1929 _lwrite(
2911 } 1930 self.work_git.GetDotgitPath(subpath=HEAD), "%s\n" % revid
2912 1931 )
2913 # First see if we'd succeed before starting the migration. 1932 else:
2914 unknown_paths = [] 1933 self._Checkout(revid, quiet=True)
2915 for name in platform_utils.listdir(dotgit): 1934
2916 # Ignore all temporary/backup names. These are common with vim & emacs. 1935 return (
2917 if name.endswith('~') or (name[0] == '#' and name[-1] == '#'): 1936 GitCommand(
2918 continue 1937 self,
2919 1938 ["branch", "-D", name],
2920 dotgit_path = os.path.join(dotgit, name) 1939 capture_stdout=True,
2921 if name in KNOWN_LINKS: 1940 capture_stderr=True,
2922 if not platform_utils.islink(dotgit_path): 1941 ).Wait()
2923 unknown_paths.append(f'{dotgit_path}: should be a symlink') 1942 == 0
2924 else: 1943 )
2925 gitdir_path = os.path.join(gitdir, name)
2926 if name not in SAFE_TO_CLOBBER and os.path.exists(gitdir_path):
2927 unknown_paths.append(f'{dotgit_path}: unknown file; please file a bug')
2928 if unknown_paths:
2929 raise GitError('Aborting migration: ' + '\n'.join(unknown_paths))
2930
2931 # Now walk the paths and sync the .git/ to .repo/projects/.
2932 for name in platform_utils.listdir(dotgit):
2933 dotgit_path = os.path.join(dotgit, name)
2934
2935 # Ignore all temporary/backup names. These are common with vim & emacs.
2936 if name.endswith('~') or (name[0] == '#' and name[-1] == '#'):
2937 platform_utils.remove(dotgit_path)
2938 elif name in KNOWN_LINKS:
2939 platform_utils.remove(dotgit_path)
2940 else:
2941 gitdir_path = os.path.join(gitdir, name)
2942 platform_utils.remove(gitdir_path, missing_ok=True)
2943 platform_utils.rename(dotgit_path, gitdir_path)
2944
2945 # Now that the dir should be empty, clear it out, and symlink it over.
2946 platform_utils.rmdir(dotgit)
2947 platform_utils.symlink(os.path.relpath(gitdir, os.path.dirname(dotgit)), dotgit)
2948
2949 def _get_symlink_error_message(self):
2950 if platform_utils.isWindows():
2951 return ('Unable to create symbolic link. Please re-run the command as '
2952 'Administrator, or see '
2953 'https://github.com/git-for-windows/git/wiki/Symbolic-Links '
2954 'for other options.')
2955 return 'filesystem must support symlinks'
2956
2957 def _revlist(self, *args, **kw):
2958 a = []
2959 a.extend(args)
2960 a.append('--')
2961 return self.work_git.rev_list(*a, **kw)
2962
2963 @property
2964 def _allrefs(self):
2965 return self.bare_ref.all
2966
2967 def _getLogs(self, rev1, rev2, oneline=False, color=True, pretty_format=None):
2968 """Get logs between two revisions of this project."""
2969 comp = '..'
2970 if rev1:
2971 revs = [rev1]
2972 if rev2:
2973 revs.extend([comp, rev2])
2974 cmd = ['log', ''.join(revs)]
2975 out = DiffColoring(self.config)
2976 if out.is_on and color:
2977 cmd.append('--color')
2978 if pretty_format is not None:
2979 cmd.append('--pretty=format:%s' % pretty_format)
2980 if oneline:
2981 cmd.append('--oneline')
2982
2983 try:
2984 log = GitCommand(self, cmd, capture_stdout=True, capture_stderr=True)
2985 if log.Wait() == 0:
2986 return log.stdout
2987 except GitError:
2988 # worktree may not exist if groups changed for example. In that case,
2989 # try in gitdir instead.
2990 if not os.path.exists(self.worktree):
2991 return self.bare_git.log(*cmd[1:])
2992 else:
2993 raise
2994 return None
2995
2996 def getAddedAndRemovedLogs(self, toProject, oneline=False, color=True,
2997 pretty_format=None):
2998 """Get the list of logs from this revision to given revisionId"""
2999 logs = {}
3000 selfId = self.GetRevisionId(self._allrefs)
3001 toId = toProject.GetRevisionId(toProject._allrefs)
3002
3003 logs['added'] = self._getLogs(selfId, toId, oneline=oneline, color=color,
3004 pretty_format=pretty_format)
3005 logs['removed'] = self._getLogs(toId, selfId, oneline=oneline, color=color,
3006 pretty_format=pretty_format)
3007 return logs
3008
3009 class _GitGetByExec(object):
3010
3011 def __init__(self, project, bare, gitdir):
3012 self._project = project
3013 self._bare = bare
3014 self._gitdir = gitdir
3015
3016 # __getstate__ and __setstate__ are required for pickling because __getattr__ exists.
3017 def __getstate__(self):
3018 return (self._project, self._bare, self._gitdir)
3019
3020 def __setstate__(self, state):
3021 self._project, self._bare, self._gitdir = state
3022
3023 def LsOthers(self):
3024 p = GitCommand(self._project,
3025 ['ls-files',
3026 '-z',
3027 '--others',
3028 '--exclude-standard'],
3029 bare=False,
3030 gitdir=self._gitdir,
3031 capture_stdout=True,
3032 capture_stderr=True)
3033 if p.Wait() == 0:
3034 out = p.stdout
3035 if out:
3036 # Backslash is not anomalous
3037 return out[:-1].split('\0')
3038 return []
3039
3040 def DiffZ(self, name, *args):
3041 cmd = [name]
3042 cmd.append('-z')
3043 cmd.append('--ignore-submodules')
3044 cmd.extend(args)
3045 p = GitCommand(self._project,
3046 cmd,
3047 gitdir=self._gitdir,
3048 bare=False,
3049 capture_stdout=True,
3050 capture_stderr=True)
3051 p.Wait()
3052 r = {}
3053 out = p.stdout
3054 if out:
3055 out = iter(out[:-1].split('\0'))
3056 while out:
3057 try:
3058 info = next(out)
3059 path = next(out)
3060 except StopIteration:
3061 break
3062
3063 class _Info(object):
3064
3065 def __init__(self, path, omode, nmode, oid, nid, state):
3066 self.path = path
3067 self.src_path = None
3068 self.old_mode = omode
3069 self.new_mode = nmode
3070 self.old_id = oid
3071 self.new_id = nid
3072
3073 if len(state) == 1:
3074 self.status = state
3075 self.level = None
3076 else:
3077 self.status = state[:1]
3078 self.level = state[1:]
3079 while self.level.startswith('0'):
3080 self.level = self.level[1:]
3081
3082 info = info[1:].split(' ')
3083 info = _Info(path, *info)
3084 if info.status in ('R', 'C'):
3085 info.src_path = info.path
3086 info.path = next(out)
3087 r[info.path] = info
3088 return r
3089
3090 def GetDotgitPath(self, subpath=None):
3091 """Return the full path to the .git dir.
3092
3093 As a convenience, append |subpath| if provided.
3094 """
3095 if self._bare:
3096 dotgit = self._gitdir
3097 else:
3098 dotgit = os.path.join(self._project.worktree, '.git')
3099 if os.path.isfile(dotgit):
3100 # Git worktrees use a "gitdir:" syntax to point to the scratch space.
3101 with open(dotgit) as fp:
3102 setting = fp.read()
3103 assert setting.startswith('gitdir:')
3104 gitdir = setting.split(':', 1)[1].strip()
3105 dotgit = os.path.normpath(os.path.join(self._project.worktree, gitdir))
3106
3107 return dotgit if subpath is None else os.path.join(dotgit, subpath)
3108
3109 def GetHead(self):
3110 """Return the ref that HEAD points to."""
3111 path = self.GetDotgitPath(subpath=HEAD)
3112 try:
3113 with open(path) as fd:
3114 line = fd.readline()
3115 except IOError as e:
3116 raise NoManifestException(path, str(e))
3117 try:
3118 line = line.decode()
3119 except AttributeError:
3120 pass
3121 if line.startswith('ref: '):
3122 return line[5:-1]
3123 return line[:-1]
3124
3125 def SetHead(self, ref, message=None):
3126 cmdv = []
3127 if message is not None:
3128 cmdv.extend(['-m', message])
3129 cmdv.append(HEAD)
3130 cmdv.append(ref)
3131 self.symbolic_ref(*cmdv)
3132
3133 def DetachHead(self, new, message=None):
3134 cmdv = ['--no-deref']
3135 if message is not None:
3136 cmdv.extend(['-m', message])
3137 cmdv.append(HEAD)
3138 cmdv.append(new)
3139 self.update_ref(*cmdv)
3140
3141 def UpdateRef(self, name, new, old=None,
3142 message=None,
3143 detach=False):
3144 cmdv = []
3145 if message is not None:
3146 cmdv.extend(['-m', message])
3147 if detach:
3148 cmdv.append('--no-deref')
3149 cmdv.append(name)
3150 cmdv.append(new)
3151 if old is not None:
3152 cmdv.append(old)
3153 self.update_ref(*cmdv)
3154
3155 def DeleteRef(self, name, old=None):
3156 if not old:
3157 old = self.rev_parse(name)
3158 self.update_ref('-d', name, old)
3159 self._project.bare_ref.deleted(name)
3160
3161 def rev_list(self, *args, **kw):
3162 if 'format' in kw:
3163 cmdv = ['log', '--pretty=format:%s' % kw['format']]
3164 else:
3165 cmdv = ['rev-list']
3166 cmdv.extend(args)
3167 p = GitCommand(self._project,
3168 cmdv,
3169 bare=self._bare,
3170 gitdir=self._gitdir,
3171 capture_stdout=True,
3172 capture_stderr=True)
3173 if p.Wait() != 0:
3174 raise GitError('%s rev-list %s: %s' %
3175 (self._project.name, str(args), p.stderr))
3176 return p.stdout.splitlines()
3177
3178 def __getattr__(self, name):
3179 """Allow arbitrary git commands using pythonic syntax.
3180
3181 This allows you to do things like:
3182 git_obj.rev_parse('HEAD')
3183
3184 Since we don't have a 'rev_parse' method defined, the __getattr__ will
3185 run. We'll replace the '_' with a '-' and try to run a git command.
3186 Any other positional arguments will be passed to the git command, and the
3187 following keyword arguments are supported:
3188 config: An optional dict of git config options to be passed with '-c'.
3189
3190 Args:
3191 name: The name of the git command to call. Any '_' characters will
3192 be replaced with '-'.
3193
3194 Returns:
3195 A callable object that will try to call git with the named command.
3196 """
3197 name = name.replace('_', '-')
3198
3199 def runner(*args, **kwargs):
3200 cmdv = []
3201 config = kwargs.pop('config', None)
3202 for k in kwargs:
3203 raise TypeError('%s() got an unexpected keyword argument %r'
3204 % (name, k))
3205 if config is not None:
3206 for k, v in config.items():
3207 cmdv.append('-c')
3208 cmdv.append('%s=%s' % (k, v))
3209 cmdv.append(name)
3210 cmdv.extend(args)
3211 p = GitCommand(self._project,
3212 cmdv,
3213 bare=self._bare,
3214 gitdir=self._gitdir,
3215 capture_stdout=True,
3216 capture_stderr=True)
3217 if p.Wait() != 0:
3218 raise GitError('%s %s: %s' %
3219 (self._project.name, name, p.stderr))
3220 r = p.stdout
3221 if r.endswith('\n') and r.index('\n') == len(r) - 1:
3222 return r[:-1]
3223 return r
3224 return runner
3225 1944
1945 def PruneHeads(self):
1946 """Prune any topic branches already merged into upstream."""
1947 cb = self.CurrentBranch
1948 kill = []
1949 left = self._allrefs
1950 for name in left.keys():
1951 if name.startswith(R_HEADS):
1952 name = name[len(R_HEADS) :]
1953 if cb is None or name != cb:
1954 kill.append(name)
1955
1956 # Minor optimization: If there's nothing to prune, then don't try to
1957 # read any project state.
1958 if not kill and not cb:
1959 return []
1960
1961 rev = self.GetRevisionId(left)
1962 if (
1963 cb is not None
1964 and not self._revlist(HEAD + "..." + rev)
1965 and not self.IsDirty(consider_untracked=False)
1966 ):
1967 self.work_git.DetachHead(HEAD)
1968 kill.append(cb)
1969
1970 if kill:
1971 old = self.bare_git.GetHead()
3226 1972
3227class _PriorSyncFailedError(Exception): 1973 try:
1974 self.bare_git.DetachHead(rev)
1975
1976 b = ["branch", "-d"]
1977 b.extend(kill)
1978 b = GitCommand(
1979 self, b, bare=True, capture_stdout=True, capture_stderr=True
1980 )
1981 b.Wait()
1982 finally:
1983 if ID_RE.match(old):
1984 self.bare_git.DetachHead(old)
1985 else:
1986 self.bare_git.SetHead(old)
1987 left = self._allrefs
1988
1989 for branch in kill:
1990 if (R_HEADS + branch) not in left:
1991 self.CleanPublishedCache()
1992 break
1993
1994 if cb and cb not in kill:
1995 kill.append(cb)
1996 kill.sort()
1997
1998 kept = []
1999 for branch in kill:
2000 if R_HEADS + branch in left:
2001 branch = self.GetBranch(branch)
2002 base = branch.LocalMerge
2003 if not base:
2004 base = rev
2005 kept.append(ReviewableBranch(self, branch, base))
2006 return kept
2007
2008 def GetRegisteredSubprojects(self):
2009 result = []
2010
2011 def rec(subprojects):
2012 if not subprojects:
2013 return
2014 result.extend(subprojects)
2015 for p in subprojects:
2016 rec(p.subprojects)
2017
2018 rec(self.subprojects)
2019 return result
2020
2021 def _GetSubmodules(self):
2022 # Unfortunately we cannot call `git submodule status --recursive` here
2023 # because the working tree might not exist yet, and it cannot be used
2024 # without a working tree in its current implementation.
2025
2026 def get_submodules(gitdir, rev):
2027 # Parse .gitmodules for submodule sub_paths and sub_urls.
2028 sub_paths, sub_urls = parse_gitmodules(gitdir, rev)
2029 if not sub_paths:
2030 return []
2031 # Run `git ls-tree` to read SHAs of submodule object, which happen
2032 # to be revision of submodule repository.
2033 sub_revs = git_ls_tree(gitdir, rev, sub_paths)
2034 submodules = []
2035 for sub_path, sub_url in zip(sub_paths, sub_urls):
2036 try:
2037 sub_rev = sub_revs[sub_path]
2038 except KeyError:
2039 # Ignore non-exist submodules.
2040 continue
2041 submodules.append((sub_rev, sub_path, sub_url))
2042 return submodules
2043
2044 re_path = re.compile(r"^submodule\.(.+)\.path=(.*)$")
2045 re_url = re.compile(r"^submodule\.(.+)\.url=(.*)$")
2046
2047 def parse_gitmodules(gitdir, rev):
2048 cmd = ["cat-file", "blob", "%s:.gitmodules" % rev]
2049 try:
2050 p = GitCommand(
2051 None,
2052 cmd,
2053 capture_stdout=True,
2054 capture_stderr=True,
2055 bare=True,
2056 gitdir=gitdir,
2057 )
2058 except GitError:
2059 return [], []
2060 if p.Wait() != 0:
2061 return [], []
2062
2063 gitmodules_lines = []
2064 fd, temp_gitmodules_path = tempfile.mkstemp()
2065 try:
2066 os.write(fd, p.stdout.encode("utf-8"))
2067 os.close(fd)
2068 cmd = ["config", "--file", temp_gitmodules_path, "--list"]
2069 p = GitCommand(
2070 None,
2071 cmd,
2072 capture_stdout=True,
2073 capture_stderr=True,
2074 bare=True,
2075 gitdir=gitdir,
2076 )
2077 if p.Wait() != 0:
2078 return [], []
2079 gitmodules_lines = p.stdout.split("\n")
2080 except GitError:
2081 return [], []
2082 finally:
2083 platform_utils.remove(temp_gitmodules_path)
2084
2085 names = set()
2086 paths = {}
2087 urls = {}
2088 for line in gitmodules_lines:
2089 if not line:
2090 continue
2091 m = re_path.match(line)
2092 if m:
2093 names.add(m.group(1))
2094 paths[m.group(1)] = m.group(2)
2095 continue
2096 m = re_url.match(line)
2097 if m:
2098 names.add(m.group(1))
2099 urls[m.group(1)] = m.group(2)
2100 continue
2101 names = sorted(names)
2102 return (
2103 [paths.get(name, "") for name in names],
2104 [urls.get(name, "") for name in names],
2105 )
2106
2107 def git_ls_tree(gitdir, rev, paths):
2108 cmd = ["ls-tree", rev, "--"]
2109 cmd.extend(paths)
2110 try:
2111 p = GitCommand(
2112 None,
2113 cmd,
2114 capture_stdout=True,
2115 capture_stderr=True,
2116 bare=True,
2117 gitdir=gitdir,
2118 )
2119 except GitError:
2120 return []
2121 if p.Wait() != 0:
2122 return []
2123 objects = {}
2124 for line in p.stdout.split("\n"):
2125 if not line.strip():
2126 continue
2127 object_rev, object_path = line.split()[2:4]
2128 objects[object_path] = object_rev
2129 return objects
3228 2130
3229 def __str__(self): 2131 try:
3230 return 'prior sync failed; rebase still in progress' 2132 rev = self.GetRevisionId()
2133 except GitError:
2134 return []
2135 return get_submodules(self.gitdir, rev)
2136
2137 def GetDerivedSubprojects(self):
2138 result = []
2139 if not self.Exists:
2140 # If git repo does not exist yet, querying its submodules will
2141 # mess up its states; so return here.
2142 return result
2143 for rev, path, url in self._GetSubmodules():
2144 name = self.manifest.GetSubprojectName(self, path)
2145 (
2146 relpath,
2147 worktree,
2148 gitdir,
2149 objdir,
2150 ) = self.manifest.GetSubprojectPaths(self, name, path)
2151 project = self.manifest.paths.get(relpath)
2152 if project:
2153 result.extend(project.GetDerivedSubprojects())
2154 continue
2155
2156 if url.startswith(".."):
2157 url = urllib.parse.urljoin("%s/" % self.remote.url, url)
2158 remote = RemoteSpec(
2159 self.remote.name,
2160 url=url,
2161 pushUrl=self.remote.pushUrl,
2162 review=self.remote.review,
2163 revision=self.remote.revision,
2164 )
2165 subproject = Project(
2166 manifest=self.manifest,
2167 name=name,
2168 remote=remote,
2169 gitdir=gitdir,
2170 objdir=objdir,
2171 worktree=worktree,
2172 relpath=relpath,
2173 revisionExpr=rev,
2174 revisionId=rev,
2175 rebase=self.rebase,
2176 groups=self.groups,
2177 sync_c=self.sync_c,
2178 sync_s=self.sync_s,
2179 sync_tags=self.sync_tags,
2180 parent=self,
2181 is_derived=True,
2182 )
2183 result.append(subproject)
2184 result.extend(subproject.GetDerivedSubprojects())
2185 return result
2186
2187 def EnableRepositoryExtension(self, key, value="true", version=1):
2188 """Enable git repository extension |key| with |value|.
3231 2189
2190 Args:
2191 key: The extension to enabled. Omit the "extensions." prefix.
2192 value: The value to use for the extension.
2193 version: The minimum git repository version needed.
2194 """
2195 # Make sure the git repo version is new enough already.
2196 found_version = self.config.GetInt("core.repositoryFormatVersion")
2197 if found_version is None:
2198 found_version = 0
2199 if found_version < version:
2200 self.config.SetString("core.repositoryFormatVersion", str(version))
2201
2202 # Enable the extension!
2203 self.config.SetString("extensions.%s" % (key,), value)
2204
2205 def ResolveRemoteHead(self, name=None):
2206 """Find out what the default branch (HEAD) points to.
2207
2208 Normally this points to refs/heads/master, but projects are moving to
2209 main. Support whatever the server uses rather than hardcoding "master"
2210 ourselves.
2211 """
2212 if name is None:
2213 name = self.remote.name
2214
2215 # The output will look like (NB: tabs are separators):
2216 # ref: refs/heads/master HEAD
2217 # 5f6803b100bb3cd0f534e96e88c91373e8ed1c44 HEAD
2218 output = self.bare_git.ls_remote(
2219 "-q", "--symref", "--exit-code", name, "HEAD"
2220 )
3232 2221
3233class _DirtyError(Exception): 2222 for line in output.splitlines():
2223 lhs, rhs = line.split("\t", 1)
2224 if rhs == "HEAD" and lhs.startswith("ref:"):
2225 return lhs[4:].strip()
3234 2226
3235 def __str__(self): 2227 return None
3236 return 'contains uncommitted changes'
3237 2228
2229 def _CheckForImmutableRevision(self):
2230 try:
2231 # if revision (sha or tag) is not present then following function
2232 # throws an error.
2233 self.bare_git.rev_list(
2234 "-1", "--missing=allow-any", "%s^0" % self.revisionExpr, "--"
2235 )
2236 if self.upstream:
2237 rev = self.GetRemote().ToLocal(self.upstream)
2238 self.bare_git.rev_list(
2239 "-1", "--missing=allow-any", "%s^0" % rev, "--"
2240 )
2241 self.bare_git.merge_base(
2242 "--is-ancestor", self.revisionExpr, rev
2243 )
2244 return True
2245 except GitError:
2246 # There is no such persistent revision. We have to fetch it.
2247 return False
3238 2248
3239class _InfoMessage(object): 2249 def _FetchArchive(self, tarpath, cwd=None):
2250 cmd = ["archive", "-v", "-o", tarpath]
2251 cmd.append("--remote=%s" % self.remote.url)
2252 cmd.append("--prefix=%s/" % self.RelPath(local=False))
2253 cmd.append(self.revisionExpr)
3240 2254
3241 def __init__(self, project, text): 2255 command = GitCommand(
3242 self.project = project 2256 self, cmd, cwd=cwd, capture_stdout=True, capture_stderr=True
3243 self.text = text 2257 )
3244 2258
3245 def Print(self, syncbuf): 2259 if command.Wait() != 0:
3246 syncbuf.out.info('%s/: %s', self.project.RelPath(local=False), self.text) 2260 raise GitError("git archive %s: %s" % (self.name, command.stderr))
3247 syncbuf.out.nl() 2261
2262 def _RemoteFetch(
2263 self,
2264 name=None,
2265 current_branch_only=False,
2266 initial=False,
2267 quiet=False,
2268 verbose=False,
2269 output_redir=None,
2270 alt_dir=None,
2271 tags=True,
2272 prune=False,
2273 depth=None,
2274 submodules=False,
2275 ssh_proxy=None,
2276 force_sync=False,
2277 clone_filter=None,
2278 retry_fetches=2,
2279 retry_sleep_initial_sec=4.0,
2280 retry_exp_factor=2.0,
2281 ):
2282 is_sha1 = False
2283 tag_name = None
2284 # The depth should not be used when fetching to a mirror because
2285 # it will result in a shallow repository that cannot be cloned or
2286 # fetched from.
2287 # The repo project should also never be synced with partial depth.
2288 if self.manifest.IsMirror or self.relpath == ".repo/repo":
2289 depth = None
2290
2291 if depth:
2292 current_branch_only = True
2293
2294 if ID_RE.match(self.revisionExpr) is not None:
2295 is_sha1 = True
2296
2297 if current_branch_only:
2298 if self.revisionExpr.startswith(R_TAGS):
2299 # This is a tag and its commit id should never change.
2300 tag_name = self.revisionExpr[len(R_TAGS) :]
2301 elif self.upstream and self.upstream.startswith(R_TAGS):
2302 # This is a tag and its commit id should never change.
2303 tag_name = self.upstream[len(R_TAGS) :]
2304
2305 if is_sha1 or tag_name is not None:
2306 if self._CheckForImmutableRevision():
2307 if verbose:
2308 print(
2309 "Skipped fetching project %s (already have "
2310 "persistent ref)" % self.name
2311 )
2312 return True
2313 if is_sha1 and not depth:
2314 # When syncing a specific commit and --depth is not set:
2315 # * if upstream is explicitly specified and is not a sha1, fetch
2316 # only upstream as users expect only upstream to be fetch.
2317 # Note: The commit might not be in upstream in which case the
2318 # sync will fail.
2319 # * otherwise, fetch all branches to make sure we end up with
2320 # the specific commit.
2321 if self.upstream:
2322 current_branch_only = not ID_RE.match(self.upstream)
2323 else:
2324 current_branch_only = False
2325
2326 if not name:
2327 name = self.remote.name
2328
2329 remote = self.GetRemote(name)
2330 if not remote.PreConnectFetch(ssh_proxy):
2331 ssh_proxy = None
2332
2333 if initial:
2334 if alt_dir and "objects" == os.path.basename(alt_dir):
2335 ref_dir = os.path.dirname(alt_dir)
2336 packed_refs = os.path.join(self.gitdir, "packed-refs")
2337
2338 all_refs = self.bare_ref.all
2339 ids = set(all_refs.values())
2340 tmp = set()
2341
2342 for r, ref_id in GitRefs(ref_dir).all.items():
2343 if r not in all_refs:
2344 if r.startswith(R_TAGS) or remote.WritesTo(r):
2345 all_refs[r] = ref_id
2346 ids.add(ref_id)
2347 continue
2348
2349 if ref_id in ids:
2350 continue
2351
2352 r = "refs/_alt/%s" % ref_id
2353 all_refs[r] = ref_id
2354 ids.add(ref_id)
2355 tmp.add(r)
2356
2357 tmp_packed_lines = []
2358 old_packed_lines = []
2359
2360 for r in sorted(all_refs):
2361 line = "%s %s\n" % (all_refs[r], r)
2362 tmp_packed_lines.append(line)
2363 if r not in tmp:
2364 old_packed_lines.append(line)
2365
2366 tmp_packed = "".join(tmp_packed_lines)
2367 old_packed = "".join(old_packed_lines)
2368 _lwrite(packed_refs, tmp_packed)
2369 else:
2370 alt_dir = None
2371
2372 cmd = ["fetch"]
2373
2374 if clone_filter:
2375 git_require((2, 19, 0), fail=True, msg="partial clones")
2376 cmd.append("--filter=%s" % clone_filter)
2377 self.EnableRepositoryExtension("partialclone", self.remote.name)
2378
2379 if depth:
2380 cmd.append("--depth=%s" % depth)
2381 else:
2382 # If this repo has shallow objects, then we don't know which refs
2383 # have shallow objects or not. Tell git to unshallow all fetched
2384 # refs. Don't do this with projects that don't have shallow
2385 # objects, since it is less efficient.
2386 if os.path.exists(os.path.join(self.gitdir, "shallow")):
2387 cmd.append("--depth=2147483647")
2388
2389 if not verbose:
2390 cmd.append("--quiet")
2391 if not quiet and sys.stdout.isatty():
2392 cmd.append("--progress")
2393 if not self.worktree:
2394 cmd.append("--update-head-ok")
2395 cmd.append(name)
2396
2397 if force_sync:
2398 cmd.append("--force")
2399
2400 if prune:
2401 cmd.append("--prune")
2402
2403 # Always pass something for --recurse-submodules, git with GIT_DIR
2404 # behaves incorrectly when not given `--recurse-submodules=no`.
2405 # (b/218891912)
2406 cmd.append(
2407 f'--recurse-submodules={"on-demand" if submodules else "no"}'
2408 )
3248 2409
2410 spec = []
2411 if not current_branch_only:
2412 # Fetch whole repo.
2413 spec.append(
2414 str(("+refs/heads/*:") + remote.ToLocal("refs/heads/*"))
2415 )
2416 elif tag_name is not None:
2417 spec.append("tag")
2418 spec.append(tag_name)
2419
2420 if self.manifest.IsMirror and not current_branch_only:
2421 branch = None
2422 else:
2423 branch = self.revisionExpr
2424 if (
2425 not self.manifest.IsMirror
2426 and is_sha1
2427 and depth
2428 and git_require((1, 8, 3))
2429 ):
2430 # Shallow checkout of a specific commit, fetch from that commit and
2431 # not the heads only as the commit might be deeper in the history.
2432 spec.append(branch)
2433 if self.upstream:
2434 spec.append(self.upstream)
2435 else:
2436 if is_sha1:
2437 branch = self.upstream
2438 if branch is not None and branch.strip():
2439 if not branch.startswith("refs/"):
2440 branch = R_HEADS + branch
2441 spec.append(str(("+%s:" % branch) + remote.ToLocal(branch)))
2442
2443 # If mirroring repo and we cannot deduce the tag or branch to fetch,
2444 # fetch whole repo.
2445 if self.manifest.IsMirror and not spec:
2446 spec.append(
2447 str(("+refs/heads/*:") + remote.ToLocal("refs/heads/*"))
2448 )
2449
2450 # If using depth then we should not get all the tags since they may
2451 # be outside of the depth.
2452 if not tags or depth:
2453 cmd.append("--no-tags")
2454 else:
2455 cmd.append("--tags")
2456 spec.append(str(("+refs/tags/*:") + remote.ToLocal("refs/tags/*")))
2457
2458 cmd.extend(spec)
2459
2460 # At least one retry minimum due to git remote prune.
2461 retry_fetches = max(retry_fetches, 2)
2462 retry_cur_sleep = retry_sleep_initial_sec
2463 ok = prune_tried = False
2464 for try_n in range(retry_fetches):
2465 gitcmd = GitCommand(
2466 self,
2467 cmd,
2468 bare=True,
2469 objdir=os.path.join(self.objdir, "objects"),
2470 ssh_proxy=ssh_proxy,
2471 merge_output=True,
2472 capture_stdout=quiet or bool(output_redir),
2473 )
2474 if gitcmd.stdout and not quiet and output_redir:
2475 output_redir.write(gitcmd.stdout)
2476 ret = gitcmd.Wait()
2477 if ret == 0:
2478 ok = True
2479 break
2480
2481 # Retry later due to HTTP 429 Too Many Requests.
2482 elif (
2483 gitcmd.stdout
2484 and "error:" in gitcmd.stdout
2485 and "HTTP 429" in gitcmd.stdout
2486 ):
2487 # Fallthru to sleep+retry logic at the bottom.
2488 pass
2489
2490 # Try to prune remote branches once in case there are conflicts.
2491 # For example, if the remote had refs/heads/upstream, but deleted
2492 # that and now has refs/heads/upstream/foo.
2493 elif (
2494 gitcmd.stdout
2495 and "error:" in gitcmd.stdout
2496 and "git remote prune" in gitcmd.stdout
2497 and not prune_tried
2498 ):
2499 prune_tried = True
2500 prunecmd = GitCommand(
2501 self,
2502 ["remote", "prune", name],
2503 bare=True,
2504 ssh_proxy=ssh_proxy,
2505 )
2506 ret = prunecmd.Wait()
2507 if ret:
2508 break
2509 print(
2510 "retrying fetch after pruning remote branches",
2511 file=output_redir,
2512 )
2513 # Continue right away so we don't sleep as we shouldn't need to.
2514 continue
2515 elif current_branch_only and is_sha1 and ret == 128:
2516 # Exit code 128 means "couldn't find the ref you asked for"; if
2517 # we're in sha1 mode, we just tried sync'ing from the upstream
2518 # field; it doesn't exist, thus abort the optimization attempt
2519 # and do a full sync.
2520 break
2521 elif ret < 0:
2522 # Git died with a signal, exit immediately.
2523 break
2524
2525 # Figure out how long to sleep before the next attempt, if there is
2526 # one.
2527 if not verbose and gitcmd.stdout:
2528 print(
2529 "\n%s:\n%s" % (self.name, gitcmd.stdout),
2530 end="",
2531 file=output_redir,
2532 )
2533 if try_n < retry_fetches - 1:
2534 print(
2535 "%s: sleeping %s seconds before retrying"
2536 % (self.name, retry_cur_sleep),
2537 file=output_redir,
2538 )
2539 time.sleep(retry_cur_sleep)
2540 retry_cur_sleep = min(
2541 retry_exp_factor * retry_cur_sleep, MAXIMUM_RETRY_SLEEP_SEC
2542 )
2543 retry_cur_sleep *= 1 - random.uniform(
2544 -RETRY_JITTER_PERCENT, RETRY_JITTER_PERCENT
2545 )
2546
2547 if initial:
2548 if alt_dir:
2549 if old_packed != "":
2550 _lwrite(packed_refs, old_packed)
2551 else:
2552 platform_utils.remove(packed_refs)
2553 self.bare_git.pack_refs("--all", "--prune")
2554
2555 if is_sha1 and current_branch_only:
2556 # We just synced the upstream given branch; verify we
2557 # got what we wanted, else trigger a second run of all
2558 # refs.
2559 if not self._CheckForImmutableRevision():
2560 # Sync the current branch only with depth set to None.
2561 # We always pass depth=None down to avoid infinite recursion.
2562 return self._RemoteFetch(
2563 name=name,
2564 quiet=quiet,
2565 verbose=verbose,
2566 output_redir=output_redir,
2567 current_branch_only=current_branch_only and depth,
2568 initial=False,
2569 alt_dir=alt_dir,
2570 depth=None,
2571 ssh_proxy=ssh_proxy,
2572 clone_filter=clone_filter,
2573 )
2574
2575 return ok
2576
2577 def _ApplyCloneBundle(self, initial=False, quiet=False, verbose=False):
2578 if initial and (
2579 self.manifest.manifestProject.depth or self.clone_depth
2580 ):
2581 return False
3249 2582
3250class _Failure(object): 2583 remote = self.GetRemote()
2584 bundle_url = remote.url + "/clone.bundle"
2585 bundle_url = GitConfig.ForUser().UrlInsteadOf(bundle_url)
2586 if GetSchemeFromUrl(bundle_url) not in (
2587 "http",
2588 "https",
2589 "persistent-http",
2590 "persistent-https",
2591 ):
2592 return False
3251 2593
3252 def __init__(self, project, why): 2594 bundle_dst = os.path.join(self.gitdir, "clone.bundle")
3253 self.project = project 2595 bundle_tmp = os.path.join(self.gitdir, "clone.bundle.tmp")
3254 self.why = why
3255 2596
3256 def Print(self, syncbuf): 2597 exist_dst = os.path.exists(bundle_dst)
3257 syncbuf.out.fail('error: %s/: %s', 2598 exist_tmp = os.path.exists(bundle_tmp)
3258 self.project.RelPath(local=False),
3259 str(self.why))
3260 syncbuf.out.nl()
3261 2599
2600 if not initial and not exist_dst and not exist_tmp:
2601 return False
3262 2602
3263class _Later(object): 2603 if not exist_dst:
2604 exist_dst = self._FetchBundle(
2605 bundle_url, bundle_tmp, bundle_dst, quiet, verbose
2606 )
2607 if not exist_dst:
2608 return False
3264 2609
3265 def __init__(self, project, action): 2610 cmd = ["fetch"]
3266 self.project = project 2611 if not verbose:
3267 self.action = action 2612 cmd.append("--quiet")
2613 if not quiet and sys.stdout.isatty():
2614 cmd.append("--progress")
2615 if not self.worktree:
2616 cmd.append("--update-head-ok")
2617 cmd.append(bundle_dst)
2618 for f in remote.fetch:
2619 cmd.append(str(f))
2620 cmd.append("+refs/tags/*:refs/tags/*")
2621
2622 ok = (
2623 GitCommand(
2624 self,
2625 cmd,
2626 bare=True,
2627 objdir=os.path.join(self.objdir, "objects"),
2628 ).Wait()
2629 == 0
2630 )
2631 platform_utils.remove(bundle_dst, missing_ok=True)
2632 platform_utils.remove(bundle_tmp, missing_ok=True)
2633 return ok
2634
2635 def _FetchBundle(self, srcUrl, tmpPath, dstPath, quiet, verbose):
2636 platform_utils.remove(dstPath, missing_ok=True)
2637
2638 cmd = ["curl", "--fail", "--output", tmpPath, "--netrc", "--location"]
2639 if quiet:
2640 cmd += ["--silent", "--show-error"]
2641 if os.path.exists(tmpPath):
2642 size = os.stat(tmpPath).st_size
2643 if size >= 1024:
2644 cmd += ["--continue-at", "%d" % (size,)]
2645 else:
2646 platform_utils.remove(tmpPath)
2647 with GetUrlCookieFile(srcUrl, quiet) as (cookiefile, proxy):
2648 if cookiefile:
2649 cmd += ["--cookie", cookiefile]
2650 if proxy:
2651 cmd += ["--proxy", proxy]
2652 elif "http_proxy" in os.environ and "darwin" == sys.platform:
2653 cmd += ["--proxy", os.environ["http_proxy"]]
2654 if srcUrl.startswith("persistent-https"):
2655 srcUrl = "http" + srcUrl[len("persistent-https") :]
2656 elif srcUrl.startswith("persistent-http"):
2657 srcUrl = "http" + srcUrl[len("persistent-http") :]
2658 cmd += [srcUrl]
2659
2660 proc = None
2661 with Trace("Fetching bundle: %s", " ".join(cmd)):
2662 if verbose:
2663 print("%s: Downloading bundle: %s" % (self.name, srcUrl))
2664 stdout = None if verbose else subprocess.PIPE
2665 stderr = None if verbose else subprocess.STDOUT
2666 try:
2667 proc = subprocess.Popen(cmd, stdout=stdout, stderr=stderr)
2668 except OSError:
2669 return False
2670
2671 (output, _) = proc.communicate()
2672 curlret = proc.returncode
2673
2674 if curlret == 22:
2675 # From curl man page:
2676 # 22: HTTP page not retrieved. The requested url was not found
2677 # or returned another error with the HTTP error code being 400
2678 # or above. This return code only appears if -f, --fail is used.
2679 if verbose:
2680 print(
2681 "%s: Unable to retrieve clone.bundle; ignoring."
2682 % self.name
2683 )
2684 if output:
2685 print("Curl output:\n%s" % output)
2686 return False
2687 elif curlret and not verbose and output:
2688 print("%s" % output, file=sys.stderr)
2689
2690 if os.path.exists(tmpPath):
2691 if curlret == 0 and self._IsValidBundle(tmpPath, quiet):
2692 platform_utils.rename(tmpPath, dstPath)
2693 return True
2694 else:
2695 platform_utils.remove(tmpPath)
2696 return False
2697 else:
2698 return False
3268 2699
3269 def Run(self, syncbuf): 2700 def _IsValidBundle(self, path, quiet):
3270 out = syncbuf.out 2701 try:
3271 out.project('project %s/', self.project.RelPath(local=False)) 2702 with open(path, "rb") as f:
3272 out.nl() 2703 if f.read(16) == b"# v2 git bundle\n":
3273 try: 2704 return True
3274 self.action() 2705 else:
3275 out.nl() 2706 if not quiet:
3276 return True 2707 print(
3277 except GitError: 2708 "Invalid clone.bundle file; ignoring.",
3278 out.nl() 2709 file=sys.stderr,
3279 return False 2710 )
2711 return False
2712 except OSError:
2713 return False
3280 2714
2715 def _Checkout(self, rev, quiet=False):
2716 cmd = ["checkout"]
2717 if quiet:
2718 cmd.append("-q")
2719 cmd.append(rev)
2720 cmd.append("--")
2721 if GitCommand(self, cmd).Wait() != 0:
2722 if self._allrefs:
2723 raise GitError("%s checkout %s " % (self.name, rev))
2724
2725 def _CherryPick(self, rev, ffonly=False, record_origin=False):
2726 cmd = ["cherry-pick"]
2727 if ffonly:
2728 cmd.append("--ff")
2729 if record_origin:
2730 cmd.append("-x")
2731 cmd.append(rev)
2732 cmd.append("--")
2733 if GitCommand(self, cmd).Wait() != 0:
2734 if self._allrefs:
2735 raise GitError("%s cherry-pick %s " % (self.name, rev))
2736
2737 def _LsRemote(self, refs):
2738 cmd = ["ls-remote", self.remote.name, refs]
2739 p = GitCommand(self, cmd, capture_stdout=True)
2740 if p.Wait() == 0:
2741 return p.stdout
2742 return None
3281 2743
3282class _SyncColoring(Coloring): 2744 def _Revert(self, rev):
2745 cmd = ["revert"]
2746 cmd.append("--no-edit")
2747 cmd.append(rev)
2748 cmd.append("--")
2749 if GitCommand(self, cmd).Wait() != 0:
2750 if self._allrefs:
2751 raise GitError("%s revert %s " % (self.name, rev))
2752
2753 def _ResetHard(self, rev, quiet=True):
2754 cmd = ["reset", "--hard"]
2755 if quiet:
2756 cmd.append("-q")
2757 cmd.append(rev)
2758 if GitCommand(self, cmd).Wait() != 0:
2759 raise GitError("%s reset --hard %s " % (self.name, rev))
3283 2760
3284 def __init__(self, config): 2761 def _SyncSubmodules(self, quiet=True):
3285 super().__init__(config, 'reposync') 2762 cmd = ["submodule", "update", "--init", "--recursive"]
3286 self.project = self.printer('header', attr='bold') 2763 if quiet:
3287 self.info = self.printer('info') 2764 cmd.append("-q")
3288 self.fail = self.printer('fail', fg='red') 2765 if GitCommand(self, cmd).Wait() != 0:
2766 raise GitError(
2767 "%s submodule update --init --recursive " % self.name
2768 )
2769
2770 def _Rebase(self, upstream, onto=None):
2771 cmd = ["rebase"]
2772 if onto is not None:
2773 cmd.extend(["--onto", onto])
2774 cmd.append(upstream)
2775 if GitCommand(self, cmd).Wait() != 0:
2776 raise GitError("%s rebase %s " % (self.name, upstream))
3289 2777
2778 def _FastForward(self, head, ffonly=False):
2779 cmd = ["merge", "--no-stat", head]
2780 if ffonly:
2781 cmd.append("--ff-only")
2782 if GitCommand(self, cmd).Wait() != 0:
2783 raise GitError("%s merge %s " % (self.name, head))
3290 2784
3291class SyncBuffer(object): 2785 def _InitGitDir(self, mirror_git=None, force_sync=False, quiet=False):
2786 init_git_dir = not os.path.exists(self.gitdir)
2787 init_obj_dir = not os.path.exists(self.objdir)
2788 try:
2789 # Initialize the bare repository, which contains all of the objects.
2790 if init_obj_dir:
2791 os.makedirs(self.objdir)
2792 self.bare_objdir.init()
2793
2794 self._UpdateHooks(quiet=quiet)
2795
2796 if self.use_git_worktrees:
2797 # Enable per-worktree config file support if possible. This
2798 # is more a nice-to-have feature for users rather than a
2799 # hard requirement.
2800 if git_require((2, 20, 0)):
2801 self.EnableRepositoryExtension("worktreeConfig")
2802
2803 # If we have a separate directory to hold refs, initialize it as
2804 # well.
2805 if self.objdir != self.gitdir:
2806 if init_git_dir:
2807 os.makedirs(self.gitdir)
2808
2809 if init_obj_dir or init_git_dir:
2810 self._ReferenceGitDir(
2811 self.objdir, self.gitdir, copy_all=True
2812 )
2813 try:
2814 self._CheckDirReference(self.objdir, self.gitdir)
2815 except GitError as e:
2816 if force_sync:
2817 print(
2818 "Retrying clone after deleting %s" % self.gitdir,
2819 file=sys.stderr,
2820 )
2821 try:
2822 platform_utils.rmtree(
2823 platform_utils.realpath(self.gitdir)
2824 )
2825 if self.worktree and os.path.exists(
2826 platform_utils.realpath(self.worktree)
2827 ):
2828 platform_utils.rmtree(
2829 platform_utils.realpath(self.worktree)
2830 )
2831 return self._InitGitDir(
2832 mirror_git=mirror_git,
2833 force_sync=False,
2834 quiet=quiet,
2835 )
2836 except Exception:
2837 raise e
2838 raise e
2839
2840 if init_git_dir:
2841 mp = self.manifest.manifestProject
2842 ref_dir = mp.reference or ""
2843
2844 def _expanded_ref_dirs():
2845 """Iterate through possible git reference dir paths."""
2846 name = self.name + ".git"
2847 yield mirror_git or os.path.join(ref_dir, name)
2848 for prefix in "", self.remote.name:
2849 yield os.path.join(
2850 ref_dir, ".repo", "project-objects", prefix, name
2851 )
2852 yield os.path.join(
2853 ref_dir, ".repo", "worktrees", prefix, name
2854 )
2855
2856 if ref_dir or mirror_git:
2857 found_ref_dir = None
2858 for path in _expanded_ref_dirs():
2859 if os.path.exists(path):
2860 found_ref_dir = path
2861 break
2862 ref_dir = found_ref_dir
2863
2864 if ref_dir:
2865 if not os.path.isabs(ref_dir):
2866 # The alternate directory is relative to the object
2867 # database.
2868 ref_dir = os.path.relpath(
2869 ref_dir, os.path.join(self.objdir, "objects")
2870 )
2871 _lwrite(
2872 os.path.join(
2873 self.objdir, "objects/info/alternates"
2874 ),
2875 os.path.join(ref_dir, "objects") + "\n",
2876 )
2877
2878 m = self.manifest.manifestProject.config
2879 for key in ["user.name", "user.email"]:
2880 if m.Has(key, include_defaults=False):
2881 self.config.SetString(key, m.GetString(key))
2882 if not self.manifest.EnableGitLfs:
2883 self.config.SetString(
2884 "filter.lfs.smudge", "git-lfs smudge --skip -- %f"
2885 )
2886 self.config.SetString(
2887 "filter.lfs.process", "git-lfs filter-process --skip"
2888 )
2889 self.config.SetBoolean(
2890 "core.bare", True if self.manifest.IsMirror else None
2891 )
2892 except Exception:
2893 if init_obj_dir and os.path.exists(self.objdir):
2894 platform_utils.rmtree(self.objdir)
2895 if init_git_dir and os.path.exists(self.gitdir):
2896 platform_utils.rmtree(self.gitdir)
2897 raise
2898
2899 def _UpdateHooks(self, quiet=False):
2900 if os.path.exists(self.objdir):
2901 self._InitHooks(quiet=quiet)
2902
2903 def _InitHooks(self, quiet=False):
2904 hooks = platform_utils.realpath(os.path.join(self.objdir, "hooks"))
2905 if not os.path.exists(hooks):
2906 os.makedirs(hooks)
2907
2908 # Delete sample hooks. They're noise.
2909 for hook in glob.glob(os.path.join(hooks, "*.sample")):
2910 try:
2911 platform_utils.remove(hook, missing_ok=True)
2912 except PermissionError:
2913 pass
2914
2915 for stock_hook in _ProjectHooks():
2916 name = os.path.basename(stock_hook)
2917
2918 if (
2919 name in ("commit-msg",)
2920 and not self.remote.review
2921 and self is not self.manifest.manifestProject
2922 ):
2923 # Don't install a Gerrit Code Review hook if this
2924 # project does not appear to use it for reviews.
2925 #
2926 # Since the manifest project is one of those, but also
2927 # managed through gerrit, it's excluded.
2928 continue
2929
2930 dst = os.path.join(hooks, name)
2931 if platform_utils.islink(dst):
2932 continue
2933 if os.path.exists(dst):
2934 # If the files are the same, we'll leave it alone. We create
2935 # symlinks below by default but fallback to hardlinks if the OS
2936 # blocks them. So if we're here, it's probably because we made a
2937 # hardlink below.
2938 if not filecmp.cmp(stock_hook, dst, shallow=False):
2939 if not quiet:
2940 _warn(
2941 "%s: Not replacing locally modified %s hook",
2942 self.RelPath(local=False),
2943 name,
2944 )
2945 continue
2946 try:
2947 platform_utils.symlink(
2948 os.path.relpath(stock_hook, os.path.dirname(dst)), dst
2949 )
2950 except OSError as e:
2951 if e.errno == errno.EPERM:
2952 try:
2953 os.link(stock_hook, dst)
2954 except OSError:
2955 raise GitError(self._get_symlink_error_message())
2956 else:
2957 raise
2958
2959 def _InitRemote(self):
2960 if self.remote.url:
2961 remote = self.GetRemote()
2962 remote.url = self.remote.url
2963 remote.pushUrl = self.remote.pushUrl
2964 remote.review = self.remote.review
2965 remote.projectname = self.name
2966
2967 if self.worktree:
2968 remote.ResetFetch(mirror=False)
2969 else:
2970 remote.ResetFetch(mirror=True)
2971 remote.Save()
2972
2973 def _InitMRef(self):
2974 """Initialize the pseudo m/<manifest branch> ref."""
2975 if self.manifest.branch:
2976 if self.use_git_worktrees:
2977 # Set up the m/ space to point to the worktree-specific ref
2978 # space. We'll update the worktree-specific ref space on each
2979 # checkout.
2980 ref = R_M + self.manifest.branch
2981 if not self.bare_ref.symref(ref):
2982 self.bare_git.symbolic_ref(
2983 "-m",
2984 "redirecting to worktree scope",
2985 ref,
2986 R_WORKTREE_M + self.manifest.branch,
2987 )
2988
2989 # We can't update this ref with git worktrees until it exists.
2990 # We'll wait until the initial checkout to set it.
2991 if not os.path.exists(self.worktree):
2992 return
2993
2994 base = R_WORKTREE_M
2995 active_git = self.work_git
2996
2997 self._InitAnyMRef(HEAD, self.bare_git, detach=True)
2998 else:
2999 base = R_M
3000 active_git = self.bare_git
3001
3002 self._InitAnyMRef(base + self.manifest.branch, active_git)
3003
3004 def _InitMirrorHead(self):
3005 self._InitAnyMRef(HEAD, self.bare_git)
3006
3007 def _InitAnyMRef(self, ref, active_git, detach=False):
3008 """Initialize |ref| in |active_git| to the value in the manifest.
3009
3010 This points |ref| to the <project> setting in the manifest.
3011
3012 Args:
3013 ref: The branch to update.
3014 active_git: The git repository to make updates in.
3015 detach: Whether to update target of symbolic refs, or overwrite the
3016 ref directly (and thus make it non-symbolic).
3017 """
3018 cur = self.bare_ref.symref(ref)
3019
3020 if self.revisionId:
3021 if cur != "" or self.bare_ref.get(ref) != self.revisionId:
3022 msg = "manifest set to %s" % self.revisionId
3023 dst = self.revisionId + "^0"
3024 active_git.UpdateRef(ref, dst, message=msg, detach=True)
3025 else:
3026 remote = self.GetRemote()
3027 dst = remote.ToLocal(self.revisionExpr)
3028 if cur != dst:
3029 msg = "manifest set to %s" % self.revisionExpr
3030 if detach:
3031 active_git.UpdateRef(ref, dst, message=msg, detach=True)
3032 else:
3033 active_git.symbolic_ref("-m", msg, ref, dst)
3034
3035 def _CheckDirReference(self, srcdir, destdir):
3036 # Git worktrees don't use symlinks to share at all.
3037 if self.use_git_worktrees:
3038 return
3039
3040 for name in self.shareable_dirs:
3041 # Try to self-heal a bit in simple cases.
3042 dst_path = os.path.join(destdir, name)
3043 src_path = os.path.join(srcdir, name)
3044
3045 dst = platform_utils.realpath(dst_path)
3046 if os.path.lexists(dst):
3047 src = platform_utils.realpath(src_path)
3048 # Fail if the links are pointing to the wrong place.
3049 if src != dst:
3050 _error("%s is different in %s vs %s", name, destdir, srcdir)
3051 raise GitError(
3052 "--force-sync not enabled; cannot overwrite a local "
3053 "work tree. If you're comfortable with the "
3054 "possibility of losing the work tree's git metadata,"
3055 " use `repo sync --force-sync {0}` to "
3056 "proceed.".format(self.RelPath(local=False))
3057 )
3058
3059 def _ReferenceGitDir(self, gitdir, dotgit, copy_all):
3060 """Update |dotgit| to reference |gitdir|, using symlinks where possible.
3061
3062 Args:
3063 gitdir: The bare git repository. Must already be initialized.
3064 dotgit: The repository you would like to initialize.
3065 copy_all: If true, copy all remaining files from |gitdir| ->
3066 |dotgit|. This saves you the effort of initializing |dotgit|
3067 yourself.
3068 """
3069 symlink_dirs = self.shareable_dirs[:]
3070 to_symlink = symlink_dirs
3071
3072 to_copy = []
3073 if copy_all:
3074 to_copy = platform_utils.listdir(gitdir)
3075
3076 dotgit = platform_utils.realpath(dotgit)
3077 for name in set(to_copy).union(to_symlink):
3078 try:
3079 src = platform_utils.realpath(os.path.join(gitdir, name))
3080 dst = os.path.join(dotgit, name)
3081
3082 if os.path.lexists(dst):
3083 continue
3084
3085 # If the source dir doesn't exist, create an empty dir.
3086 if name in symlink_dirs and not os.path.lexists(src):
3087 os.makedirs(src)
3088
3089 if name in to_symlink:
3090 platform_utils.symlink(
3091 os.path.relpath(src, os.path.dirname(dst)), dst
3092 )
3093 elif copy_all and not platform_utils.islink(dst):
3094 if platform_utils.isdir(src):
3095 shutil.copytree(src, dst)
3096 elif os.path.isfile(src):
3097 shutil.copy(src, dst)
3098
3099 except OSError as e:
3100 if e.errno == errno.EPERM:
3101 raise DownloadError(self._get_symlink_error_message())
3102 else:
3103 raise
3104
3105 def _InitGitWorktree(self):
3106 """Init the project using git worktrees."""
3107 self.bare_git.worktree("prune")
3108 self.bare_git.worktree(
3109 "add",
3110 "-ff",
3111 "--checkout",
3112 "--detach",
3113 "--lock",
3114 self.worktree,
3115 self.GetRevisionId(),
3116 )
3292 3117
3293 def __init__(self, config, detach_head=False): 3118 # Rewrite the internal state files to use relative paths between the
3294 self._messages = [] 3119 # checkouts & worktrees.
3295 self._failures = [] 3120 dotgit = os.path.join(self.worktree, ".git")
3296 self._later_queue1 = [] 3121 with open(dotgit, "r") as fp:
3297 self._later_queue2 = [] 3122 # Figure out the checkout->worktree path.
3123 setting = fp.read()
3124 assert setting.startswith("gitdir:")
3125 git_worktree_path = setting.split(":", 1)[1].strip()
3126 # Some platforms (e.g. Windows) won't let us update dotgit in situ
3127 # because of file permissions. Delete it and recreate it from scratch
3128 # to avoid.
3129 platform_utils.remove(dotgit)
3130 # Use relative path from checkout->worktree & maintain Unix line endings
3131 # on all OS's to match git behavior.
3132 with open(dotgit, "w", newline="\n") as fp:
3133 print(
3134 "gitdir:",
3135 os.path.relpath(git_worktree_path, self.worktree),
3136 file=fp,
3137 )
3138 # Use relative path from worktree->checkout & maintain Unix line endings
3139 # on all OS's to match git behavior.
3140 with open(
3141 os.path.join(git_worktree_path, "gitdir"), "w", newline="\n"
3142 ) as fp:
3143 print(os.path.relpath(dotgit, git_worktree_path), file=fp)
3144
3145 self._InitMRef()
3146
3147 def _InitWorkTree(self, force_sync=False, submodules=False):
3148 """Setup the worktree .git path.
3149
3150 This is the user-visible path like src/foo/.git/.
3151
3152 With non-git-worktrees, this will be a symlink to the .repo/projects/
3153 path. With git-worktrees, this will be a .git file using "gitdir: ..."
3154 syntax.
3155
3156 Older checkouts had .git/ directories. If we see that, migrate it.
3157
3158 This also handles changes in the manifest. Maybe this project was
3159 backed by "foo/bar" on the server, but now it's "new/foo/bar". We have
3160 to update the path we point to under .repo/projects/ to match.
3161 """
3162 dotgit = os.path.join(self.worktree, ".git")
3163
3164 # If using an old layout style (a directory), migrate it.
3165 if not platform_utils.islink(dotgit) and platform_utils.isdir(dotgit):
3166 self._MigrateOldWorkTreeGitDir(dotgit)
3167
3168 init_dotgit = not os.path.exists(dotgit)
3169 if self.use_git_worktrees:
3170 if init_dotgit:
3171 self._InitGitWorktree()
3172 self._CopyAndLinkFiles()
3173 else:
3174 if not init_dotgit:
3175 # See if the project has changed.
3176 if platform_utils.realpath(
3177 self.gitdir
3178 ) != platform_utils.realpath(dotgit):
3179 platform_utils.remove(dotgit)
3180
3181 if init_dotgit or not os.path.exists(dotgit):
3182 os.makedirs(self.worktree, exist_ok=True)
3183 platform_utils.symlink(
3184 os.path.relpath(self.gitdir, self.worktree), dotgit
3185 )
3186
3187 if init_dotgit:
3188 _lwrite(
3189 os.path.join(dotgit, HEAD), "%s\n" % self.GetRevisionId()
3190 )
3191
3192 # Finish checking out the worktree.
3193 cmd = ["read-tree", "--reset", "-u", "-v", HEAD]
3194 if GitCommand(self, cmd).Wait() != 0:
3195 raise GitError(
3196 "Cannot initialize work tree for " + self.name
3197 )
3198
3199 if submodules:
3200 self._SyncSubmodules(quiet=True)
3201 self._CopyAndLinkFiles()
3202
3203 @classmethod
3204 def _MigrateOldWorkTreeGitDir(cls, dotgit):
3205 """Migrate the old worktree .git/ dir style to a symlink.
3206
3207 This logic specifically only uses state from |dotgit| to figure out
3208 where to move content and not |self|. This way if the backing project
3209 also changed places, we only do the .git/ dir to .git symlink migration
3210 here. The path updates will happen independently.
3211 """
3212 # Figure out where in .repo/projects/ it's pointing to.
3213 if not os.path.islink(os.path.join(dotgit, "refs")):
3214 raise GitError(f"{dotgit}: unsupported checkout state")
3215 gitdir = os.path.dirname(os.path.realpath(os.path.join(dotgit, "refs")))
3216
3217 # Remove known symlink paths that exist in .repo/projects/.
3218 KNOWN_LINKS = {
3219 "config",
3220 "description",
3221 "hooks",
3222 "info",
3223 "logs",
3224 "objects",
3225 "packed-refs",
3226 "refs",
3227 "rr-cache",
3228 "shallow",
3229 "svn",
3230 }
3231 # Paths that we know will be in both, but are safe to clobber in
3232 # .repo/projects/.
3233 SAFE_TO_CLOBBER = {
3234 "COMMIT_EDITMSG",
3235 "FETCH_HEAD",
3236 "HEAD",
3237 "gc.log",
3238 "gitk.cache",
3239 "index",
3240 "ORIG_HEAD",
3241 }
3242
3243 # First see if we'd succeed before starting the migration.
3244 unknown_paths = []
3245 for name in platform_utils.listdir(dotgit):
3246 # Ignore all temporary/backup names. These are common with vim &
3247 # emacs.
3248 if name.endswith("~") or (name[0] == "#" and name[-1] == "#"):
3249 continue
3250
3251 dotgit_path = os.path.join(dotgit, name)
3252 if name in KNOWN_LINKS:
3253 if not platform_utils.islink(dotgit_path):
3254 unknown_paths.append(f"{dotgit_path}: should be a symlink")
3255 else:
3256 gitdir_path = os.path.join(gitdir, name)
3257 if name not in SAFE_TO_CLOBBER and os.path.exists(gitdir_path):
3258 unknown_paths.append(
3259 f"{dotgit_path}: unknown file; please file a bug"
3260 )
3261 if unknown_paths:
3262 raise GitError("Aborting migration: " + "\n".join(unknown_paths))
3263
3264 # Now walk the paths and sync the .git/ to .repo/projects/.
3265 for name in platform_utils.listdir(dotgit):
3266 dotgit_path = os.path.join(dotgit, name)
3267
3268 # Ignore all temporary/backup names. These are common with vim &
3269 # emacs.
3270 if name.endswith("~") or (name[0] == "#" and name[-1] == "#"):
3271 platform_utils.remove(dotgit_path)
3272 elif name in KNOWN_LINKS:
3273 platform_utils.remove(dotgit_path)
3274 else:
3275 gitdir_path = os.path.join(gitdir, name)
3276 platform_utils.remove(gitdir_path, missing_ok=True)
3277 platform_utils.rename(dotgit_path, gitdir_path)
3278
3279 # Now that the dir should be empty, clear it out, and symlink it over.
3280 platform_utils.rmdir(dotgit)
3281 platform_utils.symlink(
3282 os.path.relpath(gitdir, os.path.dirname(dotgit)), dotgit
3283 )
3298 3284
3299 self.out = _SyncColoring(config) 3285 def _get_symlink_error_message(self):
3300 self.out.redirect(sys.stderr) 3286 if platform_utils.isWindows():
3287 return (
3288 "Unable to create symbolic link. Please re-run the command as "
3289 "Administrator, or see "
3290 "https://github.com/git-for-windows/git/wiki/Symbolic-Links "
3291 "for other options."
3292 )
3293 return "filesystem must support symlinks"
3294
3295 def _revlist(self, *args, **kw):
3296 a = []
3297 a.extend(args)
3298 a.append("--")
3299 return self.work_git.rev_list(*a, **kw)
3300
3301 @property
3302 def _allrefs(self):
3303 return self.bare_ref.all
3304
3305 def _getLogs(
3306 self, rev1, rev2, oneline=False, color=True, pretty_format=None
3307 ):
3308 """Get logs between two revisions of this project."""
3309 comp = ".."
3310 if rev1:
3311 revs = [rev1]
3312 if rev2:
3313 revs.extend([comp, rev2])
3314 cmd = ["log", "".join(revs)]
3315 out = DiffColoring(self.config)
3316 if out.is_on and color:
3317 cmd.append("--color")
3318 if pretty_format is not None:
3319 cmd.append("--pretty=format:%s" % pretty_format)
3320 if oneline:
3321 cmd.append("--oneline")
3301 3322
3302 self.detach_head = detach_head 3323 try:
3303 self.clean = True 3324 log = GitCommand(
3304 self.recent_clean = True 3325 self, cmd, capture_stdout=True, capture_stderr=True
3326 )
3327 if log.Wait() == 0:
3328 return log.stdout
3329 except GitError:
3330 # worktree may not exist if groups changed for example. In that
3331 # case, try in gitdir instead.
3332 if not os.path.exists(self.worktree):
3333 return self.bare_git.log(*cmd[1:])
3334 else:
3335 raise
3336 return None
3305 3337
3306 def info(self, project, fmt, *args): 3338 def getAddedAndRemovedLogs(
3307 self._messages.append(_InfoMessage(project, fmt % args)) 3339 self, toProject, oneline=False, color=True, pretty_format=None
3340 ):
3341 """Get the list of logs from this revision to given revisionId"""
3342 logs = {}
3343 selfId = self.GetRevisionId(self._allrefs)
3344 toId = toProject.GetRevisionId(toProject._allrefs)
3345
3346 logs["added"] = self._getLogs(
3347 selfId,
3348 toId,
3349 oneline=oneline,
3350 color=color,
3351 pretty_format=pretty_format,
3352 )
3353 logs["removed"] = self._getLogs(
3354 toId,
3355 selfId,
3356 oneline=oneline,
3357 color=color,
3358 pretty_format=pretty_format,
3359 )
3360 return logs
3361
3362 class _GitGetByExec(object):
3363 def __init__(self, project, bare, gitdir):
3364 self._project = project
3365 self._bare = bare
3366 self._gitdir = gitdir
3367
3368 # __getstate__ and __setstate__ are required for pickling because
3369 # __getattr__ exists.
3370 def __getstate__(self):
3371 return (self._project, self._bare, self._gitdir)
3372
3373 def __setstate__(self, state):
3374 self._project, self._bare, self._gitdir = state
3375
3376 def LsOthers(self):
3377 p = GitCommand(
3378 self._project,
3379 ["ls-files", "-z", "--others", "--exclude-standard"],
3380 bare=False,
3381 gitdir=self._gitdir,
3382 capture_stdout=True,
3383 capture_stderr=True,
3384 )
3385 if p.Wait() == 0:
3386 out = p.stdout
3387 if out:
3388 # Backslash is not anomalous.
3389 return out[:-1].split("\0")
3390 return []
3391
3392 def DiffZ(self, name, *args):
3393 cmd = [name]
3394 cmd.append("-z")
3395 cmd.append("--ignore-submodules")
3396 cmd.extend(args)
3397 p = GitCommand(
3398 self._project,
3399 cmd,
3400 gitdir=self._gitdir,
3401 bare=False,
3402 capture_stdout=True,
3403 capture_stderr=True,
3404 )
3405 p.Wait()
3406 r = {}
3407 out = p.stdout
3408 if out:
3409 out = iter(out[:-1].split("\0"))
3410 while out:
3411 try:
3412 info = next(out)
3413 path = next(out)
3414 except StopIteration:
3415 break
3416
3417 class _Info(object):
3418 def __init__(self, path, omode, nmode, oid, nid, state):
3419 self.path = path
3420 self.src_path = None
3421 self.old_mode = omode
3422 self.new_mode = nmode
3423 self.old_id = oid
3424 self.new_id = nid
3425
3426 if len(state) == 1:
3427 self.status = state
3428 self.level = None
3429 else:
3430 self.status = state[:1]
3431 self.level = state[1:]
3432 while self.level.startswith("0"):
3433 self.level = self.level[1:]
3434
3435 info = info[1:].split(" ")
3436 info = _Info(path, *info)
3437 if info.status in ("R", "C"):
3438 info.src_path = info.path
3439 info.path = next(out)
3440 r[info.path] = info
3441 return r
3442
3443 def GetDotgitPath(self, subpath=None):
3444 """Return the full path to the .git dir.
3445
3446 As a convenience, append |subpath| if provided.
3447 """
3448 if self._bare:
3449 dotgit = self._gitdir
3450 else:
3451 dotgit = os.path.join(self._project.worktree, ".git")
3452 if os.path.isfile(dotgit):
3453 # Git worktrees use a "gitdir:" syntax to point to the
3454 # scratch space.
3455 with open(dotgit) as fp:
3456 setting = fp.read()
3457 assert setting.startswith("gitdir:")
3458 gitdir = setting.split(":", 1)[1].strip()
3459 dotgit = os.path.normpath(
3460 os.path.join(self._project.worktree, gitdir)
3461 )
3462
3463 return dotgit if subpath is None else os.path.join(dotgit, subpath)
3464
3465 def GetHead(self):
3466 """Return the ref that HEAD points to."""
3467 path = self.GetDotgitPath(subpath=HEAD)
3468 try:
3469 with open(path) as fd:
3470 line = fd.readline()
3471 except IOError as e:
3472 raise NoManifestException(path, str(e))
3473 try:
3474 line = line.decode()
3475 except AttributeError:
3476 pass
3477 if line.startswith("ref: "):
3478 return line[5:-1]
3479 return line[:-1]
3480
3481 def SetHead(self, ref, message=None):
3482 cmdv = []
3483 if message is not None:
3484 cmdv.extend(["-m", message])
3485 cmdv.append(HEAD)
3486 cmdv.append(ref)
3487 self.symbolic_ref(*cmdv)
3488
3489 def DetachHead(self, new, message=None):
3490 cmdv = ["--no-deref"]
3491 if message is not None:
3492 cmdv.extend(["-m", message])
3493 cmdv.append(HEAD)
3494 cmdv.append(new)
3495 self.update_ref(*cmdv)
3496
3497 def UpdateRef(self, name, new, old=None, message=None, detach=False):
3498 cmdv = []
3499 if message is not None:
3500 cmdv.extend(["-m", message])
3501 if detach:
3502 cmdv.append("--no-deref")
3503 cmdv.append(name)
3504 cmdv.append(new)
3505 if old is not None:
3506 cmdv.append(old)
3507 self.update_ref(*cmdv)
3508
3509 def DeleteRef(self, name, old=None):
3510 if not old:
3511 old = self.rev_parse(name)
3512 self.update_ref("-d", name, old)
3513 self._project.bare_ref.deleted(name)
3514
3515 def rev_list(self, *args, **kw):
3516 if "format" in kw:
3517 cmdv = ["log", "--pretty=format:%s" % kw["format"]]
3518 else:
3519 cmdv = ["rev-list"]
3520 cmdv.extend(args)
3521 p = GitCommand(
3522 self._project,
3523 cmdv,
3524 bare=self._bare,
3525 gitdir=self._gitdir,
3526 capture_stdout=True,
3527 capture_stderr=True,
3528 )
3529 if p.Wait() != 0:
3530 raise GitError(
3531 "%s rev-list %s: %s"
3532 % (self._project.name, str(args), p.stderr)
3533 )
3534 return p.stdout.splitlines()
3535
3536 def __getattr__(self, name):
3537 """Allow arbitrary git commands using pythonic syntax.
3538
3539 This allows you to do things like:
3540 git_obj.rev_parse('HEAD')
3541
3542 Since we don't have a 'rev_parse' method defined, the __getattr__
3543 will run. We'll replace the '_' with a '-' and try to run a git
3544 command. Any other positional arguments will be passed to the git
3545 command, and the following keyword arguments are supported:
3546 config: An optional dict of git config options to be passed with
3547 '-c'.
3548
3549 Args:
3550 name: The name of the git command to call. Any '_' characters
3551 will be replaced with '-'.
3552
3553 Returns:
3554 A callable object that will try to call git with the named
3555 command.
3556 """
3557 name = name.replace("_", "-")
3558
3559 def runner(*args, **kwargs):
3560 cmdv = []
3561 config = kwargs.pop("config", None)
3562 for k in kwargs:
3563 raise TypeError(
3564 "%s() got an unexpected keyword argument %r" % (name, k)
3565 )
3566 if config is not None:
3567 for k, v in config.items():
3568 cmdv.append("-c")
3569 cmdv.append("%s=%s" % (k, v))
3570 cmdv.append(name)
3571 cmdv.extend(args)
3572 p = GitCommand(
3573 self._project,
3574 cmdv,
3575 bare=self._bare,
3576 gitdir=self._gitdir,
3577 capture_stdout=True,
3578 capture_stderr=True,
3579 )
3580 if p.Wait() != 0:
3581 raise GitError(
3582 "%s %s: %s" % (self._project.name, name, p.stderr)
3583 )
3584 r = p.stdout
3585 if r.endswith("\n") and r.index("\n") == len(r) - 1:
3586 return r[:-1]
3587 return r
3588
3589 return runner
3308 3590
3309 def fail(self, project, err=None):
3310 self._failures.append(_Failure(project, err))
3311 self._MarkUnclean()
3312 3591
3313 def later1(self, project, what): 3592class _PriorSyncFailedError(Exception):
3314 self._later_queue1.append(_Later(project, what)) 3593 def __str__(self):
3594 return "prior sync failed; rebase still in progress"
3315 3595
3316 def later2(self, project, what):
3317 self._later_queue2.append(_Later(project, what))
3318 3596
3319 def Finish(self): 3597class _DirtyError(Exception):
3320 self._PrintMessages() 3598 def __str__(self):
3321 self._RunLater() 3599 return "contains uncommitted changes"
3322 self._PrintMessages()
3323 return self.clean
3324 3600
3325 def Recently(self):
3326 recent_clean = self.recent_clean
3327 self.recent_clean = True
3328 return recent_clean
3329 3601
3330 def _MarkUnclean(self): 3602class _InfoMessage(object):
3331 self.clean = False 3603 def __init__(self, project, text):
3332 self.recent_clean = False 3604 self.project = project
3605 self.text = text
3333 3606
3334 def _RunLater(self): 3607 def Print(self, syncbuf):
3335 for q in ['_later_queue1', '_later_queue2']: 3608 syncbuf.out.info(
3336 if not self._RunQueue(q): 3609 "%s/: %s", self.project.RelPath(local=False), self.text
3337 return 3610 )
3611 syncbuf.out.nl()
3338 3612
3339 def _RunQueue(self, queue):
3340 for m in getattr(self, queue):
3341 if not m.Run(self):
3342 self._MarkUnclean()
3343 return False
3344 setattr(self, queue, [])
3345 return True
3346 3613
3347 def _PrintMessages(self): 3614class _Failure(object):
3348 if self._messages or self._failures: 3615 def __init__(self, project, why):
3349 if os.isatty(2): 3616 self.project = project
3350 self.out.write(progress.CSI_ERASE_LINE) 3617 self.why = why
3351 self.out.write('\r')
3352 3618
3353 for m in self._messages: 3619 def Print(self, syncbuf):
3354 m.Print(self) 3620 syncbuf.out.fail(
3355 for m in self._failures: 3621 "error: %s/: %s", self.project.RelPath(local=False), str(self.why)
3356 m.Print(self) 3622 )
3623 syncbuf.out.nl()
3357 3624
3358 self._messages = []
3359 self._failures = []
3360 3625
3626class _Later(object):
3627 def __init__(self, project, action):
3628 self.project = project
3629 self.action = action
3630
3631 def Run(self, syncbuf):
3632 out = syncbuf.out
3633 out.project("project %s/", self.project.RelPath(local=False))
3634 out.nl()
3635 try:
3636 self.action()
3637 out.nl()
3638 return True
3639 except GitError:
3640 out.nl()
3641 return False
3361 3642
3362class MetaProject(Project):
3363 """A special project housed under .repo."""
3364
3365 def __init__(self, manifest, name, gitdir, worktree):
3366 Project.__init__(self,
3367 manifest=manifest,
3368 name=name,
3369 gitdir=gitdir,
3370 objdir=gitdir,
3371 worktree=worktree,
3372 remote=RemoteSpec('origin'),
3373 relpath='.repo/%s' % name,
3374 revisionExpr='refs/heads/master',
3375 revisionId=None,
3376 groups=None)
3377
3378 def PreSync(self):
3379 if self.Exists:
3380 cb = self.CurrentBranch
3381 if cb:
3382 base = self.GetBranch(cb).merge
3383 if base:
3384 self.revisionExpr = base
3385 self.revisionId = None
3386
3387 @property
3388 def HasChanges(self):
3389 """Has the remote received new commits not yet checked out?"""
3390 if not self.remote or not self.revisionExpr:
3391 return False
3392
3393 all_refs = self.bare_ref.all
3394 revid = self.GetRevisionId(all_refs)
3395 head = self.work_git.GetHead()
3396 if head.startswith(R_HEADS):
3397 try:
3398 head = all_refs[head]
3399 except KeyError:
3400 head = None
3401
3402 if revid == head:
3403 return False
3404 elif self._revlist(not_rev(HEAD), revid):
3405 return True
3406 return False
3407 3643
3644class _SyncColoring(Coloring):
3645 def __init__(self, config):
3646 super().__init__(config, "reposync")
3647 self.project = self.printer("header", attr="bold")
3648 self.info = self.printer("info")
3649 self.fail = self.printer("fail", fg="red")
3408 3650
3409class RepoProject(MetaProject):
3410 """The MetaProject for repo itself."""
3411 3651
3412 @property 3652class SyncBuffer(object):
3413 def LastFetch(self): 3653 def __init__(self, config, detach_head=False):
3414 try: 3654 self._messages = []
3415 fh = os.path.join(self.gitdir, 'FETCH_HEAD') 3655 self._failures = []
3416 return os.path.getmtime(fh) 3656 self._later_queue1 = []
3417 except OSError: 3657 self._later_queue2 = []
3418 return 0
3419 3658
3659 self.out = _SyncColoring(config)
3660 self.out.redirect(sys.stderr)
3420 3661
3421class ManifestProject(MetaProject): 3662 self.detach_head = detach_head
3422 """The MetaProject for manifests.""" 3663 self.clean = True
3423 3664 self.recent_clean = True
3424 def MetaBranchSwitch(self, submodules=False):
3425 """Prepare for manifest branch switch."""
3426
3427 # detach and delete manifest branch, allowing a new
3428 # branch to take over
3429 syncbuf = SyncBuffer(self.config, detach_head=True)
3430 self.Sync_LocalHalf(syncbuf, submodules=submodules)
3431 syncbuf.Finish()
3432
3433 return GitCommand(self,
3434 ['update-ref', '-d', 'refs/heads/default'],
3435 capture_stdout=True,
3436 capture_stderr=True).Wait() == 0
3437
3438 @property
3439 def standalone_manifest_url(self):
3440 """The URL of the standalone manifest, or None."""
3441 return self.config.GetString('manifest.standalone')
3442
3443 @property
3444 def manifest_groups(self):
3445 """The manifest groups string."""
3446 return self.config.GetString('manifest.groups')
3447
3448 @property
3449 def reference(self):
3450 """The --reference for this manifest."""
3451 return self.config.GetString('repo.reference')
3452
3453 @property
3454 def dissociate(self):
3455 """Whether to dissociate."""
3456 return self.config.GetBoolean('repo.dissociate')
3457
3458 @property
3459 def archive(self):
3460 """Whether we use archive."""
3461 return self.config.GetBoolean('repo.archive')
3462
3463 @property
3464 def mirror(self):
3465 """Whether we use mirror."""
3466 return self.config.GetBoolean('repo.mirror')
3467
3468 @property
3469 def use_worktree(self):
3470 """Whether we use worktree."""
3471 return self.config.GetBoolean('repo.worktree')
3472
3473 @property
3474 def clone_bundle(self):
3475 """Whether we use clone_bundle."""
3476 return self.config.GetBoolean('repo.clonebundle')
3477
3478 @property
3479 def submodules(self):
3480 """Whether we use submodules."""
3481 return self.config.GetBoolean('repo.submodules')
3482
3483 @property
3484 def git_lfs(self):
3485 """Whether we use git_lfs."""
3486 return self.config.GetBoolean('repo.git-lfs')
3487
3488 @property
3489 def use_superproject(self):
3490 """Whether we use superproject."""
3491 return self.config.GetBoolean('repo.superproject')
3492
3493 @property
3494 def partial_clone(self):
3495 """Whether this is a partial clone."""
3496 return self.config.GetBoolean('repo.partialclone')
3497
3498 @property
3499 def depth(self):
3500 """Partial clone depth."""
3501 return self.config.GetString('repo.depth')
3502
3503 @property
3504 def clone_filter(self):
3505 """The clone filter."""
3506 return self.config.GetString('repo.clonefilter')
3507
3508 @property
3509 def partial_clone_exclude(self):
3510 """Partial clone exclude string"""
3511 return self.config.GetString('repo.partialcloneexclude')
3512
3513 @property
3514 def manifest_platform(self):
3515 """The --platform argument from `repo init`."""
3516 return self.config.GetString('manifest.platform')
3517
3518 @property
3519 def _platform_name(self):
3520 """Return the name of the platform."""
3521 return platform.system().lower()
3522
3523 def SyncWithPossibleInit(self, submanifest, verbose=False,
3524 current_branch_only=False, tags='', git_event_log=None):
3525 """Sync a manifestProject, possibly for the first time.
3526
3527 Call Sync() with arguments from the most recent `repo init`. If this is a
3528 new sub manifest, then inherit options from the parent's manifestProject.
3529
3530 This is used by subcmds.Sync() to do an initial download of new sub
3531 manifests.
3532
3533 Args:
3534 submanifest: an XmlSubmanifest, the submanifest to re-sync.
3535 verbose: a boolean, whether to show all output, rather than only errors.
3536 current_branch_only: a boolean, whether to only fetch the current manifest
3537 branch from the server.
3538 tags: a boolean, whether to fetch tags.
3539 git_event_log: an EventLog, for git tracing.
3540 """
3541 # TODO(lamontjones): when refactoring sync (and init?) consider how to
3542 # better get the init options that we should use for new submanifests that
3543 # are added when syncing an existing workspace.
3544 git_event_log = git_event_log or EventLog()
3545 spec = submanifest.ToSubmanifestSpec()
3546 # Use the init options from the existing manifestProject, or the parent if
3547 # it doesn't exist.
3548 #
3549 # Today, we only support changing manifest_groups on the sub-manifest, with
3550 # no supported-for-the-user way to change the other arguments from those
3551 # specified by the outermost manifest.
3552 #
3553 # TODO(lamontjones): determine which of these should come from the outermost
3554 # manifest and which should come from the parent manifest.
3555 mp = self if self.Exists else submanifest.parent.manifestProject
3556 return self.Sync(
3557 manifest_url=spec.manifestUrl,
3558 manifest_branch=spec.revision,
3559 standalone_manifest=mp.standalone_manifest_url,
3560 groups=mp.manifest_groups,
3561 platform=mp.manifest_platform,
3562 mirror=mp.mirror,
3563 dissociate=mp.dissociate,
3564 reference=mp.reference,
3565 worktree=mp.use_worktree,
3566 submodules=mp.submodules,
3567 archive=mp.archive,
3568 partial_clone=mp.partial_clone,
3569 clone_filter=mp.clone_filter,
3570 partial_clone_exclude=mp.partial_clone_exclude,
3571 clone_bundle=mp.clone_bundle,
3572 git_lfs=mp.git_lfs,
3573 use_superproject=mp.use_superproject,
3574 verbose=verbose,
3575 current_branch_only=current_branch_only,
3576 tags=tags,
3577 depth=mp.depth,
3578 git_event_log=git_event_log,
3579 manifest_name=spec.manifestName,
3580 this_manifest_only=True,
3581 outer_manifest=False,
3582 )
3583
3584 def Sync(self, _kwargs_only=(), manifest_url='', manifest_branch=None,
3585 standalone_manifest=False, groups='', mirror=False, reference='',
3586 dissociate=False, worktree=False, submodules=False, archive=False,
3587 partial_clone=None, depth=None, clone_filter='blob:none',
3588 partial_clone_exclude=None, clone_bundle=None, git_lfs=None,
3589 use_superproject=None, verbose=False, current_branch_only=False,
3590 git_event_log=None, platform='', manifest_name='default.xml',
3591 tags='', this_manifest_only=False, outer_manifest=True):
3592 """Sync the manifest and all submanifests.
3593
3594 Args:
3595 manifest_url: a string, the URL of the manifest project.
3596 manifest_branch: a string, the manifest branch to use.
3597 standalone_manifest: a boolean, whether to store the manifest as a static
3598 file.
3599 groups: a string, restricts the checkout to projects with the specified
3600 groups.
3601 mirror: a boolean, whether to create a mirror of the remote repository.
3602 reference: a string, location of a repo instance to use as a reference.
3603 dissociate: a boolean, whether to dissociate from reference mirrors after
3604 clone.
3605 worktree: a boolean, whether to use git-worktree to manage projects.
3606 submodules: a boolean, whether sync submodules associated with the
3607 manifest project.
3608 archive: a boolean, whether to checkout each project as an archive. See
3609 git-archive.
3610 partial_clone: a boolean, whether to perform a partial clone.
3611 depth: an int, how deep of a shallow clone to create.
3612 clone_filter: a string, filter to use with partial_clone.
3613 partial_clone_exclude : a string, comma-delimeted list of project namess
3614 to exclude from partial clone.
3615 clone_bundle: a boolean, whether to enable /clone.bundle on HTTP/HTTPS.
3616 git_lfs: a boolean, whether to enable git LFS support.
3617 use_superproject: a boolean, whether to use the manifest superproject to
3618 sync projects.
3619 verbose: a boolean, whether to show all output, rather than only errors.
3620 current_branch_only: a boolean, whether to only fetch the current manifest
3621 branch from the server.
3622 platform: a string, restrict the checkout to projects with the specified
3623 platform group.
3624 git_event_log: an EventLog, for git tracing.
3625 tags: a boolean, whether to fetch tags.
3626 manifest_name: a string, the name of the manifest file to use.
3627 this_manifest_only: a boolean, whether to only operate on the current sub
3628 manifest.
3629 outer_manifest: a boolean, whether to start at the outermost manifest.
3630 3665
3631 Returns: 3666 def info(self, project, fmt, *args):
3632 a boolean, whether the sync was successful. 3667 self._messages.append(_InfoMessage(project, fmt % args))
3633 """
3634 assert _kwargs_only == (), 'Sync only accepts keyword arguments.'
3635
3636 groups = groups or self.manifest.GetDefaultGroupsStr(with_platform=False)
3637 platform = platform or 'auto'
3638 git_event_log = git_event_log or EventLog()
3639 if outer_manifest and self.manifest.is_submanifest:
3640 # In a multi-manifest checkout, use the outer manifest unless we are told
3641 # not to.
3642 return self.client.outer_manifest.manifestProject.Sync(
3643 manifest_url=manifest_url,
3644 manifest_branch=manifest_branch,
3645 standalone_manifest=standalone_manifest,
3646 groups=groups,
3647 platform=platform,
3648 mirror=mirror,
3649 dissociate=dissociate,
3650 reference=reference,
3651 worktree=worktree,
3652 submodules=submodules,
3653 archive=archive,
3654 partial_clone=partial_clone,
3655 clone_filter=clone_filter,
3656 partial_clone_exclude=partial_clone_exclude,
3657 clone_bundle=clone_bundle,
3658 git_lfs=git_lfs,
3659 use_superproject=use_superproject,
3660 verbose=verbose,
3661 current_branch_only=current_branch_only,
3662 tags=tags,
3663 depth=depth,
3664 git_event_log=git_event_log,
3665 manifest_name=manifest_name,
3666 this_manifest_only=this_manifest_only,
3667 outer_manifest=False)
3668
3669 # If repo has already been initialized, we take -u with the absence of
3670 # --standalone-manifest to mean "transition to a standard repo set up",
3671 # which necessitates starting fresh.
3672 # If --standalone-manifest is set, we always tear everything down and start
3673 # anew.
3674 if self.Exists:
3675 was_standalone_manifest = self.config.GetString('manifest.standalone')
3676 if was_standalone_manifest and not manifest_url:
3677 print('fatal: repo was initialized with a standlone manifest, '
3678 'cannot be re-initialized without --manifest-url/-u')
3679 return False
3680 3668
3681 if standalone_manifest or (was_standalone_manifest and manifest_url): 3669 def fail(self, project, err=None):
3682 self.config.ClearCache() 3670 self._failures.append(_Failure(project, err))
3683 if self.gitdir and os.path.exists(self.gitdir): 3671 self._MarkUnclean()
3684 platform_utils.rmtree(self.gitdir)
3685 if self.worktree and os.path.exists(self.worktree):
3686 platform_utils.rmtree(self.worktree)
3687
3688 is_new = not self.Exists
3689 if is_new:
3690 if not manifest_url:
3691 print('fatal: manifest url is required.', file=sys.stderr)
3692 return False
3693 3672
3694 if verbose: 3673 def later1(self, project, what):
3695 print('Downloading manifest from %s' % 3674 self._later_queue1.append(_Later(project, what))
3696 (GitConfig.ForUser().UrlInsteadOf(manifest_url),), 3675
3697 file=sys.stderr) 3676 def later2(self, project, what):
3698 3677 self._later_queue2.append(_Later(project, what))
3699 # The manifest project object doesn't keep track of the path on the 3678
3700 # server where this git is located, so let's save that here. 3679 def Finish(self):
3701 mirrored_manifest_git = None 3680 self._PrintMessages()
3702 if reference: 3681 self._RunLater()
3703 manifest_git_path = urllib.parse.urlparse(manifest_url).path[1:] 3682 self._PrintMessages()
3704 mirrored_manifest_git = os.path.join(reference, manifest_git_path) 3683 return self.clean
3705 if not mirrored_manifest_git.endswith(".git"): 3684
3706 mirrored_manifest_git += ".git" 3685 def Recently(self):
3707 if not os.path.exists(mirrored_manifest_git): 3686 recent_clean = self.recent_clean
3708 mirrored_manifest_git = os.path.join(reference, 3687 self.recent_clean = True
3709 '.repo/manifests.git') 3688 return recent_clean
3710 3689
3711 self._InitGitDir(mirror_git=mirrored_manifest_git) 3690 def _MarkUnclean(self):
3712 3691 self.clean = False
3713 # If standalone_manifest is set, mark the project as "standalone" -- we'll 3692 self.recent_clean = False
3714 # still do much of the manifests.git set up, but will avoid actual syncs to 3693
3715 # a remote. 3694 def _RunLater(self):
3716 if standalone_manifest: 3695 for q in ["_later_queue1", "_later_queue2"]:
3717 self.config.SetString('manifest.standalone', manifest_url) 3696 if not self._RunQueue(q):
3718 elif not manifest_url and not manifest_branch: 3697 return
3719 # If -u is set and --standalone-manifest is not, then we're not in 3698
3720 # standalone mode. Otherwise, use config to infer what we were in the last 3699 def _RunQueue(self, queue):
3721 # init. 3700 for m in getattr(self, queue):
3722 standalone_manifest = bool(self.config.GetString('manifest.standalone')) 3701 if not m.Run(self):
3723 if not standalone_manifest: 3702 self._MarkUnclean()
3724 self.config.SetString('manifest.standalone', None) 3703 return False
3725 3704 setattr(self, queue, [])
3726 self._ConfigureDepth(depth) 3705 return True
3727 3706
3728 # Set the remote URL before the remote branch as we might need it below. 3707 def _PrintMessages(self):
3729 if manifest_url: 3708 if self._messages or self._failures:
3730 r = self.GetRemote() 3709 if os.isatty(2):
3731 r.url = manifest_url 3710 self.out.write(progress.CSI_ERASE_LINE)
3732 r.ResetFetch() 3711 self.out.write("\r")
3733 r.Save() 3712
3734 3713 for m in self._messages:
3735 if not standalone_manifest: 3714 m.Print(self)
3736 if manifest_branch: 3715 for m in self._failures:
3737 if manifest_branch == 'HEAD': 3716 m.Print(self)
3738 manifest_branch = self.ResolveRemoteHead() 3717
3739 if manifest_branch is None: 3718 self._messages = []
3740 print('fatal: unable to resolve HEAD', file=sys.stderr) 3719 self._failures = []
3720
3721
3722class MetaProject(Project):
3723 """A special project housed under .repo."""
3724
3725 def __init__(self, manifest, name, gitdir, worktree):
3726 Project.__init__(
3727 self,
3728 manifest=manifest,
3729 name=name,
3730 gitdir=gitdir,
3731 objdir=gitdir,
3732 worktree=worktree,
3733 remote=RemoteSpec("origin"),
3734 relpath=".repo/%s" % name,
3735 revisionExpr="refs/heads/master",
3736 revisionId=None,
3737 groups=None,
3738 )
3739
3740 def PreSync(self):
3741 if self.Exists:
3742 cb = self.CurrentBranch
3743 if cb:
3744 base = self.GetBranch(cb).merge
3745 if base:
3746 self.revisionExpr = base
3747 self.revisionId = None
3748
3749 @property
3750 def HasChanges(self):
3751 """Has the remote received new commits not yet checked out?"""
3752 if not self.remote or not self.revisionExpr:
3741 return False 3753 return False
3742 self.revisionExpr = manifest_branch
3743 else:
3744 if is_new:
3745 default_branch = self.ResolveRemoteHead()
3746 if default_branch is None:
3747 # If the remote doesn't have HEAD configured, default to master.
3748 default_branch = 'refs/heads/master'
3749 self.revisionExpr = default_branch
3750 else:
3751 self.PreSync()
3752
3753 groups = re.split(r'[,\s]+', groups or '')
3754 all_platforms = ['linux', 'darwin', 'windows']
3755 platformize = lambda x: 'platform-' + x
3756 if platform == 'auto':
3757 if not mirror and not self.mirror:
3758 groups.append(platformize(self._platform_name))
3759 elif platform == 'all':
3760 groups.extend(map(platformize, all_platforms))
3761 elif platform in all_platforms:
3762 groups.append(platformize(platform))
3763 elif platform != 'none':
3764 print('fatal: invalid platform flag', file=sys.stderr)
3765 return False
3766 self.config.SetString('manifest.platform', platform)
3767
3768 groups = [x for x in groups if x]
3769 groupstr = ','.join(groups)
3770 if platform == 'auto' and groupstr == self.manifest.GetDefaultGroupsStr():
3771 groupstr = None
3772 self.config.SetString('manifest.groups', groupstr)
3773
3774 if reference:
3775 self.config.SetString('repo.reference', reference)
3776
3777 if dissociate:
3778 self.config.SetBoolean('repo.dissociate', dissociate)
3779
3780 if worktree:
3781 if mirror:
3782 print('fatal: --mirror and --worktree are incompatible',
3783 file=sys.stderr)
3784 return False
3785 if submodules:
3786 print('fatal: --submodules and --worktree are incompatible',
3787 file=sys.stderr)
3788 return False
3789 self.config.SetBoolean('repo.worktree', worktree)
3790 if is_new:
3791 self.use_git_worktrees = True
3792 print('warning: --worktree is experimental!', file=sys.stderr)
3793
3794 if archive:
3795 if is_new:
3796 self.config.SetBoolean('repo.archive', archive)
3797 else:
3798 print('fatal: --archive is only supported when initializing a new '
3799 'workspace.', file=sys.stderr)
3800 print('Either delete the .repo folder in this workspace, or initialize '
3801 'in another location.', file=sys.stderr)
3802 return False
3803 3754
3804 if mirror: 3755 all_refs = self.bare_ref.all
3805 if is_new: 3756 revid = self.GetRevisionId(all_refs)
3806 self.config.SetBoolean('repo.mirror', mirror) 3757 head = self.work_git.GetHead()
3807 else: 3758 if head.startswith(R_HEADS):
3808 print('fatal: --mirror is only supported when initializing a new ' 3759 try:
3809 'workspace.', file=sys.stderr) 3760 head = all_refs[head]
3810 print('Either delete the .repo folder in this workspace, or initialize ' 3761 except KeyError:
3811 'in another location.', file=sys.stderr) 3762 head = None
3812 return False
3813 3763
3814 if partial_clone is not None: 3764 if revid == head:
3815 if mirror: 3765 return False
3816 print('fatal: --mirror and --partial-clone are mutually exclusive', 3766 elif self._revlist(not_rev(HEAD), revid):
3817 file=sys.stderr) 3767 return True
3818 return False
3819 self.config.SetBoolean('repo.partialclone', partial_clone)
3820 if clone_filter:
3821 self.config.SetString('repo.clonefilter', clone_filter)
3822 elif self.partial_clone:
3823 clone_filter = self.clone_filter
3824 else:
3825 clone_filter = None
3826
3827 if partial_clone_exclude is not None:
3828 self.config.SetString('repo.partialcloneexclude', partial_clone_exclude)
3829
3830 if clone_bundle is None:
3831 clone_bundle = False if partial_clone else True
3832 else:
3833 self.config.SetBoolean('repo.clonebundle', clone_bundle)
3834
3835 if submodules:
3836 self.config.SetBoolean('repo.submodules', submodules)
3837
3838 if git_lfs is not None:
3839 if git_lfs:
3840 git_require((2, 17, 0), fail=True, msg='Git LFS support')
3841
3842 self.config.SetBoolean('repo.git-lfs', git_lfs)
3843 if not is_new:
3844 print('warning: Changing --git-lfs settings will only affect new project checkouts.\n'
3845 ' Existing projects will require manual updates.\n', file=sys.stderr)
3846
3847 if use_superproject is not None:
3848 self.config.SetBoolean('repo.superproject', use_superproject)
3849
3850 if not standalone_manifest:
3851 success = self.Sync_NetworkHalf(
3852 is_new=is_new, quiet=not verbose, verbose=verbose,
3853 clone_bundle=clone_bundle, current_branch_only=current_branch_only,
3854 tags=tags, submodules=submodules, clone_filter=clone_filter,
3855 partial_clone_exclude=self.manifest.PartialCloneExclude).success
3856 if not success:
3857 r = self.GetRemote()
3858 print('fatal: cannot obtain manifest %s' % r.url, file=sys.stderr)
3859
3860 # Better delete the manifest git dir if we created it; otherwise next
3861 # time (when user fixes problems) we won't go through the "is_new" logic.
3862 if is_new:
3863 platform_utils.rmtree(self.gitdir)
3864 return False 3768 return False
3865 3769
3866 if manifest_branch:
3867 self.MetaBranchSwitch(submodules=submodules)
3868 3770
3869 syncbuf = SyncBuffer(self.config) 3771class RepoProject(MetaProject):
3870 self.Sync_LocalHalf(syncbuf, submodules=submodules) 3772 """The MetaProject for repo itself."""
3871 syncbuf.Finish()
3872 3773
3873 if is_new or self.CurrentBranch is None: 3774 @property
3874 if not self.StartBranch('default'): 3775 def LastFetch(self):
3875 print('fatal: cannot create default in manifest', file=sys.stderr) 3776 try:
3876 return False 3777 fh = os.path.join(self.gitdir, "FETCH_HEAD")
3778 return os.path.getmtime(fh)
3779 except OSError:
3780 return 0
3877 3781
3878 if not manifest_name:
3879 print('fatal: manifest name (-m) is required.', file=sys.stderr)
3880 return False
3881 3782
3882 elif is_new: 3783class ManifestProject(MetaProject):
3883 # This is a new standalone manifest. 3784 """The MetaProject for manifests."""
3884 manifest_name = 'default.xml' 3785
3885 manifest_data = fetch.fetch_file(manifest_url, verbose=verbose) 3786 def MetaBranchSwitch(self, submodules=False):
3886 dest = os.path.join(self.worktree, manifest_name) 3787 """Prepare for manifest branch switch."""
3887 os.makedirs(os.path.dirname(dest), exist_ok=True) 3788
3888 with open(dest, 'wb') as f: 3789 # detach and delete manifest branch, allowing a new
3889 f.write(manifest_data) 3790 # branch to take over
3791 syncbuf = SyncBuffer(self.config, detach_head=True)
3792 self.Sync_LocalHalf(syncbuf, submodules=submodules)
3793 syncbuf.Finish()
3794
3795 return (
3796 GitCommand(
3797 self,
3798 ["update-ref", "-d", "refs/heads/default"],
3799 capture_stdout=True,
3800 capture_stderr=True,
3801 ).Wait()
3802 == 0
3803 )
3804
3805 @property
3806 def standalone_manifest_url(self):
3807 """The URL of the standalone manifest, or None."""
3808 return self.config.GetString("manifest.standalone")
3809
3810 @property
3811 def manifest_groups(self):
3812 """The manifest groups string."""
3813 return self.config.GetString("manifest.groups")
3814
3815 @property
3816 def reference(self):
3817 """The --reference for this manifest."""
3818 return self.config.GetString("repo.reference")
3819
3820 @property
3821 def dissociate(self):
3822 """Whether to dissociate."""
3823 return self.config.GetBoolean("repo.dissociate")
3824
3825 @property
3826 def archive(self):
3827 """Whether we use archive."""
3828 return self.config.GetBoolean("repo.archive")
3829
3830 @property
3831 def mirror(self):
3832 """Whether we use mirror."""
3833 return self.config.GetBoolean("repo.mirror")
3834
3835 @property
3836 def use_worktree(self):
3837 """Whether we use worktree."""
3838 return self.config.GetBoolean("repo.worktree")
3839
3840 @property
3841 def clone_bundle(self):
3842 """Whether we use clone_bundle."""
3843 return self.config.GetBoolean("repo.clonebundle")
3844
3845 @property
3846 def submodules(self):
3847 """Whether we use submodules."""
3848 return self.config.GetBoolean("repo.submodules")
3849
3850 @property
3851 def git_lfs(self):
3852 """Whether we use git_lfs."""
3853 return self.config.GetBoolean("repo.git-lfs")
3854
3855 @property
3856 def use_superproject(self):
3857 """Whether we use superproject."""
3858 return self.config.GetBoolean("repo.superproject")
3859
3860 @property
3861 def partial_clone(self):
3862 """Whether this is a partial clone."""
3863 return self.config.GetBoolean("repo.partialclone")
3864
3865 @property
3866 def depth(self):
3867 """Partial clone depth."""
3868 return self.config.GetString("repo.depth")
3869
3870 @property
3871 def clone_filter(self):
3872 """The clone filter."""
3873 return self.config.GetString("repo.clonefilter")
3874
3875 @property
3876 def partial_clone_exclude(self):
3877 """Partial clone exclude string"""
3878 return self.config.GetString("repo.partialcloneexclude")
3879
3880 @property
3881 def manifest_platform(self):
3882 """The --platform argument from `repo init`."""
3883 return self.config.GetString("manifest.platform")
3884
3885 @property
3886 def _platform_name(self):
3887 """Return the name of the platform."""
3888 return platform.system().lower()
3889
3890 def SyncWithPossibleInit(
3891 self,
3892 submanifest,
3893 verbose=False,
3894 current_branch_only=False,
3895 tags="",
3896 git_event_log=None,
3897 ):
3898 """Sync a manifestProject, possibly for the first time.
3899
3900 Call Sync() with arguments from the most recent `repo init`. If this is
3901 a new sub manifest, then inherit options from the parent's
3902 manifestProject.
3903
3904 This is used by subcmds.Sync() to do an initial download of new sub
3905 manifests.
3890 3906
3891 try: 3907 Args:
3892 self.manifest.Link(manifest_name) 3908 submanifest: an XmlSubmanifest, the submanifest to re-sync.
3893 except ManifestParseError as e: 3909 verbose: a boolean, whether to show all output, rather than only
3894 print("fatal: manifest '%s' not available" % manifest_name, 3910 errors.
3895 file=sys.stderr) 3911 current_branch_only: a boolean, whether to only fetch the current
3896 print('fatal: %s' % str(e), file=sys.stderr) 3912 manifest branch from the server.
3897 return False 3913 tags: a boolean, whether to fetch tags.
3898 3914 git_event_log: an EventLog, for git tracing.
3899 if not this_manifest_only: 3915 """
3900 for submanifest in self.manifest.submanifests.values(): 3916 # TODO(lamontjones): when refactoring sync (and init?) consider how to
3917 # better get the init options that we should use for new submanifests
3918 # that are added when syncing an existing workspace.
3919 git_event_log = git_event_log or EventLog()
3901 spec = submanifest.ToSubmanifestSpec() 3920 spec = submanifest.ToSubmanifestSpec()
3902 submanifest.repo_client.manifestProject.Sync( 3921 # Use the init options from the existing manifestProject, or the parent
3922 # if it doesn't exist.
3923 #
3924 # Today, we only support changing manifest_groups on the sub-manifest,
3925 # with no supported-for-the-user way to change the other arguments from
3926 # those specified by the outermost manifest.
3927 #
3928 # TODO(lamontjones): determine which of these should come from the
3929 # outermost manifest and which should come from the parent manifest.
3930 mp = self if self.Exists else submanifest.parent.manifestProject
3931 return self.Sync(
3903 manifest_url=spec.manifestUrl, 3932 manifest_url=spec.manifestUrl,
3904 manifest_branch=spec.revision, 3933 manifest_branch=spec.revision,
3905 standalone_manifest=standalone_manifest, 3934 standalone_manifest=mp.standalone_manifest_url,
3906 groups=self.manifest_groups, 3935 groups=mp.manifest_groups,
3907 platform=platform, 3936 platform=mp.manifest_platform,
3908 mirror=mirror, 3937 mirror=mp.mirror,
3909 dissociate=dissociate, 3938 dissociate=mp.dissociate,
3910 reference=reference, 3939 reference=mp.reference,
3911 worktree=worktree, 3940 worktree=mp.use_worktree,
3912 submodules=submodules, 3941 submodules=mp.submodules,
3913 archive=archive, 3942 archive=mp.archive,
3914 partial_clone=partial_clone, 3943 partial_clone=mp.partial_clone,
3915 clone_filter=clone_filter, 3944 clone_filter=mp.clone_filter,
3916 partial_clone_exclude=partial_clone_exclude, 3945 partial_clone_exclude=mp.partial_clone_exclude,
3917 clone_bundle=clone_bundle, 3946 clone_bundle=mp.clone_bundle,
3918 git_lfs=git_lfs, 3947 git_lfs=mp.git_lfs,
3919 use_superproject=use_superproject, 3948 use_superproject=mp.use_superproject,
3920 verbose=verbose, 3949 verbose=verbose,
3921 current_branch_only=current_branch_only, 3950 current_branch_only=current_branch_only,
3922 tags=tags, 3951 tags=tags,
3923 depth=depth, 3952 depth=mp.depth,
3924 git_event_log=git_event_log, 3953 git_event_log=git_event_log,
3925 manifest_name=spec.manifestName, 3954 manifest_name=spec.manifestName,
3926 this_manifest_only=False, 3955 this_manifest_only=True,
3927 outer_manifest=False, 3956 outer_manifest=False,
3928 ) 3957 )
3929 3958
3930 # Lastly, if the manifest has a <superproject> then have the superproject 3959 def Sync(
3931 # sync it (if it will be used). 3960 self,
3932 if git_superproject.UseSuperproject(use_superproject, self.manifest): 3961 _kwargs_only=(),
3933 sync_result = self.manifest.superproject.Sync(git_event_log) 3962 manifest_url="",
3934 if not sync_result.success: 3963 manifest_branch=None,
3935 submanifest = '' 3964 standalone_manifest=False,
3936 if self.manifest.path_prefix: 3965 groups="",
3937 submanifest = f'for {self.manifest.path_prefix} ' 3966 mirror=False,
3938 print(f'warning: git update of superproject {submanifest}failed, repo ' 3967 reference="",
3939 'sync will not use superproject to fetch source; while this ' 3968 dissociate=False,
3940 'error is not fatal, and you can continue to run repo sync, ' 3969 worktree=False,
3941 'please run repo init with the --no-use-superproject option to ' 3970 submodules=False,
3942 'stop seeing this warning', file=sys.stderr) 3971 archive=False,
3943 if sync_result.fatal and use_superproject is not None: 3972 partial_clone=None,
3944 return False 3973 depth=None,
3945 3974 clone_filter="blob:none",
3946 return True 3975 partial_clone_exclude=None,
3947 3976 clone_bundle=None,
3948 def _ConfigureDepth(self, depth): 3977 git_lfs=None,
3949 """Configure the depth we'll sync down. 3978 use_superproject=None,
3950 3979 verbose=False,
3951 Args: 3980 current_branch_only=False,
3952 depth: an int, how deep of a partial clone to create. 3981 git_event_log=None,
3953 """ 3982 platform="",
3954 # Opt.depth will be non-None if user actually passed --depth to repo init. 3983 manifest_name="default.xml",
3955 if depth is not None: 3984 tags="",
3956 if depth > 0: 3985 this_manifest_only=False,
3957 # Positive values will set the depth. 3986 outer_manifest=True,
3958 depth = str(depth) 3987 ):
3959 else: 3988 """Sync the manifest and all submanifests.
3960 # Negative numbers will clear the depth; passing None to SetString 3989
3961 # will do that. 3990 Args:
3962 depth = None 3991 manifest_url: a string, the URL of the manifest project.
3963 3992 manifest_branch: a string, the manifest branch to use.
3964 # We store the depth in the main manifest project. 3993 standalone_manifest: a boolean, whether to store the manifest as a
3965 self.config.SetString('repo.depth', depth) 3994 static file.
3995 groups: a string, restricts the checkout to projects with the
3996 specified groups.
3997 mirror: a boolean, whether to create a mirror of the remote
3998 repository.
3999 reference: a string, location of a repo instance to use as a
4000 reference.
4001 dissociate: a boolean, whether to dissociate from reference mirrors
4002 after clone.
4003 worktree: a boolean, whether to use git-worktree to manage projects.
4004 submodules: a boolean, whether sync submodules associated with the
4005 manifest project.
4006 archive: a boolean, whether to checkout each project as an archive.
4007 See git-archive.
4008 partial_clone: a boolean, whether to perform a partial clone.
4009 depth: an int, how deep of a shallow clone to create.
4010 clone_filter: a string, filter to use with partial_clone.
4011 partial_clone_exclude : a string, comma-delimeted list of project
4012 names to exclude from partial clone.
4013 clone_bundle: a boolean, whether to enable /clone.bundle on
4014 HTTP/HTTPS.
4015 git_lfs: a boolean, whether to enable git LFS support.
4016 use_superproject: a boolean, whether to use the manifest
4017 superproject to sync projects.
4018 verbose: a boolean, whether to show all output, rather than only
4019 errors.
4020 current_branch_only: a boolean, whether to only fetch the current
4021 manifest branch from the server.
4022 platform: a string, restrict the checkout to projects with the
4023 specified platform group.
4024 git_event_log: an EventLog, for git tracing.
4025 tags: a boolean, whether to fetch tags.
4026 manifest_name: a string, the name of the manifest file to use.
4027 this_manifest_only: a boolean, whether to only operate on the
4028 current sub manifest.
4029 outer_manifest: a boolean, whether to start at the outermost
4030 manifest.
4031
4032 Returns:
4033 a boolean, whether the sync was successful.
4034 """
4035 assert _kwargs_only == (), "Sync only accepts keyword arguments."
4036
4037 groups = groups or self.manifest.GetDefaultGroupsStr(
4038 with_platform=False
4039 )
4040 platform = platform or "auto"
4041 git_event_log = git_event_log or EventLog()
4042 if outer_manifest and self.manifest.is_submanifest:
4043 # In a multi-manifest checkout, use the outer manifest unless we are
4044 # told not to.
4045 return self.client.outer_manifest.manifestProject.Sync(
4046 manifest_url=manifest_url,
4047 manifest_branch=manifest_branch,
4048 standalone_manifest=standalone_manifest,
4049 groups=groups,
4050 platform=platform,
4051 mirror=mirror,
4052 dissociate=dissociate,
4053 reference=reference,
4054 worktree=worktree,
4055 submodules=submodules,
4056 archive=archive,
4057 partial_clone=partial_clone,
4058 clone_filter=clone_filter,
4059 partial_clone_exclude=partial_clone_exclude,
4060 clone_bundle=clone_bundle,
4061 git_lfs=git_lfs,
4062 use_superproject=use_superproject,
4063 verbose=verbose,
4064 current_branch_only=current_branch_only,
4065 tags=tags,
4066 depth=depth,
4067 git_event_log=git_event_log,
4068 manifest_name=manifest_name,
4069 this_manifest_only=this_manifest_only,
4070 outer_manifest=False,
4071 )
4072
4073 # If repo has already been initialized, we take -u with the absence of
4074 # --standalone-manifest to mean "transition to a standard repo set up",
4075 # which necessitates starting fresh.
4076 # If --standalone-manifest is set, we always tear everything down and
4077 # start anew.
4078 if self.Exists:
4079 was_standalone_manifest = self.config.GetString(
4080 "manifest.standalone"
4081 )
4082 if was_standalone_manifest and not manifest_url:
4083 print(
4084 "fatal: repo was initialized with a standlone manifest, "
4085 "cannot be re-initialized without --manifest-url/-u"
4086 )
4087 return False
4088
4089 if standalone_manifest or (
4090 was_standalone_manifest and manifest_url
4091 ):
4092 self.config.ClearCache()
4093 if self.gitdir and os.path.exists(self.gitdir):
4094 platform_utils.rmtree(self.gitdir)
4095 if self.worktree and os.path.exists(self.worktree):
4096 platform_utils.rmtree(self.worktree)
4097
4098 is_new = not self.Exists
4099 if is_new:
4100 if not manifest_url:
4101 print("fatal: manifest url is required.", file=sys.stderr)
4102 return False
4103
4104 if verbose:
4105 print(
4106 "Downloading manifest from %s"
4107 % (GitConfig.ForUser().UrlInsteadOf(manifest_url),),
4108 file=sys.stderr,
4109 )
4110
4111 # The manifest project object doesn't keep track of the path on the
4112 # server where this git is located, so let's save that here.
4113 mirrored_manifest_git = None
4114 if reference:
4115 manifest_git_path = urllib.parse.urlparse(manifest_url).path[1:]
4116 mirrored_manifest_git = os.path.join(
4117 reference, manifest_git_path
4118 )
4119 if not mirrored_manifest_git.endswith(".git"):
4120 mirrored_manifest_git += ".git"
4121 if not os.path.exists(mirrored_manifest_git):
4122 mirrored_manifest_git = os.path.join(
4123 reference, ".repo/manifests.git"
4124 )
4125
4126 self._InitGitDir(mirror_git=mirrored_manifest_git)
4127
4128 # If standalone_manifest is set, mark the project as "standalone" --
4129 # we'll still do much of the manifests.git set up, but will avoid actual
4130 # syncs to a remote.
4131 if standalone_manifest:
4132 self.config.SetString("manifest.standalone", manifest_url)
4133 elif not manifest_url and not manifest_branch:
4134 # If -u is set and --standalone-manifest is not, then we're not in
4135 # standalone mode. Otherwise, use config to infer what we were in
4136 # the last init.
4137 standalone_manifest = bool(
4138 self.config.GetString("manifest.standalone")
4139 )
4140 if not standalone_manifest:
4141 self.config.SetString("manifest.standalone", None)
4142
4143 self._ConfigureDepth(depth)
4144
4145 # Set the remote URL before the remote branch as we might need it below.
4146 if manifest_url:
4147 r = self.GetRemote()
4148 r.url = manifest_url
4149 r.ResetFetch()
4150 r.Save()
4151
4152 if not standalone_manifest:
4153 if manifest_branch:
4154 if manifest_branch == "HEAD":
4155 manifest_branch = self.ResolveRemoteHead()
4156 if manifest_branch is None:
4157 print("fatal: unable to resolve HEAD", file=sys.stderr)
4158 return False
4159 self.revisionExpr = manifest_branch
4160 else:
4161 if is_new:
4162 default_branch = self.ResolveRemoteHead()
4163 if default_branch is None:
4164 # If the remote doesn't have HEAD configured, default to
4165 # master.
4166 default_branch = "refs/heads/master"
4167 self.revisionExpr = default_branch
4168 else:
4169 self.PreSync()
4170
4171 groups = re.split(r"[,\s]+", groups or "")
4172 all_platforms = ["linux", "darwin", "windows"]
4173 platformize = lambda x: "platform-" + x
4174 if platform == "auto":
4175 if not mirror and not self.mirror:
4176 groups.append(platformize(self._platform_name))
4177 elif platform == "all":
4178 groups.extend(map(platformize, all_platforms))
4179 elif platform in all_platforms:
4180 groups.append(platformize(platform))
4181 elif platform != "none":
4182 print("fatal: invalid platform flag", file=sys.stderr)
4183 return False
4184 self.config.SetString("manifest.platform", platform)
4185
4186 groups = [x for x in groups if x]
4187 groupstr = ",".join(groups)
4188 if (
4189 platform == "auto"
4190 and groupstr == self.manifest.GetDefaultGroupsStr()
4191 ):
4192 groupstr = None
4193 self.config.SetString("manifest.groups", groupstr)
4194
4195 if reference:
4196 self.config.SetString("repo.reference", reference)
4197
4198 if dissociate:
4199 self.config.SetBoolean("repo.dissociate", dissociate)
4200
4201 if worktree:
4202 if mirror:
4203 print(
4204 "fatal: --mirror and --worktree are incompatible",
4205 file=sys.stderr,
4206 )
4207 return False
4208 if submodules:
4209 print(
4210 "fatal: --submodules and --worktree are incompatible",
4211 file=sys.stderr,
4212 )
4213 return False
4214 self.config.SetBoolean("repo.worktree", worktree)
4215 if is_new:
4216 self.use_git_worktrees = True
4217 print("warning: --worktree is experimental!", file=sys.stderr)
4218
4219 if archive:
4220 if is_new:
4221 self.config.SetBoolean("repo.archive", archive)
4222 else:
4223 print(
4224 "fatal: --archive is only supported when initializing a "
4225 "new workspace.",
4226 file=sys.stderr,
4227 )
4228 print(
4229 "Either delete the .repo folder in this workspace, or "
4230 "initialize in another location.",
4231 file=sys.stderr,
4232 )
4233 return False
4234
4235 if mirror:
4236 if is_new:
4237 self.config.SetBoolean("repo.mirror", mirror)
4238 else:
4239 print(
4240 "fatal: --mirror is only supported when initializing a new "
4241 "workspace.",
4242 file=sys.stderr,
4243 )
4244 print(
4245 "Either delete the .repo folder in this workspace, or "
4246 "initialize in another location.",
4247 file=sys.stderr,
4248 )
4249 return False
4250
4251 if partial_clone is not None:
4252 if mirror:
4253 print(
4254 "fatal: --mirror and --partial-clone are mutually "
4255 "exclusive",
4256 file=sys.stderr,
4257 )
4258 return False
4259 self.config.SetBoolean("repo.partialclone", partial_clone)
4260 if clone_filter:
4261 self.config.SetString("repo.clonefilter", clone_filter)
4262 elif self.partial_clone:
4263 clone_filter = self.clone_filter
4264 else:
4265 clone_filter = None
4266
4267 if partial_clone_exclude is not None:
4268 self.config.SetString(
4269 "repo.partialcloneexclude", partial_clone_exclude
4270 )
4271
4272 if clone_bundle is None:
4273 clone_bundle = False if partial_clone else True
4274 else:
4275 self.config.SetBoolean("repo.clonebundle", clone_bundle)
4276
4277 if submodules:
4278 self.config.SetBoolean("repo.submodules", submodules)
4279
4280 if git_lfs is not None:
4281 if git_lfs:
4282 git_require((2, 17, 0), fail=True, msg="Git LFS support")
4283
4284 self.config.SetBoolean("repo.git-lfs", git_lfs)
4285 if not is_new:
4286 print(
4287 "warning: Changing --git-lfs settings will only affect new "
4288 "project checkouts.\n"
4289 " Existing projects will require manual updates.\n",
4290 file=sys.stderr,
4291 )
4292
4293 if use_superproject is not None:
4294 self.config.SetBoolean("repo.superproject", use_superproject)
4295
4296 if not standalone_manifest:
4297 success = self.Sync_NetworkHalf(
4298 is_new=is_new,
4299 quiet=not verbose,
4300 verbose=verbose,
4301 clone_bundle=clone_bundle,
4302 current_branch_only=current_branch_only,
4303 tags=tags,
4304 submodules=submodules,
4305 clone_filter=clone_filter,
4306 partial_clone_exclude=self.manifest.PartialCloneExclude,
4307 ).success
4308 if not success:
4309 r = self.GetRemote()
4310 print(
4311 "fatal: cannot obtain manifest %s" % r.url, file=sys.stderr
4312 )
4313
4314 # Better delete the manifest git dir if we created it; otherwise
4315 # next time (when user fixes problems) we won't go through the
4316 # "is_new" logic.
4317 if is_new:
4318 platform_utils.rmtree(self.gitdir)
4319 return False
4320
4321 if manifest_branch:
4322 self.MetaBranchSwitch(submodules=submodules)
4323
4324 syncbuf = SyncBuffer(self.config)
4325 self.Sync_LocalHalf(syncbuf, submodules=submodules)
4326 syncbuf.Finish()
4327
4328 if is_new or self.CurrentBranch is None:
4329 if not self.StartBranch("default"):
4330 print(
4331 "fatal: cannot create default in manifest",
4332 file=sys.stderr,
4333 )
4334 return False
4335
4336 if not manifest_name:
4337 print("fatal: manifest name (-m) is required.", file=sys.stderr)
4338 return False
4339
4340 elif is_new:
4341 # This is a new standalone manifest.
4342 manifest_name = "default.xml"
4343 manifest_data = fetch.fetch_file(manifest_url, verbose=verbose)
4344 dest = os.path.join(self.worktree, manifest_name)
4345 os.makedirs(os.path.dirname(dest), exist_ok=True)
4346 with open(dest, "wb") as f:
4347 f.write(manifest_data)
4348
4349 try:
4350 self.manifest.Link(manifest_name)
4351 except ManifestParseError as e:
4352 print(
4353 "fatal: manifest '%s' not available" % manifest_name,
4354 file=sys.stderr,
4355 )
4356 print("fatal: %s" % str(e), file=sys.stderr)
4357 return False
4358
4359 if not this_manifest_only:
4360 for submanifest in self.manifest.submanifests.values():
4361 spec = submanifest.ToSubmanifestSpec()
4362 submanifest.repo_client.manifestProject.Sync(
4363 manifest_url=spec.manifestUrl,
4364 manifest_branch=spec.revision,
4365 standalone_manifest=standalone_manifest,
4366 groups=self.manifest_groups,
4367 platform=platform,
4368 mirror=mirror,
4369 dissociate=dissociate,
4370 reference=reference,
4371 worktree=worktree,
4372 submodules=submodules,
4373 archive=archive,
4374 partial_clone=partial_clone,
4375 clone_filter=clone_filter,
4376 partial_clone_exclude=partial_clone_exclude,
4377 clone_bundle=clone_bundle,
4378 git_lfs=git_lfs,
4379 use_superproject=use_superproject,
4380 verbose=verbose,
4381 current_branch_only=current_branch_only,
4382 tags=tags,
4383 depth=depth,
4384 git_event_log=git_event_log,
4385 manifest_name=spec.manifestName,
4386 this_manifest_only=False,
4387 outer_manifest=False,
4388 )
4389
4390 # Lastly, if the manifest has a <superproject> then have the
4391 # superproject sync it (if it will be used).
4392 if git_superproject.UseSuperproject(use_superproject, self.manifest):
4393 sync_result = self.manifest.superproject.Sync(git_event_log)
4394 if not sync_result.success:
4395 submanifest = ""
4396 if self.manifest.path_prefix:
4397 submanifest = f"for {self.manifest.path_prefix} "
4398 print(
4399 f"warning: git update of superproject {submanifest}failed, "
4400 "repo sync will not use superproject to fetch source; "
4401 "while this error is not fatal, and you can continue to "
4402 "run repo sync, please run repo init with the "
4403 "--no-use-superproject option to stop seeing this warning",
4404 file=sys.stderr,
4405 )
4406 if sync_result.fatal and use_superproject is not None:
4407 return False
4408
4409 return True
4410
4411 def _ConfigureDepth(self, depth):
4412 """Configure the depth we'll sync down.
4413
4414 Args:
4415 depth: an int, how deep of a partial clone to create.
4416 """
4417 # Opt.depth will be non-None if user actually passed --depth to repo
4418 # init.
4419 if depth is not None:
4420 if depth > 0:
4421 # Positive values will set the depth.
4422 depth = str(depth)
4423 else:
4424 # Negative numbers will clear the depth; passing None to
4425 # SetString will do that.
4426 depth = None
4427
4428 # We store the depth in the main manifest project.
4429 self.config.SetString("repo.depth", depth)
diff --git a/release/sign-launcher.py b/release/sign-launcher.py
index ffe23cc5..86566122 100755
--- a/release/sign-launcher.py
+++ b/release/sign-launcher.py
@@ -28,43 +28,56 @@ import util
28 28
29 29
30def sign(opts): 30def sign(opts):
31 """Sign the launcher!""" 31 """Sign the launcher!"""
32 output = '' 32 output = ""
33 for key in opts.keys: 33 for key in opts.keys:
34 # We use ! at the end of the key so that gpg uses this specific key. 34 # We use ! at the end of the key so that gpg uses this specific key.
35 # Otherwise it uses the key as a lookup into the overall key and uses the 35 # Otherwise it uses the key as a lookup into the overall key and uses
36 # default signing key. i.e. It will see that KEYID_RSA is a subkey of 36 # the default signing key. i.e. It will see that KEYID_RSA is a subkey
37 # another key, and use the primary key to sign instead of the subkey. 37 # of another key, and use the primary key to sign instead of the subkey.
38 cmd = ['gpg', '--homedir', opts.gpgdir, '-u', f'{key}!', '--batch', '--yes', 38 cmd = [
39 '--armor', '--detach-sign', '--output', '-', opts.launcher] 39 "gpg",
40 ret = util.run(opts, cmd, encoding='utf-8', stdout=subprocess.PIPE) 40 "--homedir",
41 output += ret.stdout 41 opts.gpgdir,
42 42 "-u",
43 # Save the combined signatures into one file. 43 f"{key}!",
44 with open(f'{opts.launcher}.asc', 'w', encoding='utf-8') as fp: 44 "--batch",
45 fp.write(output) 45 "--yes",
46 "--armor",
47 "--detach-sign",
48 "--output",
49 "-",
50 opts.launcher,
51 ]
52 ret = util.run(opts, cmd, encoding="utf-8", stdout=subprocess.PIPE)
53 output += ret.stdout
54
55 # Save the combined signatures into one file.
56 with open(f"{opts.launcher}.asc", "w", encoding="utf-8") as fp:
57 fp.write(output)
46 58
47 59
48def check(opts): 60def check(opts):
49 """Check the signature.""" 61 """Check the signature."""
50 util.run(opts, ['gpg', '--verify', f'{opts.launcher}.asc']) 62 util.run(opts, ["gpg", "--verify", f"{opts.launcher}.asc"])
51 63
52 64
53def get_version(opts): 65def get_version(opts):
54 """Get the version from |launcher|.""" 66 """Get the version from |launcher|."""
55 # Make sure we don't search $PATH when signing the "repo" file in the cwd. 67 # Make sure we don't search $PATH when signing the "repo" file in the cwd.
56 launcher = os.path.join('.', opts.launcher) 68 launcher = os.path.join(".", opts.launcher)
57 cmd = [launcher, '--version'] 69 cmd = [launcher, "--version"]
58 ret = util.run(opts, cmd, encoding='utf-8', stdout=subprocess.PIPE) 70 ret = util.run(opts, cmd, encoding="utf-8", stdout=subprocess.PIPE)
59 m = re.search(r'repo launcher version ([0-9.]+)', ret.stdout) 71 m = re.search(r"repo launcher version ([0-9.]+)", ret.stdout)
60 if not m: 72 if not m:
61 sys.exit(f'{opts.launcher}: unable to detect repo version') 73 sys.exit(f"{opts.launcher}: unable to detect repo version")
62 return m.group(1) 74 return m.group(1)
63 75
64 76
65def postmsg(opts, version): 77def postmsg(opts, version):
66 """Helpful info to show at the end for release manager.""" 78 """Helpful info to show at the end for release manager."""
67 print(f""" 79 print(
80 f"""
68Repo launcher bucket: 81Repo launcher bucket:
69 gs://git-repo-downloads/ 82 gs://git-repo-downloads/
70 83
@@ -81,55 +94,72 @@ NB: If a rollback is necessary, the GS bucket archives old versions, and may be
81 gsutil ls -la gs://git-repo-downloads/repo gs://git-repo-downloads/repo.asc 94 gsutil ls -la gs://git-repo-downloads/repo gs://git-repo-downloads/repo.asc
82 gsutil cp -a public-read gs://git-repo-downloads/repo#<unique id> gs://git-repo-downloads/repo 95 gsutil cp -a public-read gs://git-repo-downloads/repo#<unique id> gs://git-repo-downloads/repo
83 gsutil cp -a public-read gs://git-repo-downloads/repo.asc#<unique id> gs://git-repo-downloads/repo.asc 96 gsutil cp -a public-read gs://git-repo-downloads/repo.asc#<unique id> gs://git-repo-downloads/repo.asc
84""") 97""" # noqa: E501
98 )
85 99
86 100
87def get_parser(): 101def get_parser():
88 """Get a CLI parser.""" 102 """Get a CLI parser."""
89 parser = argparse.ArgumentParser(description=__doc__) 103 parser = argparse.ArgumentParser(description=__doc__)
90 parser.add_argument('-n', '--dry-run', 104 parser.add_argument(
91 dest='dryrun', action='store_true', 105 "-n",
92 help='show everything that would be done') 106 "--dry-run",
93 parser.add_argument('--gpgdir', 107 dest="dryrun",
94 default=os.path.join(util.HOMEDIR, '.gnupg', 'repo'), 108 action="store_true",
95 help='path to dedicated gpg dir with release keys ' 109 help="show everything that would be done",
96 '(default: ~/.gnupg/repo/)') 110 )
97 parser.add_argument('--keyid', dest='keys', default=[], action='append', 111 parser.add_argument(
98 help='alternative signing keys to use') 112 "--gpgdir",
99 parser.add_argument('launcher', 113 default=os.path.join(util.HOMEDIR, ".gnupg", "repo"),
100 default=os.path.join(util.TOPDIR, 'repo'), nargs='?', 114 help="path to dedicated gpg dir with release keys "
101 help='the launcher script to sign') 115 "(default: ~/.gnupg/repo/)",
102 return parser 116 )
117 parser.add_argument(
118 "--keyid",
119 dest="keys",
120 default=[],
121 action="append",
122 help="alternative signing keys to use",
123 )
124 parser.add_argument(
125 "launcher",
126 default=os.path.join(util.TOPDIR, "repo"),
127 nargs="?",
128 help="the launcher script to sign",
129 )
130 return parser
103 131
104 132
105def main(argv): 133def main(argv):
106 """The main func!""" 134 """The main func!"""
107 parser = get_parser() 135 parser = get_parser()
108 opts = parser.parse_args(argv) 136 opts = parser.parse_args(argv)
109 137
110 if not os.path.exists(opts.gpgdir): 138 if not os.path.exists(opts.gpgdir):
111 parser.error(f'--gpgdir does not exist: {opts.gpgdir}') 139 parser.error(f"--gpgdir does not exist: {opts.gpgdir}")
112 if not os.path.exists(opts.launcher): 140 if not os.path.exists(opts.launcher):
113 parser.error(f'launcher does not exist: {opts.launcher}') 141 parser.error(f"launcher does not exist: {opts.launcher}")
114 142
115 opts.launcher = os.path.relpath(opts.launcher) 143 opts.launcher = os.path.relpath(opts.launcher)
116 print(f'Signing "{opts.launcher}" launcher script and saving to ' 144 print(
117 f'"{opts.launcher}.asc"') 145 f'Signing "{opts.launcher}" launcher script and saving to '
118 146 f'"{opts.launcher}.asc"'
119 if opts.keys: 147 )
120 print(f'Using custom keys to sign: {" ".join(opts.keys)}') 148
121 else: 149 if opts.keys:
122 print('Using official Repo release keys to sign') 150 print(f'Using custom keys to sign: {" ".join(opts.keys)}')
123 opts.keys = [util.KEYID_DSA, util.KEYID_RSA, util.KEYID_ECC] 151 else:
124 util.import_release_key(opts) 152 print("Using official Repo release keys to sign")
125 153 opts.keys = [util.KEYID_DSA, util.KEYID_RSA, util.KEYID_ECC]
126 version = get_version(opts) 154 util.import_release_key(opts)
127 sign(opts) 155
128 check(opts) 156 version = get_version(opts)
129 postmsg(opts, version) 157 sign(opts)
130 158 check(opts)
131 return 0 159 postmsg(opts, version)
132 160
133 161 return 0
134if __name__ == '__main__': 162
135 sys.exit(main(sys.argv[1:])) 163
164if __name__ == "__main__":
165 sys.exit(main(sys.argv[1:]))
diff --git a/release/sign-tag.py b/release/sign-tag.py
index 605437c9..fbfe7b26 100755
--- a/release/sign-tag.py
+++ b/release/sign-tag.py
@@ -35,46 +35,61 @@ import util
35KEYID = util.KEYID_DSA 35KEYID = util.KEYID_DSA
36 36
37# Regular expression to validate tag names. 37# Regular expression to validate tag names.
38RE_VALID_TAG = r'^v([0-9]+[.])+[0-9]+$' 38RE_VALID_TAG = r"^v([0-9]+[.])+[0-9]+$"
39 39
40 40
41def sign(opts): 41def sign(opts):
42 """Tag the commit & sign it!""" 42 """Tag the commit & sign it!"""
43 # We use ! at the end of the key so that gpg uses this specific key. 43 # We use ! at the end of the key so that gpg uses this specific key.
44 # Otherwise it uses the key as a lookup into the overall key and uses the 44 # Otherwise it uses the key as a lookup into the overall key and uses the
45 # default signing key. i.e. It will see that KEYID_RSA is a subkey of 45 # default signing key. i.e. It will see that KEYID_RSA is a subkey of
46 # another key, and use the primary key to sign instead of the subkey. 46 # another key, and use the primary key to sign instead of the subkey.
47 cmd = ['git', 'tag', '-s', opts.tag, '-u', f'{opts.key}!', 47 cmd = [
48 '-m', f'repo {opts.tag}', opts.commit] 48 "git",
49 49 "tag",
50 key = 'GNUPGHOME' 50 "-s",
51 print('+', f'export {key}="{opts.gpgdir}"') 51 opts.tag,
52 oldvalue = os.getenv(key) 52 "-u",
53 os.putenv(key, opts.gpgdir) 53 f"{opts.key}!",
54 util.run(opts, cmd) 54 "-m",
55 if oldvalue is None: 55 f"repo {opts.tag}",
56 os.unsetenv(key) 56 opts.commit,
57 else: 57 ]
58 os.putenv(key, oldvalue) 58
59 key = "GNUPGHOME"
60 print("+", f'export {key}="{opts.gpgdir}"')
61 oldvalue = os.getenv(key)
62 os.putenv(key, opts.gpgdir)
63 util.run(opts, cmd)
64 if oldvalue is None:
65 os.unsetenv(key)
66 else:
67 os.putenv(key, oldvalue)
59 68
60 69
61def check(opts): 70def check(opts):
62 """Check the signature.""" 71 """Check the signature."""
63 util.run(opts, ['git', 'tag', '--verify', opts.tag]) 72 util.run(opts, ["git", "tag", "--verify", opts.tag])
64 73
65 74
66def postmsg(opts): 75def postmsg(opts):
67 """Helpful info to show at the end for release manager.""" 76 """Helpful info to show at the end for release manager."""
68 cmd = ['git', 'rev-parse', 'remotes/origin/stable'] 77 cmd = ["git", "rev-parse", "remotes/origin/stable"]
69 ret = util.run(opts, cmd, encoding='utf-8', stdout=subprocess.PIPE) 78 ret = util.run(opts, cmd, encoding="utf-8", stdout=subprocess.PIPE)
70 current_release = ret.stdout.strip() 79 current_release = ret.stdout.strip()
71 80
72 cmd = ['git', 'log', '--format=%h (%aN) %s', '--no-merges', 81 cmd = [
73 f'remotes/origin/stable..{opts.tag}'] 82 "git",
74 ret = util.run(opts, cmd, encoding='utf-8', stdout=subprocess.PIPE) 83 "log",
75 shortlog = ret.stdout.strip() 84 "--format=%h (%aN) %s",
76 85 "--no-merges",
77 print(f""" 86 f"remotes/origin/stable..{opts.tag}",
87 ]
88 ret = util.run(opts, cmd, encoding="utf-8", stdout=subprocess.PIPE)
89 shortlog = ret.stdout.strip()
90
91 print(
92 f"""
78Here's the short log since the last release. 93Here's the short log since the last release.
79{shortlog} 94{shortlog}
80 95
@@ -84,57 +99,69 @@ NB: People will start upgrading to this version immediately.
84 99
85To roll back a release: 100To roll back a release:
86 git push origin --force {current_release}:stable -n 101 git push origin --force {current_release}:stable -n
87""") 102"""
103 )
88 104
89 105
90def get_parser(): 106def get_parser():
91 """Get a CLI parser.""" 107 """Get a CLI parser."""
92 parser = argparse.ArgumentParser( 108 parser = argparse.ArgumentParser(
93 description=__doc__, 109 description=__doc__,
94 formatter_class=argparse.RawDescriptionHelpFormatter) 110 formatter_class=argparse.RawDescriptionHelpFormatter,
95 parser.add_argument('-n', '--dry-run', 111 )
96 dest='dryrun', action='store_true', 112 parser.add_argument(
97 help='show everything that would be done') 113 "-n",
98 parser.add_argument('--gpgdir', 114 "--dry-run",
99 default=os.path.join(util.HOMEDIR, '.gnupg', 'repo'), 115 dest="dryrun",
100 help='path to dedicated gpg dir with release keys ' 116 action="store_true",
101 '(default: ~/.gnupg/repo/)') 117 help="show everything that would be done",
102 parser.add_argument('-f', '--force', action='store_true', 118 )
103 help='force signing of any tag') 119 parser.add_argument(
104 parser.add_argument('--keyid', dest='key', 120 "--gpgdir",
105 help='alternative signing key to use') 121 default=os.path.join(util.HOMEDIR, ".gnupg", "repo"),
106 parser.add_argument('tag', 122 help="path to dedicated gpg dir with release keys "
107 help='the tag to create (e.g. "v2.0")') 123 "(default: ~/.gnupg/repo/)",
108 parser.add_argument('commit', default='HEAD', nargs='?', 124 )
109 help='the commit to tag') 125 parser.add_argument(
110 return parser 126 "-f", "--force", action="store_true", help="force signing of any tag"
127 )
128 parser.add_argument(
129 "--keyid", dest="key", help="alternative signing key to use"
130 )
131 parser.add_argument("tag", help='the tag to create (e.g. "v2.0")')
132 parser.add_argument(
133 "commit", default="HEAD", nargs="?", help="the commit to tag"
134 )
135 return parser
111 136
112 137
113def main(argv): 138def main(argv):
114 """The main func!""" 139 """The main func!"""
115 parser = get_parser() 140 parser = get_parser()
116 opts = parser.parse_args(argv) 141 opts = parser.parse_args(argv)
117 142
118 if not os.path.exists(opts.gpgdir): 143 if not os.path.exists(opts.gpgdir):
119 parser.error(f'--gpgdir does not exist: {opts.gpgdir}') 144 parser.error(f"--gpgdir does not exist: {opts.gpgdir}")
120 145
121 if not opts.force and not re.match(RE_VALID_TAG, opts.tag): 146 if not opts.force and not re.match(RE_VALID_TAG, opts.tag):
122 parser.error(f'tag "{opts.tag}" does not match regex "{RE_VALID_TAG}"; ' 147 parser.error(
123 'use --force to sign anyways') 148 f'tag "{opts.tag}" does not match regex "{RE_VALID_TAG}"; '
149 "use --force to sign anyways"
150 )
124 151
125 if opts.key: 152 if opts.key:
126 print(f'Using custom key to sign: {opts.key}') 153 print(f"Using custom key to sign: {opts.key}")
127 else: 154 else:
128 print('Using official Repo release key to sign') 155 print("Using official Repo release key to sign")
129 opts.key = KEYID 156 opts.key = KEYID
130 util.import_release_key(opts) 157 util.import_release_key(opts)
131 158
132 sign(opts) 159 sign(opts)
133 check(opts) 160 check(opts)
134 postmsg(opts) 161 postmsg(opts)
135 162
136 return 0 163 return 0
137 164
138 165
139if __name__ == '__main__': 166if __name__ == "__main__":
140 sys.exit(main(sys.argv[1:])) 167 sys.exit(main(sys.argv[1:]))
diff --git a/release/update_manpages.py b/release/update_manpages.py
index d1bf8928..cd2acc01 100644
--- a/release/update_manpages.py
+++ b/release/update_manpages.py
@@ -29,91 +29,125 @@ import sys
29import tempfile 29import tempfile
30 30
31TOPDIR = Path(__file__).resolve().parent.parent 31TOPDIR = Path(__file__).resolve().parent.parent
32MANDIR = TOPDIR.joinpath('man') 32MANDIR = TOPDIR.joinpath("man")
33 33
34# Load repo local modules. 34# Load repo local modules.
35sys.path.insert(0, str(TOPDIR)) 35sys.path.insert(0, str(TOPDIR))
36from git_command import RepoSourceVersion 36from git_command import RepoSourceVersion
37import subcmds 37import subcmds
38 38
39
39def worker(cmd, **kwargs): 40def worker(cmd, **kwargs):
40 subprocess.run(cmd, **kwargs) 41 subprocess.run(cmd, **kwargs)
42
41 43
42def main(argv): 44def main(argv):
43 parser = argparse.ArgumentParser(description=__doc__) 45 parser = argparse.ArgumentParser(description=__doc__)
44 opts = parser.parse_args(argv) 46 parser.parse_args(argv)
45 47
46 if not shutil.which('help2man'): 48 if not shutil.which("help2man"):
47 sys.exit('Please install help2man to continue.') 49 sys.exit("Please install help2man to continue.")
48 50
49 # Let repo know we're generating man pages so it can avoid some dynamic 51 # Let repo know we're generating man pages so it can avoid some dynamic
50 # behavior (like probing active number of CPUs). We use a weird name & 52 # behavior (like probing active number of CPUs). We use a weird name &
51 # value to make it less likely for users to set this var themselves. 53 # value to make it less likely for users to set this var themselves.
52 os.environ['_REPO_GENERATE_MANPAGES_'] = ' indeed! ' 54 os.environ["_REPO_GENERATE_MANPAGES_"] = " indeed! "
53 55
54 # "repo branch" is an alias for "repo branches". 56 # "repo branch" is an alias for "repo branches".
55 del subcmds.all_commands['branch'] 57 del subcmds.all_commands["branch"]
56 (MANDIR / 'repo-branch.1').write_text('.so man1/repo-branches.1') 58 (MANDIR / "repo-branch.1").write_text(".so man1/repo-branches.1")
57 59
58 version = RepoSourceVersion() 60 version = RepoSourceVersion()
59 cmdlist = [['help2man', '-N', '-n', f'repo {cmd} - manual page for repo {cmd}', 61 cmdlist = [
60 '-S', f'repo {cmd}', '-m', 'Repo Manual', f'--version-string={version}', 62 [
61 '-o', MANDIR.joinpath(f'repo-{cmd}.1.tmp'), './repo', 63 "help2man",
62 '-h', f'help {cmd}'] for cmd in subcmds.all_commands] 64 "-N",
63 cmdlist.append(['help2man', '-N', '-n', 'repository management tool built on top of git', 65 "-n",
64 '-S', 'repo', '-m', 'Repo Manual', f'--version-string={version}', 66 f"repo {cmd} - manual page for repo {cmd}",
65 '-o', MANDIR.joinpath('repo.1.tmp'), './repo', 67 "-S",
66 '-h', '--help-all']) 68 f"repo {cmd}",
67 69 "-m",
68 with tempfile.TemporaryDirectory() as tempdir: 70 "Repo Manual",
69 tempdir = Path(tempdir) 71 f"--version-string={version}",
70 repo_dir = tempdir / '.repo' 72 "-o",
71 repo_dir.mkdir() 73 MANDIR.joinpath(f"repo-{cmd}.1.tmp"),
72 (repo_dir / 'repo').symlink_to(TOPDIR) 74 "./repo",
73 75 "-h",
74 # Create a repo wrapper using the active Python executable. We can't pass 76 f"help {cmd}",
75 # this directly to help2man as it's too simple, so insert it via shebang. 77 ]
76 data = (TOPDIR / 'repo').read_text(encoding='utf-8') 78 for cmd in subcmds.all_commands
77 tempbin = tempdir / 'repo' 79 ]
78 tempbin.write_text(f'#!{sys.executable}\n' + data, encoding='utf-8') 80 cmdlist.append(
79 tempbin.chmod(0o755) 81 [
80 82 "help2man",
81 # Run all cmd in parallel, and wait for them to finish. 83 "-N",
82 with multiprocessing.Pool() as pool: 84 "-n",
83 pool.map(partial(worker, cwd=tempdir, check=True), cmdlist) 85 "repository management tool built on top of git",
84 86 "-S",
85 for tmp_path in MANDIR.glob('*.1.tmp'): 87 "repo",
86 path = tmp_path.parent / tmp_path.stem 88 "-m",
87 old_data = path.read_text() if path.exists() else '' 89 "Repo Manual",
88 90 f"--version-string={version}",
89 data = tmp_path.read_text() 91 "-o",
90 tmp_path.unlink() 92 MANDIR.joinpath("repo.1.tmp"),
91 93 "./repo",
92 data = replace_regex(data) 94 "-h",
93 95 "--help-all",
94 # If the only thing that changed was the date, don't refresh. This avoids 96 ]
95 # a lot of noise when only one file actually updates. 97 )
96 old_data = re.sub(r'^(\.TH REPO "1" ")([^"]+)', r'\1', old_data, flags=re.M) 98
97 new_data = re.sub(r'^(\.TH REPO "1" ")([^"]+)', r'\1', data, flags=re.M) 99 with tempfile.TemporaryDirectory() as tempdir:
98 if old_data != new_data: 100 tempdir = Path(tempdir)
99 path.write_text(data) 101 repo_dir = tempdir / ".repo"
102 repo_dir.mkdir()
103 (repo_dir / "repo").symlink_to(TOPDIR)
104
105 # Create a repo wrapper using the active Python executable. We can't
106 # pass this directly to help2man as it's too simple, so insert it via
107 # shebang.
108 data = (TOPDIR / "repo").read_text(encoding="utf-8")
109 tempbin = tempdir / "repo"
110 tempbin.write_text(f"#!{sys.executable}\n" + data, encoding="utf-8")
111 tempbin.chmod(0o755)
112
113 # Run all cmd in parallel, and wait for them to finish.
114 with multiprocessing.Pool() as pool:
115 pool.map(partial(worker, cwd=tempdir, check=True), cmdlist)
116
117 for tmp_path in MANDIR.glob("*.1.tmp"):
118 path = tmp_path.parent / tmp_path.stem
119 old_data = path.read_text() if path.exists() else ""
120
121 data = tmp_path.read_text()
122 tmp_path.unlink()
123
124 data = replace_regex(data)
125
126 # If the only thing that changed was the date, don't refresh. This
127 # avoids a lot of noise when only one file actually updates.
128 old_data = re.sub(
129 r'^(\.TH REPO "1" ")([^"]+)', r"\1", old_data, flags=re.M
130 )
131 new_data = re.sub(r'^(\.TH REPO "1" ")([^"]+)', r"\1", data, flags=re.M)
132 if old_data != new_data:
133 path.write_text(data)
100 134
101 135
102def replace_regex(data): 136def replace_regex(data):
103 """Replace semantically null regexes in the data. 137 """Replace semantically null regexes in the data.
104 138
105 Args: 139 Args:
106 data: manpage text. 140 data: manpage text.
107 141
108 Returns: 142 Returns:
109 Updated manpage text. 143 Updated manpage text.
110 """ 144 """
111 regex = ( 145 regex = (
112 (r'(It was generated by help2man) [0-9.]+', r'\g<1>.'), 146 (r"(It was generated by help2man) [0-9.]+", r"\g<1>."),
113 (r'^\033\[[0-9;]*m([^\033]*)\033\[m', r'\g<1>'), 147 (r"^\033\[[0-9;]*m([^\033]*)\033\[m", r"\g<1>"),
114 (r'^\.IP\n(.*:)\n', r'.SS \g<1>\n'), 148 (r"^\.IP\n(.*:)\n", r".SS \g<1>\n"),
115 (r'^\.PP\nDescription', r'.SH DETAILS'), 149 (r"^\.PP\nDescription", r".SH DETAILS"),
116 ) 150 )
117 for pattern, replacement in regex: 151 for pattern, replacement in regex:
118 data = re.sub(pattern, replacement, data, flags=re.M) 152 data = re.sub(pattern, replacement, data, flags=re.M)
119 return data 153 return data
diff --git a/release/util.py b/release/util.py
index 9d0eb1dc..df7a5638 100644
--- a/release/util.py
+++ b/release/util.py
@@ -20,54 +20,60 @@ import subprocess
20import sys 20import sys
21 21
22 22
23assert sys.version_info >= (3, 6), 'This module requires Python 3.6+' 23assert sys.version_info >= (3, 6), "This module requires Python 3.6+"
24 24
25 25
26TOPDIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__))) 26TOPDIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
27HOMEDIR = os.path.expanduser('~') 27HOMEDIR = os.path.expanduser("~")
28 28
29 29
30# These are the release keys we sign with. 30# These are the release keys we sign with.
31KEYID_DSA = '8BB9AD793E8E6153AF0F9A4416530D5E920F5C65' 31KEYID_DSA = "8BB9AD793E8E6153AF0F9A4416530D5E920F5C65"
32KEYID_RSA = 'A34A13BE8E76BFF46A0C022DA2E75A824AAB9624' 32KEYID_RSA = "A34A13BE8E76BFF46A0C022DA2E75A824AAB9624"
33KEYID_ECC = 'E1F9040D7A3F6DAFAC897CD3D3B95DA243E48A39' 33KEYID_ECC = "E1F9040D7A3F6DAFAC897CD3D3B95DA243E48A39"
34 34
35 35
36def cmdstr(cmd): 36def cmdstr(cmd):
37 """Get a nicely quoted shell command.""" 37 """Get a nicely quoted shell command."""
38 ret = [] 38 ret = []
39 for arg in cmd: 39 for arg in cmd:
40 if not re.match(r'^[a-zA-Z0-9/_.=-]+$', arg): 40 if not re.match(r"^[a-zA-Z0-9/_.=-]+$", arg):
41 arg = f'"{arg}"' 41 arg = f'"{arg}"'
42 ret.append(arg) 42 ret.append(arg)
43 return ' '.join(ret) 43 return " ".join(ret)
44 44
45 45
46def run(opts, cmd, check=True, **kwargs): 46def run(opts, cmd, check=True, **kwargs):
47 """Helper around subprocess.run to include logging.""" 47 """Helper around subprocess.run to include logging."""
48 print('+', cmdstr(cmd)) 48 print("+", cmdstr(cmd))
49 if opts.dryrun: 49 if opts.dryrun:
50 cmd = ['true', '--'] + cmd 50 cmd = ["true", "--"] + cmd
51 try: 51 try:
52 return subprocess.run(cmd, check=check, **kwargs) 52 return subprocess.run(cmd, check=check, **kwargs)
53 except subprocess.CalledProcessError as e: 53 except subprocess.CalledProcessError as e:
54 print(f'aborting: {e}', file=sys.stderr) 54 print(f"aborting: {e}", file=sys.stderr)
55 sys.exit(1) 55 sys.exit(1)
56 56
57 57
58def import_release_key(opts): 58def import_release_key(opts):
59 """Import the public key of the official release repo signing key.""" 59 """Import the public key of the official release repo signing key."""
60 # Extract the key from our repo launcher. 60 # Extract the key from our repo launcher.
61 launcher = getattr(opts, 'launcher', os.path.join(TOPDIR, 'repo')) 61 launcher = getattr(opts, "launcher", os.path.join(TOPDIR, "repo"))
62 print(f'Importing keys from "{launcher}" launcher script') 62 print(f'Importing keys from "{launcher}" launcher script')
63 with open(launcher, encoding='utf-8') as fp: 63 with open(launcher, encoding="utf-8") as fp:
64 data = fp.read() 64 data = fp.read()
65 65
66 keys = re.findall( 66 keys = re.findall(
67 r'\n-----BEGIN PGP PUBLIC KEY BLOCK-----\n[^-]*' 67 r"\n-----BEGIN PGP PUBLIC KEY BLOCK-----\n[^-]*"
68 r'\n-----END PGP PUBLIC KEY BLOCK-----\n', data, flags=re.M) 68 r"\n-----END PGP PUBLIC KEY BLOCK-----\n",
69 run(opts, ['gpg', '--import'], input='\n'.join(keys).encode('utf-8')) 69 data,
70 70 flags=re.M,
71 print('Marking keys as fully trusted') 71 )
72 run(opts, ['gpg', '--import-ownertrust'], 72 run(opts, ["gpg", "--import"], input="\n".join(keys).encode("utf-8"))
73 input=f'{KEYID_DSA}:6:\n'.encode('utf-8')) 73
74 print("Marking keys as fully trusted")
75 run(
76 opts,
77 ["gpg", "--import-ownertrust"],
78 input=f"{KEYID_DSA}:6:\n".encode("utf-8"),
79 )
diff --git a/repo b/repo
index ce3df054..c32e52da 100755
--- a/repo
+++ b/repo
@@ -506,10 +506,10 @@ def gitc_parse_clientdir(gitc_fs_path):
506 """Parse a path in the GITC FS and return its client name. 506 """Parse a path in the GITC FS and return its client name.
507 507
508 Args: 508 Args:
509 gitc_fs_path: A subdirectory path within the GITC_FS_ROOT_DIR. 509 gitc_fs_path: A subdirectory path within the GITC_FS_ROOT_DIR.
510 510
511 Returns: 511 Returns:
512 The GITC client name. 512 The GITC client name.
513 """ 513 """
514 if gitc_fs_path == GITC_FS_ROOT_DIR: 514 if gitc_fs_path == GITC_FS_ROOT_DIR:
515 return None 515 return None
@@ -942,14 +942,14 @@ def resolve_repo_rev(cwd, committish):
942 * xxx: Branch or tag or commit. 942 * xxx: Branch or tag or commit.
943 943
944 Args: 944 Args:
945 cwd: The git checkout to run in. 945 cwd: The git checkout to run in.
946 committish: The REPO_REV argument to resolve. 946 committish: The REPO_REV argument to resolve.
947 947
948 Returns: 948 Returns:
949 A tuple of (remote ref, commit) as makes sense for the committish. 949 A tuple of (remote ref, commit) as makes sense for the committish.
950 For branches, this will look like ('refs/heads/stable', <revision>). 950 For branches, this will look like ('refs/heads/stable', <revision>).
951 For tags, this will look like ('refs/tags/v1.0', <revision>). 951 For tags, this will look like ('refs/tags/v1.0', <revision>).
952 For commits, this will be (<revision>, <revision>). 952 For commits, this will be (<revision>, <revision>).
953 """ 953 """
954 def resolve(committish): 954 def resolve(committish):
955 ret = run_git('rev-parse', '--verify', '%s^{commit}' % (committish,), 955 ret = run_git('rev-parse', '--verify', '%s^{commit}' % (committish,),
@@ -1104,7 +1104,7 @@ class Requirements(object):
1104 """Initialize. 1104 """Initialize.
1105 1105
1106 Args: 1106 Args:
1107 requirements: A dictionary of settings. 1107 requirements: A dictionary of settings.
1108 """ 1108 """
1109 self.requirements = requirements 1109 self.requirements = requirements
1110 1110
diff --git a/repo_trace.py b/repo_trace.py
index 1ba86c79..49462174 100644
--- a/repo_trace.py
+++ b/repo_trace.py
@@ -29,138 +29,142 @@ from contextlib import ContextDecorator
29import platform_utils 29import platform_utils
30 30
31# Env var to implicitly turn on tracing. 31# Env var to implicitly turn on tracing.
32REPO_TRACE = 'REPO_TRACE' 32REPO_TRACE = "REPO_TRACE"
33 33
34# Temporarily set tracing to always on unless user expicitly sets to 0. 34# Temporarily set tracing to always on unless user expicitly sets to 0.
35_TRACE = os.environ.get(REPO_TRACE) != '0' 35_TRACE = os.environ.get(REPO_TRACE) != "0"
36_TRACE_TO_STDERR = False 36_TRACE_TO_STDERR = False
37_TRACE_FILE = None 37_TRACE_FILE = None
38_TRACE_FILE_NAME = 'TRACE_FILE' 38_TRACE_FILE_NAME = "TRACE_FILE"
39_MAX_SIZE = 70 # in MiB 39_MAX_SIZE = 70 # in MiB
40_NEW_COMMAND_SEP = '+++++++++++++++NEW COMMAND+++++++++++++++++++' 40_NEW_COMMAND_SEP = "+++++++++++++++NEW COMMAND+++++++++++++++++++"
41 41
42 42
43def IsTraceToStderr(): 43def IsTraceToStderr():
44 """Whether traces are written to stderr.""" 44 """Whether traces are written to stderr."""
45 return _TRACE_TO_STDERR 45 return _TRACE_TO_STDERR
46 46
47 47
48def IsTrace(): 48def IsTrace():
49 """Whether tracing is enabled.""" 49 """Whether tracing is enabled."""
50 return _TRACE 50 return _TRACE
51 51
52 52
53def SetTraceToStderr(): 53def SetTraceToStderr():
54 """Enables tracing logging to stderr.""" 54 """Enables tracing logging to stderr."""
55 global _TRACE_TO_STDERR 55 global _TRACE_TO_STDERR
56 _TRACE_TO_STDERR = True 56 _TRACE_TO_STDERR = True
57 57
58 58
59def SetTrace(): 59def SetTrace():
60 """Enables tracing.""" 60 """Enables tracing."""
61 global _TRACE 61 global _TRACE
62 _TRACE = True 62 _TRACE = True
63 63
64 64
65def _SetTraceFile(quiet): 65def _SetTraceFile(quiet):
66 """Sets the trace file location.""" 66 """Sets the trace file location."""
67 global _TRACE_FILE 67 global _TRACE_FILE
68 _TRACE_FILE = _GetTraceFile(quiet) 68 _TRACE_FILE = _GetTraceFile(quiet)
69 69
70 70
71class Trace(ContextDecorator): 71class Trace(ContextDecorator):
72 """Used to capture and save git traces.""" 72 """Used to capture and save git traces."""
73 73
74 def _time(self): 74 def _time(self):
75 """Generate nanoseconds of time in a py3.6 safe way""" 75 """Generate nanoseconds of time in a py3.6 safe way"""
76 return int(time.time() * 1e+9) 76 return int(time.time() * 1e9)
77 77
78 def __init__(self, fmt, *args, first_trace=False, quiet=True): 78 def __init__(self, fmt, *args, first_trace=False, quiet=True):
79 """Initialize the object. 79 """Initialize the object.
80 80
81 Args: 81 Args:
82 fmt: The format string for the trace. 82 fmt: The format string for the trace.
83 *args: Arguments to pass to formatting. 83 *args: Arguments to pass to formatting.
84 first_trace: Whether this is the first trace of a `repo` invocation. 84 first_trace: Whether this is the first trace of a `repo` invocation.
85 quiet: Whether to suppress notification of trace file location. 85 quiet: Whether to suppress notification of trace file location.
86 """ 86 """
87 if not IsTrace(): 87 if not IsTrace():
88 return 88 return
89 self._trace_msg = fmt % args 89 self._trace_msg = fmt % args
90 90
91 if not _TRACE_FILE: 91 if not _TRACE_FILE:
92 _SetTraceFile(quiet) 92 _SetTraceFile(quiet)
93 93
94 if first_trace: 94 if first_trace:
95 _ClearOldTraces() 95 _ClearOldTraces()
96 self._trace_msg = f'{_NEW_COMMAND_SEP} {self._trace_msg}' 96 self._trace_msg = f"{_NEW_COMMAND_SEP} {self._trace_msg}"
97 97
98 def __enter__(self): 98 def __enter__(self):
99 if not IsTrace(): 99 if not IsTrace():
100 return self 100 return self
101 101
102 print_msg = f'PID: {os.getpid()} START: {self._time()} :{self._trace_msg}\n' 102 print_msg = (
103 f"PID: {os.getpid()} START: {self._time()} :{self._trace_msg}\n"
104 )
103 105
104 with open(_TRACE_FILE, 'a') as f: 106 with open(_TRACE_FILE, "a") as f:
105 print(print_msg, file=f) 107 print(print_msg, file=f)
106 108
107 if _TRACE_TO_STDERR: 109 if _TRACE_TO_STDERR:
108 print(print_msg, file=sys.stderr) 110 print(print_msg, file=sys.stderr)
109 111
110 return self 112 return self
111 113
112 def __exit__(self, *exc): 114 def __exit__(self, *exc):
113 if not IsTrace(): 115 if not IsTrace():
114 return False 116 return False
115 117
116 print_msg = f'PID: {os.getpid()} END: {self._time()} :{self._trace_msg}\n' 118 print_msg = (
119 f"PID: {os.getpid()} END: {self._time()} :{self._trace_msg}\n"
120 )
117 121
118 with open(_TRACE_FILE, 'a') as f: 122 with open(_TRACE_FILE, "a") as f:
119 print(print_msg, file=f) 123 print(print_msg, file=f)
120 124
121 if _TRACE_TO_STDERR: 125 if _TRACE_TO_STDERR:
122 print(print_msg, file=sys.stderr) 126 print(print_msg, file=sys.stderr)
123 127
124 return False 128 return False
125 129
126 130
127def _GetTraceFile(quiet): 131def _GetTraceFile(quiet):
128 """Get the trace file or create one.""" 132 """Get the trace file or create one."""
129 # TODO: refactor to pass repodir to Trace. 133 # TODO: refactor to pass repodir to Trace.
130 repo_dir = os.path.dirname(os.path.dirname(__file__)) 134 repo_dir = os.path.dirname(os.path.dirname(__file__))
131 trace_file = os.path.join(repo_dir, _TRACE_FILE_NAME) 135 trace_file = os.path.join(repo_dir, _TRACE_FILE_NAME)
132 if not quiet: 136 if not quiet:
133 print(f'Trace outputs in {trace_file}', file=sys.stderr) 137 print(f"Trace outputs in {trace_file}", file=sys.stderr)
134 return trace_file 138 return trace_file
135 139
136 140
137def _ClearOldTraces(): 141def _ClearOldTraces():
138 """Clear the oldest commands if trace file is too big.""" 142 """Clear the oldest commands if trace file is too big."""
139 try: 143 try:
140 with open(_TRACE_FILE, 'r', errors='ignore') as f: 144 with open(_TRACE_FILE, "r", errors="ignore") as f:
141 if os.path.getsize(f.name) / (1024 * 1024) <= _MAX_SIZE: 145 if os.path.getsize(f.name) / (1024 * 1024) <= _MAX_SIZE:
146 return
147 trace_lines = f.readlines()
148 except FileNotFoundError:
142 return 149 return
143 trace_lines = f.readlines() 150
144 except FileNotFoundError: 151 while sum(len(x) for x in trace_lines) / (1024 * 1024) > _MAX_SIZE:
145 return 152 for i, line in enumerate(trace_lines):
146 153 if "END:" in line and _NEW_COMMAND_SEP in line:
147 while sum(len(x) for x in trace_lines) / (1024 * 1024) > _MAX_SIZE: 154 trace_lines = trace_lines[i + 1 :]
148 for i, line in enumerate(trace_lines): 155 break
149 if 'END:' in line and _NEW_COMMAND_SEP in line: 156 else:
150 trace_lines = trace_lines[i + 1:] 157 # The last chunk is bigger than _MAX_SIZE, so just throw everything
151 break 158 # away.
152 else: 159 trace_lines = []
153 # The last chunk is bigger than _MAX_SIZE, so just throw everything away. 160
154 trace_lines = [] 161 while trace_lines and trace_lines[-1] == "\n":
155 162 trace_lines = trace_lines[:-1]
156 while trace_lines and trace_lines[-1] == '\n': 163 # Write to a temporary file with a unique name in the same filesystem
157 trace_lines = trace_lines[:-1] 164 # before replacing the original trace file.
158 # Write to a temporary file with a unique name in the same filesystem 165 temp_dir, temp_prefix = os.path.split(_TRACE_FILE)
159 # before replacing the original trace file. 166 with tempfile.NamedTemporaryFile(
160 temp_dir, temp_prefix = os.path.split(_TRACE_FILE) 167 "w", dir=temp_dir, prefix=temp_prefix, delete=False
161 with tempfile.NamedTemporaryFile('w', 168 ) as f:
162 dir=temp_dir, 169 f.writelines(trace_lines)
163 prefix=temp_prefix, 170 platform_utils.rename(f.name, _TRACE_FILE)
164 delete=False) as f:
165 f.writelines(trace_lines)
166 platform_utils.rename(f.name, _TRACE_FILE)
diff --git a/run_tests b/run_tests
index 0ea098a6..e76f9d8c 100755
--- a/run_tests
+++ b/run_tests
@@ -13,10 +13,28 @@
13# See the License for the specific language governing permissions and 13# See the License for the specific language governing permissions and
14# limitations under the License. 14# limitations under the License.
15 15
16"""Wrapper to run pytest with the right settings.""" 16"""Wrapper to run black and pytest with the right settings."""
17 17
18import os
19import subprocess
18import sys 20import sys
19import pytest 21import pytest
20 22
21if __name__ == '__main__': 23
22 sys.exit(pytest.main(sys.argv[1:])) 24def run_black():
25 """Returns the exit code of running `black --check`."""
26 dirpath = os.path.dirname(os.path.realpath(__file__))
27 return subprocess.run(
28 [sys.executable, "-m", "black", "--check", dirpath], check=False
29 ).returncode
30
31
32def main(argv):
33 """The main entry."""
34 black_ret = 0 if argv else run_black()
35 pytest_ret = pytest.main(argv)
36 return 0 if not black_ret and not pytest_ret else 1
37
38
39if __name__ == "__main__":
40 sys.exit(main(sys.argv[1:]))
diff --git a/run_tests.vpython3 b/run_tests.vpython3
index d0e821dd..0c790bca 100644
--- a/run_tests.vpython3
+++ b/run_tests.vpython3
@@ -26,8 +26,8 @@ wheel: <
26 26
27# Required by pytest==6.2.2 27# Required by pytest==6.2.2
28wheel: < 28wheel: <
29 name: "infra/python/wheels/packaging-py2_py3" 29 name: "infra/python/wheels/packaging-py3"
30 version: "version:16.8" 30 version: "version:23.0"
31> 31>
32 32
33# Required by pytest==6.2.2 33# Required by pytest==6.2.2
@@ -59,3 +59,44 @@ wheel: <
59 name: "infra/python/wheels/six-py2_py3" 59 name: "infra/python/wheels/six-py2_py3"
60 version: "version:1.16.0" 60 version: "version:1.16.0"
61> 61>
62
63wheel: <
64 name: "infra/python/wheels/black-py3"
65 version: "version:23.1.0"
66>
67
68# Required by black==23.1.0
69wheel: <
70 name: "infra/python/wheels/mypy-extensions-py3"
71 version: "version:0.4.3"
72>
73
74# Required by black==23.1.0
75wheel: <
76 name: "infra/python/wheels/tomli-py3"
77 version: "version:2.0.1"
78>
79
80# Required by black==23.1.0
81wheel: <
82 name: "infra/python/wheels/platformdirs-py3"
83 version: "version:2.5.2"
84>
85
86# Required by black==23.1.0
87wheel: <
88 name: "infra/python/wheels/pathspec-py3"
89 version: "version:0.9.0"
90>
91
92# Required by black==23.1.0
93wheel: <
94 name: "infra/python/wheels/typing-extensions-py3"
95 version: "version:4.3.0"
96>
97
98# Required by black==23.1.0
99wheel: <
100 name: "infra/python/wheels/click-py3"
101 version: "version:8.0.3"
102>
diff --git a/setup.py b/setup.py
index 848b3f60..f50eb473 100755
--- a/setup.py
+++ b/setup.py
@@ -23,39 +23,39 @@ TOPDIR = os.path.dirname(os.path.abspath(__file__))
23 23
24 24
25# Rip out the first intro paragraph. 25# Rip out the first intro paragraph.
26with open(os.path.join(TOPDIR, 'README.md')) as fp: 26with open(os.path.join(TOPDIR, "README.md")) as fp:
27 lines = fp.read().splitlines()[2:] 27 lines = fp.read().splitlines()[2:]
28 end = lines.index('') 28 end = lines.index("")
29 long_description = ' '.join(lines[0:end]) 29 long_description = " ".join(lines[0:end])
30 30
31 31
32# https://packaging.python.org/tutorials/packaging-projects/ 32# https://packaging.python.org/tutorials/packaging-projects/
33setuptools.setup( 33setuptools.setup(
34 name='repo', 34 name="repo",
35 version='2', 35 version="2",
36 maintainer='Various', 36 maintainer="Various",
37 maintainer_email='repo-discuss@googlegroups.com', 37 maintainer_email="repo-discuss@googlegroups.com",
38 description='Repo helps manage many Git repositories', 38 description="Repo helps manage many Git repositories",
39 long_description=long_description, 39 long_description=long_description,
40 long_description_content_type='text/plain', 40 long_description_content_type="text/plain",
41 url='https://gerrit.googlesource.com/git-repo/', 41 url="https://gerrit.googlesource.com/git-repo/",
42 project_urls={ 42 project_urls={
43 'Bug Tracker': 'https://bugs.chromium.org/p/gerrit/issues/list?q=component:Applications%3Erepo', 43 "Bug Tracker": "https://bugs.chromium.org/p/gerrit/issues/list?q=component:Applications%3Erepo", # noqa: E501
44 }, 44 },
45 # https://pypi.org/classifiers/ 45 # https://pypi.org/classifiers/
46 classifiers=[ 46 classifiers=[
47 'Development Status :: 6 - Mature', 47 "Development Status :: 6 - Mature",
48 'Environment :: Console', 48 "Environment :: Console",
49 'Intended Audience :: Developers', 49 "Intended Audience :: Developers",
50 'License :: OSI Approved :: Apache Software License', 50 "License :: OSI Approved :: Apache Software License",
51 'Natural Language :: English', 51 "Natural Language :: English",
52 'Operating System :: MacOS :: MacOS X', 52 "Operating System :: MacOS :: MacOS X",
53 'Operating System :: Microsoft :: Windows :: Windows 10', 53 "Operating System :: Microsoft :: Windows :: Windows 10",
54 'Operating System :: POSIX :: Linux', 54 "Operating System :: POSIX :: Linux",
55 'Programming Language :: Python :: 3', 55 "Programming Language :: Python :: 3",
56 'Programming Language :: Python :: 3 :: Only', 56 "Programming Language :: Python :: 3 :: Only",
57 'Topic :: Software Development :: Version Control :: Git', 57 "Topic :: Software Development :: Version Control :: Git",
58 ], 58 ],
59 python_requires='>=3.6', 59 python_requires=">=3.6",
60 packages=['subcmds'], 60 packages=["subcmds"],
61) 61)
diff --git a/ssh.py b/ssh.py
index 004fdbad..1d7ebe32 100644
--- a/ssh.py
+++ b/ssh.py
@@ -28,254 +28,264 @@ import platform_utils
28from repo_trace import Trace 28from repo_trace import Trace
29 29
30 30
31PROXY_PATH = os.path.join(os.path.dirname(__file__), 'git_ssh') 31PROXY_PATH = os.path.join(os.path.dirname(__file__), "git_ssh")
32 32
33 33
34def _run_ssh_version(): 34def _run_ssh_version():
35 """run ssh -V to display the version number""" 35 """run ssh -V to display the version number"""
36 return subprocess.check_output(['ssh', '-V'], stderr=subprocess.STDOUT).decode() 36 return subprocess.check_output(
37 ["ssh", "-V"], stderr=subprocess.STDOUT
38 ).decode()
37 39
38 40
39def _parse_ssh_version(ver_str=None): 41def _parse_ssh_version(ver_str=None):
40 """parse a ssh version string into a tuple""" 42 """parse a ssh version string into a tuple"""
41 if ver_str is None: 43 if ver_str is None:
42 ver_str = _run_ssh_version() 44 ver_str = _run_ssh_version()
43 m = re.match(r'^OpenSSH_([0-9.]+)(p[0-9]+)?\s', ver_str) 45 m = re.match(r"^OpenSSH_([0-9.]+)(p[0-9]+)?\s", ver_str)
44 if m: 46 if m:
45 return tuple(int(x) for x in m.group(1).split('.')) 47 return tuple(int(x) for x in m.group(1).split("."))
46 else: 48 else:
47 return () 49 return ()
48 50
49 51
50@functools.lru_cache(maxsize=None) 52@functools.lru_cache(maxsize=None)
51def version(): 53def version():
52 """return ssh version as a tuple""" 54 """return ssh version as a tuple"""
53 try:
54 return _parse_ssh_version()
55 except FileNotFoundError:
56 print('fatal: ssh not installed', file=sys.stderr)
57 sys.exit(1)
58 except subprocess.CalledProcessError:
59 print('fatal: unable to detect ssh version', file=sys.stderr)
60 sys.exit(1)
61
62
63URI_SCP = re.compile(r'^([^@:]*@?[^:/]{1,}):')
64URI_ALL = re.compile(r'^([a-z][a-z+-]*)://([^@/]*@?[^/]*)/')
65
66
67class ProxyManager:
68 """Manage various ssh clients & masters that we spawn.
69
70 This will take care of sharing state between multiprocessing children, and
71 make sure that if we crash, we don't leak any of the ssh sessions.
72
73 The code should work with a single-process scenario too, and not add too much
74 overhead due to the manager.
75 """
76
77 # Path to the ssh program to run which will pass our master settings along.
78 # Set here more as a convenience API.
79 proxy = PROXY_PATH
80
81 def __init__(self, manager):
82 # Protect access to the list of active masters.
83 self._lock = multiprocessing.Lock()
84 # List of active masters (pid). These will be spawned on demand, and we are
85 # responsible for shutting them all down at the end.
86 self._masters = manager.list()
87 # Set of active masters indexed by "host:port" information.
88 # The value isn't used, but multiprocessing doesn't provide a set class.
89 self._master_keys = manager.dict()
90 # Whether ssh masters are known to be broken, so we give up entirely.
91 self._master_broken = manager.Value('b', False)
92 # List of active ssh sesssions. Clients will be added & removed as
93 # connections finish, so this list is just for safety & cleanup if we crash.
94 self._clients = manager.list()
95 # Path to directory for holding master sockets.
96 self._sock_path = None
97
98 def __enter__(self):
99 """Enter a new context."""
100 return self
101
102 def __exit__(self, exc_type, exc_value, traceback):
103 """Exit a context & clean up all resources."""
104 self.close()
105
106 def add_client(self, proc):
107 """Track a new ssh session."""
108 self._clients.append(proc.pid)
109
110 def remove_client(self, proc):
111 """Remove a completed ssh session."""
112 try: 55 try:
113 self._clients.remove(proc.pid) 56 return _parse_ssh_version()
114 except ValueError: 57 except FileNotFoundError:
115 pass 58 print("fatal: ssh not installed", file=sys.stderr)
116 59 sys.exit(1)
117 def add_master(self, proc): 60 except subprocess.CalledProcessError:
118 """Track a new master connection.""" 61 print("fatal: unable to detect ssh version", file=sys.stderr)
119 self._masters.append(proc.pid) 62 sys.exit(1)
120
121 def _terminate(self, procs):
122 """Kill all |procs|."""
123 for pid in procs:
124 try:
125 os.kill(pid, signal.SIGTERM)
126 os.waitpid(pid, 0)
127 except OSError:
128 pass
129
130 # The multiprocessing.list() API doesn't provide many standard list()
131 # methods, so we have to manually clear the list.
132 while True:
133 try:
134 procs.pop(0)
135 except:
136 break
137
138 def close(self):
139 """Close this active ssh session.
140
141 Kill all ssh clients & masters we created, and nuke the socket dir.
142 """
143 self._terminate(self._clients)
144 self._terminate(self._masters)
145
146 d = self.sock(create=False)
147 if d:
148 try:
149 platform_utils.rmdir(os.path.dirname(d))
150 except OSError:
151 pass
152
153 def _open_unlocked(self, host, port=None):
154 """Make sure a ssh master session exists for |host| & |port|.
155 63
156 If one doesn't exist already, we'll create it.
157 64
158 We won't grab any locks, so the caller has to do that. This helps keep the 65URI_SCP = re.compile(r"^([^@:]*@?[^:/]{1,}):")
159 business logic of actually creating the master separate from grabbing locks. 66URI_ALL = re.compile(r"^([a-z][a-z+-]*)://([^@/]*@?[^/]*)/")
160 """
161 # Check to see whether we already think that the master is running; if we
162 # think it's already running, return right away.
163 if port is not None:
164 key = '%s:%s' % (host, port)
165 else:
166 key = host
167
168 if key in self._master_keys:
169 return True
170
171 if self._master_broken.value or 'GIT_SSH' in os.environ:
172 # Failed earlier, so don't retry.
173 return False
174
175 # We will make two calls to ssh; this is the common part of both calls.
176 command_base = ['ssh', '-o', 'ControlPath %s' % self.sock(), host]
177 if port is not None:
178 command_base[1:1] = ['-p', str(port)]
179
180 # Since the key wasn't in _master_keys, we think that master isn't running.
181 # ...but before actually starting a master, we'll double-check. This can
182 # be important because we can't tell that that 'git@myhost.com' is the same
183 # as 'myhost.com' where "User git" is setup in the user's ~/.ssh/config file.
184 check_command = command_base + ['-O', 'check']
185 with Trace('Call to ssh (check call): %s', ' '.join(check_command)):
186 try:
187 check_process = subprocess.Popen(check_command,
188 stdout=subprocess.PIPE,
189 stderr=subprocess.PIPE)
190 check_process.communicate() # read output, but ignore it...
191 isnt_running = check_process.wait()
192
193 if not isnt_running:
194 # Our double-check found that the master _was_ infact running. Add to
195 # the list of keys.
196 self._master_keys[key] = True
197 return True
198 except Exception:
199 # Ignore excpetions. We we will fall back to the normal command and
200 # print to the log there.
201 pass
202
203 command = command_base[:1] + ['-M', '-N'] + command_base[1:]
204 p = None
205 try:
206 with Trace('Call to ssh: %s', ' '.join(command)):
207 p = subprocess.Popen(command)
208 except Exception as e:
209 self._master_broken.value = True
210 print('\nwarn: cannot enable ssh control master for %s:%s\n%s'
211 % (host, port, str(e)), file=sys.stderr)
212 return False
213 67
214 time.sleep(1)
215 ssh_died = (p.poll() is not None)
216 if ssh_died:
217 return False
218 68
219 self.add_master(p) 69class ProxyManager:
220 self._master_keys[key] = True 70 """Manage various ssh clients & masters that we spawn.
221 return True
222
223 def _open(self, host, port=None):
224 """Make sure a ssh master session exists for |host| & |port|.
225 71
226 If one doesn't exist already, we'll create it. 72 This will take care of sharing state between multiprocessing children, and
73 make sure that if we crash, we don't leak any of the ssh sessions.
227 74
228 This will obtain any necessary locks to avoid inter-process races. 75 The code should work with a single-process scenario too, and not add too
76 much overhead due to the manager.
229 """ 77 """
230 # Bail before grabbing the lock if we already know that we aren't going to
231 # try creating new masters below.
232 if sys.platform in ('win32', 'cygwin'):
233 return False
234
235 # Acquire the lock. This is needed to prevent opening multiple masters for
236 # the same host when we're running "repo sync -jN" (for N > 1) _and_ the
237 # manifest <remote fetch="ssh://xyz"> specifies a different host from the
238 # one that was passed to repo init.
239 with self._lock:
240 return self._open_unlocked(host, port)
241
242 def preconnect(self, url):
243 """If |uri| will create a ssh connection, setup the ssh master for it."""
244 m = URI_ALL.match(url)
245 if m:
246 scheme = m.group(1)
247 host = m.group(2)
248 if ':' in host:
249 host, port = host.split(':')
250 else:
251 port = None
252 if scheme in ('ssh', 'git+ssh', 'ssh+git'):
253 return self._open(host, port)
254 return False
255
256 m = URI_SCP.match(url)
257 if m:
258 host = m.group(1)
259 return self._open(host)
260
261 return False
262 78
263 def sock(self, create=True): 79 # Path to the ssh program to run which will pass our master settings along.
264 """Return the path to the ssh socket dir. 80 # Set here more as a convenience API.
265 81 proxy = PROXY_PATH
266 This has all the master sockets so clients can talk to them. 82
267 """ 83 def __init__(self, manager):
268 if self._sock_path is None: 84 # Protect access to the list of active masters.
269 if not create: 85 self._lock = multiprocessing.Lock()
270 return None 86 # List of active masters (pid). These will be spawned on demand, and we
271 tmp_dir = '/tmp' 87 # are responsible for shutting them all down at the end.
272 if not os.path.exists(tmp_dir): 88 self._masters = manager.list()
273 tmp_dir = tempfile.gettempdir() 89 # Set of active masters indexed by "host:port" information.
274 if version() < (6, 7): 90 # The value isn't used, but multiprocessing doesn't provide a set class.
275 tokens = '%r@%h:%p' 91 self._master_keys = manager.dict()
276 else: 92 # Whether ssh masters are known to be broken, so we give up entirely.
277 tokens = '%C' # hash of %l%h%p%r 93 self._master_broken = manager.Value("b", False)
278 self._sock_path = os.path.join( 94 # List of active ssh sesssions. Clients will be added & removed as
279 tempfile.mkdtemp('', 'ssh-', tmp_dir), 95 # connections finish, so this list is just for safety & cleanup if we
280 'master-' + tokens) 96 # crash.
281 return self._sock_path 97 self._clients = manager.list()
98 # Path to directory for holding master sockets.
99 self._sock_path = None
100
101 def __enter__(self):
102 """Enter a new context."""
103 return self
104
105 def __exit__(self, exc_type, exc_value, traceback):
106 """Exit a context & clean up all resources."""
107 self.close()
108
109 def add_client(self, proc):
110 """Track a new ssh session."""
111 self._clients.append(proc.pid)
112
113 def remove_client(self, proc):
114 """Remove a completed ssh session."""
115 try:
116 self._clients.remove(proc.pid)
117 except ValueError:
118 pass
119
120 def add_master(self, proc):
121 """Track a new master connection."""
122 self._masters.append(proc.pid)
123
124 def _terminate(self, procs):
125 """Kill all |procs|."""
126 for pid in procs:
127 try:
128 os.kill(pid, signal.SIGTERM)
129 os.waitpid(pid, 0)
130 except OSError:
131 pass
132
133 # The multiprocessing.list() API doesn't provide many standard list()
134 # methods, so we have to manually clear the list.
135 while True:
136 try:
137 procs.pop(0)
138 except: # noqa: E722
139 break
140
141 def close(self):
142 """Close this active ssh session.
143
144 Kill all ssh clients & masters we created, and nuke the socket dir.
145 """
146 self._terminate(self._clients)
147 self._terminate(self._masters)
148
149 d = self.sock(create=False)
150 if d:
151 try:
152 platform_utils.rmdir(os.path.dirname(d))
153 except OSError:
154 pass
155
156 def _open_unlocked(self, host, port=None):
157 """Make sure a ssh master session exists for |host| & |port|.
158
159 If one doesn't exist already, we'll create it.
160
161 We won't grab any locks, so the caller has to do that. This helps keep
162 the business logic of actually creating the master separate from
163 grabbing locks.
164 """
165 # Check to see whether we already think that the master is running; if
166 # we think it's already running, return right away.
167 if port is not None:
168 key = "%s:%s" % (host, port)
169 else:
170 key = host
171
172 if key in self._master_keys:
173 return True
174
175 if self._master_broken.value or "GIT_SSH" in os.environ:
176 # Failed earlier, so don't retry.
177 return False
178
179 # We will make two calls to ssh; this is the common part of both calls.
180 command_base = ["ssh", "-o", "ControlPath %s" % self.sock(), host]
181 if port is not None:
182 command_base[1:1] = ["-p", str(port)]
183
184 # Since the key wasn't in _master_keys, we think that master isn't
185 # running... but before actually starting a master, we'll double-check.
186 # This can be important because we can't tell that that 'git@myhost.com'
187 # is the same as 'myhost.com' where "User git" is setup in the user's
188 # ~/.ssh/config file.
189 check_command = command_base + ["-O", "check"]
190 with Trace("Call to ssh (check call): %s", " ".join(check_command)):
191 try:
192 check_process = subprocess.Popen(
193 check_command,
194 stdout=subprocess.PIPE,
195 stderr=subprocess.PIPE,
196 )
197 check_process.communicate() # read output, but ignore it...
198 isnt_running = check_process.wait()
199
200 if not isnt_running:
201 # Our double-check found that the master _was_ infact
202 # running. Add to the list of keys.
203 self._master_keys[key] = True
204 return True
205 except Exception:
206 # Ignore excpetions. We we will fall back to the normal command
207 # and print to the log there.
208 pass
209
210 command = command_base[:1] + ["-M", "-N"] + command_base[1:]
211 p = None
212 try:
213 with Trace("Call to ssh: %s", " ".join(command)):
214 p = subprocess.Popen(command)
215 except Exception as e:
216 self._master_broken.value = True
217 print(
218 "\nwarn: cannot enable ssh control master for %s:%s\n%s"
219 % (host, port, str(e)),
220 file=sys.stderr,
221 )
222 return False
223
224 time.sleep(1)
225 ssh_died = p.poll() is not None
226 if ssh_died:
227 return False
228
229 self.add_master(p)
230 self._master_keys[key] = True
231 return True
232
233 def _open(self, host, port=None):
234 """Make sure a ssh master session exists for |host| & |port|.
235
236 If one doesn't exist already, we'll create it.
237
238 This will obtain any necessary locks to avoid inter-process races.
239 """
240 # Bail before grabbing the lock if we already know that we aren't going
241 # to try creating new masters below.
242 if sys.platform in ("win32", "cygwin"):
243 return False
244
245 # Acquire the lock. This is needed to prevent opening multiple masters
246 # for the same host when we're running "repo sync -jN" (for N > 1) _and_
247 # the manifest <remote fetch="ssh://xyz"> specifies a different host
248 # from the one that was passed to repo init.
249 with self._lock:
250 return self._open_unlocked(host, port)
251
252 def preconnect(self, url):
253 """If |uri| will create a ssh connection, setup the ssh master for it.""" # noqa: E501
254 m = URI_ALL.match(url)
255 if m:
256 scheme = m.group(1)
257 host = m.group(2)
258 if ":" in host:
259 host, port = host.split(":")
260 else:
261 port = None
262 if scheme in ("ssh", "git+ssh", "ssh+git"):
263 return self._open(host, port)
264 return False
265
266 m = URI_SCP.match(url)
267 if m:
268 host = m.group(1)
269 return self._open(host)
270
271 return False
272
273 def sock(self, create=True):
274 """Return the path to the ssh socket dir.
275
276 This has all the master sockets so clients can talk to them.
277 """
278 if self._sock_path is None:
279 if not create:
280 return None
281 tmp_dir = "/tmp"
282 if not os.path.exists(tmp_dir):
283 tmp_dir = tempfile.gettempdir()
284 if version() < (6, 7):
285 tokens = "%r@%h:%p"
286 else:
287 tokens = "%C" # hash of %l%h%p%r
288 self._sock_path = os.path.join(
289 tempfile.mkdtemp("", "ssh-", tmp_dir), "master-" + tokens
290 )
291 return self._sock_path
diff --git a/subcmds/__init__.py b/subcmds/__init__.py
index 051dda06..4e41afc0 100644
--- a/subcmds/__init__.py
+++ b/subcmds/__init__.py
@@ -19,31 +19,29 @@ all_commands = {}
19 19
20my_dir = os.path.dirname(__file__) 20my_dir = os.path.dirname(__file__)
21for py in os.listdir(my_dir): 21for py in os.listdir(my_dir):
22 if py == '__init__.py': 22 if py == "__init__.py":
23 continue 23 continue
24 24
25 if py.endswith('.py'): 25 if py.endswith(".py"):
26 name = py[:-3] 26 name = py[:-3]
27 27
28 clsn = name.capitalize() 28 clsn = name.capitalize()
29 while clsn.find('_') > 0: 29 while clsn.find("_") > 0:
30 h = clsn.index('_') 30 h = clsn.index("_")
31 clsn = clsn[0:h] + clsn[h + 1:].capitalize() 31 clsn = clsn[0:h] + clsn[h + 1 :].capitalize()
32 32
33 mod = __import__(__name__, 33 mod = __import__(__name__, globals(), locals(), ["%s" % name])
34 globals(), 34 mod = getattr(mod, name)
35 locals(), 35 try:
36 ['%s' % name]) 36 cmd = getattr(mod, clsn)
37 mod = getattr(mod, name) 37 except AttributeError:
38 try: 38 raise SyntaxError(
39 cmd = getattr(mod, clsn) 39 "%s/%s does not define class %s" % (__name__, py, clsn)
40 except AttributeError: 40 )
41 raise SyntaxError('%s/%s does not define class %s' % ( 41
42 __name__, py, clsn)) 42 name = name.replace("_", "-")
43 43 cmd.NAME = name
44 name = name.replace('_', '-') 44 all_commands[name] = cmd
45 cmd.NAME = name
46 all_commands[name] = cmd
47 45
48# Add 'branch' as an alias for 'branches'. 46# Add 'branch' as an alias for 'branches'.
49all_commands['branch'] = all_commands['branches'] 47all_commands["branch"] = all_commands["branches"]
diff --git a/subcmds/abandon.py b/subcmds/abandon.py
index c3d2d5b7..1f687f53 100644
--- a/subcmds/abandon.py
+++ b/subcmds/abandon.py
@@ -23,9 +23,9 @@ from progress import Progress
23 23
24 24
25class Abandon(Command): 25class Abandon(Command):
26 COMMON = True 26 COMMON = True
27 helpSummary = "Permanently abandon a development branch" 27 helpSummary = "Permanently abandon a development branch"
28 helpUsage = """ 28 helpUsage = """
29%prog [--all | <branchname>] [<project>...] 29%prog [--all | <branchname>] [<project>...]
30 30
31This subcommand permanently abandons a development branch by 31This subcommand permanently abandons a development branch by
@@ -33,83 +33,104 @@ deleting it (and all its history) from your local repository.
33 33
34It is equivalent to "git branch -D <branchname>". 34It is equivalent to "git branch -D <branchname>".
35""" 35"""
36 PARALLEL_JOBS = DEFAULT_LOCAL_JOBS 36 PARALLEL_JOBS = DEFAULT_LOCAL_JOBS
37 37
38 def _Options(self, p): 38 def _Options(self, p):
39 p.add_option('--all', 39 p.add_option(
40 dest='all', action='store_true', 40 "--all",
41 help='delete all branches in all projects') 41 dest="all",
42 action="store_true",
43 help="delete all branches in all projects",
44 )
42 45
43 def ValidateOptions(self, opt, args): 46 def ValidateOptions(self, opt, args):
44 if not opt.all and not args: 47 if not opt.all and not args:
45 self.Usage() 48 self.Usage()
46 49
47 if not opt.all: 50 if not opt.all:
48 nb = args[0] 51 nb = args[0]
49 if not git.check_ref_format('heads/%s' % nb): 52 if not git.check_ref_format("heads/%s" % nb):
50 self.OptionParser.error("'%s' is not a valid branch name" % nb) 53 self.OptionParser.error("'%s' is not a valid branch name" % nb)
51 else: 54 else:
52 args.insert(0, "'All local branches'") 55 args.insert(0, "'All local branches'")
53 56
54 def _ExecuteOne(self, all_branches, nb, project): 57 def _ExecuteOne(self, all_branches, nb, project):
55 """Abandon one project.""" 58 """Abandon one project."""
56 if all_branches: 59 if all_branches:
57 branches = project.GetBranches() 60 branches = project.GetBranches()
58 else: 61 else:
59 branches = [nb] 62 branches = [nb]
60 63
61 ret = {} 64 ret = {}
62 for name in branches: 65 for name in branches:
63 status = project.AbandonBranch(name) 66 status = project.AbandonBranch(name)
64 if status is not None: 67 if status is not None:
65 ret[name] = status 68 ret[name] = status
66 return (ret, project) 69 return (ret, project)
67 70
68 def Execute(self, opt, args): 71 def Execute(self, opt, args):
69 nb = args[0] 72 nb = args[0]
70 err = defaultdict(list) 73 err = defaultdict(list)
71 success = defaultdict(list) 74 success = defaultdict(list)
72 all_projects = self.GetProjects(args[1:], all_manifests=not opt.this_manifest_only) 75 all_projects = self.GetProjects(
73 _RelPath = lambda p: p.RelPath(local=opt.this_manifest_only) 76 args[1:], all_manifests=not opt.this_manifest_only
77 )
78 _RelPath = lambda p: p.RelPath(local=opt.this_manifest_only)
74 79
75 def _ProcessResults(_pool, pm, states): 80 def _ProcessResults(_pool, pm, states):
76 for (results, project) in states: 81 for results, project in states:
77 for branch, status in results.items(): 82 for branch, status in results.items():
78 if status: 83 if status:
79 success[branch].append(project) 84 success[branch].append(project)
80 else: 85 else:
81 err[branch].append(project) 86 err[branch].append(project)
82 pm.update() 87 pm.update()
83 88
84 self.ExecuteInParallel( 89 self.ExecuteInParallel(
85 opt.jobs, 90 opt.jobs,
86 functools.partial(self._ExecuteOne, opt.all, nb), 91 functools.partial(self._ExecuteOne, opt.all, nb),
87 all_projects, 92 all_projects,
88 callback=_ProcessResults, 93 callback=_ProcessResults,
89 output=Progress('Abandon %s' % (nb,), len(all_projects), quiet=opt.quiet)) 94 output=Progress(
95 "Abandon %s" % (nb,), len(all_projects), quiet=opt.quiet
96 ),
97 )
90 98
91 width = max(itertools.chain( 99 width = max(
92 [25], (len(x) for x in itertools.chain(success, err)))) 100 itertools.chain(
93 if err: 101 [25], (len(x) for x in itertools.chain(success, err))
94 for br in err.keys(): 102 )
95 err_msg = "error: cannot abandon %s" % br 103 )
96 print(err_msg, file=sys.stderr) 104 if err:
97 for proj in err[br]: 105 for br in err.keys():
98 print(' ' * len(err_msg) + " | %s" % _RelPath(proj), file=sys.stderr) 106 err_msg = "error: cannot abandon %s" % br
99 sys.exit(1) 107 print(err_msg, file=sys.stderr)
100 elif not success: 108 for proj in err[br]:
101 print('error: no project has local branch(es) : %s' % nb, 109 print(
102 file=sys.stderr) 110 " " * len(err_msg) + " | %s" % _RelPath(proj),
103 sys.exit(1) 111 file=sys.stderr,
104 else: 112 )
105 # Everything below here is displaying status. 113 sys.exit(1)
106 if opt.quiet: 114 elif not success:
107 return 115 print(
108 print('Abandoned branches:') 116 "error: no project has local branch(es) : %s" % nb,
109 for br in success.keys(): 117 file=sys.stderr,
110 if len(all_projects) > 1 and len(all_projects) == len(success[br]): 118 )
111 result = "all project" 119 sys.exit(1)
112 else: 120 else:
113 result = "%s" % ( 121 # Everything below here is displaying status.
114 ('\n' + ' ' * width + '| ').join(_RelPath(p) for p in success[br])) 122 if opt.quiet:
115 print("%s%s| %s\n" % (br, ' ' * (width - len(br)), result)) 123 return
124 print("Abandoned branches:")
125 for br in success.keys():
126 if len(all_projects) > 1 and len(all_projects) == len(
127 success[br]
128 ):
129 result = "all project"
130 else:
131 result = "%s" % (
132 ("\n" + " " * width + "| ").join(
133 _RelPath(p) for p in success[br]
134 )
135 )
136 print("%s%s| %s\n" % (br, " " * (width - len(br)), result))
diff --git a/subcmds/branches.py b/subcmds/branches.py
index fcf67ef5..4d5bb196 100644
--- a/subcmds/branches.py
+++ b/subcmds/branches.py
@@ -20,51 +20,51 @@ from command import Command, DEFAULT_LOCAL_JOBS
20 20
21 21
22class BranchColoring(Coloring): 22class BranchColoring(Coloring):
23 def __init__(self, config): 23 def __init__(self, config):
24 Coloring.__init__(self, config, 'branch') 24 Coloring.__init__(self, config, "branch")
25 self.current = self.printer('current', fg='green') 25 self.current = self.printer("current", fg="green")
26 self.local = self.printer('local') 26 self.local = self.printer("local")
27 self.notinproject = self.printer('notinproject', fg='red') 27 self.notinproject = self.printer("notinproject", fg="red")
28 28
29 29
30class BranchInfo(object): 30class BranchInfo(object):
31 def __init__(self, name): 31 def __init__(self, name):
32 self.name = name 32 self.name = name
33 self.current = 0 33 self.current = 0
34 self.published = 0 34 self.published = 0
35 self.published_equal = 0 35 self.published_equal = 0
36 self.projects = [] 36 self.projects = []
37 37
38 def add(self, b): 38 def add(self, b):
39 if b.current: 39 if b.current:
40 self.current += 1 40 self.current += 1
41 if b.published: 41 if b.published:
42 self.published += 1 42 self.published += 1
43 if b.revision == b.published: 43 if b.revision == b.published:
44 self.published_equal += 1 44 self.published_equal += 1
45 self.projects.append(b) 45 self.projects.append(b)
46 46
47 @property 47 @property
48 def IsCurrent(self): 48 def IsCurrent(self):
49 return self.current > 0 49 return self.current > 0
50 50
51 @property 51 @property
52 def IsSplitCurrent(self): 52 def IsSplitCurrent(self):
53 return self.current != 0 and self.current != len(self.projects) 53 return self.current != 0 and self.current != len(self.projects)
54 54
55 @property 55 @property
56 def IsPublished(self): 56 def IsPublished(self):
57 return self.published > 0 57 return self.published > 0
58 58
59 @property 59 @property
60 def IsPublishedEqual(self): 60 def IsPublishedEqual(self):
61 return self.published_equal == len(self.projects) 61 return self.published_equal == len(self.projects)
62 62
63 63
64class Branches(Command): 64class Branches(Command):
65 COMMON = True 65 COMMON = True
66 helpSummary = "View current topic branches" 66 helpSummary = "View current topic branches"
67 helpUsage = """ 67 helpUsage = """
68%prog [<project>...] 68%prog [<project>...]
69 69
70Summarizes the currently available topic branches. 70Summarizes the currently available topic branches.
@@ -95,111 +95,114 @@ the branch appears in, or does not appear in. If no project list
95is shown, then the branch appears in all projects. 95is shown, then the branch appears in all projects.
96 96
97""" 97"""
98 PARALLEL_JOBS = DEFAULT_LOCAL_JOBS 98 PARALLEL_JOBS = DEFAULT_LOCAL_JOBS
99 99
100 def Execute(self, opt, args): 100 def Execute(self, opt, args):
101 projects = self.GetProjects(args, all_manifests=not opt.this_manifest_only) 101 projects = self.GetProjects(
102 out = BranchColoring(self.manifest.manifestProject.config) 102 args, all_manifests=not opt.this_manifest_only
103 all_branches = {} 103 )
104 project_cnt = len(projects) 104 out = BranchColoring(self.manifest.manifestProject.config)
105 105 all_branches = {}
106 def _ProcessResults(_pool, _output, results): 106 project_cnt = len(projects)
107 for name, b in itertools.chain.from_iterable(results): 107
108 if name not in all_branches: 108 def _ProcessResults(_pool, _output, results):
109 all_branches[name] = BranchInfo(name) 109 for name, b in itertools.chain.from_iterable(results):
110 all_branches[name].add(b) 110 if name not in all_branches:
111 111 all_branches[name] = BranchInfo(name)
112 self.ExecuteInParallel( 112 all_branches[name].add(b)
113 opt.jobs, 113
114 expand_project_to_branches, 114 self.ExecuteInParallel(
115 projects, 115 opt.jobs,
116 callback=_ProcessResults) 116 expand_project_to_branches,
117 117 projects,
118 names = sorted(all_branches) 118 callback=_ProcessResults,
119 119 )
120 if not names: 120
121 print(' (no branches)', file=sys.stderr) 121 names = sorted(all_branches)
122 return 122
123 123 if not names:
124 width = 25 124 print(" (no branches)", file=sys.stderr)
125 for name in names: 125 return
126 if width < len(name): 126
127 width = len(name) 127 width = 25
128 128 for name in names:
129 for name in names: 129 if width < len(name):
130 i = all_branches[name] 130 width = len(name)
131 in_cnt = len(i.projects) 131
132 132 for name in names:
133 if i.IsCurrent: 133 i = all_branches[name]
134 current = '*' 134 in_cnt = len(i.projects)
135 hdr = out.current 135
136 else: 136 if i.IsCurrent:
137 current = ' ' 137 current = "*"
138 hdr = out.local 138 hdr = out.current
139
140 if i.IsPublishedEqual:
141 published = 'P'
142 elif i.IsPublished:
143 published = 'p'
144 else:
145 published = ' '
146
147 hdr('%c%c %-*s' % (current, published, width, name))
148 out.write(' |')
149
150 _RelPath = lambda p: p.RelPath(local=opt.this_manifest_only)
151 if in_cnt < project_cnt:
152 fmt = out.write
153 paths = []
154 non_cur_paths = []
155 if i.IsSplitCurrent or (in_cnt <= project_cnt - in_cnt):
156 in_type = 'in'
157 for b in i.projects:
158 relpath = _RelPath(b.project)
159 if not i.IsSplitCurrent or b.current:
160 paths.append(relpath)
161 else: 139 else:
162 non_cur_paths.append(relpath) 140 current = " "
163 else: 141 hdr = out.local
164 fmt = out.notinproject 142
165 in_type = 'not in' 143 if i.IsPublishedEqual:
166 have = set() 144 published = "P"
167 for b in i.projects: 145 elif i.IsPublished:
168 have.add(_RelPath(b.project)) 146 published = "p"
169 for p in projects: 147 else:
170 if _RelPath(p) not in have: 148 published = " "
171 paths.append(_RelPath(p)) 149
172 150 hdr("%c%c %-*s" % (current, published, width, name))
173 s = ' %s %s' % (in_type, ', '.join(paths)) 151 out.write(" |")
174 if not i.IsSplitCurrent and (width + 7 + len(s) < 80): 152
175 fmt = out.current if i.IsCurrent else fmt 153 _RelPath = lambda p: p.RelPath(local=opt.this_manifest_only)
176 fmt(s) 154 if in_cnt < project_cnt:
177 else: 155 fmt = out.write
178 fmt(' %s:' % in_type) 156 paths = []
179 fmt = out.current if i.IsCurrent else out.write 157 non_cur_paths = []
180 for p in paths: 158 if i.IsSplitCurrent or (in_cnt <= project_cnt - in_cnt):
181 out.nl() 159 in_type = "in"
182 fmt(width * ' ' + ' %s' % p) 160 for b in i.projects:
183 fmt = out.write 161 relpath = _RelPath(b.project)
184 for p in non_cur_paths: 162 if not i.IsSplitCurrent or b.current:
163 paths.append(relpath)
164 else:
165 non_cur_paths.append(relpath)
166 else:
167 fmt = out.notinproject
168 in_type = "not in"
169 have = set()
170 for b in i.projects:
171 have.add(_RelPath(b.project))
172 for p in projects:
173 if _RelPath(p) not in have:
174 paths.append(_RelPath(p))
175
176 s = " %s %s" % (in_type, ", ".join(paths))
177 if not i.IsSplitCurrent and (width + 7 + len(s) < 80):
178 fmt = out.current if i.IsCurrent else fmt
179 fmt(s)
180 else:
181 fmt(" %s:" % in_type)
182 fmt = out.current if i.IsCurrent else out.write
183 for p in paths:
184 out.nl()
185 fmt(width * " " + " %s" % p)
186 fmt = out.write
187 for p in non_cur_paths:
188 out.nl()
189 fmt(width * " " + " %s" % p)
190 else:
191 out.write(" in all projects")
185 out.nl() 192 out.nl()
186 fmt(width * ' ' + ' %s' % p)
187 else:
188 out.write(' in all projects')
189 out.nl()
190 193
191 194
192def expand_project_to_branches(project): 195def expand_project_to_branches(project):
193 """Expands a project into a list of branch names & associated information. 196 """Expands a project into a list of branch names & associated information.
194 197
195 Args: 198 Args:
196 project: project.Project 199 project: project.Project
197 200
198 Returns: 201 Returns:
199 List[Tuple[str, git_config.Branch]] 202 List[Tuple[str, git_config.Branch]]
200 """ 203 """
201 branches = [] 204 branches = []
202 for name, b in project.GetBranches().items(): 205 for name, b in project.GetBranches().items():
203 b.project = project 206 b.project = project
204 branches.append((name, b)) 207 branches.append((name, b))
205 return branches 208 return branches
diff --git a/subcmds/checkout.py b/subcmds/checkout.py
index 768b6027..08012a82 100644
--- a/subcmds/checkout.py
+++ b/subcmds/checkout.py
@@ -20,12 +20,12 @@ from progress import Progress
20 20
21 21
22class Checkout(Command): 22class Checkout(Command):
23 COMMON = True 23 COMMON = True
24 helpSummary = "Checkout a branch for development" 24 helpSummary = "Checkout a branch for development"
25 helpUsage = """ 25 helpUsage = """
26%prog <branchname> [<project>...] 26%prog <branchname> [<project>...]
27""" 27"""
28 helpDescription = """ 28 helpDescription = """
29The '%prog' command checks out an existing branch that was previously 29The '%prog' command checks out an existing branch that was previously
30created by 'repo start'. 30created by 'repo start'.
31 31
@@ -33,43 +33,50 @@ The command is equivalent to:
33 33
34 repo forall [<project>...] -c git checkout <branchname> 34 repo forall [<project>...] -c git checkout <branchname>
35""" 35"""
36 PARALLEL_JOBS = DEFAULT_LOCAL_JOBS 36 PARALLEL_JOBS = DEFAULT_LOCAL_JOBS
37 37
38 def ValidateOptions(self, opt, args): 38 def ValidateOptions(self, opt, args):
39 if not args: 39 if not args:
40 self.Usage() 40 self.Usage()
41 41
42 def _ExecuteOne(self, nb, project): 42 def _ExecuteOne(self, nb, project):
43 """Checkout one project.""" 43 """Checkout one project."""
44 return (project.CheckoutBranch(nb), project) 44 return (project.CheckoutBranch(nb), project)
45 45
46 def Execute(self, opt, args): 46 def Execute(self, opt, args):
47 nb = args[0] 47 nb = args[0]
48 err = [] 48 err = []
49 success = [] 49 success = []
50 all_projects = self.GetProjects(args[1:], all_manifests=not opt.this_manifest_only) 50 all_projects = self.GetProjects(
51 args[1:], all_manifests=not opt.this_manifest_only
52 )
51 53
52 def _ProcessResults(_pool, pm, results): 54 def _ProcessResults(_pool, pm, results):
53 for status, project in results: 55 for status, project in results:
54 if status is not None: 56 if status is not None:
55 if status: 57 if status:
56 success.append(project) 58 success.append(project)
57 else: 59 else:
58 err.append(project) 60 err.append(project)
59 pm.update() 61 pm.update()
60 62
61 self.ExecuteInParallel( 63 self.ExecuteInParallel(
62 opt.jobs, 64 opt.jobs,
63 functools.partial(self._ExecuteOne, nb), 65 functools.partial(self._ExecuteOne, nb),
64 all_projects, 66 all_projects,
65 callback=_ProcessResults, 67 callback=_ProcessResults,
66 output=Progress('Checkout %s' % (nb,), len(all_projects), quiet=opt.quiet)) 68 output=Progress(
69 "Checkout %s" % (nb,), len(all_projects), quiet=opt.quiet
70 ),
71 )
67 72
68 if err: 73 if err:
69 for p in err: 74 for p in err:
70 print("error: %s/: cannot checkout %s" % (p.relpath, nb), 75 print(
71 file=sys.stderr) 76 "error: %s/: cannot checkout %s" % (p.relpath, nb),
72 sys.exit(1) 77 file=sys.stderr,
73 elif not success: 78 )
74 print('error: no project has branch %s' % nb, file=sys.stderr) 79 sys.exit(1)
75 sys.exit(1) 80 elif not success:
81 print("error: no project has branch %s" % nb, file=sys.stderr)
82 sys.exit(1)
diff --git a/subcmds/cherry_pick.py b/subcmds/cherry_pick.py
index eecf4e17..4cfb8c88 100644
--- a/subcmds/cherry_pick.py
+++ b/subcmds/cherry_pick.py
@@ -17,96 +17,107 @@ import sys
17from command import Command 17from command import Command
18from git_command import GitCommand 18from git_command import GitCommand
19 19
20CHANGE_ID_RE = re.compile(r'^\s*Change-Id: I([0-9a-f]{40})\s*$') 20CHANGE_ID_RE = re.compile(r"^\s*Change-Id: I([0-9a-f]{40})\s*$")
21 21
22 22
23class CherryPick(Command): 23class CherryPick(Command):
24 COMMON = True 24 COMMON = True
25 helpSummary = "Cherry-pick a change." 25 helpSummary = "Cherry-pick a change."
26 helpUsage = """ 26 helpUsage = """
27%prog <sha1> 27%prog <sha1>
28""" 28"""
29 helpDescription = """ 29 helpDescription = """
30'%prog' cherry-picks a change from one branch to another. 30'%prog' cherry-picks a change from one branch to another.
31The change id will be updated, and a reference to the old 31The change id will be updated, and a reference to the old
32change id will be added. 32change id will be added.
33""" 33"""
34 34
35 def ValidateOptions(self, opt, args): 35 def ValidateOptions(self, opt, args):
36 if len(args) != 1: 36 if len(args) != 1:
37 self.Usage() 37 self.Usage()
38 38
39 def Execute(self, opt, args): 39 def Execute(self, opt, args):
40 reference = args[0] 40 reference = args[0]
41 41
42 p = GitCommand(None, 42 p = GitCommand(
43 ['rev-parse', '--verify', reference], 43 None,
44 capture_stdout=True, 44 ["rev-parse", "--verify", reference],
45 capture_stderr=True) 45 capture_stdout=True,
46 if p.Wait() != 0: 46 capture_stderr=True,
47 print(p.stderr, file=sys.stderr) 47 )
48 sys.exit(1) 48 if p.Wait() != 0:
49 sha1 = p.stdout.strip() 49 print(p.stderr, file=sys.stderr)
50 50 sys.exit(1)
51 p = GitCommand(None, ['cat-file', 'commit', sha1], capture_stdout=True) 51 sha1 = p.stdout.strip()
52 if p.Wait() != 0: 52
53 print("error: Failed to retrieve old commit message", file=sys.stderr) 53 p = GitCommand(None, ["cat-file", "commit", sha1], capture_stdout=True)
54 sys.exit(1) 54 if p.Wait() != 0:
55 old_msg = self._StripHeader(p.stdout) 55 print(
56 56 "error: Failed to retrieve old commit message", file=sys.stderr
57 p = GitCommand(None, 57 )
58 ['cherry-pick', sha1], 58 sys.exit(1)
59 capture_stdout=True, 59 old_msg = self._StripHeader(p.stdout)
60 capture_stderr=True) 60
61 status = p.Wait() 61 p = GitCommand(
62 62 None,
63 if p.stdout: 63 ["cherry-pick", sha1],
64 print(p.stdout.strip(), file=sys.stdout) 64 capture_stdout=True,
65 if p.stderr: 65 capture_stderr=True,
66 print(p.stderr.strip(), file=sys.stderr) 66 )
67 67 status = p.Wait()
68 if status == 0: 68
69 # The cherry-pick was applied correctly. We just need to edit the 69 if p.stdout:
70 # commit message. 70 print(p.stdout.strip(), file=sys.stdout)
71 new_msg = self._Reformat(old_msg, sha1) 71 if p.stderr:
72 72 print(p.stderr.strip(), file=sys.stderr)
73 p = GitCommand(None, ['commit', '--amend', '-F', '-'], 73
74 input=new_msg, 74 if status == 0:
75 capture_stdout=True, 75 # The cherry-pick was applied correctly. We just need to edit the
76 capture_stderr=True) 76 # commit message.
77 if p.Wait() != 0: 77 new_msg = self._Reformat(old_msg, sha1)
78 print("error: Failed to update commit message", file=sys.stderr) 78
79 sys.exit(1) 79 p = GitCommand(
80 80 None,
81 else: 81 ["commit", "--amend", "-F", "-"],
82 print('NOTE: When committing (please see above) and editing the commit ' 82 input=new_msg,
83 'message, please remove the old Change-Id-line and add:') 83 capture_stdout=True,
84 print(self._GetReference(sha1), file=sys.stderr) 84 capture_stderr=True,
85 print(file=sys.stderr) 85 )
86 86 if p.Wait() != 0:
87 def _IsChangeId(self, line): 87 print("error: Failed to update commit message", file=sys.stderr)
88 return CHANGE_ID_RE.match(line) 88 sys.exit(1)
89 89
90 def _GetReference(self, sha1): 90 else:
91 return "(cherry picked from commit %s)" % sha1 91 print(
92 92 "NOTE: When committing (please see above) and editing the "
93 def _StripHeader(self, commit_msg): 93 "commit message, please remove the old Change-Id-line and add:"
94 lines = commit_msg.splitlines() 94 )
95 return "\n".join(lines[lines.index("") + 1:]) 95 print(self._GetReference(sha1), file=sys.stderr)
96 96 print(file=sys.stderr)
97 def _Reformat(self, old_msg, sha1): 97
98 new_msg = [] 98 def _IsChangeId(self, line):
99 99 return CHANGE_ID_RE.match(line)
100 for line in old_msg.splitlines(): 100
101 if not self._IsChangeId(line): 101 def _GetReference(self, sha1):
102 new_msg.append(line) 102 return "(cherry picked from commit %s)" % sha1
103 103
104 # Add a blank line between the message and the change id/reference 104 def _StripHeader(self, commit_msg):
105 try: 105 lines = commit_msg.splitlines()
106 if new_msg[-1].strip() != "": 106 return "\n".join(lines[lines.index("") + 1 :])
107 new_msg.append("") 107
108 except IndexError: 108 def _Reformat(self, old_msg, sha1):
109 pass 109 new_msg = []
110 110
111 new_msg.append(self._GetReference(sha1)) 111 for line in old_msg.splitlines():
112 return "\n".join(new_msg) 112 if not self._IsChangeId(line):
113 new_msg.append(line)
114
115 # Add a blank line between the message and the change id/reference.
116 try:
117 if new_msg[-1].strip() != "":
118 new_msg.append("")
119 except IndexError:
120 pass
121
122 new_msg.append(self._GetReference(sha1))
123 return "\n".join(new_msg)
diff --git a/subcmds/diff.py b/subcmds/diff.py
index a606ee9a..5c627c0c 100644
--- a/subcmds/diff.py
+++ b/subcmds/diff.py
@@ -19,54 +19,63 @@ from command import DEFAULT_LOCAL_JOBS, PagedCommand
19 19
20 20
21class Diff(PagedCommand): 21class Diff(PagedCommand):
22 COMMON = True 22 COMMON = True
23 helpSummary = "Show changes between commit and working tree" 23 helpSummary = "Show changes between commit and working tree"
24 helpUsage = """ 24 helpUsage = """
25%prog [<project>...] 25%prog [<project>...]
26 26
27The -u option causes '%prog' to generate diff output with file paths 27The -u option causes '%prog' to generate diff output with file paths
28relative to the repository root, so the output can be applied 28relative to the repository root, so the output can be applied
29to the Unix 'patch' command. 29to the Unix 'patch' command.
30""" 30"""
31 PARALLEL_JOBS = DEFAULT_LOCAL_JOBS 31 PARALLEL_JOBS = DEFAULT_LOCAL_JOBS
32 32
33 def _Options(self, p): 33 def _Options(self, p):
34 p.add_option('-u', '--absolute', 34 p.add_option(
35 dest='absolute', action='store_true', 35 "-u",
36 help='paths are relative to the repository root') 36 "--absolute",
37 dest="absolute",
38 action="store_true",
39 help="paths are relative to the repository root",
40 )
37 41
38 def _ExecuteOne(self, absolute, local, project): 42 def _ExecuteOne(self, absolute, local, project):
39 """Obtains the diff for a specific project. 43 """Obtains the diff for a specific project.
40 44
41 Args: 45 Args:
42 absolute: Paths are relative to the root. 46 absolute: Paths are relative to the root.
43 local: a boolean, if True, the path is relative to the local 47 local: a boolean, if True, the path is relative to the local
44 (sub)manifest. If false, the path is relative to the 48 (sub)manifest. If false, the path is relative to the outermost
45 outermost manifest. 49 manifest.
46 project: Project to get status of. 50 project: Project to get status of.
47 51
48 Returns: 52 Returns:
49 The status of the project. 53 The status of the project.
50 """ 54 """
51 buf = io.StringIO() 55 buf = io.StringIO()
52 ret = project.PrintWorkTreeDiff(absolute, output_redir=buf, local=local) 56 ret = project.PrintWorkTreeDiff(absolute, output_redir=buf, local=local)
53 return (ret, buf.getvalue()) 57 return (ret, buf.getvalue())
54 58
55 def Execute(self, opt, args): 59 def Execute(self, opt, args):
56 all_projects = self.GetProjects(args, all_manifests=not opt.this_manifest_only) 60 all_projects = self.GetProjects(
61 args, all_manifests=not opt.this_manifest_only
62 )
57 63
58 def _ProcessResults(_pool, _output, results): 64 def _ProcessResults(_pool, _output, results):
59 ret = 0 65 ret = 0
60 for (state, output) in results: 66 for state, output in results:
61 if output: 67 if output:
62 print(output, end='') 68 print(output, end="")
63 if not state: 69 if not state:
64 ret = 1 70 ret = 1
65 return ret 71 return ret
66 72
67 return self.ExecuteInParallel( 73 return self.ExecuteInParallel(
68 opt.jobs, 74 opt.jobs,
69 functools.partial(self._ExecuteOne, opt.absolute, opt.this_manifest_only), 75 functools.partial(
70 all_projects, 76 self._ExecuteOne, opt.absolute, opt.this_manifest_only
71 callback=_ProcessResults, 77 ),
72 ordered=True) 78 all_projects,
79 callback=_ProcessResults,
80 ordered=True,
81 )
diff --git a/subcmds/diffmanifests.py b/subcmds/diffmanifests.py
index 4f9f5b0f..b446dbd8 100644
--- a/subcmds/diffmanifests.py
+++ b/subcmds/diffmanifests.py
@@ -18,24 +18,24 @@ from manifest_xml import RepoClient
18 18
19 19
20class _Coloring(Coloring): 20class _Coloring(Coloring):
21 def __init__(self, config): 21 def __init__(self, config):
22 Coloring.__init__(self, config, "status") 22 Coloring.__init__(self, config, "status")
23 23
24 24
25class Diffmanifests(PagedCommand): 25class Diffmanifests(PagedCommand):
26 """ A command to see logs in projects represented by manifests 26 """A command to see logs in projects represented by manifests
27 27
28 This is used to see deeper differences between manifests. Where a simple 28 This is used to see deeper differences between manifests. Where a simple
29 diff would only show a diff of sha1s for example, this command will display 29 diff would only show a diff of sha1s for example, this command will display
30 the logs of the project between both sha1s, allowing user to see diff at a 30 the logs of the project between both sha1s, allowing user to see diff at a
31 deeper level. 31 deeper level.
32 """ 32 """
33 33
34 COMMON = True 34 COMMON = True
35 helpSummary = "Manifest diff utility" 35 helpSummary = "Manifest diff utility"
36 helpUsage = """%prog manifest1.xml [manifest2.xml] [options]""" 36 helpUsage = """%prog manifest1.xml [manifest2.xml] [options]"""
37 37
38 helpDescription = """ 38 helpDescription = """
39The %prog command shows differences between project revisions of manifest1 and 39The %prog command shows differences between project revisions of manifest1 and
40manifest2. if manifest2 is not specified, current manifest.xml will be used 40manifest2. if manifest2 is not specified, current manifest.xml will be used
41instead. Both absolute and relative paths may be used for manifests. Relative 41instead. Both absolute and relative paths may be used for manifests. Relative
@@ -65,159 +65,209 @@ synced and their revisions won't be found.
65 65
66""" 66"""
67 67
68 def _Options(self, p): 68 def _Options(self, p):
69 p.add_option('--raw', 69 p.add_option(
70 dest='raw', action='store_true', 70 "--raw", dest="raw", action="store_true", help="display raw diff"
71 help='display raw diff') 71 )
72 p.add_option('--no-color', 72 p.add_option(
73 dest='color', action='store_false', default=True, 73 "--no-color",
74 help='does not display the diff in color') 74 dest="color",
75 p.add_option('--pretty-format', 75 action="store_false",
76 dest='pretty_format', action='store', 76 default=True,
77 metavar='<FORMAT>', 77 help="does not display the diff in color",
78 help='print the log using a custom git pretty format string') 78 )
79 79 p.add_option(
80 def _printRawDiff(self, diff, pretty_format=None, local=False): 80 "--pretty-format",
81 _RelPath = lambda p: p.RelPath(local=local) 81 dest="pretty_format",
82 for project in diff['added']: 82 action="store",
83 self.printText("A %s %s" % (_RelPath(project), project.revisionExpr)) 83 metavar="<FORMAT>",
84 self.out.nl() 84 help="print the log using a custom git pretty format string",
85 85 )
86 for project in diff['removed']: 86
87 self.printText("R %s %s" % (_RelPath(project), project.revisionExpr)) 87 def _printRawDiff(self, diff, pretty_format=None, local=False):
88 self.out.nl() 88 _RelPath = lambda p: p.RelPath(local=local)
89 89 for project in diff["added"]:
90 for project, otherProject in diff['changed']: 90 self.printText(
91 self.printText("C %s %s %s" % (_RelPath(project), project.revisionExpr, 91 "A %s %s" % (_RelPath(project), project.revisionExpr)
92 otherProject.revisionExpr)) 92 )
93 self.out.nl()
94 self._printLogs(project, otherProject, raw=True, color=False, pretty_format=pretty_format)
95
96 for project, otherProject in diff['unreachable']:
97 self.printText("U %s %s %s" % (_RelPath(project), project.revisionExpr,
98 otherProject.revisionExpr))
99 self.out.nl()
100
101 def _printDiff(self, diff, color=True, pretty_format=None, local=False):
102 _RelPath = lambda p: p.RelPath(local=local)
103 if diff['added']:
104 self.out.nl()
105 self.printText('added projects : \n')
106 self.out.nl()
107 for project in diff['added']:
108 self.printProject('\t%s' % (_RelPath(project)))
109 self.printText(' at revision ')
110 self.printRevision(project.revisionExpr)
111 self.out.nl()
112
113 if diff['removed']:
114 self.out.nl()
115 self.printText('removed projects : \n')
116 self.out.nl()
117 for project in diff['removed']:
118 self.printProject('\t%s' % (_RelPath(project)))
119 self.printText(' at revision ')
120 self.printRevision(project.revisionExpr)
121 self.out.nl()
122
123 if diff['missing']:
124 self.out.nl()
125 self.printText('missing projects : \n')
126 self.out.nl()
127 for project in diff['missing']:
128 self.printProject('\t%s' % (_RelPath(project)))
129 self.printText(' at revision ')
130 self.printRevision(project.revisionExpr)
131 self.out.nl()
132
133 if diff['changed']:
134 self.out.nl()
135 self.printText('changed projects : \n')
136 self.out.nl()
137 for project, otherProject in diff['changed']:
138 self.printProject('\t%s' % (_RelPath(project)))
139 self.printText(' changed from ')
140 self.printRevision(project.revisionExpr)
141 self.printText(' to ')
142 self.printRevision(otherProject.revisionExpr)
143 self.out.nl()
144 self._printLogs(project, otherProject, raw=False, color=color,
145 pretty_format=pretty_format)
146 self.out.nl()
147
148 if diff['unreachable']:
149 self.out.nl()
150 self.printText('projects with unreachable revisions : \n')
151 self.out.nl()
152 for project, otherProject in diff['unreachable']:
153 self.printProject('\t%s ' % (_RelPath(project)))
154 self.printRevision(project.revisionExpr)
155 self.printText(' or ')
156 self.printRevision(otherProject.revisionExpr)
157 self.printText(' not found')
158 self.out.nl()
159
160 def _printLogs(self, project, otherProject, raw=False, color=True,
161 pretty_format=None):
162
163 logs = project.getAddedAndRemovedLogs(otherProject,
164 oneline=(pretty_format is None),
165 color=color,
166 pretty_format=pretty_format)
167 if logs['removed']:
168 removedLogs = logs['removed'].split('\n')
169 for log in removedLogs:
170 if log.strip():
171 if raw:
172 self.printText(' R ' + log)
173 self.out.nl() 93 self.out.nl()
174 else: 94
175 self.printRemoved('\t\t[-] ') 95 for project in diff["removed"]:
176 self.printText(log) 96 self.printText(
97 "R %s %s" % (_RelPath(project), project.revisionExpr)
98 )
99 self.out.nl()
100
101 for project, otherProject in diff["changed"]:
102 self.printText(
103 "C %s %s %s"
104 % (
105 _RelPath(project),
106 project.revisionExpr,
107 otherProject.revisionExpr,
108 )
109 )
110 self.out.nl()
111 self._printLogs(
112 project,
113 otherProject,
114 raw=True,
115 color=False,
116 pretty_format=pretty_format,
117 )
118
119 for project, otherProject in diff["unreachable"]:
120 self.printText(
121 "U %s %s %s"
122 % (
123 _RelPath(project),
124 project.revisionExpr,
125 otherProject.revisionExpr,
126 )
127 )
128 self.out.nl()
129
130 def _printDiff(self, diff, color=True, pretty_format=None, local=False):
131 _RelPath = lambda p: p.RelPath(local=local)
132 if diff["added"]:
133 self.out.nl()
134 self.printText("added projects : \n")
177 self.out.nl() 135 self.out.nl()
136 for project in diff["added"]:
137 self.printProject("\t%s" % (_RelPath(project)))
138 self.printText(" at revision ")
139 self.printRevision(project.revisionExpr)
140 self.out.nl()
178 141
179 if logs['added']: 142 if diff["removed"]:
180 addedLogs = logs['added'].split('\n')
181 for log in addedLogs:
182 if log.strip():
183 if raw:
184 self.printText(' A ' + log)
185 self.out.nl() 143 self.out.nl()
186 else: 144 self.printText("removed projects : \n")
187 self.printAdded('\t\t[+] ')
188 self.printText(log)
189 self.out.nl() 145 self.out.nl()
146 for project in diff["removed"]:
147 self.printProject("\t%s" % (_RelPath(project)))
148 self.printText(" at revision ")
149 self.printRevision(project.revisionExpr)
150 self.out.nl()
190 151
191 def ValidateOptions(self, opt, args): 152 if diff["missing"]:
192 if not args or len(args) > 2: 153 self.out.nl()
193 self.OptionParser.error('missing manifests to diff') 154 self.printText("missing projects : \n")
194 if opt.this_manifest_only is False: 155 self.out.nl()
195 raise self.OptionParser.error( 156 for project in diff["missing"]:
196 '`diffmanifest` only supports the current tree') 157 self.printProject("\t%s" % (_RelPath(project)))
197 158 self.printText(" at revision ")
198 def Execute(self, opt, args): 159 self.printRevision(project.revisionExpr)
199 self.out = _Coloring(self.client.globalConfig) 160 self.out.nl()
200 self.printText = self.out.nofmt_printer('text') 161
201 if opt.color: 162 if diff["changed"]:
202 self.printProject = self.out.nofmt_printer('project', attr='bold') 163 self.out.nl()
203 self.printAdded = self.out.nofmt_printer('green', fg='green', attr='bold') 164 self.printText("changed projects : \n")
204 self.printRemoved = self.out.nofmt_printer('red', fg='red', attr='bold') 165 self.out.nl()
205 self.printRevision = self.out.nofmt_printer('revision', fg='yellow') 166 for project, otherProject in diff["changed"]:
206 else: 167 self.printProject("\t%s" % (_RelPath(project)))
207 self.printProject = self.printAdded = self.printRemoved = self.printRevision = self.printText 168 self.printText(" changed from ")
208 169 self.printRevision(project.revisionExpr)
209 manifest1 = RepoClient(self.repodir) 170 self.printText(" to ")
210 manifest1.Override(args[0], load_local_manifests=False) 171 self.printRevision(otherProject.revisionExpr)
211 if len(args) == 1: 172 self.out.nl()
212 manifest2 = self.manifest 173 self._printLogs(
213 else: 174 project,
214 manifest2 = RepoClient(self.repodir) 175 otherProject,
215 manifest2.Override(args[1], load_local_manifests=False) 176 raw=False,
216 177 color=color,
217 diff = manifest1.projectsDiff(manifest2) 178 pretty_format=pretty_format,
218 if opt.raw: 179 )
219 self._printRawDiff(diff, pretty_format=opt.pretty_format, 180 self.out.nl()
220 local=opt.this_manifest_only) 181
221 else: 182 if diff["unreachable"]:
222 self._printDiff(diff, color=opt.color, pretty_format=opt.pretty_format, 183 self.out.nl()
223 local=opt.this_manifest_only) 184 self.printText("projects with unreachable revisions : \n")
185 self.out.nl()
186 for project, otherProject in diff["unreachable"]:
187 self.printProject("\t%s " % (_RelPath(project)))
188 self.printRevision(project.revisionExpr)
189 self.printText(" or ")
190 self.printRevision(otherProject.revisionExpr)
191 self.printText(" not found")
192 self.out.nl()
193
194 def _printLogs(
195 self, project, otherProject, raw=False, color=True, pretty_format=None
196 ):
197 logs = project.getAddedAndRemovedLogs(
198 otherProject,
199 oneline=(pretty_format is None),
200 color=color,
201 pretty_format=pretty_format,
202 )
203 if logs["removed"]:
204 removedLogs = logs["removed"].split("\n")
205 for log in removedLogs:
206 if log.strip():
207 if raw:
208 self.printText(" R " + log)
209 self.out.nl()
210 else:
211 self.printRemoved("\t\t[-] ")
212 self.printText(log)
213 self.out.nl()
214
215 if logs["added"]:
216 addedLogs = logs["added"].split("\n")
217 for log in addedLogs:
218 if log.strip():
219 if raw:
220 self.printText(" A " + log)
221 self.out.nl()
222 else:
223 self.printAdded("\t\t[+] ")
224 self.printText(log)
225 self.out.nl()
226
227 def ValidateOptions(self, opt, args):
228 if not args or len(args) > 2:
229 self.OptionParser.error("missing manifests to diff")
230 if opt.this_manifest_only is False:
231 raise self.OptionParser.error(
232 "`diffmanifest` only supports the current tree"
233 )
234
235 def Execute(self, opt, args):
236 self.out = _Coloring(self.client.globalConfig)
237 self.printText = self.out.nofmt_printer("text")
238 if opt.color:
239 self.printProject = self.out.nofmt_printer("project", attr="bold")
240 self.printAdded = self.out.nofmt_printer(
241 "green", fg="green", attr="bold"
242 )
243 self.printRemoved = self.out.nofmt_printer(
244 "red", fg="red", attr="bold"
245 )
246 self.printRevision = self.out.nofmt_printer("revision", fg="yellow")
247 else:
248 self.printProject = (
249 self.printAdded
250 ) = self.printRemoved = self.printRevision = self.printText
251
252 manifest1 = RepoClient(self.repodir)
253 manifest1.Override(args[0], load_local_manifests=False)
254 if len(args) == 1:
255 manifest2 = self.manifest
256 else:
257 manifest2 = RepoClient(self.repodir)
258 manifest2.Override(args[1], load_local_manifests=False)
259
260 diff = manifest1.projectsDiff(manifest2)
261 if opt.raw:
262 self._printRawDiff(
263 diff,
264 pretty_format=opt.pretty_format,
265 local=opt.this_manifest_only,
266 )
267 else:
268 self._printDiff(
269 diff,
270 color=opt.color,
271 pretty_format=opt.pretty_format,
272 local=opt.this_manifest_only,
273 )
diff --git a/subcmds/download.py b/subcmds/download.py
index 15824843..d81d1f8c 100644
--- a/subcmds/download.py
+++ b/subcmds/download.py
@@ -18,143 +18,187 @@ import sys
18from command import Command 18from command import Command
19from error import GitError, NoSuchProjectError 19from error import GitError, NoSuchProjectError
20 20
21CHANGE_RE = re.compile(r'^([1-9][0-9]*)(?:[/\.-]([1-9][0-9]*))?$') 21CHANGE_RE = re.compile(r"^([1-9][0-9]*)(?:[/\.-]([1-9][0-9]*))?$")
22 22
23 23
24class Download(Command): 24class Download(Command):
25 COMMON = True 25 COMMON = True
26 helpSummary = "Download and checkout a change" 26 helpSummary = "Download and checkout a change"
27 helpUsage = """ 27 helpUsage = """
28%prog {[project] change[/patchset]}... 28%prog {[project] change[/patchset]}...
29""" 29"""
30 helpDescription = """ 30 helpDescription = """
31The '%prog' command downloads a change from the review system and 31The '%prog' command downloads a change from the review system and
32makes it available in your project's local working directory. 32makes it available in your project's local working directory.
33If no project is specified try to use current directory as a project. 33If no project is specified try to use current directory as a project.
34""" 34"""
35 35
36 def _Options(self, p): 36 def _Options(self, p):
37 p.add_option('-b', '--branch', 37 p.add_option("-b", "--branch", help="create a new branch first")
38 help='create a new branch first') 38 p.add_option(
39 p.add_option('-c', '--cherry-pick', 39 "-c",
40 dest='cherrypick', action='store_true', 40 "--cherry-pick",
41 help="cherry-pick instead of checkout") 41 dest="cherrypick",
42 p.add_option('-x', '--record-origin', action='store_true', 42 action="store_true",
43 help='pass -x when cherry-picking') 43 help="cherry-pick instead of checkout",
44 p.add_option('-r', '--revert', 44 )
45 dest='revert', action='store_true', 45 p.add_option(
46 help="revert instead of checkout") 46 "-x",
47 p.add_option('-f', '--ff-only', 47 "--record-origin",
48 dest='ffonly', action='store_true', 48 action="store_true",
49 help="force fast-forward merge") 49 help="pass -x when cherry-picking",
50 50 )
51 def _ParseChangeIds(self, opt, args): 51 p.add_option(
52 if not args: 52 "-r",
53 self.Usage() 53 "--revert",
54 54 dest="revert",
55 to_get = [] 55 action="store_true",
56 project = None 56 help="revert instead of checkout",
57 57 )
58 for a in args: 58 p.add_option(
59 m = CHANGE_RE.match(a) 59 "-f",
60 if m: 60 "--ff-only",
61 if not project: 61 dest="ffonly",
62 project = self.GetProjects(".")[0] 62 action="store_true",
63 print('Defaulting to cwd project', project.name) 63 help="force fast-forward merge",
64 chg_id = int(m.group(1)) 64 )
65 if m.group(2): 65
66 ps_id = int(m.group(2)) 66 def _ParseChangeIds(self, opt, args):
67 else: 67 if not args:
68 ps_id = 1 68 self.Usage()
69 refs = 'refs/changes/%2.2d/%d/' % (chg_id % 100, chg_id) 69
70 output = project._LsRemote(refs + '*') 70 to_get = []
71 if output: 71 project = None
72 regex = refs + r'(\d+)' 72
73 rcomp = re.compile(regex, re.I) 73 for a in args:
74 for line in output.splitlines(): 74 m = CHANGE_RE.match(a)
75 match = rcomp.search(line) 75 if m:
76 if match: 76 if not project:
77 ps_id = max(int(match.group(1)), ps_id) 77 project = self.GetProjects(".")[0]
78 to_get.append((project, chg_id, ps_id)) 78 print("Defaulting to cwd project", project.name)
79 else: 79 chg_id = int(m.group(1))
80 projects = self.GetProjects([a], all_manifests=not opt.this_manifest_only) 80 if m.group(2):
81 if len(projects) > 1: 81 ps_id = int(m.group(2))
82 # If the cwd is one of the projects, assume they want that. 82 else:
83 try: 83 ps_id = 1
84 project = self.GetProjects('.')[0] 84 refs = "refs/changes/%2.2d/%d/" % (chg_id % 100, chg_id)
85 except NoSuchProjectError: 85 output = project._LsRemote(refs + "*")
86 project = None 86 if output:
87 if project not in projects: 87 regex = refs + r"(\d+)"
88 print('error: %s matches too many projects; please re-run inside ' 88 rcomp = re.compile(regex, re.I)
89 'the project checkout.' % (a,), file=sys.stderr) 89 for line in output.splitlines():
90 for project in projects: 90 match = rcomp.search(line)
91 print(' %s/ @ %s' % (project.RelPath(local=opt.this_manifest_only), 91 if match:
92 project.revisionExpr), file=sys.stderr) 92 ps_id = max(int(match.group(1)), ps_id)
93 sys.exit(1) 93 to_get.append((project, chg_id, ps_id))
94 else: 94 else:
95 project = projects[0] 95 projects = self.GetProjects(
96 print('Defaulting to cwd project', project.name) 96 [a], all_manifests=not opt.this_manifest_only
97 return to_get 97 )
98 98 if len(projects) > 1:
99 def ValidateOptions(self, opt, args): 99 # If the cwd is one of the projects, assume they want that.
100 if opt.record_origin: 100 try:
101 if not opt.cherrypick: 101 project = self.GetProjects(".")[0]
102 self.OptionParser.error('-x only makes sense with --cherry-pick') 102 except NoSuchProjectError:
103 103 project = None
104 if opt.ffonly: 104 if project not in projects:
105 self.OptionParser.error('-x and --ff are mutually exclusive options') 105 print(
106 106 "error: %s matches too many projects; please "
107 def Execute(self, opt, args): 107 "re-run inside the project checkout." % (a,),
108 for project, change_id, ps_id in self._ParseChangeIds(opt, args): 108 file=sys.stderr,
109 dl = project.DownloadPatchSet(change_id, ps_id) 109 )
110 if not dl: 110 for project in projects:
111 print('[%s] change %d/%d not found' 111 print(
112 % (project.name, change_id, ps_id), 112 " %s/ @ %s"
113 file=sys.stderr) 113 % (
114 sys.exit(1) 114 project.RelPath(
115 115 local=opt.this_manifest_only
116 if not opt.revert and not dl.commits: 116 ),
117 print('[%s] change %d/%d has already been merged' 117 project.revisionExpr,
118 % (project.name, change_id, ps_id), 118 ),
119 file=sys.stderr) 119 file=sys.stderr,
120 continue 120 )
121 121 sys.exit(1)
122 if len(dl.commits) > 1: 122 else:
123 print('[%s] %d/%d depends on %d unmerged changes:' 123 project = projects[0]
124 % (project.name, change_id, ps_id, len(dl.commits)), 124 print("Defaulting to cwd project", project.name)
125 file=sys.stderr) 125 return to_get
126 for c in dl.commits: 126
127 print(' %s' % (c), file=sys.stderr) 127 def ValidateOptions(self, opt, args):
128 128 if opt.record_origin:
129 if opt.cherrypick: 129 if not opt.cherrypick:
130 mode = 'cherry-pick' 130 self.OptionParser.error(
131 elif opt.revert: 131 "-x only makes sense with --cherry-pick"
132 mode = 'revert' 132 )
133 elif opt.ffonly: 133
134 mode = 'fast-forward merge' 134 if opt.ffonly:
135 else: 135 self.OptionParser.error(
136 mode = 'checkout' 136 "-x and --ff are mutually exclusive options"
137 137 )
138 # We'll combine the branch+checkout operation, but all the rest need a 138
139 # dedicated branch start. 139 def Execute(self, opt, args):
140 if opt.branch and mode != 'checkout': 140 for project, change_id, ps_id in self._ParseChangeIds(opt, args):
141 project.StartBranch(opt.branch) 141 dl = project.DownloadPatchSet(change_id, ps_id)
142 142 if not dl:
143 try: 143 print(
144 if opt.cherrypick: 144 "[%s] change %d/%d not found"
145 project._CherryPick(dl.commit, ffonly=opt.ffonly, 145 % (project.name, change_id, ps_id),
146 record_origin=opt.record_origin) 146 file=sys.stderr,
147 elif opt.revert: 147 )
148 project._Revert(dl.commit) 148 sys.exit(1)
149 elif opt.ffonly: 149
150 project._FastForward(dl.commit, ffonly=True) 150 if not opt.revert and not dl.commits:
151 else: 151 print(
152 if opt.branch: 152 "[%s] change %d/%d has already been merged"
153 project.StartBranch(opt.branch, revision=dl.commit) 153 % (project.name, change_id, ps_id),
154 else: 154 file=sys.stderr,
155 project._Checkout(dl.commit) 155 )
156 156 continue
157 except GitError: 157
158 print('[%s] Could not complete the %s of %s' 158 if len(dl.commits) > 1:
159 % (project.name, mode, dl.commit), file=sys.stderr) 159 print(
160 sys.exit(1) 160 "[%s] %d/%d depends on %d unmerged changes:"
161 % (project.name, change_id, ps_id, len(dl.commits)),
162 file=sys.stderr,
163 )
164 for c in dl.commits:
165 print(" %s" % (c), file=sys.stderr)
166
167 if opt.cherrypick:
168 mode = "cherry-pick"
169 elif opt.revert:
170 mode = "revert"
171 elif opt.ffonly:
172 mode = "fast-forward merge"
173 else:
174 mode = "checkout"
175
176 # We'll combine the branch+checkout operation, but all the rest need
177 # a dedicated branch start.
178 if opt.branch and mode != "checkout":
179 project.StartBranch(opt.branch)
180
181 try:
182 if opt.cherrypick:
183 project._CherryPick(
184 dl.commit,
185 ffonly=opt.ffonly,
186 record_origin=opt.record_origin,
187 )
188 elif opt.revert:
189 project._Revert(dl.commit)
190 elif opt.ffonly:
191 project._FastForward(dl.commit, ffonly=True)
192 else:
193 if opt.branch:
194 project.StartBranch(opt.branch, revision=dl.commit)
195 else:
196 project._Checkout(dl.commit)
197
198 except GitError:
199 print(
200 "[%s] Could not complete the %s of %s"
201 % (project.name, mode, dl.commit),
202 file=sys.stderr,
203 )
204 sys.exit(1)
diff --git a/subcmds/forall.py b/subcmds/forall.py
index f9f34e33..0a897357 100644
--- a/subcmds/forall.py
+++ b/subcmds/forall.py
@@ -23,31 +23,36 @@ import sys
23import subprocess 23import subprocess
24 24
25from color import Coloring 25from color import Coloring
26from command import DEFAULT_LOCAL_JOBS, Command, MirrorSafeCommand, WORKER_BATCH_SIZE 26from command import (
27 DEFAULT_LOCAL_JOBS,
28 Command,
29 MirrorSafeCommand,
30 WORKER_BATCH_SIZE,
31)
27from error import ManifestInvalidRevisionError 32from error import ManifestInvalidRevisionError
28 33
29_CAN_COLOR = [ 34_CAN_COLOR = [
30 'branch', 35 "branch",
31 'diff', 36 "diff",
32 'grep', 37 "grep",
33 'log', 38 "log",
34] 39]
35 40
36 41
37class ForallColoring(Coloring): 42class ForallColoring(Coloring):
38 def __init__(self, config): 43 def __init__(self, config):
39 Coloring.__init__(self, config, 'forall') 44 Coloring.__init__(self, config, "forall")
40 self.project = self.printer('project', attr='bold') 45 self.project = self.printer("project", attr="bold")
41 46
42 47
43class Forall(Command, MirrorSafeCommand): 48class Forall(Command, MirrorSafeCommand):
44 COMMON = False 49 COMMON = False
45 helpSummary = "Run a shell command in each project" 50 helpSummary = "Run a shell command in each project"
46 helpUsage = """ 51 helpUsage = """
47%prog [<project>...] -c <command> [<arg>...] 52%prog [<project>...] -c <command> [<arg>...]
48%prog -r str1 [str2] ... -c <command> [<arg>...] 53%prog -r str1 [str2] ... -c <command> [<arg>...]
49""" 54"""
50 helpDescription = """ 55 helpDescription = """
51Executes the same shell command in each project. 56Executes the same shell command in each project.
52 57
53The -r option allows running the command only on projects matching 58The -r option allows running the command only on projects matching
@@ -125,236 +130,285 @@ terminal and are not redirected.
125If -e is used, when a command exits unsuccessfully, '%prog' will abort 130If -e is used, when a command exits unsuccessfully, '%prog' will abort
126without iterating through the remaining projects. 131without iterating through the remaining projects.
127""" 132"""
128 PARALLEL_JOBS = DEFAULT_LOCAL_JOBS 133 PARALLEL_JOBS = DEFAULT_LOCAL_JOBS
129 134
130 @staticmethod 135 @staticmethod
131 def _cmd_option(option, _opt_str, _value, parser): 136 def _cmd_option(option, _opt_str, _value, parser):
132 setattr(parser.values, option.dest, list(parser.rargs)) 137 setattr(parser.values, option.dest, list(parser.rargs))
133 while parser.rargs: 138 while parser.rargs:
134 del parser.rargs[0] 139 del parser.rargs[0]
135 140
136 def _Options(self, p): 141 def _Options(self, p):
137 p.add_option('-r', '--regex', 142 p.add_option(
138 dest='regex', action='store_true', 143 "-r",
139 help='execute the command only on projects matching regex or wildcard expression') 144 "--regex",
140 p.add_option('-i', '--inverse-regex', 145 dest="regex",
141 dest='inverse_regex', action='store_true', 146 action="store_true",
142 help='execute the command only on projects not matching regex or ' 147 help="execute the command only on projects matching regex or "
143 'wildcard expression') 148 "wildcard expression",
144 p.add_option('-g', '--groups', 149 )
145 dest='groups', 150 p.add_option(
146 help='execute the command only on projects matching the specified groups') 151 "-i",
147 p.add_option('-c', '--command', 152 "--inverse-regex",
148 help='command (and arguments) to execute', 153 dest="inverse_regex",
149 dest='command', 154 action="store_true",
150 action='callback', 155 help="execute the command only on projects not matching regex or "
151 callback=self._cmd_option) 156 "wildcard expression",
152 p.add_option('-e', '--abort-on-errors', 157 )
153 dest='abort_on_errors', action='store_true', 158 p.add_option(
154 help='abort if a command exits unsuccessfully') 159 "-g",
155 p.add_option('--ignore-missing', action='store_true', 160 "--groups",
156 help='silently skip & do not exit non-zero due missing ' 161 dest="groups",
157 'checkouts') 162 help="execute the command only on projects matching the specified "
158 163 "groups",
159 g = p.get_option_group('--quiet') 164 )
160 g.add_option('-p', 165 p.add_option(
161 dest='project_header', action='store_true', 166 "-c",
162 help='show project headers before output') 167 "--command",
163 p.add_option('--interactive', 168 help="command (and arguments) to execute",
164 action='store_true', 169 dest="command",
165 help='force interactive usage') 170 action="callback",
166 171 callback=self._cmd_option,
167 def WantPager(self, opt): 172 )
168 return opt.project_header and opt.jobs == 1 173 p.add_option(
169 174 "-e",
170 def ValidateOptions(self, opt, args): 175 "--abort-on-errors",
171 if not opt.command: 176 dest="abort_on_errors",
172 self.Usage() 177 action="store_true",
173 178 help="abort if a command exits unsuccessfully",
174 def Execute(self, opt, args): 179 )
175 cmd = [opt.command[0]] 180 p.add_option(
176 all_trees = not opt.this_manifest_only 181 "--ignore-missing",
177 182 action="store_true",
178 shell = True 183 help="silently skip & do not exit non-zero due missing "
179 if re.compile(r'^[a-z0-9A-Z_/\.-]+$').match(cmd[0]): 184 "checkouts",
180 shell = False 185 )
181 186
182 if shell: 187 g = p.get_option_group("--quiet")
183 cmd.append(cmd[0]) 188 g.add_option(
184 cmd.extend(opt.command[1:]) 189 "-p",
185 190 dest="project_header",
186 # Historically, forall operated interactively, and in serial. If the user 191 action="store_true",
187 # has selected 1 job, then default to interacive mode. 192 help="show project headers before output",
188 if opt.jobs == 1: 193 )
189 opt.interactive = True 194 p.add_option(
190 195 "--interactive", action="store_true", help="force interactive usage"
191 if opt.project_header \ 196 )
192 and not shell \ 197
193 and cmd[0] == 'git': 198 def WantPager(self, opt):
194 # If this is a direct git command that can enable colorized 199 return opt.project_header and opt.jobs == 1
195 # output and the user prefers coloring, add --color into the 200
196 # command line because we are going to wrap the command into 201 def ValidateOptions(self, opt, args):
197 # a pipe and git won't know coloring should activate. 202 if not opt.command:
198 # 203 self.Usage()
199 for cn in cmd[1:]: 204
200 if not cn.startswith('-'): 205 def Execute(self, opt, args):
201 break 206 cmd = [opt.command[0]]
202 else: 207 all_trees = not opt.this_manifest_only
203 cn = None 208
204 if cn and cn in _CAN_COLOR: 209 shell = True
205 class ColorCmd(Coloring): 210 if re.compile(r"^[a-z0-9A-Z_/\.-]+$").match(cmd[0]):
206 def __init__(self, config, cmd): 211 shell = False
207 Coloring.__init__(self, config, cmd) 212
208 if ColorCmd(self.manifest.manifestProject.config, cn).is_on: 213 if shell:
209 cmd.insert(cmd.index(cn) + 1, '--color') 214 cmd.append(cmd[0])
210 215 cmd.extend(opt.command[1:])
211 mirror = self.manifest.IsMirror 216
212 rc = 0 217 # Historically, forall operated interactively, and in serial. If the
213 218 # user has selected 1 job, then default to interacive mode.
214 smart_sync_manifest_name = "smart_sync_override.xml" 219 if opt.jobs == 1:
215 smart_sync_manifest_path = os.path.join( 220 opt.interactive = True
216 self.manifest.manifestProject.worktree, smart_sync_manifest_name) 221
217 222 if opt.project_header and not shell and cmd[0] == "git":
218 if os.path.isfile(smart_sync_manifest_path): 223 # If this is a direct git command that can enable colorized
219 self.manifest.Override(smart_sync_manifest_path) 224 # output and the user prefers coloring, add --color into the
220 225 # command line because we are going to wrap the command into
221 if opt.regex: 226 # a pipe and git won't know coloring should activate.
222 projects = self.FindProjects(args, all_manifests=all_trees) 227 #
223 elif opt.inverse_regex: 228 for cn in cmd[1:]:
224 projects = self.FindProjects(args, inverse=True, all_manifests=all_trees) 229 if not cn.startswith("-"):
225 else: 230 break
226 projects = self.GetProjects(args, groups=opt.groups, all_manifests=all_trees) 231 else:
227 232 cn = None
228 os.environ['REPO_COUNT'] = str(len(projects)) 233 if cn and cn in _CAN_COLOR:
229 234
230 try: 235 class ColorCmd(Coloring):
231 config = self.manifest.manifestProject.config 236 def __init__(self, config, cmd):
232 with multiprocessing.Pool(opt.jobs, InitWorker) as pool: 237 Coloring.__init__(self, config, cmd)
233 results_it = pool.imap( 238
234 functools.partial(DoWorkWrapper, mirror, opt, cmd, shell, config), 239 if ColorCmd(self.manifest.manifestProject.config, cn).is_on:
235 enumerate(projects), 240 cmd.insert(cmd.index(cn) + 1, "--color")
236 chunksize=WORKER_BATCH_SIZE) 241
237 first = True 242 mirror = self.manifest.IsMirror
238 for (r, output) in results_it: 243 rc = 0
239 if output: 244
240 if first: 245 smart_sync_manifest_name = "smart_sync_override.xml"
241 first = False 246 smart_sync_manifest_path = os.path.join(
242 elif opt.project_header: 247 self.manifest.manifestProject.worktree, smart_sync_manifest_name
243 print() 248 )
244 # To simplify the DoWorkWrapper, take care of automatic newlines. 249
245 end = '\n' 250 if os.path.isfile(smart_sync_manifest_path):
246 if output[-1] == '\n': 251 self.manifest.Override(smart_sync_manifest_path)
247 end = '' 252
248 print(output, end=end) 253 if opt.regex:
249 rc = rc or r 254 projects = self.FindProjects(args, all_manifests=all_trees)
250 if r != 0 and opt.abort_on_errors: 255 elif opt.inverse_regex:
251 raise Exception('Aborting due to previous error') 256 projects = self.FindProjects(
252 except (KeyboardInterrupt, WorkerKeyboardInterrupt): 257 args, inverse=True, all_manifests=all_trees
253 # Catch KeyboardInterrupt raised inside and outside of workers 258 )
254 rc = rc or errno.EINTR 259 else:
255 except Exception as e: 260 projects = self.GetProjects(
256 # Catch any other exceptions raised 261 args, groups=opt.groups, all_manifests=all_trees
257 print('forall: unhandled error, terminating the pool: %s: %s' % 262 )
258 (type(e).__name__, e), 263
259 file=sys.stderr) 264 os.environ["REPO_COUNT"] = str(len(projects))
260 rc = rc or getattr(e, 'errno', 1) 265
261 if rc != 0: 266 try:
262 sys.exit(rc) 267 config = self.manifest.manifestProject.config
268 with multiprocessing.Pool(opt.jobs, InitWorker) as pool:
269 results_it = pool.imap(
270 functools.partial(
271 DoWorkWrapper, mirror, opt, cmd, shell, config
272 ),
273 enumerate(projects),
274 chunksize=WORKER_BATCH_SIZE,
275 )
276 first = True
277 for r, output in results_it:
278 if output:
279 if first:
280 first = False
281 elif opt.project_header:
282 print()
283 # To simplify the DoWorkWrapper, take care of automatic
284 # newlines.
285 end = "\n"
286 if output[-1] == "\n":
287 end = ""
288 print(output, end=end)
289 rc = rc or r
290 if r != 0 and opt.abort_on_errors:
291 raise Exception("Aborting due to previous error")
292 except (KeyboardInterrupt, WorkerKeyboardInterrupt):
293 # Catch KeyboardInterrupt raised inside and outside of workers
294 rc = rc or errno.EINTR
295 except Exception as e:
296 # Catch any other exceptions raised
297 print(
298 "forall: unhandled error, terminating the pool: %s: %s"
299 % (type(e).__name__, e),
300 file=sys.stderr,
301 )
302 rc = rc or getattr(e, "errno", 1)
303 if rc != 0:
304 sys.exit(rc)
263 305
264 306
265class WorkerKeyboardInterrupt(Exception): 307class WorkerKeyboardInterrupt(Exception):
266 """ Keyboard interrupt exception for worker processes. """ 308 """Keyboard interrupt exception for worker processes."""
267 309
268 310
269def InitWorker(): 311def InitWorker():
270 signal.signal(signal.SIGINT, signal.SIG_IGN) 312 signal.signal(signal.SIGINT, signal.SIG_IGN)
271 313
272 314
273def DoWorkWrapper(mirror, opt, cmd, shell, config, args): 315def DoWorkWrapper(mirror, opt, cmd, shell, config, args):
274 """ A wrapper around the DoWork() method. 316 """A wrapper around the DoWork() method.
275 317
276 Catch the KeyboardInterrupt exceptions here and re-raise them as a different, 318 Catch the KeyboardInterrupt exceptions here and re-raise them as a
277 ``Exception``-based exception to stop it flooding the console with stacktraces 319 different, ``Exception``-based exception to stop it flooding the console
278 and making the parent hang indefinitely. 320 with stacktraces and making the parent hang indefinitely.
279 321
280 """ 322 """
281 cnt, project = args 323 cnt, project = args
282 try: 324 try:
283 return DoWork(project, mirror, opt, cmd, shell, cnt, config) 325 return DoWork(project, mirror, opt, cmd, shell, cnt, config)
284 except KeyboardInterrupt: 326 except KeyboardInterrupt:
285 print('%s: Worker interrupted' % project.name) 327 print("%s: Worker interrupted" % project.name)
286 raise WorkerKeyboardInterrupt() 328 raise WorkerKeyboardInterrupt()
287 329
288 330
289def DoWork(project, mirror, opt, cmd, shell, cnt, config): 331def DoWork(project, mirror, opt, cmd, shell, cnt, config):
290 env = os.environ.copy() 332 env = os.environ.copy()
291 333
292 def setenv(name, val): 334 def setenv(name, val):
293 if val is None: 335 if val is None:
294 val = '' 336 val = ""
295 env[name] = val 337 env[name] = val
296 338
297 setenv('REPO_PROJECT', project.name) 339 setenv("REPO_PROJECT", project.name)
298 setenv('REPO_OUTERPATH', project.manifest.path_prefix) 340 setenv("REPO_OUTERPATH", project.manifest.path_prefix)
299 setenv('REPO_INNERPATH', project.relpath) 341 setenv("REPO_INNERPATH", project.relpath)
300 setenv('REPO_PATH', project.RelPath(local=opt.this_manifest_only)) 342 setenv("REPO_PATH", project.RelPath(local=opt.this_manifest_only))
301 setenv('REPO_REMOTE', project.remote.name) 343 setenv("REPO_REMOTE", project.remote.name)
302 try: 344 try:
303 # If we aren't in a fully synced state and we don't have the ref the manifest 345 # If we aren't in a fully synced state and we don't have the ref the
304 # wants, then this will fail. Ignore it for the purposes of this code. 346 # manifest wants, then this will fail. Ignore it for the purposes of
305 lrev = '' if mirror else project.GetRevisionId() 347 # this code.
306 except ManifestInvalidRevisionError: 348 lrev = "" if mirror else project.GetRevisionId()
307 lrev = '' 349 except ManifestInvalidRevisionError:
308 setenv('REPO_LREV', lrev) 350 lrev = ""
309 setenv('REPO_RREV', project.revisionExpr) 351 setenv("REPO_LREV", lrev)
310 setenv('REPO_UPSTREAM', project.upstream) 352 setenv("REPO_RREV", project.revisionExpr)
311 setenv('REPO_DEST_BRANCH', project.dest_branch) 353 setenv("REPO_UPSTREAM", project.upstream)
312 setenv('REPO_I', str(cnt + 1)) 354 setenv("REPO_DEST_BRANCH", project.dest_branch)
313 for annotation in project.annotations: 355 setenv("REPO_I", str(cnt + 1))
314 setenv("REPO__%s" % (annotation.name), annotation.value) 356 for annotation in project.annotations:
315 357 setenv("REPO__%s" % (annotation.name), annotation.value)
316 if mirror: 358
317 setenv('GIT_DIR', project.gitdir) 359 if mirror:
318 cwd = project.gitdir 360 setenv("GIT_DIR", project.gitdir)
319 else: 361 cwd = project.gitdir
320 cwd = project.worktree 362 else:
321 363 cwd = project.worktree
322 if not os.path.exists(cwd): 364
323 # Allow the user to silently ignore missing checkouts so they can run on 365 if not os.path.exists(cwd):
324 # partial checkouts (good for infra recovery tools). 366 # Allow the user to silently ignore missing checkouts so they can run on
325 if opt.ignore_missing: 367 # partial checkouts (good for infra recovery tools).
326 return (0, '') 368 if opt.ignore_missing:
327 369 return (0, "")
328 output = '' 370
329 if ((opt.project_header and opt.verbose) 371 output = ""
330 or not opt.project_header): 372 if (opt.project_header and opt.verbose) or not opt.project_header:
331 output = 'skipping %s/' % project.RelPath(local=opt.this_manifest_only) 373 output = "skipping %s/" % project.RelPath(
332 return (1, output) 374 local=opt.this_manifest_only
333 375 )
334 if opt.verbose: 376 return (1, output)
335 stderr = subprocess.STDOUT 377
336 else: 378 if opt.verbose:
337 stderr = subprocess.DEVNULL 379 stderr = subprocess.STDOUT
338 380 else:
339 stdin = None if opt.interactive else subprocess.DEVNULL 381 stderr = subprocess.DEVNULL
340 382
341 result = subprocess.run( 383 stdin = None if opt.interactive else subprocess.DEVNULL
342 cmd, cwd=cwd, shell=shell, env=env, check=False, 384
343 encoding='utf-8', errors='replace', 385 result = subprocess.run(
344 stdin=stdin, stdout=subprocess.PIPE, stderr=stderr) 386 cmd,
345 387 cwd=cwd,
346 output = result.stdout 388 shell=shell,
347 if opt.project_header: 389 env=env,
348 if output: 390 check=False,
349 buf = io.StringIO() 391 encoding="utf-8",
350 out = ForallColoring(config) 392 errors="replace",
351 out.redirect(buf) 393 stdin=stdin,
352 if mirror: 394 stdout=subprocess.PIPE,
353 project_header_path = project.name 395 stderr=stderr,
354 else: 396 )
355 project_header_path = project.RelPath(local=opt.this_manifest_only) 397
356 out.project('project %s/' % project_header_path) 398 output = result.stdout
357 out.nl() 399 if opt.project_header:
358 buf.write(output) 400 if output:
359 output = buf.getvalue() 401 buf = io.StringIO()
360 return (result.returncode, output) 402 out = ForallColoring(config)
403 out.redirect(buf)
404 if mirror:
405 project_header_path = project.name
406 else:
407 project_header_path = project.RelPath(
408 local=opt.this_manifest_only
409 )
410 out.project("project %s/" % project_header_path)
411 out.nl()
412 buf.write(output)
413 output = buf.getvalue()
414 return (result.returncode, output)
diff --git a/subcmds/gitc_delete.py b/subcmds/gitc_delete.py
index df749469..ae9d4d1f 100644
--- a/subcmds/gitc_delete.py
+++ b/subcmds/gitc_delete.py
@@ -19,28 +19,34 @@ import platform_utils
19 19
20 20
21class GitcDelete(Command, GitcClientCommand): 21class GitcDelete(Command, GitcClientCommand):
22 COMMON = True 22 COMMON = True
23 visible_everywhere = False 23 visible_everywhere = False
24 helpSummary = "Delete a GITC Client." 24 helpSummary = "Delete a GITC Client."
25 helpUsage = """ 25 helpUsage = """
26%prog 26%prog
27""" 27"""
28 helpDescription = """ 28 helpDescription = """
29This subcommand deletes the current GITC client, deleting the GITC manifest 29This subcommand deletes the current GITC client, deleting the GITC manifest
30and all locally downloaded sources. 30and all locally downloaded sources.
31""" 31"""
32 32
33 def _Options(self, p): 33 def _Options(self, p):
34 p.add_option('-f', '--force', 34 p.add_option(
35 dest='force', action='store_true', 35 "-f",
36 help='force the deletion (no prompt)') 36 "--force",
37 dest="force",
38 action="store_true",
39 help="force the deletion (no prompt)",
40 )
37 41
38 def Execute(self, opt, args): 42 def Execute(self, opt, args):
39 if not opt.force: 43 if not opt.force:
40 prompt = ('This will delete GITC client: %s\nAre you sure? (yes/no) ' % 44 prompt = (
41 self.gitc_manifest.gitc_client_name) 45 "This will delete GITC client: %s\nAre you sure? (yes/no) "
42 response = input(prompt).lower() 46 % self.gitc_manifest.gitc_client_name
43 if not response == 'yes': 47 )
44 print('Response was not "yes"\n Exiting...') 48 response = input(prompt).lower()
45 sys.exit(1) 49 if not response == "yes":
46 platform_utils.rmtree(self.gitc_manifest.gitc_client_dir) 50 print('Response was not "yes"\n Exiting...')
51 sys.exit(1)
52 platform_utils.rmtree(self.gitc_manifest.gitc_client_dir)
diff --git a/subcmds/gitc_init.py b/subcmds/gitc_init.py
index e3a5813d..54791d58 100644
--- a/subcmds/gitc_init.py
+++ b/subcmds/gitc_init.py
@@ -23,13 +23,13 @@ import wrapper
23 23
24 24
25class GitcInit(init.Init, GitcAvailableCommand): 25class GitcInit(init.Init, GitcAvailableCommand):
26 COMMON = True 26 COMMON = True
27 MULTI_MANIFEST_SUPPORT = False 27 MULTI_MANIFEST_SUPPORT = False
28 helpSummary = "Initialize a GITC Client." 28 helpSummary = "Initialize a GITC Client."
29 helpUsage = """ 29 helpUsage = """
30%prog [options] [client name] 30%prog [options] [client name]
31""" 31"""
32 helpDescription = """ 32 helpDescription = """
33The '%prog' command is ran to initialize a new GITC client for use 33The '%prog' command is ran to initialize a new GITC client for use
34with the GITC file system. 34with the GITC file system.
35 35
@@ -47,30 +47,41 @@ The optional -f argument can be used to specify the manifest file to
47use for this GITC client. 47use for this GITC client.
48""" 48"""
49 49
50 def _Options(self, p): 50 def _Options(self, p):
51 super()._Options(p, gitc_init=True) 51 super()._Options(p, gitc_init=True)
52 52
53 def Execute(self, opt, args): 53 def Execute(self, opt, args):
54 gitc_client = gitc_utils.parse_clientdir(os.getcwd()) 54 gitc_client = gitc_utils.parse_clientdir(os.getcwd())
55 if not gitc_client or (opt.gitc_client and gitc_client != opt.gitc_client): 55 if not gitc_client or (
56 print('fatal: Please update your repo command. See go/gitc for instructions.', 56 opt.gitc_client and gitc_client != opt.gitc_client
57 file=sys.stderr) 57 ):
58 sys.exit(1) 58 print(
59 self.client_dir = os.path.join(gitc_utils.get_gitc_manifest_dir(), 59 "fatal: Please update your repo command. See go/gitc for "
60 gitc_client) 60 "instructions.",
61 super().Execute(opt, args) 61 file=sys.stderr,
62 )
63 sys.exit(1)
64 self.client_dir = os.path.join(
65 gitc_utils.get_gitc_manifest_dir(), gitc_client
66 )
67 super().Execute(opt, args)
62 68
63 manifest_file = self.manifest.manifestFile 69 manifest_file = self.manifest.manifestFile
64 if opt.manifest_file: 70 if opt.manifest_file:
65 if not os.path.exists(opt.manifest_file): 71 if not os.path.exists(opt.manifest_file):
66 print('fatal: Specified manifest file %s does not exist.' % 72 print(
67 opt.manifest_file) 73 "fatal: Specified manifest file %s does not exist."
68 sys.exit(1) 74 % opt.manifest_file
69 manifest_file = opt.manifest_file 75 )
76 sys.exit(1)
77 manifest_file = opt.manifest_file
70 78
71 manifest = GitcManifest(self.repodir, os.path.join(self.client_dir, 79 manifest = GitcManifest(
72 '.manifest')) 80 self.repodir, os.path.join(self.client_dir, ".manifest")
73 manifest.Override(manifest_file) 81 )
74 gitc_utils.generate_gitc_manifest(None, manifest) 82 manifest.Override(manifest_file)
75 print('Please run `cd %s` to view your GITC client.' % 83 gitc_utils.generate_gitc_manifest(None, manifest)
76 os.path.join(wrapper.Wrapper().GITC_FS_ROOT_DIR, gitc_client)) 84 print(
85 "Please run `cd %s` to view your GITC client."
86 % os.path.join(wrapper.Wrapper().GITC_FS_ROOT_DIR, gitc_client)
87 )
diff --git a/subcmds/grep.py b/subcmds/grep.py
index 93c9ae51..5cd33763 100644
--- a/subcmds/grep.py
+++ b/subcmds/grep.py
@@ -22,19 +22,19 @@ from git_command import GitCommand
22 22
23 23
24class GrepColoring(Coloring): 24class GrepColoring(Coloring):
25 def __init__(self, config): 25 def __init__(self, config):
26 Coloring.__init__(self, config, 'grep') 26 Coloring.__init__(self, config, "grep")
27 self.project = self.printer('project', attr='bold') 27 self.project = self.printer("project", attr="bold")
28 self.fail = self.printer('fail', fg='red') 28 self.fail = self.printer("fail", fg="red")
29 29
30 30
31class Grep(PagedCommand): 31class Grep(PagedCommand):
32 COMMON = True 32 COMMON = True
33 helpSummary = "Print lines matching a pattern" 33 helpSummary = "Print lines matching a pattern"
34 helpUsage = """ 34 helpUsage = """
35%prog {pattern | -e pattern} [<project>...] 35%prog {pattern | -e pattern} [<project>...]
36""" 36"""
37 helpDescription = """ 37 helpDescription = """
38Search for the specified patterns in all project files. 38Search for the specified patterns in all project files.
39 39
40# Boolean Options 40# Boolean Options
@@ -62,215 +62,304 @@ contain a line that matches both expressions:
62 repo grep --all-match -e NODE -e Unexpected 62 repo grep --all-match -e NODE -e Unexpected
63 63
64""" 64"""
65 PARALLEL_JOBS = DEFAULT_LOCAL_JOBS 65 PARALLEL_JOBS = DEFAULT_LOCAL_JOBS
66 66
67 @staticmethod 67 @staticmethod
68 def _carry_option(_option, opt_str, value, parser): 68 def _carry_option(_option, opt_str, value, parser):
69 pt = getattr(parser.values, 'cmd_argv', None) 69 pt = getattr(parser.values, "cmd_argv", None)
70 if pt is None: 70 if pt is None:
71 pt = [] 71 pt = []
72 setattr(parser.values, 'cmd_argv', pt) 72 setattr(parser.values, "cmd_argv", pt)
73 73
74 if opt_str == '-(': 74 if opt_str == "-(":
75 pt.append('(') 75 pt.append("(")
76 elif opt_str == '-)': 76 elif opt_str == "-)":
77 pt.append(')') 77 pt.append(")")
78 else: 78 else:
79 pt.append(opt_str) 79 pt.append(opt_str)
80 80
81 if value is not None: 81 if value is not None:
82 pt.append(value) 82 pt.append(value)
83 83
84 def _CommonOptions(self, p): 84 def _CommonOptions(self, p):
85 """Override common options slightly.""" 85 """Override common options slightly."""
86 super()._CommonOptions(p, opt_v=False) 86 super()._CommonOptions(p, opt_v=False)
87 87
88 def _Options(self, p): 88 def _Options(self, p):
89 g = p.add_option_group('Sources') 89 g = p.add_option_group("Sources")
90 g.add_option('--cached', 90 g.add_option(
91 action='callback', callback=self._carry_option, 91 "--cached",
92 help='Search the index, instead of the work tree') 92 action="callback",
93 g.add_option('-r', '--revision', 93 callback=self._carry_option,
94 dest='revision', action='append', metavar='TREEish', 94 help="Search the index, instead of the work tree",
95 help='Search TREEish, instead of the work tree') 95 )
96 96 g.add_option(
97 g = p.add_option_group('Pattern') 97 "-r",
98 g.add_option('-e', 98 "--revision",
99 action='callback', callback=self._carry_option, 99 dest="revision",
100 metavar='PATTERN', type='str', 100 action="append",
101 help='Pattern to search for') 101 metavar="TREEish",
102 g.add_option('-i', '--ignore-case', 102 help="Search TREEish, instead of the work tree",
103 action='callback', callback=self._carry_option, 103 )
104 help='Ignore case differences') 104
105 g.add_option('-a', '--text', 105 g = p.add_option_group("Pattern")
106 action='callback', callback=self._carry_option, 106 g.add_option(
107 help="Process binary files as if they were text") 107 "-e",
108 g.add_option('-I', 108 action="callback",
109 action='callback', callback=self._carry_option, 109 callback=self._carry_option,
110 help="Don't match the pattern in binary files") 110 metavar="PATTERN",
111 g.add_option('-w', '--word-regexp', 111 type="str",
112 action='callback', callback=self._carry_option, 112 help="Pattern to search for",
113 help='Match the pattern only at word boundaries') 113 )
114 g.add_option('-v', '--invert-match', 114 g.add_option(
115 action='callback', callback=self._carry_option, 115 "-i",
116 help='Select non-matching lines') 116 "--ignore-case",
117 g.add_option('-G', '--basic-regexp', 117 action="callback",
118 action='callback', callback=self._carry_option, 118 callback=self._carry_option,
119 help='Use POSIX basic regexp for patterns (default)') 119 help="Ignore case differences",
120 g.add_option('-E', '--extended-regexp', 120 )
121 action='callback', callback=self._carry_option, 121 g.add_option(
122 help='Use POSIX extended regexp for patterns') 122 "-a",
123 g.add_option('-F', '--fixed-strings', 123 "--text",
124 action='callback', callback=self._carry_option, 124 action="callback",
125 help='Use fixed strings (not regexp) for pattern') 125 callback=self._carry_option,
126 126 help="Process binary files as if they were text",
127 g = p.add_option_group('Pattern Grouping') 127 )
128 g.add_option('--all-match', 128 g.add_option(
129 action='callback', callback=self._carry_option, 129 "-I",
130 help='Limit match to lines that have all patterns') 130 action="callback",
131 g.add_option('--and', '--or', '--not', 131 callback=self._carry_option,
132 action='callback', callback=self._carry_option, 132 help="Don't match the pattern in binary files",
133 help='Boolean operators to combine patterns') 133 )
134 g.add_option('-(', '-)', 134 g.add_option(
135 action='callback', callback=self._carry_option, 135 "-w",
136 help='Boolean operator grouping') 136 "--word-regexp",
137 137 action="callback",
138 g = p.add_option_group('Output') 138 callback=self._carry_option,
139 g.add_option('-n', 139 help="Match the pattern only at word boundaries",
140 action='callback', callback=self._carry_option, 140 )
141 help='Prefix the line number to matching lines') 141 g.add_option(
142 g.add_option('-C', 142 "-v",
143 action='callback', callback=self._carry_option, 143 "--invert-match",
144 metavar='CONTEXT', type='str', 144 action="callback",
145 help='Show CONTEXT lines around match') 145 callback=self._carry_option,
146 g.add_option('-B', 146 help="Select non-matching lines",
147 action='callback', callback=self._carry_option, 147 )
148 metavar='CONTEXT', type='str', 148 g.add_option(
149 help='Show CONTEXT lines before match') 149 "-G",
150 g.add_option('-A', 150 "--basic-regexp",
151 action='callback', callback=self._carry_option, 151 action="callback",
152 metavar='CONTEXT', type='str', 152 callback=self._carry_option,
153 help='Show CONTEXT lines after match') 153 help="Use POSIX basic regexp for patterns (default)",
154 g.add_option('-l', '--name-only', '--files-with-matches', 154 )
155 action='callback', callback=self._carry_option, 155 g.add_option(
156 help='Show only file names containing matching lines') 156 "-E",
157 g.add_option('-L', '--files-without-match', 157 "--extended-regexp",
158 action='callback', callback=self._carry_option, 158 action="callback",
159 help='Show only file names not containing matching lines') 159 callback=self._carry_option,
160 160 help="Use POSIX extended regexp for patterns",
161 def _ExecuteOne(self, cmd_argv, project): 161 )
162 """Process one project.""" 162 g.add_option(
163 try: 163 "-F",
164 p = GitCommand(project, 164 "--fixed-strings",
165 cmd_argv, 165 action="callback",
166 bare=False, 166 callback=self._carry_option,
167 capture_stdout=True, 167 help="Use fixed strings (not regexp) for pattern",
168 capture_stderr=True) 168 )
169 except GitError as e: 169
170 return (project, -1, None, str(e)) 170 g = p.add_option_group("Pattern Grouping")
171 171 g.add_option(
172 return (project, p.Wait(), p.stdout, p.stderr) 172 "--all-match",
173 173 action="callback",
174 @staticmethod 174 callback=self._carry_option,
175 def _ProcessResults(full_name, have_rev, opt, _pool, out, results): 175 help="Limit match to lines that have all patterns",
176 git_failed = False 176 )
177 bad_rev = False 177 g.add_option(
178 have_match = False 178 "--and",
179 _RelPath = lambda p: p.RelPath(local=opt.this_manifest_only) 179 "--or",
180 180 "--not",
181 for project, rc, stdout, stderr in results: 181 action="callback",
182 if rc < 0: 182 callback=self._carry_option,
183 git_failed = True 183 help="Boolean operators to combine patterns",
184 out.project('--- project %s ---' % _RelPath(project)) 184 )
185 out.nl() 185 g.add_option(
186 out.fail('%s', stderr) 186 "-(",
187 out.nl() 187 "-)",
188 continue 188 action="callback",
189 189 callback=self._carry_option,
190 if rc: 190 help="Boolean operator grouping",
191 # no results 191 )
192 if stderr: 192
193 if have_rev and 'fatal: ambiguous argument' in stderr: 193 g = p.add_option_group("Output")
194 bad_rev = True 194 g.add_option(
195 else: 195 "-n",
196 out.project('--- project %s ---' % _RelPath(project)) 196 action="callback",
197 out.nl() 197 callback=self._carry_option,
198 out.fail('%s', stderr.strip()) 198 help="Prefix the line number to matching lines",
199 out.nl() 199 )
200 continue 200 g.add_option(
201 have_match = True 201 "-C",
202 202 action="callback",
203 # We cut the last element, to avoid a blank line. 203 callback=self._carry_option,
204 r = stdout.split('\n') 204 metavar="CONTEXT",
205 r = r[0:-1] 205 type="str",
206 206 help="Show CONTEXT lines around match",
207 if have_rev and full_name: 207 )
208 for line in r: 208 g.add_option(
209 rev, line = line.split(':', 1) 209 "-B",
210 out.write("%s", rev) 210 action="callback",
211 out.write(':') 211 callback=self._carry_option,
212 out.project(_RelPath(project)) 212 metavar="CONTEXT",
213 out.write('/') 213 type="str",
214 out.write("%s", line) 214 help="Show CONTEXT lines before match",
215 out.nl() 215 )
216 elif full_name: 216 g.add_option(
217 for line in r: 217 "-A",
218 out.project(_RelPath(project)) 218 action="callback",
219 out.write('/') 219 callback=self._carry_option,
220 out.write("%s", line) 220 metavar="CONTEXT",
221 out.nl() 221 type="str",
222 else: 222 help="Show CONTEXT lines after match",
223 for line in r: 223 )
224 print(line) 224 g.add_option(
225 225 "-l",
226 return (git_failed, bad_rev, have_match) 226 "--name-only",
227 227 "--files-with-matches",
228 def Execute(self, opt, args): 228 action="callback",
229 out = GrepColoring(self.manifest.manifestProject.config) 229 callback=self._carry_option,
230 230 help="Show only file names containing matching lines",
231 cmd_argv = ['grep'] 231 )
232 if out.is_on: 232 g.add_option(
233 cmd_argv.append('--color') 233 "-L",
234 cmd_argv.extend(getattr(opt, 'cmd_argv', [])) 234 "--files-without-match",
235 235 action="callback",
236 if '-e' not in cmd_argv: 236 callback=self._carry_option,
237 if not args: 237 help="Show only file names not containing matching lines",
238 self.Usage() 238 )
239 cmd_argv.append('-e') 239
240 cmd_argv.append(args[0]) 240 def _ExecuteOne(self, cmd_argv, project):
241 args = args[1:] 241 """Process one project."""
242 242 try:
243 projects = self.GetProjects(args, all_manifests=not opt.this_manifest_only) 243 p = GitCommand(
244 244 project,
245 full_name = False 245 cmd_argv,
246 if len(projects) > 1: 246 bare=False,
247 cmd_argv.append('--full-name') 247 capture_stdout=True,
248 full_name = True 248 capture_stderr=True,
249 249 )
250 have_rev = False 250 except GitError as e:
251 if opt.revision: 251 return (project, -1, None, str(e))
252 if '--cached' in cmd_argv: 252
253 print('fatal: cannot combine --cached and --revision', file=sys.stderr) 253 return (project, p.Wait(), p.stdout, p.stderr)
254 sys.exit(1) 254
255 have_rev = True 255 @staticmethod
256 cmd_argv.extend(opt.revision) 256 def _ProcessResults(full_name, have_rev, opt, _pool, out, results):
257 cmd_argv.append('--') 257 git_failed = False
258 258 bad_rev = False
259 git_failed, bad_rev, have_match = self.ExecuteInParallel( 259 have_match = False
260 opt.jobs, 260 _RelPath = lambda p: p.RelPath(local=opt.this_manifest_only)
261 functools.partial(self._ExecuteOne, cmd_argv), 261
262 projects, 262 for project, rc, stdout, stderr in results:
263 callback=functools.partial(self._ProcessResults, full_name, have_rev, opt), 263 if rc < 0:
264 output=out, 264 git_failed = True
265 ordered=True) 265 out.project("--- project %s ---" % _RelPath(project))
266 266 out.nl()
267 if git_failed: 267 out.fail("%s", stderr)
268 sys.exit(1) 268 out.nl()
269 elif have_match: 269 continue
270 sys.exit(0) 270
271 elif have_rev and bad_rev: 271 if rc:
272 for r in opt.revision: 272 # no results
273 print("error: can't search revision %s" % r, file=sys.stderr) 273 if stderr:
274 sys.exit(1) 274 if have_rev and "fatal: ambiguous argument" in stderr:
275 else: 275 bad_rev = True
276 sys.exit(1) 276 else:
277 out.project("--- project %s ---" % _RelPath(project))
278 out.nl()
279 out.fail("%s", stderr.strip())
280 out.nl()
281 continue
282 have_match = True
283
284 # We cut the last element, to avoid a blank line.
285 r = stdout.split("\n")
286 r = r[0:-1]
287
288 if have_rev and full_name:
289 for line in r:
290 rev, line = line.split(":", 1)
291 out.write("%s", rev)
292 out.write(":")
293 out.project(_RelPath(project))
294 out.write("/")
295 out.write("%s", line)
296 out.nl()
297 elif full_name:
298 for line in r:
299 out.project(_RelPath(project))
300 out.write("/")
301 out.write("%s", line)
302 out.nl()
303 else:
304 for line in r:
305 print(line)
306
307 return (git_failed, bad_rev, have_match)
308
309 def Execute(self, opt, args):
310 out = GrepColoring(self.manifest.manifestProject.config)
311
312 cmd_argv = ["grep"]
313 if out.is_on:
314 cmd_argv.append("--color")
315 cmd_argv.extend(getattr(opt, "cmd_argv", []))
316
317 if "-e" not in cmd_argv:
318 if not args:
319 self.Usage()
320 cmd_argv.append("-e")
321 cmd_argv.append(args[0])
322 args = args[1:]
323
324 projects = self.GetProjects(
325 args, all_manifests=not opt.this_manifest_only
326 )
327
328 full_name = False
329 if len(projects) > 1:
330 cmd_argv.append("--full-name")
331 full_name = True
332
333 have_rev = False
334 if opt.revision:
335 if "--cached" in cmd_argv:
336 print(
337 "fatal: cannot combine --cached and --revision",
338 file=sys.stderr,
339 )
340 sys.exit(1)
341 have_rev = True
342 cmd_argv.extend(opt.revision)
343 cmd_argv.append("--")
344
345 git_failed, bad_rev, have_match = self.ExecuteInParallel(
346 opt.jobs,
347 functools.partial(self._ExecuteOne, cmd_argv),
348 projects,
349 callback=functools.partial(
350 self._ProcessResults, full_name, have_rev, opt
351 ),
352 output=out,
353 ordered=True,
354 )
355
356 if git_failed:
357 sys.exit(1)
358 elif have_match:
359 sys.exit(0)
360 elif have_rev and bad_rev:
361 for r in opt.revision:
362 print("error: can't search revision %s" % r, file=sys.stderr)
363 sys.exit(1)
364 else:
365 sys.exit(1)
diff --git a/subcmds/help.py b/subcmds/help.py
index 1ad391db..50a48047 100644
--- a/subcmds/help.py
+++ b/subcmds/help.py
@@ -18,163 +18,193 @@ import textwrap
18 18
19from subcmds import all_commands 19from subcmds import all_commands
20from color import Coloring 20from color import Coloring
21from command import PagedCommand, MirrorSafeCommand, GitcAvailableCommand, GitcClientCommand 21from command import (
22 PagedCommand,
23 MirrorSafeCommand,
24 GitcAvailableCommand,
25 GitcClientCommand,
26)
22import gitc_utils 27import gitc_utils
23from wrapper import Wrapper 28from wrapper import Wrapper
24 29
25 30
26class Help(PagedCommand, MirrorSafeCommand): 31class Help(PagedCommand, MirrorSafeCommand):
27 COMMON = False 32 COMMON = False
28 helpSummary = "Display detailed help on a command" 33 helpSummary = "Display detailed help on a command"
29 helpUsage = """ 34 helpUsage = """
30%prog [--all|command] 35%prog [--all|command]
31""" 36"""
32 helpDescription = """ 37 helpDescription = """
33Displays detailed usage information about a command. 38Displays detailed usage information about a command.
34""" 39"""
35 40
36 def _PrintCommands(self, commandNames): 41 def _PrintCommands(self, commandNames):
37 """Helper to display |commandNames| summaries.""" 42 """Helper to display |commandNames| summaries."""
38 maxlen = 0 43 maxlen = 0
39 for name in commandNames: 44 for name in commandNames:
40 maxlen = max(maxlen, len(name)) 45 maxlen = max(maxlen, len(name))
41 fmt = ' %%-%ds %%s' % maxlen 46 fmt = " %%-%ds %%s" % maxlen
42 47
43 for name in commandNames: 48 for name in commandNames:
44 command = all_commands[name]() 49 command = all_commands[name]()
45 try: 50 try:
46 summary = command.helpSummary.strip() 51 summary = command.helpSummary.strip()
47 except AttributeError: 52 except AttributeError:
48 summary = '' 53 summary = ""
49 print(fmt % (name, summary)) 54 print(fmt % (name, summary))
50 55
51 def _PrintAllCommands(self): 56 def _PrintAllCommands(self):
52 print('usage: repo COMMAND [ARGS]') 57 print("usage: repo COMMAND [ARGS]")
53 self.PrintAllCommandsBody() 58 self.PrintAllCommandsBody()
54 59
55 def PrintAllCommandsBody(self): 60 def PrintAllCommandsBody(self):
56 print('The complete list of recognized repo commands is:') 61 print("The complete list of recognized repo commands is:")
57 commandNames = list(sorted(all_commands)) 62 commandNames = list(sorted(all_commands))
58 self._PrintCommands(commandNames) 63 self._PrintCommands(commandNames)
59 print("See 'repo help <command>' for more information on a " 64 print(
60 'specific command.') 65 "See 'repo help <command>' for more information on a "
61 print('Bug reports:', Wrapper().BUG_URL) 66 "specific command."
62 67 )
63 def _PrintCommonCommands(self): 68 print("Bug reports:", Wrapper().BUG_URL)
64 print('usage: repo COMMAND [ARGS]') 69
65 self.PrintCommonCommandsBody() 70 def _PrintCommonCommands(self):
66 71 print("usage: repo COMMAND [ARGS]")
67 def PrintCommonCommandsBody(self): 72 self.PrintCommonCommandsBody()
68 print('The most commonly used repo commands are:') 73
69 74 def PrintCommonCommandsBody(self):
70 def gitc_supported(cmd): 75 print("The most commonly used repo commands are:")
71 if not isinstance(cmd, GitcAvailableCommand) and not isinstance(cmd, GitcClientCommand): 76
72 return True 77 def gitc_supported(cmd):
73 if self.client.isGitcClient: 78 if not isinstance(cmd, GitcAvailableCommand) and not isinstance(
74 return True 79 cmd, GitcClientCommand
75 if isinstance(cmd, GitcClientCommand): 80 ):
76 return False 81 return True
77 if gitc_utils.get_gitc_manifest_dir(): 82 if self.client.isGitcClient:
78 return True 83 return True
79 return False 84 if isinstance(cmd, GitcClientCommand):
80 85 return False
81 commandNames = list(sorted([name 86 if gitc_utils.get_gitc_manifest_dir():
82 for name, command in all_commands.items() 87 return True
83 if command.COMMON and gitc_supported(command)])) 88 return False
84 self._PrintCommands(commandNames) 89
85 90 commandNames = list(
86 print( 91 sorted(
87 "See 'repo help <command>' for more information on a specific command.\n" 92 [
88 "See 'repo help --all' for a complete list of recognized commands.") 93 name
89 print('Bug reports:', Wrapper().BUG_URL) 94 for name, command in all_commands.items()
90 95 if command.COMMON and gitc_supported(command)
91 def _PrintCommandHelp(self, cmd, header_prefix=''): 96 ]
92 class _Out(Coloring): 97 )
93 def __init__(self, gc): 98 )
94 Coloring.__init__(self, gc, 'help') 99 self._PrintCommands(commandNames)
95 self.heading = self.printer('heading', attr='bold') 100
96 self._first = True 101 print(
97 102 "See 'repo help <command>' for more information on a specific "
98 def _PrintSection(self, heading, bodyAttr): 103 "command.\nSee 'repo help --all' for a complete list of recognized "
99 try: 104 "commands."
100 body = getattr(cmd, bodyAttr) 105 )
101 except AttributeError: 106 print("Bug reports:", Wrapper().BUG_URL)
102 return 107
103 if body == '' or body is None: 108 def _PrintCommandHelp(self, cmd, header_prefix=""):
104 return 109 class _Out(Coloring):
105 110 def __init__(self, gc):
106 if not self._first: 111 Coloring.__init__(self, gc, "help")
107 self.nl() 112 self.heading = self.printer("heading", attr="bold")
108 self._first = False 113 self._first = True
109 114
110 self.heading('%s%s', header_prefix, heading) 115 def _PrintSection(self, heading, bodyAttr):
111 self.nl() 116 try:
112 self.nl() 117 body = getattr(cmd, bodyAttr)
113 118 except AttributeError:
114 me = 'repo %s' % cmd.NAME 119 return
115 body = body.strip() 120 if body == "" or body is None:
116 body = body.replace('%prog', me) 121 return
117 122
118 # Extract the title, but skip any trailing {#anchors}. 123 if not self._first:
119 asciidoc_hdr = re.compile(r'^\n?#+ ([^{]+)(\{#.+\})?$') 124 self.nl()
120 for para in body.split("\n\n"): 125 self._first = False
121 if para.startswith(' '): 126
122 self.write('%s', para) 127 self.heading("%s%s", header_prefix, heading)
123 self.nl() 128 self.nl()
124 self.nl() 129 self.nl()
125 continue 130
126 131 me = "repo %s" % cmd.NAME
127 m = asciidoc_hdr.match(para) 132 body = body.strip()
128 if m: 133 body = body.replace("%prog", me)
129 self.heading('%s%s', header_prefix, m.group(1)) 134
130 self.nl() 135 # Extract the title, but skip any trailing {#anchors}.
131 self.nl() 136 asciidoc_hdr = re.compile(r"^\n?#+ ([^{]+)(\{#.+\})?$")
132 continue 137 for para in body.split("\n\n"):
133 138 if para.startswith(" "):
134 lines = textwrap.wrap(para.replace(' ', ' '), width=80, 139 self.write("%s", para)
135 break_long_words=False, break_on_hyphens=False) 140 self.nl()
136 for line in lines: 141 self.nl()
137 self.write('%s', line) 142 continue
138 self.nl() 143
139 self.nl() 144 m = asciidoc_hdr.match(para)
140 145 if m:
141 out = _Out(self.client.globalConfig) 146 self.heading("%s%s", header_prefix, m.group(1))
142 out._PrintSection('Summary', 'helpSummary') 147 self.nl()
143 cmd.OptionParser.print_help() 148 self.nl()
144 out._PrintSection('Description', 'helpDescription') 149 continue
145 150
146 def _PrintAllCommandHelp(self): 151 lines = textwrap.wrap(
147 for name in sorted(all_commands): 152 para.replace(" ", " "),
148 cmd = all_commands[name](manifest=self.manifest) 153 width=80,
149 self._PrintCommandHelp(cmd, header_prefix='[%s] ' % (name,)) 154 break_long_words=False,
150 155 break_on_hyphens=False,
151 def _Options(self, p): 156 )
152 p.add_option('-a', '--all', 157 for line in lines:
153 dest='show_all', action='store_true', 158 self.write("%s", line)
154 help='show the complete list of commands') 159 self.nl()
155 p.add_option('--help-all', 160 self.nl()
156 dest='show_all_help', action='store_true', 161
157 help='show the --help of all commands') 162 out = _Out(self.client.globalConfig)
158 163 out._PrintSection("Summary", "helpSummary")
159 def Execute(self, opt, args): 164 cmd.OptionParser.print_help()
160 if len(args) == 0: 165 out._PrintSection("Description", "helpDescription")
161 if opt.show_all_help: 166
162 self._PrintAllCommandHelp() 167 def _PrintAllCommandHelp(self):
163 elif opt.show_all: 168 for name in sorted(all_commands):
164 self._PrintAllCommands() 169 cmd = all_commands[name](manifest=self.manifest)
165 else: 170 self._PrintCommandHelp(cmd, header_prefix="[%s] " % (name,))
166 self._PrintCommonCommands() 171
167 172 def _Options(self, p):
168 elif len(args) == 1: 173 p.add_option(
169 name = args[0] 174 "-a",
170 175 "--all",
171 try: 176 dest="show_all",
172 cmd = all_commands[name](manifest=self.manifest) 177 action="store_true",
173 except KeyError: 178 help="show the complete list of commands",
174 print("repo: '%s' is not a repo command." % name, file=sys.stderr) 179 )
175 sys.exit(1) 180 p.add_option(
176 181 "--help-all",
177 self._PrintCommandHelp(cmd) 182 dest="show_all_help",
178 183 action="store_true",
179 else: 184 help="show the --help of all commands",
180 self._PrintCommandHelp(self) 185 )
186
187 def Execute(self, opt, args):
188 if len(args) == 0:
189 if opt.show_all_help:
190 self._PrintAllCommandHelp()
191 elif opt.show_all:
192 self._PrintAllCommands()
193 else:
194 self._PrintCommonCommands()
195
196 elif len(args) == 1:
197 name = args[0]
198
199 try:
200 cmd = all_commands[name](manifest=self.manifest)
201 except KeyError:
202 print(
203 "repo: '%s' is not a repo command." % name, file=sys.stderr
204 )
205 sys.exit(1)
206
207 self._PrintCommandHelp(cmd)
208
209 else:
210 self._PrintCommandHelp(self)
diff --git a/subcmds/info.py b/subcmds/info.py
index baa4c5b1..6e7f3ed2 100644
--- a/subcmds/info.py
+++ b/subcmds/info.py
@@ -20,203 +20,234 @@ from git_refs import R_M, R_HEADS
20 20
21 21
22class _Coloring(Coloring): 22class _Coloring(Coloring):
23 def __init__(self, config): 23 def __init__(self, config):
24 Coloring.__init__(self, config, "status") 24 Coloring.__init__(self, config, "status")
25 25
26 26
27class Info(PagedCommand): 27class Info(PagedCommand):
28 COMMON = True 28 COMMON = True
29 helpSummary = "Get info on the manifest branch, current branch or unmerged branches" 29 helpSummary = (
30 helpUsage = "%prog [-dl] [-o [-c]] [<project>...]" 30 "Get info on the manifest branch, current branch or unmerged branches"
31 31 )
32 def _Options(self, p): 32 helpUsage = "%prog [-dl] [-o [-c]] [<project>...]"
33 p.add_option('-d', '--diff', 33
34 dest='all', action='store_true', 34 def _Options(self, p):
35 help="show full info and commit diff including remote branches") 35 p.add_option(
36 p.add_option('-o', '--overview', 36 "-d",
37 dest='overview', action='store_true', 37 "--diff",
38 help='show overview of all local commits') 38 dest="all",
39 p.add_option('-c', '--current-branch', 39 action="store_true",
40 dest="current_branch", action="store_true", 40 help="show full info and commit diff including remote branches",
41 help="consider only checked out branches") 41 )
42 p.add_option('--no-current-branch', 42 p.add_option(
43 dest='current_branch', action='store_false', 43 "-o",
44 help='consider all local branches') 44 "--overview",
45 # Turn this into a warning & remove this someday. 45 dest="overview",
46 p.add_option('-b', 46 action="store_true",
47 dest='current_branch', action='store_true', 47 help="show overview of all local commits",
48 help=optparse.SUPPRESS_HELP) 48 )
49 p.add_option('-l', '--local-only', 49 p.add_option(
50 dest="local", action="store_true", 50 "-c",
51 help="disable all remote operations") 51 "--current-branch",
52 52 dest="current_branch",
53 def Execute(self, opt, args): 53 action="store_true",
54 self.out = _Coloring(self.client.globalConfig) 54 help="consider only checked out branches",
55 self.heading = self.out.printer('heading', attr='bold') 55 )
56 self.headtext = self.out.nofmt_printer('headtext', fg='yellow') 56 p.add_option(
57 self.redtext = self.out.printer('redtext', fg='red') 57 "--no-current-branch",
58 self.sha = self.out.printer("sha", fg='yellow') 58 dest="current_branch",
59 self.text = self.out.nofmt_printer('text') 59 action="store_false",
60 self.dimtext = self.out.printer('dimtext', attr='dim') 60 help="consider all local branches",
61 61 )
62 self.opt = opt 62 # Turn this into a warning & remove this someday.
63 63 p.add_option(
64 if not opt.this_manifest_only: 64 "-b",
65 self.manifest = self.manifest.outer_client 65 dest="current_branch",
66 manifestConfig = self.manifest.manifestProject.config 66 action="store_true",
67 mergeBranch = manifestConfig.GetBranch("default").merge 67 help=optparse.SUPPRESS_HELP,
68 manifestGroups = self.manifest.GetGroupsStr() 68 )
69 69 p.add_option(
70 self.heading("Manifest branch: ") 70 "-l",
71 if self.manifest.default.revisionExpr: 71 "--local-only",
72 self.headtext(self.manifest.default.revisionExpr) 72 dest="local",
73 self.out.nl() 73 action="store_true",
74 self.heading("Manifest merge branch: ") 74 help="disable all remote operations",
75 self.headtext(mergeBranch) 75 )
76 self.out.nl() 76
77 self.heading("Manifest groups: ") 77 def Execute(self, opt, args):
78 self.headtext(manifestGroups) 78 self.out = _Coloring(self.client.globalConfig)
79 self.out.nl() 79 self.heading = self.out.printer("heading", attr="bold")
80 80 self.headtext = self.out.nofmt_printer("headtext", fg="yellow")
81 self.printSeparator() 81 self.redtext = self.out.printer("redtext", fg="red")
82 82 self.sha = self.out.printer("sha", fg="yellow")
83 if not opt.overview: 83 self.text = self.out.nofmt_printer("text")
84 self._printDiffInfo(opt, args) 84 self.dimtext = self.out.printer("dimtext", attr="dim")
85 else: 85
86 self._printCommitOverview(opt, args) 86 self.opt = opt
87 87
88 def printSeparator(self): 88 if not opt.this_manifest_only:
89 self.text("----------------------------") 89 self.manifest = self.manifest.outer_client
90 self.out.nl() 90 manifestConfig = self.manifest.manifestProject.config
91 91 mergeBranch = manifestConfig.GetBranch("default").merge
92 def _printDiffInfo(self, opt, args): 92 manifestGroups = self.manifest.GetGroupsStr()
93 # We let exceptions bubble up to main as they'll be well structured. 93
94 projs = self.GetProjects(args, all_manifests=not opt.this_manifest_only) 94 self.heading("Manifest branch: ")
95 95 if self.manifest.default.revisionExpr:
96 for p in projs: 96 self.headtext(self.manifest.default.revisionExpr)
97 self.heading("Project: ") 97 self.out.nl()
98 self.headtext(p.name) 98 self.heading("Manifest merge branch: ")
99 self.out.nl() 99 self.headtext(mergeBranch)
100 100 self.out.nl()
101 self.heading("Mount path: ") 101 self.heading("Manifest groups: ")
102 self.headtext(p.worktree) 102 self.headtext(manifestGroups)
103 self.out.nl() 103 self.out.nl()
104 104
105 self.heading("Current revision: ") 105 self.printSeparator()
106 self.headtext(p.GetRevisionId()) 106
107 self.out.nl() 107 if not opt.overview:
108 108 self._printDiffInfo(opt, args)
109 currentBranch = p.CurrentBranch 109 else:
110 if currentBranch: 110 self._printCommitOverview(opt, args)
111 self.heading('Current branch: ') 111
112 self.headtext(currentBranch) 112 def printSeparator(self):
113 self.text("----------------------------")
113 self.out.nl() 114 self.out.nl()
114 115
115 self.heading("Manifest revision: ") 116 def _printDiffInfo(self, opt, args):
116 self.headtext(p.revisionExpr) 117 # We let exceptions bubble up to main as they'll be well structured.
117 self.out.nl() 118 projs = self.GetProjects(args, all_manifests=not opt.this_manifest_only)
118 119
119 localBranches = list(p.GetBranches().keys()) 120 for p in projs:
120 self.heading("Local Branches: ") 121 self.heading("Project: ")
121 self.redtext(str(len(localBranches))) 122 self.headtext(p.name)
122 if localBranches: 123 self.out.nl()
123 self.text(" [") 124
124 self.text(", ".join(localBranches)) 125 self.heading("Mount path: ")
125 self.text("]") 126 self.headtext(p.worktree)
126 self.out.nl() 127 self.out.nl()
127 128
128 if self.opt.all: 129 self.heading("Current revision: ")
129 self.findRemoteLocalDiff(p) 130 self.headtext(p.GetRevisionId())
130 131 self.out.nl()
131 self.printSeparator() 132
132 133 currentBranch = p.CurrentBranch
133 def findRemoteLocalDiff(self, project): 134 if currentBranch:
134 # Fetch all the latest commits. 135 self.heading("Current branch: ")
135 if not self.opt.local: 136 self.headtext(currentBranch)
136 project.Sync_NetworkHalf(quiet=True, current_branch_only=True) 137 self.out.nl()
137 138
138 branch = self.manifest.manifestProject.config.GetBranch('default').merge 139 self.heading("Manifest revision: ")
139 if branch.startswith(R_HEADS): 140 self.headtext(p.revisionExpr)
140 branch = branch[len(R_HEADS):] 141 self.out.nl()
141 logTarget = R_M + branch 142
142 143 localBranches = list(p.GetBranches().keys())
143 bareTmp = project.bare_git._bare 144 self.heading("Local Branches: ")
144 project.bare_git._bare = False 145 self.redtext(str(len(localBranches)))
145 localCommits = project.bare_git.rev_list( 146 if localBranches:
146 '--abbrev=8', 147 self.text(" [")
147 '--abbrev-commit', 148 self.text(", ".join(localBranches))
148 '--pretty=oneline', 149 self.text("]")
149 logTarget + "..", 150 self.out.nl()
150 '--') 151
151 152 if self.opt.all:
152 originCommits = project.bare_git.rev_list( 153 self.findRemoteLocalDiff(p)
153 '--abbrev=8', 154
154 '--abbrev-commit', 155 self.printSeparator()
155 '--pretty=oneline', 156
156 ".." + logTarget, 157 def findRemoteLocalDiff(self, project):
157 '--') 158 # Fetch all the latest commits.
158 project.bare_git._bare = bareTmp 159 if not self.opt.local:
159 160 project.Sync_NetworkHalf(quiet=True, current_branch_only=True)
160 self.heading("Local Commits: ") 161
161 self.redtext(str(len(localCommits))) 162 branch = self.manifest.manifestProject.config.GetBranch("default").merge
162 self.dimtext(" (on current branch)") 163 if branch.startswith(R_HEADS):
163 self.out.nl() 164 branch = branch[len(R_HEADS) :]
164 165 logTarget = R_M + branch
165 for c in localCommits: 166
166 split = c.split() 167 bareTmp = project.bare_git._bare
167 self.sha(split[0] + " ") 168 project.bare_git._bare = False
168 self.text(" ".join(split[1:])) 169 localCommits = project.bare_git.rev_list(
169 self.out.nl() 170 "--abbrev=8",
170 171 "--abbrev-commit",
171 self.printSeparator() 172 "--pretty=oneline",
172 173 logTarget + "..",
173 self.heading("Remote Commits: ") 174 "--",
174 self.redtext(str(len(originCommits))) 175 )
175 self.out.nl() 176
176 177 originCommits = project.bare_git.rev_list(
177 for c in originCommits: 178 "--abbrev=8",
178 split = c.split() 179 "--abbrev-commit",
179 self.sha(split[0] + " ") 180 "--pretty=oneline",
180 self.text(" ".join(split[1:])) 181 ".." + logTarget,
181 self.out.nl() 182 "--",
182 183 )
183 def _printCommitOverview(self, opt, args): 184 project.bare_git._bare = bareTmp
184 all_branches = [] 185
185 for project in self.GetProjects(args, all_manifests=not opt.this_manifest_only): 186 self.heading("Local Commits: ")
186 br = [project.GetUploadableBranch(x) 187 self.redtext(str(len(localCommits)))
187 for x in project.GetBranches()] 188 self.dimtext(" (on current branch)")
188 br = [x for x in br if x]
189 if self.opt.current_branch:
190 br = [x for x in br if x.name == project.CurrentBranch]
191 all_branches.extend(br)
192
193 if not all_branches:
194 return
195
196 self.out.nl()
197 self.heading('Projects Overview')
198 project = None
199
200 for branch in all_branches:
201 if project != branch.project:
202 project = branch.project
203 self.out.nl() 189 self.out.nl()
204 self.headtext(project.RelPath(local=opt.this_manifest_only)) 190
191 for c in localCommits:
192 split = c.split()
193 self.sha(split[0] + " ")
194 self.text(" ".join(split[1:]))
195 self.out.nl()
196
197 self.printSeparator()
198
199 self.heading("Remote Commits: ")
200 self.redtext(str(len(originCommits)))
205 self.out.nl() 201 self.out.nl()
206 202
207 commits = branch.commits 203 for c in originCommits:
208 date = branch.date 204 split = c.split()
209 self.text('%s %-33s (%2d commit%s, %s)' % ( 205 self.sha(split[0] + " ")
210 branch.name == project.CurrentBranch and '*' or ' ', 206 self.text(" ".join(split[1:]))
211 branch.name, 207 self.out.nl()
212 len(commits), 208
213 len(commits) != 1 and 's' or '', 209 def _printCommitOverview(self, opt, args):
214 date)) 210 all_branches = []
215 self.out.nl() 211 for project in self.GetProjects(
216 212 args, all_manifests=not opt.this_manifest_only
217 for commit in commits: 213 ):
218 split = commit.split() 214 br = [project.GetUploadableBranch(x) for x in project.GetBranches()]
219 self.text('{0:38}{1} '.format('', '-')) 215 br = [x for x in br if x]
220 self.sha(split[0] + " ") 216 if self.opt.current_branch:
221 self.text(" ".join(split[1:])) 217 br = [x for x in br if x.name == project.CurrentBranch]
218 all_branches.extend(br)
219
220 if not all_branches:
221 return
222
222 self.out.nl() 223 self.out.nl()
224 self.heading("Projects Overview")
225 project = None
226
227 for branch in all_branches:
228 if project != branch.project:
229 project = branch.project
230 self.out.nl()
231 self.headtext(project.RelPath(local=opt.this_manifest_only))
232 self.out.nl()
233
234 commits = branch.commits
235 date = branch.date
236 self.text(
237 "%s %-33s (%2d commit%s, %s)"
238 % (
239 branch.name == project.CurrentBranch and "*" or " ",
240 branch.name,
241 len(commits),
242 len(commits) != 1 and "s" or "",
243 date,
244 )
245 )
246 self.out.nl()
247
248 for commit in commits:
249 split = commit.split()
250 self.text("{0:38}{1} ".format("", "-"))
251 self.sha(split[0] + " ")
252 self.text(" ".join(split[1:]))
253 self.out.nl()
diff --git a/subcmds/init.py b/subcmds/init.py
index 813fa590..b5c2e3b5 100644
--- a/subcmds/init.py
+++ b/subcmds/init.py
@@ -22,13 +22,13 @@ from wrapper import Wrapper
22 22
23 23
24class Init(InteractiveCommand, MirrorSafeCommand): 24class Init(InteractiveCommand, MirrorSafeCommand):
25 COMMON = True 25 COMMON = True
26 MULTI_MANIFEST_SUPPORT = True 26 MULTI_MANIFEST_SUPPORT = True
27 helpSummary = "Initialize a repo client checkout in the current directory" 27 helpSummary = "Initialize a repo client checkout in the current directory"
28 helpUsage = """ 28 helpUsage = """
29%prog [options] [manifest url] 29%prog [options] [manifest url]
30""" 30"""
31 helpDescription = """ 31 helpDescription = """
32The '%prog' command is run once to install and initialize repo. 32The '%prog' command is run once to install and initialize repo.
33The latest repo source code and manifest collection is downloaded 33The latest repo source code and manifest collection is downloaded
34from the server and is installed in the .repo/ directory in the 34from the server and is installed in the .repo/ directory in the
@@ -77,243 +77,303 @@ manifest, a subsequent `repo sync` (or `repo sync -d`) is necessary
77to update the working directory files. 77to update the working directory files.
78""" 78"""
79 79
80 def _CommonOptions(self, p): 80 def _CommonOptions(self, p):
81 """Disable due to re-use of Wrapper().""" 81 """Disable due to re-use of Wrapper()."""
82 82
83 def _Options(self, p, gitc_init=False): 83 def _Options(self, p, gitc_init=False):
84 Wrapper().InitParser(p, gitc_init=gitc_init) 84 Wrapper().InitParser(p, gitc_init=gitc_init)
85 m = p.add_option_group('Multi-manifest') 85 m = p.add_option_group("Multi-manifest")
86 m.add_option('--outer-manifest', action='store_true', default=True, 86 m.add_option(
87 help='operate starting at the outermost manifest') 87 "--outer-manifest",
88 m.add_option('--no-outer-manifest', dest='outer_manifest', 88 action="store_true",
89 action='store_false', help='do not operate on outer manifests') 89 default=True,
90 m.add_option('--this-manifest-only', action='store_true', default=None, 90 help="operate starting at the outermost manifest",
91 help='only operate on this (sub)manifest') 91 )
92 m.add_option('--no-this-manifest-only', '--all-manifests', 92 m.add_option(
93 dest='this_manifest_only', action='store_false', 93 "--no-outer-manifest",
94 help='operate on this manifest and its submanifests') 94 dest="outer_manifest",
95 95 action="store_false",
96 def _RegisteredEnvironmentOptions(self): 96 help="do not operate on outer manifests",
97 return {'REPO_MANIFEST_URL': 'manifest_url', 97 )
98 'REPO_MIRROR_LOCATION': 'reference'} 98 m.add_option(
99 99 "--this-manifest-only",
100 def _SyncManifest(self, opt): 100 action="store_true",
101 """Call manifestProject.Sync with arguments from opt. 101 default=None,
102 102 help="only operate on this (sub)manifest",
103 Args: 103 )
104 opt: options from optparse. 104 m.add_option(
105 """ 105 "--no-this-manifest-only",
106 # Normally this value is set when instantiating the project, but the 106 "--all-manifests",
107 # manifest project is special and is created when instantiating the 107 dest="this_manifest_only",
108 # manifest which happens before we parse options. 108 action="store_false",
109 self.manifest.manifestProject.clone_depth = opt.manifest_depth 109 help="operate on this manifest and its submanifests",
110 if not self.manifest.manifestProject.Sync( 110 )
111 manifest_url=opt.manifest_url, 111
112 manifest_branch=opt.manifest_branch, 112 def _RegisteredEnvironmentOptions(self):
113 standalone_manifest=opt.standalone_manifest, 113 return {
114 groups=opt.groups, 114 "REPO_MANIFEST_URL": "manifest_url",
115 platform=opt.platform, 115 "REPO_MIRROR_LOCATION": "reference",
116 mirror=opt.mirror, 116 }
117 dissociate=opt.dissociate, 117
118 reference=opt.reference, 118 def _SyncManifest(self, opt):
119 worktree=opt.worktree, 119 """Call manifestProject.Sync with arguments from opt.
120 submodules=opt.submodules, 120
121 archive=opt.archive, 121 Args:
122 partial_clone=opt.partial_clone, 122 opt: options from optparse.
123 clone_filter=opt.clone_filter, 123 """
124 partial_clone_exclude=opt.partial_clone_exclude, 124 # Normally this value is set when instantiating the project, but the
125 clone_bundle=opt.clone_bundle, 125 # manifest project is special and is created when instantiating the
126 git_lfs=opt.git_lfs, 126 # manifest which happens before we parse options.
127 use_superproject=opt.use_superproject, 127 self.manifest.manifestProject.clone_depth = opt.manifest_depth
128 verbose=opt.verbose, 128 if not self.manifest.manifestProject.Sync(
129 current_branch_only=opt.current_branch_only, 129 manifest_url=opt.manifest_url,
130 tags=opt.tags, 130 manifest_branch=opt.manifest_branch,
131 depth=opt.depth, 131 standalone_manifest=opt.standalone_manifest,
132 git_event_log=self.git_event_log, 132 groups=opt.groups,
133 manifest_name=opt.manifest_name): 133 platform=opt.platform,
134 sys.exit(1) 134 mirror=opt.mirror,
135 135 dissociate=opt.dissociate,
136 def _Prompt(self, prompt, value): 136 reference=opt.reference,
137 print('%-10s [%s]: ' % (prompt, value), end='', flush=True) 137 worktree=opt.worktree,
138 a = sys.stdin.readline().strip() 138 submodules=opt.submodules,
139 if a == '': 139 archive=opt.archive,
140 return value 140 partial_clone=opt.partial_clone,
141 return a 141 clone_filter=opt.clone_filter,
142 142 partial_clone_exclude=opt.partial_clone_exclude,
143 def _ShouldConfigureUser(self, opt, existing_checkout): 143 clone_bundle=opt.clone_bundle,
144 gc = self.client.globalConfig 144 git_lfs=opt.git_lfs,
145 mp = self.manifest.manifestProject 145 use_superproject=opt.use_superproject,
146 146 verbose=opt.verbose,
147 # If we don't have local settings, get from global. 147 current_branch_only=opt.current_branch_only,
148 if not mp.config.Has('user.name') or not mp.config.Has('user.email'): 148 tags=opt.tags,
149 if not gc.Has('user.name') or not gc.Has('user.email'): 149 depth=opt.depth,
150 return True 150 git_event_log=self.git_event_log,
151 151 manifest_name=opt.manifest_name,
152 mp.config.SetString('user.name', gc.GetString('user.name')) 152 ):
153 mp.config.SetString('user.email', gc.GetString('user.email')) 153 sys.exit(1)
154 154
155 if not opt.quiet and not existing_checkout or opt.verbose: 155 def _Prompt(self, prompt, value):
156 print() 156 print("%-10s [%s]: " % (prompt, value), end="", flush=True)
157 print('Your identity is: %s <%s>' % (mp.config.GetString('user.name'), 157 a = sys.stdin.readline().strip()
158 mp.config.GetString('user.email'))) 158 if a == "":
159 print("If you want to change this, please re-run 'repo init' with --config-name") 159 return value
160 return False 160 return a
161 161
162 def _ConfigureUser(self, opt): 162 def _ShouldConfigureUser(self, opt, existing_checkout):
163 mp = self.manifest.manifestProject 163 gc = self.client.globalConfig
164 164 mp = self.manifest.manifestProject
165 while True: 165
166 if not opt.quiet: 166 # If we don't have local settings, get from global.
167 if not mp.config.Has("user.name") or not mp.config.Has("user.email"):
168 if not gc.Has("user.name") or not gc.Has("user.email"):
169 return True
170
171 mp.config.SetString("user.name", gc.GetString("user.name"))
172 mp.config.SetString("user.email", gc.GetString("user.email"))
173
174 if not opt.quiet and not existing_checkout or opt.verbose:
175 print()
176 print(
177 "Your identity is: %s <%s>"
178 % (
179 mp.config.GetString("user.name"),
180 mp.config.GetString("user.email"),
181 )
182 )
183 print(
184 "If you want to change this, please re-run 'repo init' with "
185 "--config-name"
186 )
187 return False
188
189 def _ConfigureUser(self, opt):
190 mp = self.manifest.manifestProject
191
192 while True:
193 if not opt.quiet:
194 print()
195 name = self._Prompt("Your Name", mp.UserName)
196 email = self._Prompt("Your Email", mp.UserEmail)
197
198 if not opt.quiet:
199 print()
200 print("Your identity is: %s <%s>" % (name, email))
201 print("is this correct [y/N]? ", end="", flush=True)
202 a = sys.stdin.readline().strip().lower()
203 if a in ("yes", "y", "t", "true"):
204 break
205
206 if name != mp.UserName:
207 mp.config.SetString("user.name", name)
208 if email != mp.UserEmail:
209 mp.config.SetString("user.email", email)
210
211 def _HasColorSet(self, gc):
212 for n in ["ui", "diff", "status"]:
213 if gc.Has("color.%s" % n):
214 return True
215 return False
216
217 def _ConfigureColor(self):
218 gc = self.client.globalConfig
219 if self._HasColorSet(gc):
220 return
221
222 class _Test(Coloring):
223 def __init__(self):
224 Coloring.__init__(self, gc, "test color display")
225 self._on = True
226
227 out = _Test()
228
167 print() 229 print()
168 name = self._Prompt('Your Name', mp.UserName) 230 print("Testing colorized output (for 'repo diff', 'repo status'):")
169 email = self._Prompt('Your Email', mp.UserEmail) 231
232 for c in ["black", "red", "green", "yellow", "blue", "magenta", "cyan"]:
233 out.write(" ")
234 out.printer(fg=c)(" %-6s ", c)
235 out.write(" ")
236 out.printer(fg="white", bg="black")(" %s " % "white")
237 out.nl()
238
239 for c in ["bold", "dim", "ul", "reverse"]:
240 out.write(" ")
241 out.printer(fg="black", attr=c)(" %-6s ", c)
242 out.nl()
243
244 print(
245 "Enable color display in this user account (y/N)? ",
246 end="",
247 flush=True,
248 )
249 a = sys.stdin.readline().strip().lower()
250 if a in ("y", "yes", "t", "true", "on"):
251 gc.SetString("color.ui", "auto")
252
253 def _DisplayResult(self):
254 if self.manifest.IsMirror:
255 init_type = "mirror "
256 else:
257 init_type = ""
170 258
171 if not opt.quiet:
172 print() 259 print()
173 print('Your identity is: %s <%s>' % (name, email)) 260 print(
174 print('is this correct [y/N]? ', end='', flush=True) 261 "repo %shas been initialized in %s"
175 a = sys.stdin.readline().strip().lower() 262 % (init_type, self.manifest.topdir)
176 if a in ('yes', 'y', 't', 'true'): 263 )
177 break 264
178 265 current_dir = os.getcwd()
179 if name != mp.UserName: 266 if current_dir != self.manifest.topdir:
180 mp.config.SetString('user.name', name) 267 print(
181 if email != mp.UserEmail: 268 "If this is not the directory in which you want to initialize "
182 mp.config.SetString('user.email', email) 269 "repo, please run:"
183 270 )
184 def _HasColorSet(self, gc): 271 print(" rm -r %s" % os.path.join(self.manifest.topdir, ".repo"))
185 for n in ['ui', 'diff', 'status']: 272 print("and try again.")
186 if gc.Has('color.%s' % n): 273
187 return True 274 def ValidateOptions(self, opt, args):
188 return False 275 if opt.reference:
189 276 opt.reference = os.path.expanduser(opt.reference)
190 def _ConfigureColor(self): 277
191 gc = self.client.globalConfig 278 # Check this here, else manifest will be tagged "not new" and init won't
192 if self._HasColorSet(gc): 279 # be possible anymore without removing the .repo/manifests directory.
193 return 280 if opt.mirror:
194 281 if opt.archive:
195 class _Test(Coloring): 282 self.OptionParser.error(
196 def __init__(self): 283 "--mirror and --archive cannot be used " "together."
197 Coloring.__init__(self, gc, 'test color display') 284 )
198 self._on = True 285 if opt.use_superproject is not None:
199 out = _Test() 286 self.OptionParser.error(
200 287 "--mirror and --use-superproject cannot be "
201 print() 288 "used together."
202 print("Testing colorized output (for 'repo diff', 'repo status'):") 289 )
203 290 if opt.archive and opt.use_superproject is not None:
204 for c in ['black', 'red', 'green', 'yellow', 'blue', 'magenta', 'cyan']: 291 self.OptionParser.error(
205 out.write(' ') 292 "--archive and --use-superproject cannot be used " "together."
206 out.printer(fg=c)(' %-6s ', c) 293 )
207 out.write(' ') 294
208 out.printer(fg='white', bg='black')(' %s ' % 'white') 295 if opt.standalone_manifest and (
209 out.nl() 296 opt.manifest_branch or opt.manifest_name != "default.xml"
210 297 ):
211 for c in ['bold', 'dim', 'ul', 'reverse']: 298 self.OptionParser.error(
212 out.write(' ') 299 "--manifest-branch and --manifest-name cannot"
213 out.printer(fg='black', attr=c)(' %-6s ', c) 300 " be used with --standalone-manifest."
214 out.nl() 301 )
215 302
216 print('Enable color display in this user account (y/N)? ', end='', flush=True) 303 if args:
217 a = sys.stdin.readline().strip().lower() 304 if opt.manifest_url:
218 if a in ('y', 'yes', 't', 'true', 'on'): 305 self.OptionParser.error(
219 gc.SetString('color.ui', 'auto') 306 "--manifest-url option and URL argument both specified: "
220 307 "only use one to select the manifest URL."
221 def _DisplayResult(self): 308 )
222 if self.manifest.IsMirror: 309
223 init_type = 'mirror ' 310 opt.manifest_url = args.pop(0)
224 else: 311
225 init_type = '' 312 if args:
226 313 self.OptionParser.error("too many arguments to init")
227 print() 314
228 print('repo %shas been initialized in %s' % (init_type, self.manifest.topdir)) 315 def Execute(self, opt, args):
229 316 git_require(MIN_GIT_VERSION_HARD, fail=True)
230 current_dir = os.getcwd() 317 if not git_require(MIN_GIT_VERSION_SOFT):
231 if current_dir != self.manifest.topdir: 318 print(
232 print('If this is not the directory in which you want to initialize ' 319 "repo: warning: git-%s+ will soon be required; please upgrade "
233 'repo, please run:') 320 "your version of git to maintain support."
234 print(' rm -r %s' % os.path.join(self.manifest.topdir, '.repo')) 321 % (".".join(str(x) for x in MIN_GIT_VERSION_SOFT),),
235 print('and try again.') 322 file=sys.stderr,
236 323 )
237 def ValidateOptions(self, opt, args): 324
238 if opt.reference: 325 rp = self.manifest.repoProject
239 opt.reference = os.path.expanduser(opt.reference) 326
240 327 # Handle new --repo-url requests.
241 # Check this here, else manifest will be tagged "not new" and init won't be 328 if opt.repo_url:
242 # possible anymore without removing the .repo/manifests directory. 329 remote = rp.GetRemote("origin")
243 if opt.mirror: 330 remote.url = opt.repo_url
244 if opt.archive: 331 remote.Save()
245 self.OptionParser.error('--mirror and --archive cannot be used ' 332
246 'together.') 333 # Handle new --repo-rev requests.
247 if opt.use_superproject is not None: 334 if opt.repo_rev:
248 self.OptionParser.error('--mirror and --use-superproject cannot be ' 335 wrapper = Wrapper()
249 'used together.') 336 try:
250 if opt.archive and opt.use_superproject is not None: 337 remote_ref, rev = wrapper.check_repo_rev(
251 self.OptionParser.error('--archive and --use-superproject cannot be used ' 338 rp.gitdir,
252 'together.') 339 opt.repo_rev,
253 340 repo_verify=opt.repo_verify,
254 if opt.standalone_manifest and (opt.manifest_branch or 341 quiet=opt.quiet,
255 opt.manifest_name != 'default.xml'): 342 )
256 self.OptionParser.error('--manifest-branch and --manifest-name cannot' 343 except wrapper.CloneFailure:
257 ' be used with --standalone-manifest.') 344 print(
258 345 "fatal: double check your --repo-rev setting.",
259 if args: 346 file=sys.stderr,
260 if opt.manifest_url: 347 )
261 self.OptionParser.error( 348 sys.exit(1)
262 '--manifest-url option and URL argument both specified: only use ' 349 branch = rp.GetBranch("default")
263 'one to select the manifest URL.') 350 branch.merge = remote_ref
264 351 rp.work_git.reset("--hard", rev)
265 opt.manifest_url = args.pop(0) 352 branch.Save()
266 353
267 if args: 354 if opt.worktree:
268 self.OptionParser.error('too many arguments to init') 355 # Older versions of git supported worktree, but had dangerous gc
269 356 # bugs.
270 def Execute(self, opt, args): 357 git_require((2, 15, 0), fail=True, msg="git gc worktree corruption")
271 git_require(MIN_GIT_VERSION_HARD, fail=True) 358
272 if not git_require(MIN_GIT_VERSION_SOFT): 359 # Provide a short notice that we're reinitializing an existing checkout.
273 print('repo: warning: git-%s+ will soon be required; please upgrade your ' 360 # Sometimes developers might not realize that they're in one, or that
274 'version of git to maintain support.' 361 # repo doesn't do nested checkouts.
275 % ('.'.join(str(x) for x in MIN_GIT_VERSION_SOFT),), 362 existing_checkout = self.manifest.manifestProject.Exists
276 file=sys.stderr) 363 if not opt.quiet and existing_checkout:
277 364 print(
278 rp = self.manifest.repoProject 365 "repo: reusing existing repo client checkout in",
279 366 self.manifest.topdir,
280 # Handle new --repo-url requests. 367 )
281 if opt.repo_url: 368
282 remote = rp.GetRemote('origin') 369 self._SyncManifest(opt)
283 remote.url = opt.repo_url 370
284 remote.Save() 371 if os.isatty(0) and os.isatty(1) and not self.manifest.IsMirror:
285 372 if opt.config_name or self._ShouldConfigureUser(
286 # Handle new --repo-rev requests. 373 opt, existing_checkout
287 if opt.repo_rev: 374 ):
288 wrapper = Wrapper() 375 self._ConfigureUser(opt)
289 try: 376 self._ConfigureColor()
290 remote_ref, rev = wrapper.check_repo_rev( 377
291 rp.gitdir, opt.repo_rev, repo_verify=opt.repo_verify, quiet=opt.quiet) 378 if not opt.quiet:
292 except wrapper.CloneFailure: 379 self._DisplayResult()
293 print('fatal: double check your --repo-rev setting.', file=sys.stderr)
294 sys.exit(1)
295 branch = rp.GetBranch('default')
296 branch.merge = remote_ref
297 rp.work_git.reset('--hard', rev)
298 branch.Save()
299
300 if opt.worktree:
301 # Older versions of git supported worktree, but had dangerous gc bugs.
302 git_require((2, 15, 0), fail=True, msg='git gc worktree corruption')
303
304 # Provide a short notice that we're reinitializing an existing checkout.
305 # Sometimes developers might not realize that they're in one, or that
306 # repo doesn't do nested checkouts.
307 existing_checkout = self.manifest.manifestProject.Exists
308 if not opt.quiet and existing_checkout:
309 print('repo: reusing existing repo client checkout in', self.manifest.topdir)
310
311 self._SyncManifest(opt)
312
313 if os.isatty(0) and os.isatty(1) and not self.manifest.IsMirror:
314 if opt.config_name or self._ShouldConfigureUser(opt, existing_checkout):
315 self._ConfigureUser(opt)
316 self._ConfigureColor()
317
318 if not opt.quiet:
319 self._DisplayResult()
diff --git a/subcmds/list.py b/subcmds/list.py
index ad8036ee..24e3e1fc 100644
--- a/subcmds/list.py
+++ b/subcmds/list.py
@@ -18,13 +18,13 @@ from command import Command, MirrorSafeCommand
18 18
19 19
20class List(Command, MirrorSafeCommand): 20class List(Command, MirrorSafeCommand):
21 COMMON = True 21 COMMON = True
22 helpSummary = "List projects and their associated directories" 22 helpSummary = "List projects and their associated directories"
23 helpUsage = """ 23 helpUsage = """
24%prog [-f] [<project>...] 24%prog [-f] [<project>...]
25%prog [-f] -r str1 [str2]... 25%prog [-f] -r str1 [str2]...
26""" 26"""
27 helpDescription = """ 27 helpDescription = """
28List all projects; pass '.' to list the project for the cwd. 28List all projects; pass '.' to list the project for the cwd.
29 29
30By default, only projects that currently exist in the checkout are shown. If 30By default, only projects that currently exist in the checkout are shown. If
@@ -35,69 +35,103 @@ groups, then also pass --groups all.
35This is similar to running: repo forall -c 'echo "$REPO_PATH : $REPO_PROJECT"'. 35This is similar to running: repo forall -c 'echo "$REPO_PATH : $REPO_PROJECT"'.
36""" 36"""
37 37
38 def _Options(self, p): 38 def _Options(self, p):
39 p.add_option('-r', '--regex', 39 p.add_option(
40 dest='regex', action='store_true', 40 "-r",
41 help='filter the project list based on regex or wildcard matching of strings') 41 "--regex",
42 p.add_option('-g', '--groups', 42 dest="regex",
43 dest='groups', 43 action="store_true",
44 help='filter the project list based on the groups the project is in') 44 help="filter the project list based on regex or wildcard matching "
45 p.add_option('-a', '--all', 45 "of strings",
46 action='store_true', 46 )
47 help='show projects regardless of checkout state') 47 p.add_option(
48 p.add_option('-n', '--name-only', 48 "-g",
49 dest='name_only', action='store_true', 49 "--groups",
50 help='display only the name of the repository') 50 dest="groups",
51 p.add_option('-p', '--path-only', 51 help="filter the project list based on the groups the project is "
52 dest='path_only', action='store_true', 52 "in",
53 help='display only the path of the repository') 53 )
54 p.add_option('-f', '--fullpath', 54 p.add_option(
55 dest='fullpath', action='store_true', 55 "-a",
56 help='display the full work tree path instead of the relative path') 56 "--all",
57 p.add_option('--relative-to', metavar='PATH', 57 action="store_true",
58 help='display paths relative to this one (default: top of repo client checkout)') 58 help="show projects regardless of checkout state",
59 )
60 p.add_option(
61 "-n",
62 "--name-only",
63 dest="name_only",
64 action="store_true",
65 help="display only the name of the repository",
66 )
67 p.add_option(
68 "-p",
69 "--path-only",
70 dest="path_only",
71 action="store_true",
72 help="display only the path of the repository",
73 )
74 p.add_option(
75 "-f",
76 "--fullpath",
77 dest="fullpath",
78 action="store_true",
79 help="display the full work tree path instead of the relative path",
80 )
81 p.add_option(
82 "--relative-to",
83 metavar="PATH",
84 help="display paths relative to this one (default: top of repo "
85 "client checkout)",
86 )
59 87
60 def ValidateOptions(self, opt, args): 88 def ValidateOptions(self, opt, args):
61 if opt.fullpath and opt.name_only: 89 if opt.fullpath and opt.name_only:
62 self.OptionParser.error('cannot combine -f and -n') 90 self.OptionParser.error("cannot combine -f and -n")
63 91
64 # Resolve any symlinks so the output is stable. 92 # Resolve any symlinks so the output is stable.
65 if opt.relative_to: 93 if opt.relative_to:
66 opt.relative_to = os.path.realpath(opt.relative_to) 94 opt.relative_to = os.path.realpath(opt.relative_to)
67 95
68 def Execute(self, opt, args): 96 def Execute(self, opt, args):
69 """List all projects and the associated directories. 97 """List all projects and the associated directories.
70 98
71 This may be possible to do with 'repo forall', but repo newbies have 99 This may be possible to do with 'repo forall', but repo newbies have
72 trouble figuring that out. The idea here is that it should be more 100 trouble figuring that out. The idea here is that it should be more
73 discoverable. 101 discoverable.
74 102
75 Args: 103 Args:
76 opt: The options. 104 opt: The options.
77 args: Positional args. Can be a list of projects to list, or empty. 105 args: Positional args. Can be a list of projects to list, or empty.
78 """ 106 """
79 if not opt.regex: 107 if not opt.regex:
80 projects = self.GetProjects(args, groups=opt.groups, missing_ok=opt.all, 108 projects = self.GetProjects(
81 all_manifests=not opt.this_manifest_only) 109 args,
82 else: 110 groups=opt.groups,
83 projects = self.FindProjects(args, all_manifests=not opt.this_manifest_only) 111 missing_ok=opt.all,
112 all_manifests=not opt.this_manifest_only,
113 )
114 else:
115 projects = self.FindProjects(
116 args, all_manifests=not opt.this_manifest_only
117 )
84 118
85 def _getpath(x): 119 def _getpath(x):
86 if opt.fullpath: 120 if opt.fullpath:
87 return x.worktree 121 return x.worktree
88 if opt.relative_to: 122 if opt.relative_to:
89 return os.path.relpath(x.worktree, opt.relative_to) 123 return os.path.relpath(x.worktree, opt.relative_to)
90 return x.RelPath(local=opt.this_manifest_only) 124 return x.RelPath(local=opt.this_manifest_only)
91 125
92 lines = [] 126 lines = []
93 for project in projects: 127 for project in projects:
94 if opt.name_only and not opt.path_only: 128 if opt.name_only and not opt.path_only:
95 lines.append("%s" % (project.name)) 129 lines.append("%s" % (project.name))
96 elif opt.path_only and not opt.name_only: 130 elif opt.path_only and not opt.name_only:
97 lines.append("%s" % (_getpath(project))) 131 lines.append("%s" % (_getpath(project)))
98 else: 132 else:
99 lines.append("%s : %s" % (_getpath(project), project.name)) 133 lines.append("%s : %s" % (_getpath(project), project.name))
100 134
101 if lines: 135 if lines:
102 lines.sort() 136 lines.sort()
103 print('\n'.join(lines)) 137 print("\n".join(lines))
diff --git a/subcmds/manifest.py b/subcmds/manifest.py
index f4602a59..f72df348 100644
--- a/subcmds/manifest.py
+++ b/subcmds/manifest.py
@@ -20,12 +20,12 @@ from command import PagedCommand
20 20
21 21
22class Manifest(PagedCommand): 22class Manifest(PagedCommand):
23 COMMON = False 23 COMMON = False
24 helpSummary = "Manifest inspection utility" 24 helpSummary = "Manifest inspection utility"
25 helpUsage = """ 25 helpUsage = """
26%prog [-o {-|NAME.xml}] [-m MANIFEST.xml] [-r] 26%prog [-o {-|NAME.xml}] [-m MANIFEST.xml] [-r]
27""" 27"""
28 _helpDescription = """ 28 _helpDescription = """
29 29
30With the -o option, exports the current manifest for inspection. 30With the -o option, exports the current manifest for inspection.
31The manifest and (if present) local_manifests/ are combined 31The manifest and (if present) local_manifests/ are combined
@@ -40,92 +40,136 @@ when the manifest was generated. The 'dest-branch' attribute is set
40to indicate the remote ref to push changes to via 'repo upload'. 40to indicate the remote ref to push changes to via 'repo upload'.
41""" 41"""
42 42
43 @property 43 @property
44 def helpDescription(self): 44 def helpDescription(self):
45 helptext = self._helpDescription + '\n' 45 helptext = self._helpDescription + "\n"
46 r = os.path.dirname(__file__) 46 r = os.path.dirname(__file__)
47 r = os.path.dirname(r) 47 r = os.path.dirname(r)
48 with open(os.path.join(r, 'docs', 'manifest-format.md')) as fd: 48 with open(os.path.join(r, "docs", "manifest-format.md")) as fd:
49 for line in fd: 49 for line in fd:
50 helptext += line 50 helptext += line
51 return helptext 51 return helptext
52 52
53 def _Options(self, p): 53 def _Options(self, p):
54 p.add_option('-r', '--revision-as-HEAD', 54 p.add_option(
55 dest='peg_rev', action='store_true', 55 "-r",
56 help='save revisions as current HEAD') 56 "--revision-as-HEAD",
57 p.add_option('-m', '--manifest-name', 57 dest="peg_rev",
58 help='temporary manifest to use for this sync', metavar='NAME.xml') 58 action="store_true",
59 p.add_option('--suppress-upstream-revision', dest='peg_rev_upstream', 59 help="save revisions as current HEAD",
60 default=True, action='store_false', 60 )
61 help='if in -r mode, do not write the upstream field ' 61 p.add_option(
62 '(only of use if the branch names for a sha1 manifest are ' 62 "-m",
63 'sensitive)') 63 "--manifest-name",
64 p.add_option('--suppress-dest-branch', dest='peg_rev_dest_branch', 64 help="temporary manifest to use for this sync",
65 default=True, action='store_false', 65 metavar="NAME.xml",
66 help='if in -r mode, do not write the dest-branch field ' 66 )
67 '(only of use if the branch names for a sha1 manifest are ' 67 p.add_option(
68 'sensitive)') 68 "--suppress-upstream-revision",
69 p.add_option('--json', default=False, action='store_true', 69 dest="peg_rev_upstream",
70 help='output manifest in JSON format (experimental)') 70 default=True,
71 p.add_option('--pretty', default=False, action='store_true', 71 action="store_false",
72 help='format output for humans to read') 72 help="if in -r mode, do not write the upstream field "
73 p.add_option('--no-local-manifests', default=False, action='store_true', 73 "(only of use if the branch names for a sha1 manifest are "
74 dest='ignore_local_manifests', help='ignore local manifests') 74 "sensitive)",
75 p.add_option('-o', '--output-file', 75 )
76 dest='output_file', 76 p.add_option(
77 default='-', 77 "--suppress-dest-branch",
78 help='file to save the manifest to. (Filename prefix for multi-tree.)', 78 dest="peg_rev_dest_branch",
79 metavar='-|NAME.xml') 79 default=True,
80 80 action="store_false",
81 def _Output(self, opt): 81 help="if in -r mode, do not write the dest-branch field "
82 # If alternate manifest is specified, override the manifest file that we're using. 82 "(only of use if the branch names for a sha1 manifest are "
83 if opt.manifest_name: 83 "sensitive)",
84 self.manifest.Override(opt.manifest_name, False) 84 )
85 85 p.add_option(
86 for manifest in self.ManifestList(opt): 86 "--json",
87 output_file = opt.output_file 87 default=False,
88 if output_file == '-': 88 action="store_true",
89 fd = sys.stdout 89 help="output manifest in JSON format (experimental)",
90 else: 90 )
91 if manifest.path_prefix: 91 p.add_option(
92 output_file = f'{opt.output_file}:{manifest.path_prefix.replace("/", "%2f")}' 92 "--pretty",
93 fd = open(output_file, 'w') 93 default=False,
94 94 action="store_true",
95 manifest.SetUseLocalManifests(not opt.ignore_local_manifests) 95 help="format output for humans to read",
96 96 )
97 if opt.json: 97 p.add_option(
98 print('warning: --json is experimental!', file=sys.stderr) 98 "--no-local-manifests",
99 doc = manifest.ToDict(peg_rev=opt.peg_rev, 99 default=False,
100 peg_rev_upstream=opt.peg_rev_upstream, 100 action="store_true",
101 peg_rev_dest_branch=opt.peg_rev_dest_branch) 101 dest="ignore_local_manifests",
102 102 help="ignore local manifests",
103 json_settings = { 103 )
104 # JSON style guide says Uunicode characters are fully allowed. 104 p.add_option(
105 'ensure_ascii': False, 105 "-o",
106 # We use 2 space indent to match JSON style guide. 106 "--output-file",
107 'indent': 2 if opt.pretty else None, 107 dest="output_file",
108 'separators': (',', ': ') if opt.pretty else (',', ':'), 108 default="-",
109 'sort_keys': True, 109 help="file to save the manifest to. (Filename prefix for "
110 } 110 "multi-tree.)",
111 fd.write(json.dumps(doc, **json_settings)) 111 metavar="-|NAME.xml",
112 else: 112 )
113 manifest.Save(fd, 113
114 peg_rev=opt.peg_rev, 114 def _Output(self, opt):
115 peg_rev_upstream=opt.peg_rev_upstream, 115 # If alternate manifest is specified, override the manifest file that
116 peg_rev_dest_branch=opt.peg_rev_dest_branch) 116 # we're using.
117 if output_file != '-': 117 if opt.manifest_name:
118 fd.close() 118 self.manifest.Override(opt.manifest_name, False)
119 if manifest.path_prefix: 119
120 print(f'Saved {manifest.path_prefix} submanifest to {output_file}', 120 for manifest in self.ManifestList(opt):
121 file=sys.stderr) 121 output_file = opt.output_file
122 else: 122 if output_file == "-":
123 print(f'Saved manifest to {output_file}', file=sys.stderr) 123 fd = sys.stdout
124 124 else:
125 125 if manifest.path_prefix:
126 def ValidateOptions(self, opt, args): 126 output_file = (
127 if args: 127 f"{opt.output_file}:"
128 self.Usage() 128 f'{manifest.path_prefix.replace("/", "%2f")}'
129 129 )
130 def Execute(self, opt, args): 130 fd = open(output_file, "w")
131 self._Output(opt) 131
132 manifest.SetUseLocalManifests(not opt.ignore_local_manifests)
133
134 if opt.json:
135 print("warning: --json is experimental!", file=sys.stderr)
136 doc = manifest.ToDict(
137 peg_rev=opt.peg_rev,
138 peg_rev_upstream=opt.peg_rev_upstream,
139 peg_rev_dest_branch=opt.peg_rev_dest_branch,
140 )
141
142 json_settings = {
143 # JSON style guide says Unicode characters are fully
144 # allowed.
145 "ensure_ascii": False,
146 # We use 2 space indent to match JSON style guide.
147 "indent": 2 if opt.pretty else None,
148 "separators": (",", ": ") if opt.pretty else (",", ":"),
149 "sort_keys": True,
150 }
151 fd.write(json.dumps(doc, **json_settings))
152 else:
153 manifest.Save(
154 fd,
155 peg_rev=opt.peg_rev,
156 peg_rev_upstream=opt.peg_rev_upstream,
157 peg_rev_dest_branch=opt.peg_rev_dest_branch,
158 )
159 if output_file != "-":
160 fd.close()
161 if manifest.path_prefix:
162 print(
163 f"Saved {manifest.path_prefix} submanifest to "
164 f"{output_file}",
165 file=sys.stderr,
166 )
167 else:
168 print(f"Saved manifest to {output_file}", file=sys.stderr)
169
170 def ValidateOptions(self, opt, args):
171 if args:
172 self.Usage()
173
174 def Execute(self, opt, args):
175 self._Output(opt)
diff --git a/subcmds/overview.py b/subcmds/overview.py
index 11dba95f..8ccad611 100644
--- a/subcmds/overview.py
+++ b/subcmds/overview.py
@@ -19,12 +19,12 @@ from command import PagedCommand
19 19
20 20
21class Overview(PagedCommand): 21class Overview(PagedCommand):
22 COMMON = True 22 COMMON = True
23 helpSummary = "Display overview of unmerged project branches" 23 helpSummary = "Display overview of unmerged project branches"
24 helpUsage = """ 24 helpUsage = """
25%prog [--current-branch] [<project>...] 25%prog [--current-branch] [<project>...]
26""" 26"""
27 helpDescription = """ 27 helpDescription = """
28The '%prog' command is used to display an overview of the projects branches, 28The '%prog' command is used to display an overview of the projects branches,
29and list any local commits that have not yet been merged into the project. 29and list any local commits that have not yet been merged into the project.
30 30
@@ -33,59 +33,77 @@ branches currently checked out in each project. By default, all branches
33are displayed. 33are displayed.
34""" 34"""
35 35
36 def _Options(self, p): 36 def _Options(self, p):
37 p.add_option('-c', '--current-branch', 37 p.add_option(
38 dest="current_branch", action="store_true", 38 "-c",
39 help="consider only checked out branches") 39 "--current-branch",
40 p.add_option('--no-current-branch', 40 dest="current_branch",
41 dest='current_branch', action='store_false', 41 action="store_true",
42 help='consider all local branches') 42 help="consider only checked out branches",
43 # Turn this into a warning & remove this someday. 43 )
44 p.add_option('-b', 44 p.add_option(
45 dest='current_branch', action='store_true', 45 "--no-current-branch",
46 help=optparse.SUPPRESS_HELP) 46 dest="current_branch",
47 action="store_false",
48 help="consider all local branches",
49 )
50 # Turn this into a warning & remove this someday.
51 p.add_option(
52 "-b",
53 dest="current_branch",
54 action="store_true",
55 help=optparse.SUPPRESS_HELP,
56 )
47 57
48 def Execute(self, opt, args): 58 def Execute(self, opt, args):
49 all_branches = [] 59 all_branches = []
50 for project in self.GetProjects(args, all_manifests=not opt.this_manifest_only): 60 for project in self.GetProjects(
51 br = [project.GetUploadableBranch(x) 61 args, all_manifests=not opt.this_manifest_only
52 for x in project.GetBranches()] 62 ):
53 br = [x for x in br if x] 63 br = [project.GetUploadableBranch(x) for x in project.GetBranches()]
54 if opt.current_branch: 64 br = [x for x in br if x]
55 br = [x for x in br if x.name == project.CurrentBranch] 65 if opt.current_branch:
56 all_branches.extend(br) 66 br = [x for x in br if x.name == project.CurrentBranch]
67 all_branches.extend(br)
57 68
58 if not all_branches: 69 if not all_branches:
59 return 70 return
60 71
61 class Report(Coloring): 72 class Report(Coloring):
62 def __init__(self, config): 73 def __init__(self, config):
63 Coloring.__init__(self, config, 'status') 74 Coloring.__init__(self, config, "status")
64 self.project = self.printer('header', attr='bold') 75 self.project = self.printer("header", attr="bold")
65 self.text = self.printer('text') 76 self.text = self.printer("text")
66 77
67 out = Report(all_branches[0].project.config) 78 out = Report(all_branches[0].project.config)
68 out.text("Deprecated. See repo info -o.") 79 out.text("Deprecated. See repo info -o.")
69 out.nl()
70 out.project('Projects Overview')
71 out.nl()
72
73 project = None
74
75 for branch in all_branches:
76 if project != branch.project:
77 project = branch.project
78 out.nl() 80 out.nl()
79 out.project('project %s/' % project.RelPath(local=opt.this_manifest_only)) 81 out.project("Projects Overview")
80 out.nl() 82 out.nl()
81 83
82 commits = branch.commits 84 project = None
83 date = branch.date 85
84 print('%s %-33s (%2d commit%s, %s)' % ( 86 for branch in all_branches:
85 branch.name == project.CurrentBranch and '*' or ' ', 87 if project != branch.project:
86 branch.name, 88 project = branch.project
87 len(commits), 89 out.nl()
88 len(commits) != 1 and 's' or ' ', 90 out.project(
89 date)) 91 "project %s/"
90 for commit in commits: 92 % project.RelPath(local=opt.this_manifest_only)
91 print('%-35s - %s' % ('', commit)) 93 )
94 out.nl()
95
96 commits = branch.commits
97 date = branch.date
98 print(
99 "%s %-33s (%2d commit%s, %s)"
100 % (
101 branch.name == project.CurrentBranch and "*" or " ",
102 branch.name,
103 len(commits),
104 len(commits) != 1 and "s" or " ",
105 date,
106 )
107 )
108 for commit in commits:
109 print("%-35s - %s" % ("", commit))
diff --git a/subcmds/prune.py b/subcmds/prune.py
index 251accaa..5a68c14a 100644
--- a/subcmds/prune.py
+++ b/subcmds/prune.py
@@ -19,63 +19,76 @@ from command import DEFAULT_LOCAL_JOBS, PagedCommand
19 19
20 20
21class Prune(PagedCommand): 21class Prune(PagedCommand):
22 COMMON = True 22 COMMON = True
23 helpSummary = "Prune (delete) already merged topics" 23 helpSummary = "Prune (delete) already merged topics"
24 helpUsage = """ 24 helpUsage = """
25%prog [<project>...] 25%prog [<project>...]
26""" 26"""
27 PARALLEL_JOBS = DEFAULT_LOCAL_JOBS 27 PARALLEL_JOBS = DEFAULT_LOCAL_JOBS
28 28
29 def _ExecuteOne(self, project): 29 def _ExecuteOne(self, project):
30 """Process one project.""" 30 """Process one project."""
31 return project.PruneHeads() 31 return project.PruneHeads()
32 32
33 def Execute(self, opt, args): 33 def Execute(self, opt, args):
34 projects = self.GetProjects(args, all_manifests=not opt.this_manifest_only) 34 projects = self.GetProjects(
35 args, all_manifests=not opt.this_manifest_only
36 )
35 37
36 # NB: Should be able to refactor this module to display summary as results 38 # NB: Should be able to refactor this module to display summary as
37 # come back from children. 39 # results come back from children.
38 def _ProcessResults(_pool, _output, results): 40 def _ProcessResults(_pool, _output, results):
39 return list(itertools.chain.from_iterable(results)) 41 return list(itertools.chain.from_iterable(results))
40 42
41 all_branches = self.ExecuteInParallel( 43 all_branches = self.ExecuteInParallel(
42 opt.jobs, 44 opt.jobs,
43 self._ExecuteOne, 45 self._ExecuteOne,
44 projects, 46 projects,
45 callback=_ProcessResults, 47 callback=_ProcessResults,
46 ordered=True) 48 ordered=True,
49 )
47 50
48 if not all_branches: 51 if not all_branches:
49 return 52 return
50 53
51 class Report(Coloring): 54 class Report(Coloring):
52 def __init__(self, config): 55 def __init__(self, config):
53 Coloring.__init__(self, config, 'status') 56 Coloring.__init__(self, config, "status")
54 self.project = self.printer('header', attr='bold') 57 self.project = self.printer("header", attr="bold")
55 58
56 out = Report(all_branches[0].project.config) 59 out = Report(all_branches[0].project.config)
57 out.project('Pending Branches') 60 out.project("Pending Branches")
58 out.nl() 61 out.nl()
59 62
60 project = None 63 project = None
61 64
62 for branch in all_branches: 65 for branch in all_branches:
63 if project != branch.project: 66 if project != branch.project:
64 project = branch.project 67 project = branch.project
65 out.nl() 68 out.nl()
66 out.project('project %s/' % project.RelPath(local=opt.this_manifest_only)) 69 out.project(
67 out.nl() 70 "project %s/"
71 % project.RelPath(local=opt.this_manifest_only)
72 )
73 out.nl()
68 74
69 print('%s %-33s ' % ( 75 print(
70 branch.name == project.CurrentBranch and '*' or ' ', 76 "%s %-33s "
71 branch.name), end='') 77 % (
78 branch.name == project.CurrentBranch and "*" or " ",
79 branch.name,
80 ),
81 end="",
82 )
72 83
73 if not branch.base_exists: 84 if not branch.base_exists:
74 print('(ignoring: tracking branch is gone: %s)' % (branch.base,)) 85 print(
75 else: 86 "(ignoring: tracking branch is gone: %s)" % (branch.base,)
76 commits = branch.commits 87 )
77 date = branch.date 88 else:
78 print('(%2d commit%s, %s)' % ( 89 commits = branch.commits
79 len(commits), 90 date = branch.date
80 len(commits) != 1 and 's' or ' ', 91 print(
81 date)) 92 "(%2d commit%s, %s)"
93 % (len(commits), len(commits) != 1 and "s" or " ", date)
94 )
diff --git a/subcmds/rebase.py b/subcmds/rebase.py
index 3d1a63e4..dc4f5805 100644
--- a/subcmds/rebase.py
+++ b/subcmds/rebase.py
@@ -20,146 +20,193 @@ from git_command import GitCommand
20 20
21 21
22class RebaseColoring(Coloring): 22class RebaseColoring(Coloring):
23 def __init__(self, config): 23 def __init__(self, config):
24 Coloring.__init__(self, config, 'rebase') 24 Coloring.__init__(self, config, "rebase")
25 self.project = self.printer('project', attr='bold') 25 self.project = self.printer("project", attr="bold")
26 self.fail = self.printer('fail', fg='red') 26 self.fail = self.printer("fail", fg="red")
27 27
28 28
29class Rebase(Command): 29class Rebase(Command):
30 COMMON = True 30 COMMON = True
31 helpSummary = "Rebase local branches on upstream branch" 31 helpSummary = "Rebase local branches on upstream branch"
32 helpUsage = """ 32 helpUsage = """
33%prog {[<project>...] | -i <project>...} 33%prog {[<project>...] | -i <project>...}
34""" 34"""
35 helpDescription = """ 35 helpDescription = """
36'%prog' uses git rebase to move local changes in the current topic branch to 36'%prog' uses git rebase to move local changes in the current topic branch to
37the HEAD of the upstream history, useful when you have made commits in a topic 37the HEAD of the upstream history, useful when you have made commits in a topic
38branch but need to incorporate new upstream changes "underneath" them. 38branch but need to incorporate new upstream changes "underneath" them.
39""" 39"""
40 40
41 def _Options(self, p): 41 def _Options(self, p):
42 g = p.get_option_group('--quiet') 42 g = p.get_option_group("--quiet")
43 g.add_option('-i', '--interactive', 43 g.add_option(
44 dest="interactive", action="store_true", 44 "-i",
45 help="interactive rebase (single project only)") 45 "--interactive",
46 46 dest="interactive",
47 p.add_option('--fail-fast', 47 action="store_true",
48 dest='fail_fast', action='store_true', 48 help="interactive rebase (single project only)",
49 help='stop rebasing after first error is hit') 49 )
50 p.add_option('-f', '--force-rebase', 50
51 dest='force_rebase', action='store_true', 51 p.add_option(
52 help='pass --force-rebase to git rebase') 52 "--fail-fast",
53 p.add_option('--no-ff', 53 dest="fail_fast",
54 dest='ff', default=True, action='store_false', 54 action="store_true",
55 help='pass --no-ff to git rebase') 55 help="stop rebasing after first error is hit",
56 p.add_option('--autosquash', 56 )
57 dest='autosquash', action='store_true', 57 p.add_option(
58 help='pass --autosquash to git rebase') 58 "-f",
59 p.add_option('--whitespace', 59 "--force-rebase",
60 dest='whitespace', action='store', metavar='WS', 60 dest="force_rebase",
61 help='pass --whitespace to git rebase') 61 action="store_true",
62 p.add_option('--auto-stash', 62 help="pass --force-rebase to git rebase",
63 dest='auto_stash', action='store_true', 63 )
64 help='stash local modifications before starting') 64 p.add_option(
65 p.add_option('-m', '--onto-manifest', 65 "--no-ff",
66 dest='onto_manifest', action='store_true', 66 dest="ff",
67 help='rebase onto the manifest version instead of upstream ' 67 default=True,
68 'HEAD (this helps to make sure the local tree stays ' 68 action="store_false",
69 'consistent if you previously synced to a manifest)') 69 help="pass --no-ff to git rebase",
70 70 )
71 def Execute(self, opt, args): 71 p.add_option(
72 all_projects = self.GetProjects(args, all_manifests=not opt.this_manifest_only) 72 "--autosquash",
73 one_project = len(all_projects) == 1 73 dest="autosquash",
74 74 action="store_true",
75 if opt.interactive and not one_project: 75 help="pass --autosquash to git rebase",
76 print('error: interactive rebase not supported with multiple projects', 76 )
77 file=sys.stderr) 77 p.add_option(
78 if len(args) == 1: 78 "--whitespace",
79 print('note: project %s is mapped to more than one path' % (args[0],), 79 dest="whitespace",
80 file=sys.stderr) 80 action="store",
81 return 1 81 metavar="WS",
82 82 help="pass --whitespace to git rebase",
83 # Setup the common git rebase args that we use for all projects. 83 )
84 common_args = ['rebase'] 84 p.add_option(
85 if opt.whitespace: 85 "--auto-stash",
86 common_args.append('--whitespace=%s' % opt.whitespace) 86 dest="auto_stash",
87 if opt.quiet: 87 action="store_true",
88 common_args.append('--quiet') 88 help="stash local modifications before starting",
89 if opt.force_rebase: 89 )
90 common_args.append('--force-rebase') 90 p.add_option(
91 if not opt.ff: 91 "-m",
92 common_args.append('--no-ff') 92 "--onto-manifest",
93 if opt.autosquash: 93 dest="onto_manifest",
94 common_args.append('--autosquash') 94 action="store_true",
95 if opt.interactive: 95 help="rebase onto the manifest version instead of upstream "
96 common_args.append('-i') 96 "HEAD (this helps to make sure the local tree stays "
97 97 "consistent if you previously synced to a manifest)",
98 config = self.manifest.manifestProject.config 98 )
99 out = RebaseColoring(config) 99
100 out.redirect(sys.stdout) 100 def Execute(self, opt, args):
101 _RelPath = lambda p: p.RelPath(local=opt.this_manifest_only) 101 all_projects = self.GetProjects(
102 102 args, all_manifests=not opt.this_manifest_only
103 ret = 0 103 )
104 for project in all_projects: 104 one_project = len(all_projects) == 1
105 if ret and opt.fail_fast: 105
106 break 106 if opt.interactive and not one_project:
107 107 print(
108 cb = project.CurrentBranch 108 "error: interactive rebase not supported with multiple "
109 if not cb: 109 "projects",
110 if one_project: 110 file=sys.stderr,
111 print("error: project %s has a detached HEAD" % _RelPath(project), 111 )
112 file=sys.stderr) 112 if len(args) == 1:
113 return 1 113 print(
114 # ignore branches with detatched HEADs 114 "note: project %s is mapped to more than one path"
115 continue 115 % (args[0],),
116 116 file=sys.stderr,
117 upbranch = project.GetBranch(cb) 117 )
118 if not upbranch.LocalMerge: 118 return 1
119 if one_project: 119
120 print("error: project %s does not track any remote branches" 120 # Setup the common git rebase args that we use for all projects.
121 % _RelPath(project), file=sys.stderr) 121 common_args = ["rebase"]
122 return 1 122 if opt.whitespace:
123 # ignore branches without remotes 123 common_args.append("--whitespace=%s" % opt.whitespace)
124 continue 124 if opt.quiet:
125 125 common_args.append("--quiet")
126 args = common_args[:] 126 if opt.force_rebase:
127 if opt.onto_manifest: 127 common_args.append("--force-rebase")
128 args.append('--onto') 128 if not opt.ff:
129 args.append(project.revisionExpr) 129 common_args.append("--no-ff")
130 130 if opt.autosquash:
131 args.append(upbranch.LocalMerge) 131 common_args.append("--autosquash")
132 132 if opt.interactive:
133 out.project('project %s: rebasing %s -> %s', 133 common_args.append("-i")
134 _RelPath(project), cb, upbranch.LocalMerge) 134
135 out.nl() 135 config = self.manifest.manifestProject.config
136 out.flush() 136 out = RebaseColoring(config)
137 137 out.redirect(sys.stdout)
138 needs_stash = False 138 _RelPath = lambda p: p.RelPath(local=opt.this_manifest_only)
139 if opt.auto_stash: 139
140 stash_args = ["update-index", "--refresh", "-q"] 140 ret = 0
141 141 for project in all_projects:
142 if GitCommand(project, stash_args).Wait() != 0: 142 if ret and opt.fail_fast:
143 needs_stash = True 143 break
144 # Dirty index, requires stash... 144
145 stash_args = ["stash"] 145 cb = project.CurrentBranch
146 146 if not cb:
147 if GitCommand(project, stash_args).Wait() != 0: 147 if one_project:
148 ret += 1 148 print(
149 continue 149 "error: project %s has a detached HEAD"
150 150 % _RelPath(project),
151 if GitCommand(project, args).Wait() != 0: 151 file=sys.stderr,
152 ret += 1 152 )
153 continue 153 return 1
154 154 # Ignore branches with detached HEADs.
155 if needs_stash: 155 continue
156 stash_args.append('pop') 156
157 stash_args.append('--quiet') 157 upbranch = project.GetBranch(cb)
158 if GitCommand(project, stash_args).Wait() != 0: 158 if not upbranch.LocalMerge:
159 ret += 1 159 if one_project:
160 160 print(
161 if ret: 161 "error: project %s does not track any remote branches"
162 out.fail('%i projects had errors', ret) 162 % _RelPath(project),
163 out.nl() 163 file=sys.stderr,
164 164 )
165 return ret 165 return 1
166 # Ignore branches without remotes.
167 continue
168
169 args = common_args[:]
170 if opt.onto_manifest:
171 args.append("--onto")
172 args.append(project.revisionExpr)
173
174 args.append(upbranch.LocalMerge)
175
176 out.project(
177 "project %s: rebasing %s -> %s",
178 _RelPath(project),
179 cb,
180 upbranch.LocalMerge,
181 )
182 out.nl()
183 out.flush()
184
185 needs_stash = False
186 if opt.auto_stash:
187 stash_args = ["update-index", "--refresh", "-q"]
188
189 if GitCommand(project, stash_args).Wait() != 0:
190 needs_stash = True
191 # Dirty index, requires stash...
192 stash_args = ["stash"]
193
194 if GitCommand(project, stash_args).Wait() != 0:
195 ret += 1
196 continue
197
198 if GitCommand(project, args).Wait() != 0:
199 ret += 1
200 continue
201
202 if needs_stash:
203 stash_args.append("pop")
204 stash_args.append("--quiet")
205 if GitCommand(project, stash_args).Wait() != 0:
206 ret += 1
207
208 if ret:
209 out.fail("%i projects had errors", ret)
210 out.nl()
211
212 return ret
diff --git a/subcmds/selfupdate.py b/subcmds/selfupdate.py
index 898bc3f2..d5d0a838 100644
--- a/subcmds/selfupdate.py
+++ b/subcmds/selfupdate.py
@@ -21,12 +21,12 @@ from subcmds.sync import _PostRepoFetch
21 21
22 22
23class Selfupdate(Command, MirrorSafeCommand): 23class Selfupdate(Command, MirrorSafeCommand):
24 COMMON = False 24 COMMON = False
25 helpSummary = "Update repo to the latest version" 25 helpSummary = "Update repo to the latest version"
26 helpUsage = """ 26 helpUsage = """
27%prog 27%prog
28""" 28"""
29 helpDescription = """ 29 helpDescription = """
30The '%prog' command upgrades repo to the latest version, if a 30The '%prog' command upgrades repo to the latest version, if a
31newer version is available. 31newer version is available.
32 32
@@ -34,28 +34,33 @@ Normally this is done automatically by 'repo sync' and does not
34need to be performed by an end-user. 34need to be performed by an end-user.
35""" 35"""
36 36
37 def _Options(self, p): 37 def _Options(self, p):
38 g = p.add_option_group('repo Version options') 38 g = p.add_option_group("repo Version options")
39 g.add_option('--no-repo-verify', 39 g.add_option(
40 dest='repo_verify', default=True, action='store_false', 40 "--no-repo-verify",
41 help='do not verify repo source code') 41 dest="repo_verify",
42 g.add_option('--repo-upgraded', 42 default=True,
43 dest='repo_upgraded', action='store_true', 43 action="store_false",
44 help=SUPPRESS_HELP) 44 help="do not verify repo source code",
45 45 )
46 def Execute(self, opt, args): 46 g.add_option(
47 rp = self.manifest.repoProject 47 "--repo-upgraded",
48 rp.PreSync() 48 dest="repo_upgraded",
49 49 action="store_true",
50 if opt.repo_upgraded: 50 help=SUPPRESS_HELP,
51 _PostRepoUpgrade(self.manifest) 51 )
52 52
53 else: 53 def Execute(self, opt, args):
54 if not rp.Sync_NetworkHalf().success: 54 rp = self.manifest.repoProject
55 print("error: can't update repo", file=sys.stderr) 55 rp.PreSync()
56 sys.exit(1) 56
57 57 if opt.repo_upgraded:
58 rp.bare_git.gc('--auto') 58 _PostRepoUpgrade(self.manifest)
59 _PostRepoFetch(rp, 59
60 repo_verify=opt.repo_verify, 60 else:
61 verbose=True) 61 if not rp.Sync_NetworkHalf().success:
62 print("error: can't update repo", file=sys.stderr)
63 sys.exit(1)
64
65 rp.bare_git.gc("--auto")
66 _PostRepoFetch(rp, repo_verify=opt.repo_verify, verbose=True)
diff --git a/subcmds/smartsync.py b/subcmds/smartsync.py
index d91d59c6..49d09972 100644
--- a/subcmds/smartsync.py
+++ b/subcmds/smartsync.py
@@ -16,18 +16,18 @@ from subcmds.sync import Sync
16 16
17 17
18class Smartsync(Sync): 18class Smartsync(Sync):
19 COMMON = True 19 COMMON = True
20 helpSummary = "Update working tree to the latest known good revision" 20 helpSummary = "Update working tree to the latest known good revision"
21 helpUsage = """ 21 helpUsage = """
22%prog [<project>...] 22%prog [<project>...]
23""" 23"""
24 helpDescription = """ 24 helpDescription = """
25The '%prog' command is a shortcut for sync -s. 25The '%prog' command is a shortcut for sync -s.
26""" 26"""
27 27
28 def _Options(self, p): 28 def _Options(self, p):
29 Sync._Options(self, p, show_smart=False) 29 Sync._Options(self, p, show_smart=False)
30 30
31 def Execute(self, opt, args): 31 def Execute(self, opt, args):
32 opt.smart_sync = True 32 opt.smart_sync = True
33 Sync.Execute(self, opt, args) 33 Sync.Execute(self, opt, args)
diff --git a/subcmds/stage.py b/subcmds/stage.py
index bdb72012..4d54eb19 100644
--- a/subcmds/stage.py
+++ b/subcmds/stage.py
@@ -20,98 +20,111 @@ from git_command import GitCommand
20 20
21 21
22class _ProjectList(Coloring): 22class _ProjectList(Coloring):
23 def __init__(self, gc): 23 def __init__(self, gc):
24 Coloring.__init__(self, gc, 'interactive') 24 Coloring.__init__(self, gc, "interactive")
25 self.prompt = self.printer('prompt', fg='blue', attr='bold') 25 self.prompt = self.printer("prompt", fg="blue", attr="bold")
26 self.header = self.printer('header', attr='bold') 26 self.header = self.printer("header", attr="bold")
27 self.help = self.printer('help', fg='red', attr='bold') 27 self.help = self.printer("help", fg="red", attr="bold")
28 28
29 29
30class Stage(InteractiveCommand): 30class Stage(InteractiveCommand):
31 COMMON = True 31 COMMON = True
32 helpSummary = "Stage file(s) for commit" 32 helpSummary = "Stage file(s) for commit"
33 helpUsage = """ 33 helpUsage = """
34%prog -i [<project>...] 34%prog -i [<project>...]
35""" 35"""
36 helpDescription = """ 36 helpDescription = """
37The '%prog' command stages files to prepare the next commit. 37The '%prog' command stages files to prepare the next commit.
38""" 38"""
39 39
40 def _Options(self, p): 40 def _Options(self, p):
41 g = p.get_option_group('--quiet') 41 g = p.get_option_group("--quiet")
42 g.add_option('-i', '--interactive', 42 g.add_option(
43 dest='interactive', action='store_true', 43 "-i",
44 help='use interactive staging') 44 "--interactive",
45 45 dest="interactive",
46 def Execute(self, opt, args): 46 action="store_true",
47 if opt.interactive: 47 help="use interactive staging",
48 self._Interactive(opt, args) 48 )
49 else: 49
50 self.Usage() 50 def Execute(self, opt, args):
51 51 if opt.interactive:
52 def _Interactive(self, opt, args): 52 self._Interactive(opt, args)
53 all_projects = [ 53 else:
54 p for p in self.GetProjects(args, all_manifests=not opt.this_manifest_only) 54 self.Usage()
55 if p.IsDirty()] 55
56 if not all_projects: 56 def _Interactive(self, opt, args):
57 print('no projects have uncommitted modifications', file=sys.stderr) 57 all_projects = [
58 return 58 p
59 59 for p in self.GetProjects(
60 out = _ProjectList(self.manifest.manifestProject.config) 60 args, all_manifests=not opt.this_manifest_only
61 while True: 61 )
62 out.header(' %s', 'project') 62 if p.IsDirty()
63 out.nl() 63 ]
64 64 if not all_projects:
65 for i in range(len(all_projects)): 65 print("no projects have uncommitted modifications", file=sys.stderr)
66 project = all_projects[i] 66 return
67 out.write('%3d: %s', i + 1, 67
68 project.RelPath(local=opt.this_manifest_only) + '/') 68 out = _ProjectList(self.manifest.manifestProject.config)
69 out.nl() 69 while True:
70 out.nl() 70 out.header(" %s", "project")
71 71 out.nl()
72 out.write('%3d: (', 0) 72
73 out.prompt('q') 73 for i in range(len(all_projects)):
74 out.write('uit)') 74 project = all_projects[i]
75 out.nl() 75 out.write(
76 76 "%3d: %s",
77 out.prompt('project> ') 77 i + 1,
78 out.flush() 78 project.RelPath(local=opt.this_manifest_only) + "/",
79 try: 79 )
80 a = sys.stdin.readline() 80 out.nl()
81 except KeyboardInterrupt: 81 out.nl()
82 out.nl() 82
83 break 83 out.write("%3d: (", 0)
84 if a == '': 84 out.prompt("q")
85 out.nl() 85 out.write("uit)")
86 break 86 out.nl()
87 87
88 a = a.strip() 88 out.prompt("project> ")
89 if a.lower() in ('q', 'quit', 'exit'): 89 out.flush()
90 break 90 try:
91 if not a: 91 a = sys.stdin.readline()
92 continue 92 except KeyboardInterrupt:
93 93 out.nl()
94 try: 94 break
95 a_index = int(a) 95 if a == "":
96 except ValueError: 96 out.nl()
97 a_index = None 97 break
98 98
99 if a_index is not None: 99 a = a.strip()
100 if a_index == 0: 100 if a.lower() in ("q", "quit", "exit"):
101 break 101 break
102 if 0 < a_index and a_index <= len(all_projects): 102 if not a:
103 _AddI(all_projects[a_index - 1]) 103 continue
104 continue 104
105 105 try:
106 projects = [ 106 a_index = int(a)
107 p for p in all_projects 107 except ValueError:
108 if a in [p.name, p.RelPath(local=opt.this_manifest_only)]] 108 a_index = None
109 if len(projects) == 1: 109
110 _AddI(projects[0]) 110 if a_index is not None:
111 continue 111 if a_index == 0:
112 print('Bye.') 112 break
113 if 0 < a_index and a_index <= len(all_projects):
114 _AddI(all_projects[a_index - 1])
115 continue
116
117 projects = [
118 p
119 for p in all_projects
120 if a in [p.name, p.RelPath(local=opt.this_manifest_only)]
121 ]
122 if len(projects) == 1:
123 _AddI(projects[0])
124 continue
125 print("Bye.")
113 126
114 127
115def _AddI(project): 128def _AddI(project):
116 p = GitCommand(project, ['add', '--interactive'], bare=False) 129 p = GitCommand(project, ["add", "--interactive"], bare=False)
117 p.Wait() 130 p.Wait()
diff --git a/subcmds/start.py b/subcmds/start.py
index 809df963..d7772b33 100644
--- a/subcmds/start.py
+++ b/subcmds/start.py
@@ -25,119 +25,147 @@ from project import SyncBuffer
25 25
26 26
27class Start(Command): 27class Start(Command):
28 COMMON = True 28 COMMON = True
29 helpSummary = "Start a new branch for development" 29 helpSummary = "Start a new branch for development"
30 helpUsage = """ 30 helpUsage = """
31%prog <newbranchname> [--all | <project>...] 31%prog <newbranchname> [--all | <project>...]
32""" 32"""
33 helpDescription = """ 33 helpDescription = """
34'%prog' begins a new branch of development, starting from the 34'%prog' begins a new branch of development, starting from the
35revision specified in the manifest. 35revision specified in the manifest.
36""" 36"""
37 PARALLEL_JOBS = DEFAULT_LOCAL_JOBS 37 PARALLEL_JOBS = DEFAULT_LOCAL_JOBS
38 38
39 def _Options(self, p): 39 def _Options(self, p):
40 p.add_option('--all', 40 p.add_option(
41 dest='all', action='store_true', 41 "--all",
42 help='begin branch in all projects') 42 dest="all",
43 p.add_option('-r', '--rev', '--revision', dest='revision', 43 action="store_true",
44 help='point branch at this revision instead of upstream') 44 help="begin branch in all projects",
45 p.add_option('--head', '--HEAD', 45 )
46 dest='revision', action='store_const', const='HEAD', 46 p.add_option(
47 help='abbreviation for --rev HEAD') 47 "-r",
48 48 "--rev",
49 def ValidateOptions(self, opt, args): 49 "--revision",
50 if not args: 50 dest="revision",
51 self.Usage() 51 help="point branch at this revision instead of upstream",
52 52 )
53 nb = args[0] 53 p.add_option(
54 if not git.check_ref_format('heads/%s' % nb): 54 "--head",
55 self.OptionParser.error("'%s' is not a valid name" % nb) 55 "--HEAD",
56 56 dest="revision",
57 def _ExecuteOne(self, revision, nb, project): 57 action="store_const",
58 """Start one project.""" 58 const="HEAD",
59 # If the current revision is immutable, such as a SHA1, a tag or 59 help="abbreviation for --rev HEAD",
60 # a change, then we can't push back to it. Substitute with 60 )
61 # dest_branch, if defined; or with manifest default revision instead. 61
62 branch_merge = '' 62 def ValidateOptions(self, opt, args):
63 if IsImmutable(project.revisionExpr): 63 if not args:
64 if project.dest_branch: 64 self.Usage()
65 branch_merge = project.dest_branch 65
66 else: 66 nb = args[0]
67 branch_merge = self.manifest.default.revisionExpr 67 if not git.check_ref_format("heads/%s" % nb):
68 68 self.OptionParser.error("'%s' is not a valid name" % nb)
69 try: 69
70 ret = project.StartBranch( 70 def _ExecuteOne(self, revision, nb, project):
71 nb, branch_merge=branch_merge, revision=revision) 71 """Start one project."""
72 except Exception as e: 72 # If the current revision is immutable, such as a SHA1, a tag or
73 print('error: unable to checkout %s: %s' % (project.name, e), file=sys.stderr) 73 # a change, then we can't push back to it. Substitute with
74 ret = False 74 # dest_branch, if defined; or with manifest default revision instead.
75 return (ret, project) 75 branch_merge = ""
76 76 if IsImmutable(project.revisionExpr):
77 def Execute(self, opt, args): 77 if project.dest_branch:
78 nb = args[0] 78 branch_merge = project.dest_branch
79 err = [] 79 else:
80 projects = [] 80 branch_merge = self.manifest.default.revisionExpr
81 if not opt.all: 81
82 projects = args[1:] 82 try:
83 if len(projects) < 1: 83 ret = project.StartBranch(
84 projects = ['.'] # start it in the local project by default 84 nb, branch_merge=branch_merge, revision=revision
85 85 )
86 all_projects = self.GetProjects(projects, 86 except Exception as e:
87 missing_ok=bool(self.gitc_manifest), 87 print(
88 all_manifests=not opt.this_manifest_only) 88 "error: unable to checkout %s: %s" % (project.name, e),
89 89 file=sys.stderr,
90 # This must happen after we find all_projects, since GetProjects may need 90 )
91 # the local directory, which will disappear once we save the GITC manifest. 91 ret = False
92 if self.gitc_manifest: 92 return (ret, project)
93 gitc_projects = self.GetProjects(projects, manifest=self.gitc_manifest, 93
94 missing_ok=True) 94 def Execute(self, opt, args):
95 for project in gitc_projects: 95 nb = args[0]
96 if project.old_revision: 96 err = []
97 project.already_synced = True 97 projects = []
98 else: 98 if not opt.all:
99 project.already_synced = False 99 projects = args[1:]
100 project.old_revision = project.revisionExpr 100 if len(projects) < 1:
101 project.revisionExpr = None 101 projects = ["."] # start it in the local project by default
102 # Save the GITC manifest. 102
103 gitc_utils.save_manifest(self.gitc_manifest) 103 all_projects = self.GetProjects(
104 104 projects,
105 # Make sure we have a valid CWD 105 missing_ok=bool(self.gitc_manifest),
106 if not os.path.exists(os.getcwd()): 106 all_manifests=not opt.this_manifest_only,
107 os.chdir(self.manifest.topdir) 107 )
108 108
109 pm = Progress('Syncing %s' % nb, len(all_projects), quiet=opt.quiet) 109 # This must happen after we find all_projects, since GetProjects may
110 for project in all_projects: 110 # need the local directory, which will disappear once we save the GITC
111 gitc_project = self.gitc_manifest.paths[project.relpath] 111 # manifest.
112 # Sync projects that have not been opened. 112 if self.gitc_manifest:
113 if not gitc_project.already_synced: 113 gitc_projects = self.GetProjects(
114 proj_localdir = os.path.join(self.gitc_manifest.gitc_client_dir, 114 projects, manifest=self.gitc_manifest, missing_ok=True
115 project.relpath) 115 )
116 project.worktree = proj_localdir 116 for project in gitc_projects:
117 if not os.path.exists(proj_localdir): 117 if project.old_revision:
118 os.makedirs(proj_localdir) 118 project.already_synced = True
119 project.Sync_NetworkHalf() 119 else:
120 sync_buf = SyncBuffer(self.manifest.manifestProject.config) 120 project.already_synced = False
121 project.Sync_LocalHalf(sync_buf) 121 project.old_revision = project.revisionExpr
122 project.revisionId = gitc_project.old_revision 122 project.revisionExpr = None
123 pm.update() 123 # Save the GITC manifest.
124 pm.end() 124 gitc_utils.save_manifest(self.gitc_manifest)
125 125
126 def _ProcessResults(_pool, pm, results): 126 # Make sure we have a valid CWD.
127 for (result, project) in results: 127 if not os.path.exists(os.getcwd()):
128 if not result: 128 os.chdir(self.manifest.topdir)
129 err.append(project) 129
130 pm.update() 130 pm = Progress("Syncing %s" % nb, len(all_projects), quiet=opt.quiet)
131 131 for project in all_projects:
132 self.ExecuteInParallel( 132 gitc_project = self.gitc_manifest.paths[project.relpath]
133 opt.jobs, 133 # Sync projects that have not been opened.
134 functools.partial(self._ExecuteOne, opt.revision, nb), 134 if not gitc_project.already_synced:
135 all_projects, 135 proj_localdir = os.path.join(
136 callback=_ProcessResults, 136 self.gitc_manifest.gitc_client_dir, project.relpath
137 output=Progress('Starting %s' % (nb,), len(all_projects), quiet=opt.quiet)) 137 )
138 138 project.worktree = proj_localdir
139 if err: 139 if not os.path.exists(proj_localdir):
140 for p in err: 140 os.makedirs(proj_localdir)
141 print("error: %s/: cannot start %s" % (p.RelPath(local=opt.this_manifest_only), nb), 141 project.Sync_NetworkHalf()
142 file=sys.stderr) 142 sync_buf = SyncBuffer(self.manifest.manifestProject.config)
143 sys.exit(1) 143 project.Sync_LocalHalf(sync_buf)
144 project.revisionId = gitc_project.old_revision
145 pm.update()
146 pm.end()
147
148 def _ProcessResults(_pool, pm, results):
149 for result, project in results:
150 if not result:
151 err.append(project)
152 pm.update()
153
154 self.ExecuteInParallel(
155 opt.jobs,
156 functools.partial(self._ExecuteOne, opt.revision, nb),
157 all_projects,
158 callback=_ProcessResults,
159 output=Progress(
160 "Starting %s" % (nb,), len(all_projects), quiet=opt.quiet
161 ),
162 )
163
164 if err:
165 for p in err:
166 print(
167 "error: %s/: cannot start %s"
168 % (p.RelPath(local=opt.this_manifest_only), nb),
169 file=sys.stderr,
170 )
171 sys.exit(1)
diff --git a/subcmds/status.py b/subcmds/status.py
index 572c72f7..6e0026f9 100644
--- a/subcmds/status.py
+++ b/subcmds/status.py
@@ -24,12 +24,12 @@ import platform_utils
24 24
25 25
26class Status(PagedCommand): 26class Status(PagedCommand):
27 COMMON = True 27 COMMON = True
28 helpSummary = "Show the working tree status" 28 helpSummary = "Show the working tree status"
29 helpUsage = """ 29 helpUsage = """
30%prog [<project>...] 30%prog [<project>...]
31""" 31"""
32 helpDescription = """ 32 helpDescription = """
33'%prog' compares the working tree to the staging area (aka index), 33'%prog' compares the working tree to the staging area (aka index),
34and the most recent commit on this branch (HEAD), in each project 34and the most recent commit on this branch (HEAD), in each project
35specified. A summary is displayed, one line per file where there 35specified. A summary is displayed, one line per file where there
@@ -76,109 +76,128 @@ the following meanings:
76 d: deleted ( in index, not in work tree ) 76 d: deleted ( in index, not in work tree )
77 77
78""" 78"""
79 PARALLEL_JOBS = DEFAULT_LOCAL_JOBS 79 PARALLEL_JOBS = DEFAULT_LOCAL_JOBS
80 80
81 def _Options(self, p): 81 def _Options(self, p):
82 p.add_option('-o', '--orphans', 82 p.add_option(
83 dest='orphans', action='store_true', 83 "-o",
84 help="include objects in working directory outside of repo projects") 84 "--orphans",
85 85 dest="orphans",
86 def _StatusHelper(self, quiet, local, project): 86 action="store_true",
87 """Obtains the status for a specific project. 87 help="include objects in working directory outside of repo "
88 88 "projects",
89 Obtains the status for a project, redirecting the output to 89 )
90 the specified object. 90
91 91 def _StatusHelper(self, quiet, local, project):
92 Args: 92 """Obtains the status for a specific project.
93 quiet: Where to output the status. 93
94 local: a boolean, if True, the path is relative to the local 94 Obtains the status for a project, redirecting the output to
95 (sub)manifest. If false, the path is relative to the 95 the specified object.
96 outermost manifest. 96
97 project: Project to get status of. 97 Args:
98 98 quiet: Where to output the status.
99 Returns: 99 local: a boolean, if True, the path is relative to the local
100 The status of the project. 100 (sub)manifest. If false, the path is relative to the outermost
101 """ 101 manifest.
102 buf = io.StringIO() 102 project: Project to get status of.
103 ret = project.PrintWorkTreeStatus(quiet=quiet, output_redir=buf, 103
104 local=local) 104 Returns:
105 return (ret, buf.getvalue()) 105 The status of the project.
106 106 """
107 def _FindOrphans(self, dirs, proj_dirs, proj_dirs_parents, outstring): 107 buf = io.StringIO()
108 """find 'dirs' that are present in 'proj_dirs_parents' but not in 'proj_dirs'""" 108 ret = project.PrintWorkTreeStatus(
109 status_header = ' --\t' 109 quiet=quiet, output_redir=buf, local=local
110 for item in dirs: 110 )
111 if not platform_utils.isdir(item): 111 return (ret, buf.getvalue())
112 outstring.append(''.join([status_header, item])) 112
113 continue 113 def _FindOrphans(self, dirs, proj_dirs, proj_dirs_parents, outstring):
114 if item in proj_dirs: 114 """find 'dirs' that are present in 'proj_dirs_parents' but not in 'proj_dirs'""" # noqa: E501
115 continue 115 status_header = " --\t"
116 if item in proj_dirs_parents: 116 for item in dirs:
117 self._FindOrphans(glob.glob('%s/.*' % item) + 117 if not platform_utils.isdir(item):
118 glob.glob('%s/*' % item), 118 outstring.append("".join([status_header, item]))
119 proj_dirs, proj_dirs_parents, outstring) 119 continue
120 continue 120 if item in proj_dirs:
121 outstring.append(''.join([status_header, item, '/'])) 121 continue
122 122 if item in proj_dirs_parents:
123 def Execute(self, opt, args): 123 self._FindOrphans(
124 all_projects = self.GetProjects(args, all_manifests=not opt.this_manifest_only) 124 glob.glob("%s/.*" % item) + glob.glob("%s/*" % item),
125 125 proj_dirs,
126 def _ProcessResults(_pool, _output, results): 126 proj_dirs_parents,
127 ret = 0 127 outstring,
128 for (state, output) in results: 128 )
129 if output: 129 continue
130 print(output, end='') 130 outstring.append("".join([status_header, item, "/"]))
131 if state == 'CLEAN': 131
132 ret += 1 132 def Execute(self, opt, args):
133 return ret 133 all_projects = self.GetProjects(
134 134 args, all_manifests=not opt.this_manifest_only
135 counter = self.ExecuteInParallel( 135 )
136 opt.jobs, 136
137 functools.partial(self._StatusHelper, opt.quiet, opt.this_manifest_only), 137 def _ProcessResults(_pool, _output, results):
138 all_projects, 138 ret = 0
139 callback=_ProcessResults, 139 for state, output in results:
140 ordered=True) 140 if output:
141 141 print(output, end="")
142 if not opt.quiet and len(all_projects) == counter: 142 if state == "CLEAN":
143 print('nothing to commit (working directory clean)') 143 ret += 1
144 144 return ret
145 if opt.orphans: 145
146 proj_dirs = set() 146 counter = self.ExecuteInParallel(
147 proj_dirs_parents = set() 147 opt.jobs,
148 for project in self.GetProjects(None, missing_ok=True, all_manifests=not opt.this_manifest_only): 148 functools.partial(
149 relpath = project.RelPath(local=opt.this_manifest_only) 149 self._StatusHelper, opt.quiet, opt.this_manifest_only
150 proj_dirs.add(relpath) 150 ),
151 (head, _tail) = os.path.split(relpath) 151 all_projects,
152 while head != "": 152 callback=_ProcessResults,
153 proj_dirs_parents.add(head) 153 ordered=True,
154 (head, _tail) = os.path.split(head) 154 )
155 proj_dirs.add('.repo') 155
156 156 if not opt.quiet and len(all_projects) == counter:
157 class StatusColoring(Coloring): 157 print("nothing to commit (working directory clean)")
158 def __init__(self, config): 158
159 Coloring.__init__(self, config, 'status') 159 if opt.orphans:
160 self.project = self.printer('header', attr='bold') 160 proj_dirs = set()
161 self.untracked = self.printer('untracked', fg='red') 161 proj_dirs_parents = set()
162 162 for project in self.GetProjects(
163 orig_path = os.getcwd() 163 None, missing_ok=True, all_manifests=not opt.this_manifest_only
164 try: 164 ):
165 os.chdir(self.manifest.topdir) 165 relpath = project.RelPath(local=opt.this_manifest_only)
166 166 proj_dirs.add(relpath)
167 outstring = [] 167 (head, _tail) = os.path.split(relpath)
168 self._FindOrphans(glob.glob('.*') + 168 while head != "":
169 glob.glob('*'), 169 proj_dirs_parents.add(head)
170 proj_dirs, proj_dirs_parents, outstring) 170 (head, _tail) = os.path.split(head)
171 171 proj_dirs.add(".repo")
172 if outstring: 172
173 output = StatusColoring(self.client.globalConfig) 173 class StatusColoring(Coloring):
174 output.project('Objects not within a project (orphans)') 174 def __init__(self, config):
175 output.nl() 175 Coloring.__init__(self, config, "status")
176 for entry in outstring: 176 self.project = self.printer("header", attr="bold")
177 output.untracked(entry) 177 self.untracked = self.printer("untracked", fg="red")
178 output.nl() 178
179 else: 179 orig_path = os.getcwd()
180 print('No orphan files or directories') 180 try:
181 181 os.chdir(self.manifest.topdir)
182 finally: 182
183 # Restore CWD. 183 outstring = []
184 os.chdir(orig_path) 184 self._FindOrphans(
185 glob.glob(".*") + glob.glob("*"),
186 proj_dirs,
187 proj_dirs_parents,
188 outstring,
189 )
190
191 if outstring:
192 output = StatusColoring(self.client.globalConfig)
193 output.project("Objects not within a project (orphans)")
194 output.nl()
195 for entry in outstring:
196 output.untracked(entry)
197 output.nl()
198 else:
199 print("No orphan files or directories")
200
201 finally:
202 # Restore CWD.
203 os.chdir(orig_path)
diff --git a/subcmds/sync.py b/subcmds/sync.py
index 9a8ca8f7..eabaa68b 100644
--- a/subcmds/sync.py
+++ b/subcmds/sync.py
@@ -33,18 +33,21 @@ import xml.parsers.expat
33import xmlrpc.client 33import xmlrpc.client
34 34
35try: 35try:
36 import threading as _threading 36 import threading as _threading
37except ImportError: 37except ImportError:
38 import dummy_threading as _threading 38 import dummy_threading as _threading
39 39
40try: 40try:
41 import resource 41 import resource
42
43 def _rlimit_nofile():
44 return resource.getrlimit(resource.RLIMIT_NOFILE)
42 45
43 def _rlimit_nofile():
44 return resource.getrlimit(resource.RLIMIT_NOFILE)
45except ImportError: 46except ImportError:
46 def _rlimit_nofile(): 47
47 return (256, 256) 48 def _rlimit_nofile():
49 return (256, 256)
50
48 51
49import event_log 52import event_log
50from git_command import git_require 53from git_command import git_require
@@ -54,7 +57,12 @@ import git_superproject
54import gitc_utils 57import gitc_utils
55from project import Project 58from project import Project
56from project import RemoteSpec 59from project import RemoteSpec
57from command import Command, DEFAULT_LOCAL_JOBS, MirrorSafeCommand, WORKER_BATCH_SIZE 60from command import (
61 Command,
62 DEFAULT_LOCAL_JOBS,
63 MirrorSafeCommand,
64 WORKER_BATCH_SIZE,
65)
58from error import RepoChangedException, GitError 66from error import RepoChangedException, GitError
59import platform_utils 67import platform_utils
60from project import SyncBuffer 68from project import SyncBuffer
@@ -68,70 +76,74 @@ _ONE_DAY_S = 24 * 60 * 60
68 76
69# Env var to implicitly turn auto-gc back on. This was added to allow a user to 77# Env var to implicitly turn auto-gc back on. This was added to allow a user to
70# revert a change in default behavior in v2.29.9. Remove after 2023-04-01. 78# revert a change in default behavior in v2.29.9. Remove after 2023-04-01.
71_REPO_AUTO_GC = 'REPO_AUTO_GC' 79_REPO_AUTO_GC = "REPO_AUTO_GC"
72_AUTO_GC = os.environ.get(_REPO_AUTO_GC) == '1' 80_AUTO_GC = os.environ.get(_REPO_AUTO_GC) == "1"
73 81
74 82
75class _FetchOneResult(NamedTuple): 83class _FetchOneResult(NamedTuple):
76 """_FetchOne return value. 84 """_FetchOne return value.
77 85
78 Attributes: 86 Attributes:
79 success (bool): True if successful. 87 success (bool): True if successful.
80 project (Project): The fetched project. 88 project (Project): The fetched project.
81 start (float): The starting time.time(). 89 start (float): The starting time.time().
82 finish (float): The ending time.time(). 90 finish (float): The ending time.time().
83 remote_fetched (bool): True if the remote was actually queried. 91 remote_fetched (bool): True if the remote was actually queried.
84 """ 92 """
85 success: bool 93
86 project: Project 94 success: bool
87 start: float 95 project: Project
88 finish: float 96 start: float
89 remote_fetched: bool 97 finish: float
98 remote_fetched: bool
90 99
91 100
92class _FetchResult(NamedTuple): 101class _FetchResult(NamedTuple):
93 """_Fetch return value. 102 """_Fetch return value.
103
104 Attributes:
105 success (bool): True if successful.
106 projects (Set[str]): The names of the git directories of fetched projects.
107 """
94 108
95 Attributes: 109 success: bool
96 success (bool): True if successful. 110 projects: Set[str]
97 projects (Set[str]): The names of the git directories of fetched projects.
98 """
99 success: bool
100 projects: Set[str]
101 111
102 112
103class _FetchMainResult(NamedTuple): 113class _FetchMainResult(NamedTuple):
104 """_FetchMain return value. 114 """_FetchMain return value.
115
116 Attributes:
117 all_projects (List[Project]): The fetched projects.
118 """
105 119
106 Attributes: 120 all_projects: List[Project]
107 all_projects (List[Project]): The fetched projects.
108 """
109 all_projects: List[Project]
110 121
111 122
112class _CheckoutOneResult(NamedTuple): 123class _CheckoutOneResult(NamedTuple):
113 """_CheckoutOne return value. 124 """_CheckoutOne return value.
125
126 Attributes:
127 success (bool): True if successful.
128 project (Project): The project.
129 start (float): The starting time.time().
130 finish (float): The ending time.time().
131 """
114 132
115 Attributes: 133 success: bool
116 success (bool): True if successful. 134 project: Project
117 project (Project): The project. 135 start: float
118 start (float): The starting time.time(). 136 finish: float
119 finish (float): The ending time.time().
120 """
121 success: bool
122 project: Project
123 start: float
124 finish: float
125 137
126 138
127class Sync(Command, MirrorSafeCommand): 139class Sync(Command, MirrorSafeCommand):
128 COMMON = True 140 COMMON = True
129 MULTI_MANIFEST_SUPPORT = True 141 MULTI_MANIFEST_SUPPORT = True
130 helpSummary = "Update working tree to the latest revision" 142 helpSummary = "Update working tree to the latest revision"
131 helpUsage = """ 143 helpUsage = """
132%prog [<project>...] 144%prog [<project>...]
133""" 145"""
134 helpDescription = """ 146 helpDescription = """
135The '%prog' command synchronizes local project directories 147The '%prog' command synchronizes local project directories
136with the remote repositories specified in the manifest. If a local 148with the remote repositories specified in the manifest. If a local
137project does not yet exist, it will clone a new local directory from 149project does not yet exist, it will clone a new local directory from
@@ -230,1293 +242,1604 @@ If the remote SSH daemon is Gerrit Code Review, version 2.0.10 or
230later is required to fix a server side protocol bug. 242later is required to fix a server side protocol bug.
231 243
232""" 244"""
233 # A value of 0 means we want parallel jobs, but we'll determine the default 245 # A value of 0 means we want parallel jobs, but we'll determine the default
234 # value later on. 246 # value later on.
235 PARALLEL_JOBS = 0 247 PARALLEL_JOBS = 0
236 248
237 def _Options(self, p, show_smart=True): 249 def _Options(self, p, show_smart=True):
238 p.add_option('--jobs-network', default=None, type=int, metavar='JOBS', 250 p.add_option(
239 help='number of network jobs to run in parallel (defaults to --jobs or 1)') 251 "--jobs-network",
240 p.add_option('--jobs-checkout', default=None, type=int, metavar='JOBS', 252 default=None,
241 help='number of local checkout jobs to run in parallel (defaults to --jobs or ' 253 type=int,
242 f'{DEFAULT_LOCAL_JOBS})') 254 metavar="JOBS",
243 255 help="number of network jobs to run in parallel (defaults to "
244 p.add_option('-f', '--force-broken', 256 "--jobs or 1)",
245 dest='force_broken', action='store_true', 257 )
246 help='obsolete option (to be deleted in the future)') 258 p.add_option(
247 p.add_option('--fail-fast', 259 "--jobs-checkout",
248 dest='fail_fast', action='store_true', 260 default=None,
249 help='stop syncing after first error is hit') 261 type=int,
250 p.add_option('--force-sync', 262 metavar="JOBS",
251 dest='force_sync', action='store_true', 263 help="number of local checkout jobs to run in parallel (defaults "
252 help="overwrite an existing git directory if it needs to " 264 f"to --jobs or {DEFAULT_LOCAL_JOBS})",
253 "point to a different object directory. WARNING: this " 265 )
254 "may cause loss of data") 266
255 p.add_option('--force-remove-dirty', 267 p.add_option(
256 dest='force_remove_dirty', action='store_true', 268 "-f",
257 help="force remove projects with uncommitted modifications if " 269 "--force-broken",
258 "projects no longer exist in the manifest. " 270 dest="force_broken",
259 "WARNING: this may cause loss of data") 271 action="store_true",
260 p.add_option('-l', '--local-only', 272 help="obsolete option (to be deleted in the future)",
261 dest='local_only', action='store_true', 273 )
262 help="only update working tree, don't fetch") 274 p.add_option(
263 p.add_option('--no-manifest-update', '--nmu', 275 "--fail-fast",
264 dest='mp_update', action='store_false', default='true', 276 dest="fail_fast",
265 help='use the existing manifest checkout as-is. ' 277 action="store_true",
266 '(do not update to the latest revision)') 278 help="stop syncing after first error is hit",
267 p.add_option('-n', '--network-only', 279 )
268 dest='network_only', action='store_true', 280 p.add_option(
269 help="fetch only, don't update working tree") 281 "--force-sync",
270 p.add_option('-d', '--detach', 282 dest="force_sync",
271 dest='detach_head', action='store_true', 283 action="store_true",
272 help='detach projects back to manifest revision') 284 help="overwrite an existing git directory if it needs to "
273 p.add_option('-c', '--current-branch', 285 "point to a different object directory. WARNING: this "
274 dest='current_branch_only', action='store_true', 286 "may cause loss of data",
275 help='fetch only current branch from server') 287 )
276 p.add_option('--no-current-branch', 288 p.add_option(
277 dest='current_branch_only', action='store_false', 289 "--force-remove-dirty",
278 help='fetch all branches from server') 290 dest="force_remove_dirty",
279 p.add_option('-m', '--manifest-name', 291 action="store_true",
280 dest='manifest_name', 292 help="force remove projects with uncommitted modifications if "
281 help='temporary manifest to use for this sync', metavar='NAME.xml') 293 "projects no longer exist in the manifest. "
282 p.add_option('--clone-bundle', action='store_true', 294 "WARNING: this may cause loss of data",
283 help='enable use of /clone.bundle on HTTP/HTTPS') 295 )
284 p.add_option('--no-clone-bundle', dest='clone_bundle', action='store_false', 296 p.add_option(
285 help='disable use of /clone.bundle on HTTP/HTTPS') 297 "-l",
286 p.add_option('-u', '--manifest-server-username', action='store', 298 "--local-only",
287 dest='manifest_server_username', 299 dest="local_only",
288 help='username to authenticate with the manifest server') 300 action="store_true",
289 p.add_option('-p', '--manifest-server-password', action='store', 301 help="only update working tree, don't fetch",
290 dest='manifest_server_password', 302 )
291 help='password to authenticate with the manifest server') 303 p.add_option(
292 p.add_option('--fetch-submodules', 304 "--no-manifest-update",
293 dest='fetch_submodules', action='store_true', 305 "--nmu",
294 help='fetch submodules from server') 306 dest="mp_update",
295 p.add_option('--use-superproject', action='store_true', 307 action="store_false",
296 help='use the manifest superproject to sync projects; implies -c') 308 default="true",
297 p.add_option('--no-use-superproject', action='store_false', 309 help="use the existing manifest checkout as-is. "
298 dest='use_superproject', 310 "(do not update to the latest revision)",
299 help='disable use of manifest superprojects') 311 )
300 p.add_option('--tags', action='store_true', 312 p.add_option(
301 help='fetch tags') 313 "-n",
302 p.add_option('--no-tags', 314 "--network-only",
303 dest='tags', action='store_false', 315 dest="network_only",
304 help="don't fetch tags (default)") 316 action="store_true",
305 p.add_option('--optimized-fetch', 317 help="fetch only, don't update working tree",
306 dest='optimized_fetch', action='store_true', 318 )
307 help='only fetch projects fixed to sha1 if revision does not exist locally') 319 p.add_option(
308 p.add_option('--retry-fetches', 320 "-d",
309 default=0, action='store', type='int', 321 "--detach",
310 help='number of times to retry fetches on transient errors') 322 dest="detach_head",
311 p.add_option('--prune', action='store_true', 323 action="store_true",
312 help='delete refs that no longer exist on the remote (default)') 324 help="detach projects back to manifest revision",
313 p.add_option('--no-prune', dest='prune', action='store_false', 325 )
314 help='do not delete refs that no longer exist on the remote') 326 p.add_option(
315 p.add_option('--auto-gc', action='store_true', default=None, 327 "-c",
316 help='run garbage collection on all synced projects') 328 "--current-branch",
317 p.add_option('--no-auto-gc', dest='auto_gc', action='store_false', 329 dest="current_branch_only",
318 help='do not run garbage collection on any projects (default)') 330 action="store_true",
319 if show_smart: 331 help="fetch only current branch from server",
320 p.add_option('-s', '--smart-sync', 332 )
321 dest='smart_sync', action='store_true', 333 p.add_option(
322 help='smart sync using manifest from the latest known good build') 334 "--no-current-branch",
323 p.add_option('-t', '--smart-tag', 335 dest="current_branch_only",
324 dest='smart_tag', action='store', 336 action="store_false",
325 help='smart sync using manifest from a known tag') 337 help="fetch all branches from server",
326 338 )
327 g = p.add_option_group('repo Version options') 339 p.add_option(
328 g.add_option('--no-repo-verify', 340 "-m",
329 dest='repo_verify', default=True, action='store_false', 341 "--manifest-name",
330 help='do not verify repo source code') 342 dest="manifest_name",
331 g.add_option('--repo-upgraded', 343 help="temporary manifest to use for this sync",
332 dest='repo_upgraded', action='store_true', 344 metavar="NAME.xml",
333 help=SUPPRESS_HELP) 345 )
334 346 p.add_option(
335 def _GetBranch(self, manifest_project): 347 "--clone-bundle",
336 """Returns the branch name for getting the approved smartsync manifest. 348 action="store_true",
337 349 help="enable use of /clone.bundle on HTTP/HTTPS",
338 Args: 350 )
339 manifest_project: the manifestProject to query. 351 p.add_option(
340 """ 352 "--no-clone-bundle",
341 b = manifest_project.GetBranch(manifest_project.CurrentBranch) 353 dest="clone_bundle",
342 branch = b.merge 354 action="store_false",
343 if branch.startswith(R_HEADS): 355 help="disable use of /clone.bundle on HTTP/HTTPS",
344 branch = branch[len(R_HEADS):] 356 )
345 return branch 357 p.add_option(
346 358 "-u",
347 def _GetCurrentBranchOnly(self, opt, manifest): 359 "--manifest-server-username",
348 """Returns whether current-branch or use-superproject options are enabled. 360 action="store",
349 361 dest="manifest_server_username",
350 Args: 362 help="username to authenticate with the manifest server",
351 opt: Program options returned from optparse. See _Options(). 363 )
352 manifest: The manifest to use. 364 p.add_option(
353 365 "-p",
354 Returns: 366 "--manifest-server-password",
355 True if a superproject is requested, otherwise the value of the 367 action="store",
356 current_branch option (True, False or None). 368 dest="manifest_server_password",
357 """ 369 help="password to authenticate with the manifest server",
358 return git_superproject.UseSuperproject(opt.use_superproject, manifest) or opt.current_branch_only 370 )
359 371 p.add_option(
360 def _UpdateProjectsRevisionId(self, opt, args, superproject_logging_data, 372 "--fetch-submodules",
361 manifest): 373 dest="fetch_submodules",
362 """Update revisionId of projects with the commit hash from the superproject. 374 action="store_true",
363 375 help="fetch submodules from server",
364 This function updates each project's revisionId with the commit hash from 376 )
365 the superproject. It writes the updated manifest into a file and reloads 377 p.add_option(
366 the manifest from it. When appropriate, sub manifests are also processed. 378 "--use-superproject",
367 379 action="store_true",
368 Args: 380 help="use the manifest superproject to sync projects; implies -c",
369 opt: Program options returned from optparse. See _Options(). 381 )
370 args: Arguments to pass to GetProjects. See the GetProjects 382 p.add_option(
371 docstring for details. 383 "--no-use-superproject",
372 superproject_logging_data: A dictionary of superproject data to log. 384 action="store_false",
373 manifest: The manifest to use. 385 dest="use_superproject",
374 """ 386 help="disable use of manifest superprojects",
375 have_superproject = manifest.superproject or any( 387 )
376 m.superproject for m in manifest.all_children) 388 p.add_option("--tags", action="store_true", help="fetch tags")
377 if not have_superproject: 389 p.add_option(
378 return 390 "--no-tags",
379 391 dest="tags",
380 if opt.local_only and manifest.superproject: 392 action="store_false",
381 manifest_path = manifest.superproject.manifest_path 393 help="don't fetch tags (default)",
382 if manifest_path: 394 )
383 self._ReloadManifest(manifest_path, manifest) 395 p.add_option(
384 return 396 "--optimized-fetch",
385 397 dest="optimized_fetch",
386 all_projects = self.GetProjects(args, 398 action="store_true",
387 missing_ok=True, 399 help="only fetch projects fixed to sha1 if revision does not exist "
388 submodules_ok=opt.fetch_submodules, 400 "locally",
389 manifest=manifest, 401 )
390 all_manifests=not opt.this_manifest_only) 402 p.add_option(
391 403 "--retry-fetches",
392 per_manifest = collections.defaultdict(list) 404 default=0,
393 manifest_paths = {} 405 action="store",
394 if opt.this_manifest_only: 406 type="int",
395 per_manifest[manifest.path_prefix] = all_projects 407 help="number of times to retry fetches on transient errors",
396 else: 408 )
397 for p in all_projects: 409 p.add_option(
398 per_manifest[p.manifest.path_prefix].append(p) 410 "--prune",
399 411 action="store_true",
400 superproject_logging_data = {} 412 help="delete refs that no longer exist on the remote (default)",
401 need_unload = False 413 )
402 for m in self.ManifestList(opt): 414 p.add_option(
403 if not m.path_prefix in per_manifest: 415 "--no-prune",
404 continue 416 dest="prune",
405 use_super = git_superproject.UseSuperproject(opt.use_superproject, m) 417 action="store_false",
406 if superproject_logging_data: 418 help="do not delete refs that no longer exist on the remote",
407 superproject_logging_data['multimanifest'] = True 419 )
408 superproject_logging_data.update( 420 p.add_option(
409 superproject=use_super, 421 "--auto-gc",
410 haslocalmanifests=bool(m.HasLocalManifests), 422 action="store_true",
411 hassuperprojecttag=bool(m.superproject), 423 default=None,
412 ) 424 help="run garbage collection on all synced projects",
413 if use_super and (m.IsMirror or m.IsArchive): 425 )
414 # Don't use superproject, because we have no working tree. 426 p.add_option(
415 use_super = False 427 "--no-auto-gc",
416 superproject_logging_data['superproject'] = False 428 dest="auto_gc",
417 superproject_logging_data['noworktree'] = True 429 action="store_false",
418 if opt.use_superproject is not False: 430 help="do not run garbage collection on any projects (default)",
419 print(f'{m.path_prefix}: not using superproject because there is no ' 431 )
420 'working tree.') 432 if show_smart:
421 433 p.add_option(
422 if not use_super: 434 "-s",
423 continue 435 "--smart-sync",
424 m.superproject.SetQuiet(opt.quiet) 436 dest="smart_sync",
425 print_messages = git_superproject.PrintMessages(opt.use_superproject, m) 437 action="store_true",
426 m.superproject.SetPrintMessages(print_messages) 438 help="smart sync using manifest from the latest known good "
427 update_result = m.superproject.UpdateProjectsRevisionId( 439 "build",
428 per_manifest[m.path_prefix], git_event_log=self.git_event_log) 440 )
429 manifest_path = update_result.manifest_path 441 p.add_option(
430 superproject_logging_data['updatedrevisionid'] = bool(manifest_path) 442 "-t",
431 if manifest_path: 443 "--smart-tag",
432 m.SetManifestOverride(manifest_path) 444 dest="smart_tag",
433 need_unload = True 445 action="store",
434 else: 446 help="smart sync using manifest from a known tag",
435 if print_messages: 447 )
436 print(f'{m.path_prefix}: warning: Update of revisionId from ' 448
437 'superproject has failed, repo sync will not use superproject ' 449 g = p.add_option_group("repo Version options")
438 'to fetch the source. ', 450 g.add_option(
439 'Please resync with the --no-use-superproject option to avoid ' 451 "--no-repo-verify",
440 'this repo warning.', 452 dest="repo_verify",
441 file=sys.stderr) 453 default=True,
442 if update_result.fatal and opt.use_superproject is not None: 454 action="store_false",
443 sys.exit(1) 455 help="do not verify repo source code",
444 if need_unload: 456 )
445 m.outer_client.manifest.Unload() 457 g.add_option(
446 458 "--repo-upgraded",
447 def _FetchProjectList(self, opt, projects): 459 dest="repo_upgraded",
448 """Main function of the fetch worker. 460 action="store_true",
449 461 help=SUPPRESS_HELP,
450 The projects we're given share the same underlying git object store, so we 462 )
451 have to fetch them in serial.
452
453 Delegates most of the work to _FetchHelper.
454
455 Args:
456 opt: Program options returned from optparse. See _Options().
457 projects: Projects to fetch.
458 """
459 return [self._FetchOne(opt, x) for x in projects]
460 463
461 def _FetchOne(self, opt, project): 464 def _GetBranch(self, manifest_project):
462 """Fetch git objects for a single project. 465 """Returns the branch name for getting the approved smartsync manifest.
466
467 Args:
468 manifest_project: The manifestProject to query.
469 """
470 b = manifest_project.GetBranch(manifest_project.CurrentBranch)
471 branch = b.merge
472 if branch.startswith(R_HEADS):
473 branch = branch[len(R_HEADS) :]
474 return branch
475
476 def _GetCurrentBranchOnly(self, opt, manifest):
477 """Returns whether current-branch or use-superproject options are
478 enabled.
479
480 Args:
481 opt: Program options returned from optparse. See _Options().
482 manifest: The manifest to use.
483
484 Returns:
485 True if a superproject is requested, otherwise the value of the
486 current_branch option (True, False or None).
487 """
488 return (
489 git_superproject.UseSuperproject(opt.use_superproject, manifest)
490 or opt.current_branch_only
491 )
463 492
464 Args: 493 def _UpdateProjectsRevisionId(
465 opt: Program options returned from optparse. See _Options(). 494 self, opt, args, superproject_logging_data, manifest
466 project: Project object for the project to fetch. 495 ):
496 """Update revisionId of projects with the commit from the superproject.
497
498 This function updates each project's revisionId with the commit hash
499 from the superproject. It writes the updated manifest into a file and
500 reloads the manifest from it. When appropriate, sub manifests are also
501 processed.
502
503 Args:
504 opt: Program options returned from optparse. See _Options().
505 args: Arguments to pass to GetProjects. See the GetProjects
506 docstring for details.
507 superproject_logging_data: A dictionary of superproject data to log.
508 manifest: The manifest to use.
509 """
510 have_superproject = manifest.superproject or any(
511 m.superproject for m in manifest.all_children
512 )
513 if not have_superproject:
514 return
515
516 if opt.local_only and manifest.superproject:
517 manifest_path = manifest.superproject.manifest_path
518 if manifest_path:
519 self._ReloadManifest(manifest_path, manifest)
520 return
521
522 all_projects = self.GetProjects(
523 args,
524 missing_ok=True,
525 submodules_ok=opt.fetch_submodules,
526 manifest=manifest,
527 all_manifests=not opt.this_manifest_only,
528 )
467 529
468 Returns: 530 per_manifest = collections.defaultdict(list)
469 Whether the fetch was successful. 531 if opt.this_manifest_only:
470 """ 532 per_manifest[manifest.path_prefix] = all_projects
471 start = time.time() 533 else:
472 success = False 534 for p in all_projects:
473 remote_fetched = False 535 per_manifest[p.manifest.path_prefix].append(p)
474 buf = io.StringIO() 536
475 try: 537 superproject_logging_data = {}
476 sync_result = project.Sync_NetworkHalf( 538 need_unload = False
477 quiet=opt.quiet, 539 for m in self.ManifestList(opt):
478 verbose=opt.verbose, 540 if m.path_prefix not in per_manifest:
479 output_redir=buf, 541 continue
480 current_branch_only=self._GetCurrentBranchOnly(opt, project.manifest), 542 use_super = git_superproject.UseSuperproject(
481 force_sync=opt.force_sync, 543 opt.use_superproject, m
482 clone_bundle=opt.clone_bundle, 544 )
483 tags=opt.tags, archive=project.manifest.IsArchive, 545 if superproject_logging_data:
484 optimized_fetch=opt.optimized_fetch, 546 superproject_logging_data["multimanifest"] = True
485 retry_fetches=opt.retry_fetches, 547 superproject_logging_data.update(
486 prune=opt.prune, 548 superproject=use_super,
487 ssh_proxy=self.ssh_proxy, 549 haslocalmanifests=bool(m.HasLocalManifests),
488 clone_filter=project.manifest.CloneFilter, 550 hassuperprojecttag=bool(m.superproject),
489 partial_clone_exclude=project.manifest.PartialCloneExclude) 551 )
490 success = sync_result.success 552 if use_super and (m.IsMirror or m.IsArchive):
491 remote_fetched = sync_result.remote_fetched 553 # Don't use superproject, because we have no working tree.
492 554 use_super = False
493 output = buf.getvalue() 555 superproject_logging_data["superproject"] = False
494 if (opt.verbose or not success) and output: 556 superproject_logging_data["noworktree"] = True
495 print('\n' + output.rstrip()) 557 if opt.use_superproject is not False:
496 558 print(
497 if not success: 559 f"{m.path_prefix}: not using superproject because "
498 print('error: Cannot fetch %s from %s' 560 "there is no working tree."
499 % (project.name, project.remote.url), 561 )
500 file=sys.stderr) 562
501 except KeyboardInterrupt: 563 if not use_super:
502 print(f'Keyboard interrupt while processing {project.name}') 564 continue
503 except GitError as e: 565 m.superproject.SetQuiet(opt.quiet)
504 print('error.GitError: Cannot fetch %s' % str(e), file=sys.stderr) 566 print_messages = git_superproject.PrintMessages(
505 except Exception as e: 567 opt.use_superproject, m
506 print('error: Cannot fetch %s (%s: %s)' 568 )
507 % (project.name, type(e).__name__, str(e)), file=sys.stderr) 569 m.superproject.SetPrintMessages(print_messages)
508 raise 570 update_result = m.superproject.UpdateProjectsRevisionId(
509 571 per_manifest[m.path_prefix], git_event_log=self.git_event_log
510 finish = time.time() 572 )
511 return _FetchOneResult(success, project, start, finish, remote_fetched) 573 manifest_path = update_result.manifest_path
512 574 superproject_logging_data["updatedrevisionid"] = bool(manifest_path)
513 @classmethod 575 if manifest_path:
514 def _FetchInitChild(cls, ssh_proxy): 576 m.SetManifestOverride(manifest_path)
515 cls.ssh_proxy = ssh_proxy 577 need_unload = True
516 578 else:
517 def _Fetch(self, projects, opt, err_event, ssh_proxy): 579 if print_messages:
518 ret = True 580 print(
519 581 f"{m.path_prefix}: warning: Update of revisionId from "
520 jobs = opt.jobs_network 582 "superproject has failed, repo sync will not use "
521 fetched = set() 583 "superproject to fetch the source. ",
522 remote_fetched = set() 584 "Please resync with the --no-use-superproject option "
523 pm = Progress('Fetching', len(projects), delay=False, quiet=opt.quiet) 585 "to avoid this repo warning.",
524 586 file=sys.stderr,
525 objdir_project_map = dict() 587 )
526 for project in projects: 588 if update_result.fatal and opt.use_superproject is not None:
527 objdir_project_map.setdefault(project.objdir, []).append(project) 589 sys.exit(1)
528 projects_list = list(objdir_project_map.values()) 590 if need_unload:
529 591 m.outer_client.manifest.Unload()
530 def _ProcessResults(results_sets): 592
531 ret = True 593 def _FetchProjectList(self, opt, projects):
532 for results in results_sets: 594 """Main function of the fetch worker.
533 for result in results: 595
534 success = result.success 596 The projects we're given share the same underlying git object store, so
535 project = result.project 597 we have to fetch them in serial.
536 start = result.start 598
537 finish = result.finish 599 Delegates most of the work to _FetchHelper.
538 self._fetch_times.Set(project, finish - start) 600
539 self.event_log.AddSync(project, event_log.TASK_SYNC_NETWORK, 601 Args:
540 start, finish, success) 602 opt: Program options returned from optparse. See _Options().
541 if result.remote_fetched: 603 projects: Projects to fetch.
542 remote_fetched.add(project) 604 """
543 # Check for any errors before running any more tasks. 605 return [self._FetchOne(opt, x) for x in projects]
544 # ...we'll let existing jobs finish, though. 606
545 if not success: 607 def _FetchOne(self, opt, project):
546 ret = False 608 """Fetch git objects for a single project.
547 else: 609
548 fetched.add(project.gitdir) 610 Args:
549 pm.update(msg=f'Last synced: {project.name}') 611 opt: Program options returned from optparse. See _Options().
550 if not ret and opt.fail_fast: 612 project: Project object for the project to fetch.
551 break 613
552 return ret 614 Returns:
553 615 Whether the fetch was successful.
554 # We pass the ssh proxy settings via the class. This allows multiprocessing 616 """
555 # to pickle it up when spawning children. We can't pass it as an argument 617 start = time.time()
556 # to _FetchProjectList below as multiprocessing is unable to pickle those. 618 success = False
557 Sync.ssh_proxy = None 619 remote_fetched = False
558 620 buf = io.StringIO()
559 # NB: Multiprocessing is heavy, so don't spin it up for one job. 621 try:
560 if len(projects_list) == 1 or jobs == 1: 622 sync_result = project.Sync_NetworkHalf(
561 self._FetchInitChild(ssh_proxy) 623 quiet=opt.quiet,
562 if not _ProcessResults(self._FetchProjectList(opt, x) for x in projects_list): 624 verbose=opt.verbose,
563 ret = False 625 output_redir=buf,
564 else: 626 current_branch_only=self._GetCurrentBranchOnly(
565 # Favor throughput over responsiveness when quiet. It seems that imap() 627 opt, project.manifest
566 # will yield results in batches relative to chunksize, so even as the 628 ),
567 # children finish a sync, we won't see the result until one child finishes 629 force_sync=opt.force_sync,
568 # ~chunksize jobs. When using a large --jobs with large chunksize, this 630 clone_bundle=opt.clone_bundle,
569 # can be jarring as there will be a large initial delay where repo looks 631 tags=opt.tags,
570 # like it isn't doing anything and sits at 0%, but then suddenly completes 632 archive=project.manifest.IsArchive,
571 # a lot of jobs all at once. Since this code is more network bound, we 633 optimized_fetch=opt.optimized_fetch,
572 # can accept a bit more CPU overhead with a smaller chunksize so that the 634 retry_fetches=opt.retry_fetches,
573 # user sees more immediate & continuous feedback. 635 prune=opt.prune,
574 if opt.quiet: 636 ssh_proxy=self.ssh_proxy,
575 chunksize = WORKER_BATCH_SIZE 637 clone_filter=project.manifest.CloneFilter,
576 else: 638 partial_clone_exclude=project.manifest.PartialCloneExclude,
577 pm.update(inc=0, msg='warming up') 639 )
578 chunksize = 4 640 success = sync_result.success
579 with multiprocessing.Pool(jobs, initializer=self._FetchInitChild, 641 remote_fetched = sync_result.remote_fetched
580 initargs=(ssh_proxy,)) as pool: 642
581 results = pool.imap_unordered( 643 output = buf.getvalue()
582 functools.partial(self._FetchProjectList, opt), 644 if (opt.verbose or not success) and output:
583 projects_list, 645 print("\n" + output.rstrip())
584 chunksize=chunksize) 646
585 if not _ProcessResults(results): 647 if not success:
586 ret = False 648 print(
587 pool.close() 649 "error: Cannot fetch %s from %s"
588 650 % (project.name, project.remote.url),
589 # Cleanup the reference now that we're done with it, and we're going to 651 file=sys.stderr,
590 # release any resources it points to. If we don't, later multiprocessing 652 )
591 # usage (e.g. checkouts) will try to pickle and then crash. 653 except KeyboardInterrupt:
592 del Sync.ssh_proxy 654 print(f"Keyboard interrupt while processing {project.name}")
593 655 except GitError as e:
594 pm.end() 656 print("error.GitError: Cannot fetch %s" % str(e), file=sys.stderr)
595 self._fetch_times.Save() 657 except Exception as e:
596 658 print(
597 if not self.outer_client.manifest.IsArchive: 659 "error: Cannot fetch %s (%s: %s)"
598 self._GCProjects(projects, opt, err_event) 660 % (project.name, type(e).__name__, str(e)),
599 661 file=sys.stderr,
600 return _FetchResult(ret, fetched) 662 )
601 663 raise
602 def _FetchMain(self, opt, args, all_projects, err_event, 664
603 ssh_proxy, manifest): 665 finish = time.time()
604 """The main network fetch loop. 666 return _FetchOneResult(success, project, start, finish, remote_fetched)
605 667
606 Args: 668 @classmethod
607 opt: Program options returned from optparse. See _Options(). 669 def _FetchInitChild(cls, ssh_proxy):
608 args: Command line args used to filter out projects. 670 cls.ssh_proxy = ssh_proxy
609 all_projects: List of all projects that should be fetched. 671
610 err_event: Whether an error was hit while processing. 672 def _Fetch(self, projects, opt, err_event, ssh_proxy):
611 ssh_proxy: SSH manager for clients & masters. 673 ret = True
612 manifest: The manifest to use. 674
613 675 jobs = opt.jobs_network
614 Returns: 676 fetched = set()
615 List of all projects that should be checked out. 677 remote_fetched = set()
616 """ 678 pm = Progress("Fetching", len(projects), delay=False, quiet=opt.quiet)
617 rp = manifest.repoProject 679
618 680 objdir_project_map = dict()
619 to_fetch = [] 681 for project in projects:
620 now = time.time() 682 objdir_project_map.setdefault(project.objdir, []).append(project)
621 if _ONE_DAY_S <= (now - rp.LastFetch): 683 projects_list = list(objdir_project_map.values())
622 to_fetch.append(rp) 684
623 to_fetch.extend(all_projects) 685 def _ProcessResults(results_sets):
624 to_fetch.sort(key=self._fetch_times.Get, reverse=True) 686 ret = True
625 687 for results in results_sets:
626 result = self._Fetch(to_fetch, opt, err_event, ssh_proxy) 688 for result in results:
627 success = result.success 689 success = result.success
628 fetched = result.projects 690 project = result.project
629 if not success: 691 start = result.start
630 err_event.set() 692 finish = result.finish
631 693 self._fetch_times.Set(project, finish - start)
632 _PostRepoFetch(rp, opt.repo_verify) 694 self.event_log.AddSync(
633 if opt.network_only: 695 project,
634 # bail out now; the rest touches the working tree 696 event_log.TASK_SYNC_NETWORK,
635 if err_event.is_set(): 697 start,
636 print('\nerror: Exited sync due to fetch errors.\n', file=sys.stderr) 698 finish,
637 sys.exit(1) 699 success,
638 return _FetchMainResult([]) 700 )
639 701 if result.remote_fetched:
640 # Iteratively fetch missing and/or nested unregistered submodules 702 remote_fetched.add(project)
641 previously_missing_set = set() 703 # Check for any errors before running any more tasks.
642 while True: 704 # ...we'll let existing jobs finish, though.
643 self._ReloadManifest(None, manifest) 705 if not success:
644 all_projects = self.GetProjects(args, 706 ret = False
645 missing_ok=True, 707 else:
646 submodules_ok=opt.fetch_submodules, 708 fetched.add(project.gitdir)
647 manifest=manifest, 709 pm.update(msg=f"Last synced: {project.name}")
648 all_manifests=not opt.this_manifest_only) 710 if not ret and opt.fail_fast:
649 missing = [] 711 break
650 for project in all_projects: 712 return ret
651 if project.gitdir not in fetched:
652 missing.append(project)
653 if not missing:
654 break
655 # Stop us from non-stopped fetching actually-missing repos: If set of
656 # missing repos has not been changed from last fetch, we break.
657 missing_set = set(p.name for p in missing)
658 if previously_missing_set == missing_set:
659 break
660 previously_missing_set = missing_set
661 result = self._Fetch(missing, opt, err_event, ssh_proxy)
662 success = result.success
663 new_fetched = result.projects
664 if not success:
665 err_event.set()
666 fetched.update(new_fetched)
667
668 return _FetchMainResult(all_projects)
669
670 def _CheckoutOne(self, detach_head, force_sync, project):
671 """Checkout work tree for one project
672
673 Args:
674 detach_head: Whether to leave a detached HEAD.
675 force_sync: Force checking out of the repo.
676 project: Project object for the project to checkout.
677
678 Returns:
679 Whether the fetch was successful.
680 """
681 start = time.time()
682 syncbuf = SyncBuffer(project.manifest.manifestProject.config,
683 detach_head=detach_head)
684 success = False
685 try:
686 project.Sync_LocalHalf(syncbuf, force_sync=force_sync)
687 success = syncbuf.Finish()
688 except GitError as e:
689 print('error.GitError: Cannot checkout %s: %s' %
690 (project.name, str(e)), file=sys.stderr)
691 except Exception as e:
692 print('error: Cannot checkout %s: %s: %s' %
693 (project.name, type(e).__name__, str(e)),
694 file=sys.stderr)
695 raise
696
697 if not success:
698 print('error: Cannot checkout %s' % (project.name), file=sys.stderr)
699 finish = time.time()
700 return _CheckoutOneResult(success, project, start, finish)
701
702 def _Checkout(self, all_projects, opt, err_results):
703 """Checkout projects listed in all_projects
704
705 Args:
706 all_projects: List of all projects that should be checked out.
707 opt: Program options returned from optparse. See _Options().
708 err_results: A list of strings, paths to git repos where checkout failed.
709 """
710 # Only checkout projects with worktrees.
711 all_projects = [x for x in all_projects if x.worktree]
712 713
713 def _ProcessResults(pool, pm, results): 714 # We pass the ssh proxy settings via the class. This allows
714 ret = True 715 # multiprocessing to pickle it up when spawning children. We can't pass
715 for result in results: 716 # it as an argument to _FetchProjectList below as multiprocessing is
717 # unable to pickle those.
718 Sync.ssh_proxy = None
719
720 # NB: Multiprocessing is heavy, so don't spin it up for one job.
721 if len(projects_list) == 1 or jobs == 1:
722 self._FetchInitChild(ssh_proxy)
723 if not _ProcessResults(
724 self._FetchProjectList(opt, x) for x in projects_list
725 ):
726 ret = False
727 else:
728 # Favor throughput over responsiveness when quiet. It seems that
729 # imap() will yield results in batches relative to chunksize, so
730 # even as the children finish a sync, we won't see the result until
731 # one child finishes ~chunksize jobs. When using a large --jobs
732 # with large chunksize, this can be jarring as there will be a large
733 # initial delay where repo looks like it isn't doing anything and
734 # sits at 0%, but then suddenly completes a lot of jobs all at once.
735 # Since this code is more network bound, we can accept a bit more
736 # CPU overhead with a smaller chunksize so that the user sees more
737 # immediate & continuous feedback.
738 if opt.quiet:
739 chunksize = WORKER_BATCH_SIZE
740 else:
741 pm.update(inc=0, msg="warming up")
742 chunksize = 4
743 with multiprocessing.Pool(
744 jobs, initializer=self._FetchInitChild, initargs=(ssh_proxy,)
745 ) as pool:
746 results = pool.imap_unordered(
747 functools.partial(self._FetchProjectList, opt),
748 projects_list,
749 chunksize=chunksize,
750 )
751 if not _ProcessResults(results):
752 ret = False
753 pool.close()
754
755 # Cleanup the reference now that we're done with it, and we're going to
756 # release any resources it points to. If we don't, later
757 # multiprocessing usage (e.g. checkouts) will try to pickle and then
758 # crash.
759 del Sync.ssh_proxy
760
761 pm.end()
762 self._fetch_times.Save()
763
764 if not self.outer_client.manifest.IsArchive:
765 self._GCProjects(projects, opt, err_event)
766
767 return _FetchResult(ret, fetched)
768
769 def _FetchMain(
770 self, opt, args, all_projects, err_event, ssh_proxy, manifest
771 ):
772 """The main network fetch loop.
773
774 Args:
775 opt: Program options returned from optparse. See _Options().
776 args: Command line args used to filter out projects.
777 all_projects: List of all projects that should be fetched.
778 err_event: Whether an error was hit while processing.
779 ssh_proxy: SSH manager for clients & masters.
780 manifest: The manifest to use.
781
782 Returns:
783 List of all projects that should be checked out.
784 """
785 rp = manifest.repoProject
786
787 to_fetch = []
788 now = time.time()
789 if _ONE_DAY_S <= (now - rp.LastFetch):
790 to_fetch.append(rp)
791 to_fetch.extend(all_projects)
792 to_fetch.sort(key=self._fetch_times.Get, reverse=True)
793
794 result = self._Fetch(to_fetch, opt, err_event, ssh_proxy)
716 success = result.success 795 success = result.success
717 project = result.project 796 fetched = result.projects
718 start = result.start
719 finish = result.finish
720 self.event_log.AddSync(project, event_log.TASK_SYNC_LOCAL,
721 start, finish, success)
722 # Check for any errors before running any more tasks.
723 # ...we'll let existing jobs finish, though.
724 if not success: 797 if not success:
725 ret = False 798 err_event.set()
726 err_results.append(project.RelPath(local=opt.this_manifest_only)) 799
727 if opt.fail_fast: 800 _PostRepoFetch(rp, opt.repo_verify)
728 if pool: 801 if opt.network_only:
729 pool.close() 802 # Bail out now; the rest touches the working tree.
803 if err_event.is_set():
804 print(
805 "\nerror: Exited sync due to fetch errors.\n",
806 file=sys.stderr,
807 )
808 sys.exit(1)
809 return _FetchMainResult([])
810
811 # Iteratively fetch missing and/or nested unregistered submodules.
812 previously_missing_set = set()
813 while True:
814 self._ReloadManifest(None, manifest)
815 all_projects = self.GetProjects(
816 args,
817 missing_ok=True,
818 submodules_ok=opt.fetch_submodules,
819 manifest=manifest,
820 all_manifests=not opt.this_manifest_only,
821 )
822 missing = []
823 for project in all_projects:
824 if project.gitdir not in fetched:
825 missing.append(project)
826 if not missing:
827 break
828 # Stop us from non-stopped fetching actually-missing repos: If set
829 # of missing repos has not been changed from last fetch, we break.
830 missing_set = set(p.name for p in missing)
831 if previously_missing_set == missing_set:
832 break
833 previously_missing_set = missing_set
834 result = self._Fetch(missing, opt, err_event, ssh_proxy)
835 success = result.success
836 new_fetched = result.projects
837 if not success:
838 err_event.set()
839 fetched.update(new_fetched)
840
841 return _FetchMainResult(all_projects)
842
843 def _CheckoutOne(self, detach_head, force_sync, project):
844 """Checkout work tree for one project
845
846 Args:
847 detach_head: Whether to leave a detached HEAD.
848 force_sync: Force checking out of the repo.
849 project: Project object for the project to checkout.
850
851 Returns:
852 Whether the fetch was successful.
853 """
854 start = time.time()
855 syncbuf = SyncBuffer(
856 project.manifest.manifestProject.config, detach_head=detach_head
857 )
858 success = False
859 try:
860 project.Sync_LocalHalf(syncbuf, force_sync=force_sync)
861 success = syncbuf.Finish()
862 except GitError as e:
863 print(
864 "error.GitError: Cannot checkout %s: %s"
865 % (project.name, str(e)),
866 file=sys.stderr,
867 )
868 except Exception as e:
869 print(
870 "error: Cannot checkout %s: %s: %s"
871 % (project.name, type(e).__name__, str(e)),
872 file=sys.stderr,
873 )
874 raise
875
876 if not success:
877 print("error: Cannot checkout %s" % (project.name), file=sys.stderr)
878 finish = time.time()
879 return _CheckoutOneResult(success, project, start, finish)
880
881 def _Checkout(self, all_projects, opt, err_results):
882 """Checkout projects listed in all_projects
883
884 Args:
885 all_projects: List of all projects that should be checked out.
886 opt: Program options returned from optparse. See _Options().
887 err_results: A list of strings, paths to git repos where checkout
888 failed.
889 """
890 # Only checkout projects with worktrees.
891 all_projects = [x for x in all_projects if x.worktree]
892
893 def _ProcessResults(pool, pm, results):
894 ret = True
895 for result in results:
896 success = result.success
897 project = result.project
898 start = result.start
899 finish = result.finish
900 self.event_log.AddSync(
901 project, event_log.TASK_SYNC_LOCAL, start, finish, success
902 )
903 # Check for any errors before running any more tasks.
904 # ...we'll let existing jobs finish, though.
905 if not success:
906 ret = False
907 err_results.append(
908 project.RelPath(local=opt.this_manifest_only)
909 )
910 if opt.fail_fast:
911 if pool:
912 pool.close()
913 return ret
914 pm.update(msg=project.name)
730 return ret 915 return ret
731 pm.update(msg=project.name)
732 return ret
733
734 return self.ExecuteInParallel(
735 opt.jobs_checkout,
736 functools.partial(self._CheckoutOne, opt.detach_head, opt.force_sync),
737 all_projects,
738 callback=_ProcessResults,
739 output=Progress('Checking out', len(all_projects), quiet=opt.quiet)) and not err_results
740
741 @staticmethod
742 def _GetPreciousObjectsState(project: Project, opt):
743 """Get the preciousObjects state for the project.
744
745 Args:
746 project (Project): the project to examine, and possibly correct.
747 opt (optparse.Values): options given to sync.
748
749 Returns:
750 Expected state of extensions.preciousObjects:
751 False: Should be disabled. (not present)
752 True: Should be enabled.
753 """
754 if project.use_git_worktrees:
755 return False
756 projects = project.manifest.GetProjectsWithName(project.name,
757 all_manifests=True)
758 if len(projects) == 1:
759 return False
760 relpath = project.RelPath(local=opt.this_manifest_only)
761 if len(projects) > 1:
762 # Objects are potentially shared with another project.
763 # See the logic in Project.Sync_NetworkHalf regarding UseAlternates.
764 # - When False, shared projects share (via symlink)
765 # .repo/project-objects/{PROJECT_NAME}.git as the one-and-only objects
766 # directory. All objects are precious, since there is no project with a
767 # complete set of refs.
768 # - When True, shared projects share (via info/alternates)
769 # .repo/project-objects/{PROJECT_NAME}.git as an alternate object store,
770 # which is written only on the first clone of the project, and is not
771 # written subsequently. (When Sync_NetworkHalf sees that it exists, it
772 # makes sure that the alternates file points there, and uses a
773 # project-local .git/objects directory for all syncs going forward.
774 # We do not support switching between the options. The environment
775 # variable is present for testing and migration only.
776 return not project.UseAlternates
777
778 return False
779
780 def _SetPreciousObjectsState(self, project: Project, opt):
781 """Correct the preciousObjects state for the project.
782
783 Args:
784 project: the project to examine, and possibly correct.
785 opt: options given to sync.
786 """
787 expected = self._GetPreciousObjectsState(project, opt)
788 actual = project.config.GetBoolean('extensions.preciousObjects') or False
789 relpath = project.RelPath(local=opt.this_manifest_only)
790
791 if expected != actual:
792 # If this is unexpected, log it and repair.
793 Trace(f'{relpath} expected preciousObjects={expected}, got {actual}')
794 if expected:
795 if not opt.quiet:
796 print('\r%s: Shared project %s found, disabling pruning.' %
797 (relpath, project.name))
798 if git_require((2, 7, 0)):
799 project.EnableRepositoryExtension('preciousObjects')
800 else:
801 # This isn't perfect, but it's the best we can do with old git.
802 print('\r%s: WARNING: shared projects are unreliable when using '
803 'old versions of git; please upgrade to git-2.7.0+.'
804 % (relpath,),
805 file=sys.stderr)
806 project.config.SetString('gc.pruneExpire', 'never')
807 else:
808 if not opt.quiet:
809 print(f'\r{relpath}: not shared, disabling pruning.')
810 project.config.SetString('extensions.preciousObjects', None)
811 project.config.SetString('gc.pruneExpire', None)
812 916
813 def _GCProjects(self, projects, opt, err_event): 917 return (
814 """Perform garbage collection. 918 self.ExecuteInParallel(
919 opt.jobs_checkout,
920 functools.partial(
921 self._CheckoutOne, opt.detach_head, opt.force_sync
922 ),
923 all_projects,
924 callback=_ProcessResults,
925 output=Progress(
926 "Checking out", len(all_projects), quiet=opt.quiet
927 ),
928 )
929 and not err_results
930 )
815 931
816 If We are skipping garbage collection (opt.auto_gc not set), we still want 932 @staticmethod
817 to potentially mark objects precious, so that `git gc` does not discard 933 def _GetPreciousObjectsState(project: Project, opt):
818 shared objects. 934 """Get the preciousObjects state for the project.
819 """ 935
820 if not opt.auto_gc: 936 Args:
821 # Just repair preciousObjects state, and return. 937 project (Project): the project to examine, and possibly correct.
822 for project in projects: 938 opt (optparse.Values): options given to sync.
823 self._SetPreciousObjectsState(project, opt) 939
824 return 940 Returns:
825 941 Expected state of extensions.preciousObjects:
826 pm = Progress('Garbage collecting', len(projects), delay=False, 942 False: Should be disabled. (not present)
827 quiet=opt.quiet) 943 True: Should be enabled.
828 pm.update(inc=0, msg='prescan') 944 """
829 945 if project.use_git_worktrees:
830 tidy_dirs = {} 946 return False
831 for project in projects: 947 projects = project.manifest.GetProjectsWithName(
832 self._SetPreciousObjectsState(project, opt) 948 project.name, all_manifests=True
833
834 project.config.SetString('gc.autoDetach', 'false')
835 # Only call git gc once per objdir, but call pack-refs for the remainder.
836 if project.objdir not in tidy_dirs:
837 tidy_dirs[project.objdir] = (
838 True, # Run a full gc.
839 project.bare_git,
840 ) 949 )
841 elif project.gitdir not in tidy_dirs: 950 if len(projects) == 1:
842 tidy_dirs[project.gitdir] = ( 951 return False
843 False, # Do not run a full gc; just run pack-refs. 952 if len(projects) > 1:
844 project.bare_git, 953 # Objects are potentially shared with another project.
954 # See the logic in Project.Sync_NetworkHalf regarding UseAlternates.
955 # - When False, shared projects share (via symlink)
956 # .repo/project-objects/{PROJECT_NAME}.git as the one-and-only
957 # objects directory. All objects are precious, since there is no
958 # project with a complete set of refs.
959 # - When True, shared projects share (via info/alternates)
960 # .repo/project-objects/{PROJECT_NAME}.git as an alternate object
961 # store, which is written only on the first clone of the project,
962 # and is not written subsequently. (When Sync_NetworkHalf sees
963 # that it exists, it makes sure that the alternates file points
964 # there, and uses a project-local .git/objects directory for all
965 # syncs going forward.
966 # We do not support switching between the options. The environment
967 # variable is present for testing and migration only.
968 return not project.UseAlternates
969
970 return False
971
972 def _SetPreciousObjectsState(self, project: Project, opt):
973 """Correct the preciousObjects state for the project.
974
975 Args:
976 project: the project to examine, and possibly correct.
977 opt: options given to sync.
978 """
979 expected = self._GetPreciousObjectsState(project, opt)
980 actual = (
981 project.config.GetBoolean("extensions.preciousObjects") or False
845 ) 982 )
846 983 relpath = project.RelPath(local=opt.this_manifest_only)
847 jobs = opt.jobs 984
848 985 if expected != actual:
849 if jobs < 2: 986 # If this is unexpected, log it and repair.
850 for (run_gc, bare_git) in tidy_dirs.values(): 987 Trace(
851 pm.update(msg=bare_git._project.name) 988 f"{relpath} expected preciousObjects={expected}, got {actual}"
852 989 )
853 if run_gc: 990 if expected:
854 bare_git.gc('--auto') 991 if not opt.quiet:
992 print(
993 "\r%s: Shared project %s found, disabling pruning."
994 % (relpath, project.name)
995 )
996 if git_require((2, 7, 0)):
997 project.EnableRepositoryExtension("preciousObjects")
998 else:
999 # This isn't perfect, but it's the best we can do with old
1000 # git.
1001 print(
1002 "\r%s: WARNING: shared projects are unreliable when "
1003 "using old versions of git; please upgrade to "
1004 "git-2.7.0+." % (relpath,),
1005 file=sys.stderr,
1006 )
1007 project.config.SetString("gc.pruneExpire", "never")
1008 else:
1009 if not opt.quiet:
1010 print(f"\r{relpath}: not shared, disabling pruning.")
1011 project.config.SetString("extensions.preciousObjects", None)
1012 project.config.SetString("gc.pruneExpire", None)
1013
1014 def _GCProjects(self, projects, opt, err_event):
1015 """Perform garbage collection.
1016
1017 If We are skipping garbage collection (opt.auto_gc not set), we still
1018 want to potentially mark objects precious, so that `git gc` does not
1019 discard shared objects.
1020 """
1021 if not opt.auto_gc:
1022 # Just repair preciousObjects state, and return.
1023 for project in projects:
1024 self._SetPreciousObjectsState(project, opt)
1025 return
1026
1027 pm = Progress(
1028 "Garbage collecting", len(projects), delay=False, quiet=opt.quiet
1029 )
1030 pm.update(inc=0, msg="prescan")
1031
1032 tidy_dirs = {}
1033 for project in projects:
1034 self._SetPreciousObjectsState(project, opt)
1035
1036 project.config.SetString("gc.autoDetach", "false")
1037 # Only call git gc once per objdir, but call pack-refs for the
1038 # remainder.
1039 if project.objdir not in tidy_dirs:
1040 tidy_dirs[project.objdir] = (
1041 True, # Run a full gc.
1042 project.bare_git,
1043 )
1044 elif project.gitdir not in tidy_dirs:
1045 tidy_dirs[project.gitdir] = (
1046 False, # Do not run a full gc; just run pack-refs.
1047 project.bare_git,
1048 )
1049
1050 jobs = opt.jobs
1051
1052 if jobs < 2:
1053 for run_gc, bare_git in tidy_dirs.values():
1054 pm.update(msg=bare_git._project.name)
1055
1056 if run_gc:
1057 bare_git.gc("--auto")
1058 else:
1059 bare_git.pack_refs()
1060 pm.end()
1061 return
1062
1063 cpu_count = os.cpu_count()
1064 config = {"pack.threads": cpu_count // jobs if cpu_count > jobs else 1}
1065
1066 threads = set()
1067 sem = _threading.Semaphore(jobs)
1068
1069 def tidy_up(run_gc, bare_git):
1070 pm.start(bare_git._project.name)
1071 try:
1072 try:
1073 if run_gc:
1074 bare_git.gc("--auto", config=config)
1075 else:
1076 bare_git.pack_refs(config=config)
1077 except GitError:
1078 err_event.set()
1079 except Exception:
1080 err_event.set()
1081 raise
1082 finally:
1083 pm.finish(bare_git._project.name)
1084 sem.release()
1085
1086 for run_gc, bare_git in tidy_dirs.values():
1087 if err_event.is_set() and opt.fail_fast:
1088 break
1089 sem.acquire()
1090 t = _threading.Thread(
1091 target=tidy_up,
1092 args=(
1093 run_gc,
1094 bare_git,
1095 ),
1096 )
1097 t.daemon = True
1098 threads.add(t)
1099 t.start()
1100
1101 for t in threads:
1102 t.join()
1103 pm.end()
1104
1105 def _ReloadManifest(self, manifest_name, manifest):
1106 """Reload the manfiest from the file specified by the |manifest_name|.
1107
1108 It unloads the manifest if |manifest_name| is None.
1109
1110 Args:
1111 manifest_name: Manifest file to be reloaded.
1112 manifest: The manifest to use.
1113 """
1114 if manifest_name:
1115 # Override calls Unload already.
1116 manifest.Override(manifest_name)
855 else: 1117 else:
856 bare_git.pack_refs() 1118 manifest.Unload()
857 pm.end() 1119
858 return 1120 def UpdateProjectList(self, opt, manifest):
859 1121 """Update the cached projects list for |manifest|
860 cpu_count = os.cpu_count() 1122
861 config = {'pack.threads': cpu_count // jobs if cpu_count > jobs else 1} 1123 In a multi-manifest checkout, each manifest has its own project.list.
862 1124
863 threads = set() 1125 Args:
864 sem = _threading.Semaphore(jobs) 1126 opt: Program options returned from optparse. See _Options().
1127 manifest: The manifest to use.
1128
1129 Returns:
1130 0: success
1131 1: failure
1132 """
1133 new_project_paths = []
1134 for project in self.GetProjects(
1135 None, missing_ok=True, manifest=manifest, all_manifests=False
1136 ):
1137 if project.relpath:
1138 new_project_paths.append(project.relpath)
1139 file_name = "project.list"
1140 file_path = os.path.join(manifest.subdir, file_name)
1141 old_project_paths = []
1142
1143 if os.path.exists(file_path):
1144 with open(file_path, "r") as fd:
1145 old_project_paths = fd.read().split("\n")
1146 # In reversed order, so subfolders are deleted before parent folder.
1147 for path in sorted(old_project_paths, reverse=True):
1148 if not path:
1149 continue
1150 if path not in new_project_paths:
1151 # If the path has already been deleted, we don't need to do
1152 # it.
1153 gitdir = os.path.join(manifest.topdir, path, ".git")
1154 if os.path.exists(gitdir):
1155 project = Project(
1156 manifest=manifest,
1157 name=path,
1158 remote=RemoteSpec("origin"),
1159 gitdir=gitdir,
1160 objdir=gitdir,
1161 use_git_worktrees=os.path.isfile(gitdir),
1162 worktree=os.path.join(manifest.topdir, path),
1163 relpath=path,
1164 revisionExpr="HEAD",
1165 revisionId=None,
1166 groups=None,
1167 )
1168 if not project.DeleteWorktree(
1169 quiet=opt.quiet, force=opt.force_remove_dirty
1170 ):
1171 return 1
1172
1173 new_project_paths.sort()
1174 with open(file_path, "w") as fd:
1175 fd.write("\n".join(new_project_paths))
1176 fd.write("\n")
1177 return 0
1178
1179 def UpdateCopyLinkfileList(self, manifest):
1180 """Save all dests of copyfile and linkfile, and update them if needed.
1181
1182 Returns:
1183 Whether update was successful.
1184 """
1185 new_paths = {}
1186 new_linkfile_paths = []
1187 new_copyfile_paths = []
1188 for project in self.GetProjects(
1189 None, missing_ok=True, manifest=manifest, all_manifests=False
1190 ):
1191 new_linkfile_paths.extend(x.dest for x in project.linkfiles)
1192 new_copyfile_paths.extend(x.dest for x in project.copyfiles)
1193
1194 new_paths = {
1195 "linkfile": new_linkfile_paths,
1196 "copyfile": new_copyfile_paths,
1197 }
1198
1199 copylinkfile_name = "copy-link-files.json"
1200 copylinkfile_path = os.path.join(manifest.subdir, copylinkfile_name)
1201 old_copylinkfile_paths = {}
1202
1203 if os.path.exists(copylinkfile_path):
1204 with open(copylinkfile_path, "rb") as fp:
1205 try:
1206 old_copylinkfile_paths = json.load(fp)
1207 except Exception:
1208 print(
1209 "error: %s is not a json formatted file."
1210 % copylinkfile_path,
1211 file=sys.stderr,
1212 )
1213 platform_utils.remove(copylinkfile_path)
1214 return False
1215
1216 need_remove_files = []
1217 need_remove_files.extend(
1218 set(old_copylinkfile_paths.get("linkfile", []))
1219 - set(new_linkfile_paths)
1220 )
1221 need_remove_files.extend(
1222 set(old_copylinkfile_paths.get("copyfile", []))
1223 - set(new_copyfile_paths)
1224 )
1225
1226 for need_remove_file in need_remove_files:
1227 # Try to remove the updated copyfile or linkfile.
1228 # So, if the file is not exist, nothing need to do.
1229 platform_utils.remove(need_remove_file, missing_ok=True)
1230
1231 # Create copy-link-files.json, save dest path of "copyfile" and
1232 # "linkfile".
1233 with open(copylinkfile_path, "w", encoding="utf-8") as fp:
1234 json.dump(new_paths, fp)
1235 return True
1236
1237 def _SmartSyncSetup(self, opt, smart_sync_manifest_path, manifest):
1238 if not manifest.manifest_server:
1239 print(
1240 "error: cannot smart sync: no manifest server defined in "
1241 "manifest",
1242 file=sys.stderr,
1243 )
1244 sys.exit(1)
1245
1246 manifest_server = manifest.manifest_server
1247 if not opt.quiet:
1248 print("Using manifest server %s" % manifest_server)
1249
1250 if "@" not in manifest_server:
1251 username = None
1252 password = None
1253 if opt.manifest_server_username and opt.manifest_server_password:
1254 username = opt.manifest_server_username
1255 password = opt.manifest_server_password
1256 else:
1257 try:
1258 info = netrc.netrc()
1259 except IOError:
1260 # .netrc file does not exist or could not be opened.
1261 pass
1262 else:
1263 try:
1264 parse_result = urllib.parse.urlparse(manifest_server)
1265 if parse_result.hostname:
1266 auth = info.authenticators(parse_result.hostname)
1267 if auth:
1268 username, _account, password = auth
1269 else:
1270 print(
1271 "No credentials found for %s in .netrc"
1272 % parse_result.hostname,
1273 file=sys.stderr,
1274 )
1275 except netrc.NetrcParseError as e:
1276 print(
1277 "Error parsing .netrc file: %s" % e, file=sys.stderr
1278 )
1279
1280 if username and password:
1281 manifest_server = manifest_server.replace(
1282 "://", "://%s:%s@" % (username, password), 1
1283 )
1284
1285 transport = PersistentTransport(manifest_server)
1286 if manifest_server.startswith("persistent-"):
1287 manifest_server = manifest_server[len("persistent-") :]
865 1288
866 def tidy_up(run_gc, bare_git):
867 pm.start(bare_git._project.name)
868 try:
869 try: 1289 try:
870 if run_gc: 1290 server = xmlrpc.client.Server(manifest_server, transport=transport)
871 bare_git.gc('--auto', config=config) 1291 if opt.smart_sync:
872 else: 1292 branch = self._GetBranch(manifest.manifestProject)
873 bare_git.pack_refs(config=config) 1293
874 except GitError: 1294 if "SYNC_TARGET" in os.environ:
875 err_event.set() 1295 target = os.environ["SYNC_TARGET"]
876 except Exception: 1296 [success, manifest_str] = server.GetApprovedManifest(
877 err_event.set() 1297 branch, target
878 raise 1298 )
879 finally: 1299 elif (
880 pm.finish(bare_git._project.name) 1300 "TARGET_PRODUCT" in os.environ
881 sem.release() 1301 and "TARGET_BUILD_VARIANT" in os.environ
882 1302 ):
883 for (run_gc, bare_git) in tidy_dirs.values(): 1303 target = "%s-%s" % (
884 if err_event.is_set() and opt.fail_fast: 1304 os.environ["TARGET_PRODUCT"],
885 break 1305 os.environ["TARGET_BUILD_VARIANT"],
886 sem.acquire() 1306 )
887 t = _threading.Thread(target=tidy_up, args=(run_gc, bare_git,)) 1307 [success, manifest_str] = server.GetApprovedManifest(
888 t.daemon = True 1308 branch, target
889 threads.add(t) 1309 )
890 t.start() 1310 else:
891 1311 [success, manifest_str] = server.GetApprovedManifest(branch)
892 for t in threads: 1312 else:
893 t.join() 1313 assert opt.smart_tag
894 pm.end() 1314 [success, manifest_str] = server.GetManifest(opt.smart_tag)
895 1315
896 def _ReloadManifest(self, manifest_name, manifest): 1316 if success:
897 """Reload the manfiest from the file specified by the |manifest_name|. 1317 manifest_name = os.path.basename(smart_sync_manifest_path)
898 1318 try:
899 It unloads the manifest if |manifest_name| is None. 1319 with open(smart_sync_manifest_path, "w") as f:
900 1320 f.write(manifest_str)
901 Args: 1321 except IOError as e:
902 manifest_name: Manifest file to be reloaded. 1322 print(
903 manifest: The manifest to use. 1323 "error: cannot write manifest to %s:\n%s"
904 """ 1324 % (smart_sync_manifest_path, e),
905 if manifest_name: 1325 file=sys.stderr,
906 # Override calls Unload already 1326 )
907 manifest.Override(manifest_name) 1327 sys.exit(1)
908 else: 1328 self._ReloadManifest(manifest_name, manifest)
909 manifest.Unload() 1329 else:
910 1330 print(
911 def UpdateProjectList(self, opt, manifest): 1331 "error: manifest server RPC call failed: %s" % manifest_str,
912 """Update the cached projects list for |manifest| 1332 file=sys.stderr,
913 1333 )
914 In a multi-manifest checkout, each manifest has its own project.list. 1334 sys.exit(1)
1335 except (socket.error, IOError, xmlrpc.client.Fault) as e:
1336 print(
1337 "error: cannot connect to manifest server %s:\n%s"
1338 % (manifest.manifest_server, e),
1339 file=sys.stderr,
1340 )
1341 sys.exit(1)
1342 except xmlrpc.client.ProtocolError as e:
1343 print(
1344 "error: cannot connect to manifest server %s:\n%d %s"
1345 % (manifest.manifest_server, e.errcode, e.errmsg),
1346 file=sys.stderr,
1347 )
1348 sys.exit(1)
1349
1350 return manifest_name
1351
1352 def _UpdateAllManifestProjects(self, opt, mp, manifest_name):
1353 """Fetch & update the local manifest project.
1354
1355 After syncing the manifest project, if the manifest has any sub
1356 manifests, those are recursively processed.
1357
1358 Args:
1359 opt: Program options returned from optparse. See _Options().
1360 mp: the manifestProject to query.
1361 manifest_name: Manifest file to be reloaded.
1362 """
1363 if not mp.standalone_manifest_url:
1364 self._UpdateManifestProject(opt, mp, manifest_name)
1365
1366 if mp.manifest.submanifests:
1367 for submanifest in mp.manifest.submanifests.values():
1368 child = submanifest.repo_client.manifest
1369 child.manifestProject.SyncWithPossibleInit(
1370 submanifest,
1371 current_branch_only=self._GetCurrentBranchOnly(opt, child),
1372 verbose=opt.verbose,
1373 tags=opt.tags,
1374 git_event_log=self.git_event_log,
1375 )
1376 self._UpdateAllManifestProjects(
1377 opt, child.manifestProject, None
1378 )
1379
1380 def _UpdateManifestProject(self, opt, mp, manifest_name):
1381 """Fetch & update the local manifest project.
1382
1383 Args:
1384 opt: Program options returned from optparse. See _Options().
1385 mp: the manifestProject to query.
1386 manifest_name: Manifest file to be reloaded.
1387 """
1388 if not opt.local_only:
1389 start = time.time()
1390 success = mp.Sync_NetworkHalf(
1391 quiet=opt.quiet,
1392 verbose=opt.verbose,
1393 current_branch_only=self._GetCurrentBranchOnly(
1394 opt, mp.manifest
1395 ),
1396 force_sync=opt.force_sync,
1397 tags=opt.tags,
1398 optimized_fetch=opt.optimized_fetch,
1399 retry_fetches=opt.retry_fetches,
1400 submodules=mp.manifest.HasSubmodules,
1401 clone_filter=mp.manifest.CloneFilter,
1402 partial_clone_exclude=mp.manifest.PartialCloneExclude,
1403 )
1404 finish = time.time()
1405 self.event_log.AddSync(
1406 mp, event_log.TASK_SYNC_NETWORK, start, finish, success
1407 )
1408
1409 if mp.HasChanges:
1410 syncbuf = SyncBuffer(mp.config)
1411 start = time.time()
1412 mp.Sync_LocalHalf(syncbuf, submodules=mp.manifest.HasSubmodules)
1413 clean = syncbuf.Finish()
1414 self.event_log.AddSync(
1415 mp, event_log.TASK_SYNC_LOCAL, start, time.time(), clean
1416 )
1417 if not clean:
1418 sys.exit(1)
1419 self._ReloadManifest(manifest_name, mp.manifest)
1420
1421 def ValidateOptions(self, opt, args):
1422 if opt.force_broken:
1423 print(
1424 "warning: -f/--force-broken is now the default behavior, and "
1425 "the options are deprecated",
1426 file=sys.stderr,
1427 )
1428 if opt.network_only and opt.detach_head:
1429 self.OptionParser.error("cannot combine -n and -d")
1430 if opt.network_only and opt.local_only:
1431 self.OptionParser.error("cannot combine -n and -l")
1432 if opt.manifest_name and opt.smart_sync:
1433 self.OptionParser.error("cannot combine -m and -s")
1434 if opt.manifest_name and opt.smart_tag:
1435 self.OptionParser.error("cannot combine -m and -t")
1436 if opt.manifest_server_username or opt.manifest_server_password:
1437 if not (opt.smart_sync or opt.smart_tag):
1438 self.OptionParser.error(
1439 "-u and -p may only be combined with -s or -t"
1440 )
1441 if None in [
1442 opt.manifest_server_username,
1443 opt.manifest_server_password,
1444 ]:
1445 self.OptionParser.error("both -u and -p must be given")
1446
1447 if opt.prune is None:
1448 opt.prune = True
1449
1450 if opt.auto_gc is None and _AUTO_GC:
1451 print(
1452 f"Will run `git gc --auto` because {_REPO_AUTO_GC} is set.",
1453 f"{_REPO_AUTO_GC} is deprecated and will be removed in a ",
1454 "future release. Use `--auto-gc` instead.",
1455 file=sys.stderr,
1456 )
1457 opt.auto_gc = True
1458
1459 def _ValidateOptionsWithManifest(self, opt, mp):
1460 """Like ValidateOptions, but after we've updated the manifest.
1461
1462 Needed to handle sync-xxx option defaults in the manifest.
1463
1464 Args:
1465 opt: The options to process.
1466 mp: The manifest project to pull defaults from.
1467 """
1468 if not opt.jobs:
1469 # If the user hasn't made a choice, use the manifest value.
1470 opt.jobs = mp.manifest.default.sync_j
1471 if opt.jobs:
1472 # If --jobs has a non-default value, propagate it as the default for
1473 # --jobs-xxx flags too.
1474 if not opt.jobs_network:
1475 opt.jobs_network = opt.jobs
1476 if not opt.jobs_checkout:
1477 opt.jobs_checkout = opt.jobs
1478 else:
1479 # Neither user nor manifest have made a choice, so setup defaults.
1480 if not opt.jobs_network:
1481 opt.jobs_network = 1
1482 if not opt.jobs_checkout:
1483 opt.jobs_checkout = DEFAULT_LOCAL_JOBS
1484 opt.jobs = os.cpu_count()
1485
1486 # Try to stay under user rlimit settings.
1487 #
1488 # Since each worker requires at 3 file descriptors to run `git fetch`,
1489 # use that to scale down the number of jobs. Unfortunately there isn't
1490 # an easy way to determine this reliably as systems change, but it was
1491 # last measured by hand in 2011.
1492 soft_limit, _ = _rlimit_nofile()
1493 jobs_soft_limit = max(1, (soft_limit - 5) // 3)
1494 opt.jobs = min(opt.jobs, jobs_soft_limit)
1495 opt.jobs_network = min(opt.jobs_network, jobs_soft_limit)
1496 opt.jobs_checkout = min(opt.jobs_checkout, jobs_soft_limit)
1497
1498 def Execute(self, opt, args):
1499 manifest = self.outer_manifest
1500 if not opt.outer_manifest:
1501 manifest = self.manifest
1502
1503 if opt.manifest_name:
1504 manifest.Override(opt.manifest_name)
1505
1506 manifest_name = opt.manifest_name
1507 smart_sync_manifest_path = os.path.join(
1508 manifest.manifestProject.worktree, "smart_sync_override.xml"
1509 )
915 1510
916 Args: 1511 if opt.clone_bundle is None:
917 opt: Program options returned from optparse. See _Options(). 1512 opt.clone_bundle = manifest.CloneBundle
918 manifest: The manifest to use.
919 1513
920 Returns: 1514 if opt.smart_sync or opt.smart_tag:
921 0: success 1515 manifest_name = self._SmartSyncSetup(
922 1: failure 1516 opt, smart_sync_manifest_path, manifest
923 """ 1517 )
924 new_project_paths = []
925 for project in self.GetProjects(None, missing_ok=True, manifest=manifest,
926 all_manifests=False):
927 if project.relpath:
928 new_project_paths.append(project.relpath)
929 file_name = 'project.list'
930 file_path = os.path.join(manifest.subdir, file_name)
931 old_project_paths = []
932
933 if os.path.exists(file_path):
934 with open(file_path, 'r') as fd:
935 old_project_paths = fd.read().split('\n')
936 # In reversed order, so subfolders are deleted before parent folder.
937 for path in sorted(old_project_paths, reverse=True):
938 if not path:
939 continue
940 if path not in new_project_paths:
941 # If the path has already been deleted, we don't need to do it
942 gitdir = os.path.join(manifest.topdir, path, '.git')
943 if os.path.exists(gitdir):
944 project = Project(
945 manifest=manifest,
946 name=path,
947 remote=RemoteSpec('origin'),
948 gitdir=gitdir,
949 objdir=gitdir,
950 use_git_worktrees=os.path.isfile(gitdir),
951 worktree=os.path.join(manifest.topdir, path),
952 relpath=path,
953 revisionExpr='HEAD',
954 revisionId=None,
955 groups=None)
956 if not project.DeleteWorktree(
957 quiet=opt.quiet,
958 force=opt.force_remove_dirty):
959 return 1
960
961 new_project_paths.sort()
962 with open(file_path, 'w') as fd:
963 fd.write('\n'.join(new_project_paths))
964 fd.write('\n')
965 return 0
966
967 def UpdateCopyLinkfileList(self, manifest):
968 """Save all dests of copyfile and linkfile, and update them if needed.
969
970 Returns:
971 Whether update was successful.
972 """
973 new_paths = {}
974 new_linkfile_paths = []
975 new_copyfile_paths = []
976 for project in self.GetProjects(None, missing_ok=True,
977 manifest=manifest, all_manifests=False):
978 new_linkfile_paths.extend(x.dest for x in project.linkfiles)
979 new_copyfile_paths.extend(x.dest for x in project.copyfiles)
980
981 new_paths = {
982 'linkfile': new_linkfile_paths,
983 'copyfile': new_copyfile_paths,
984 }
985
986 copylinkfile_name = 'copy-link-files.json'
987 copylinkfile_path = os.path.join(manifest.subdir, copylinkfile_name)
988 old_copylinkfile_paths = {}
989
990 if os.path.exists(copylinkfile_path):
991 with open(copylinkfile_path, 'rb') as fp:
992 try:
993 old_copylinkfile_paths = json.load(fp)
994 except Exception:
995 print('error: %s is not a json formatted file.' %
996 copylinkfile_path, file=sys.stderr)
997 platform_utils.remove(copylinkfile_path)
998 return False
999
1000 need_remove_files = []
1001 need_remove_files.extend(
1002 set(old_copylinkfile_paths.get('linkfile', [])) -
1003 set(new_linkfile_paths))
1004 need_remove_files.extend(
1005 set(old_copylinkfile_paths.get('copyfile', [])) -
1006 set(new_copyfile_paths))
1007
1008 for need_remove_file in need_remove_files:
1009 # Try to remove the updated copyfile or linkfile.
1010 # So, if the file is not exist, nothing need to do.
1011 platform_utils.remove(need_remove_file, missing_ok=True)
1012
1013 # Create copy-link-files.json, save dest path of "copyfile" and "linkfile".
1014 with open(copylinkfile_path, 'w', encoding='utf-8') as fp:
1015 json.dump(new_paths, fp)
1016 return True
1017
1018 def _SmartSyncSetup(self, opt, smart_sync_manifest_path, manifest):
1019 if not manifest.manifest_server:
1020 print('error: cannot smart sync: no manifest server defined in '
1021 'manifest', file=sys.stderr)
1022 sys.exit(1)
1023
1024 manifest_server = manifest.manifest_server
1025 if not opt.quiet:
1026 print('Using manifest server %s' % manifest_server)
1027
1028 if '@' not in manifest_server:
1029 username = None
1030 password = None
1031 if opt.manifest_server_username and opt.manifest_server_password:
1032 username = opt.manifest_server_username
1033 password = opt.manifest_server_password
1034 else:
1035 try:
1036 info = netrc.netrc()
1037 except IOError:
1038 # .netrc file does not exist or could not be opened
1039 pass
1040 else: 1518 else:
1041 try: 1519 if os.path.isfile(smart_sync_manifest_path):
1042 parse_result = urllib.parse.urlparse(manifest_server) 1520 try:
1043 if parse_result.hostname: 1521 platform_utils.remove(smart_sync_manifest_path)
1044 auth = info.authenticators(parse_result.hostname) 1522 except OSError as e:
1045 if auth: 1523 print(
1046 username, _account, password = auth 1524 "error: failed to remove existing smart sync override "
1047 else: 1525 "manifest: %s" % e,
1048 print('No credentials found for %s in .netrc' 1526 file=sys.stderr,
1049 % parse_result.hostname, file=sys.stderr) 1527 )
1050 except netrc.NetrcParseError as e: 1528
1051 print('Error parsing .netrc file: %s' % e, file=sys.stderr) 1529 err_event = multiprocessing.Event()
1052 1530
1053 if (username and password): 1531 rp = manifest.repoProject
1054 manifest_server = manifest_server.replace('://', '://%s:%s@' % 1532 rp.PreSync()
1055 (username, password), 1533 cb = rp.CurrentBranch
1056 1) 1534 if cb:
1057 1535 base = rp.GetBranch(cb).merge
1058 transport = PersistentTransport(manifest_server) 1536 if not base or not base.startswith("refs/heads/"):
1059 if manifest_server.startswith('persistent-'): 1537 print(
1060 manifest_server = manifest_server[len('persistent-'):] 1538 "warning: repo is not tracking a remote branch, so it will "
1061 1539 "not receive updates; run `repo init --repo-rev=stable` to "
1062 try: 1540 "fix.",
1063 server = xmlrpc.client.Server(manifest_server, transport=transport) 1541 file=sys.stderr,
1064 if opt.smart_sync: 1542 )
1065 branch = self._GetBranch(manifest.manifestProject) 1543
1066 1544 for m in self.ManifestList(opt):
1067 if 'SYNC_TARGET' in os.environ: 1545 if not m.manifestProject.standalone_manifest_url:
1068 target = os.environ['SYNC_TARGET'] 1546 m.manifestProject.PreSync()
1069 [success, manifest_str] = server.GetApprovedManifest(branch, target) 1547
1070 elif ('TARGET_PRODUCT' in os.environ and 1548 if opt.repo_upgraded:
1071 'TARGET_BUILD_VARIANT' in os.environ): 1549 _PostRepoUpgrade(manifest, quiet=opt.quiet)
1072 target = '%s-%s' % (os.environ['TARGET_PRODUCT'], 1550
1073 os.environ['TARGET_BUILD_VARIANT']) 1551 mp = manifest.manifestProject
1074 [success, manifest_str] = server.GetApprovedManifest(branch, target) 1552 if opt.mp_update:
1553 self._UpdateAllManifestProjects(opt, mp, manifest_name)
1075 else: 1554 else:
1076 [success, manifest_str] = server.GetApprovedManifest(branch) 1555 print("Skipping update of local manifest project.")
1077 else:
1078 assert(opt.smart_tag)
1079 [success, manifest_str] = server.GetManifest(opt.smart_tag)
1080 1556
1081 if success: 1557 # Now that the manifests are up-to-date, setup options whose defaults
1082 manifest_name = os.path.basename(smart_sync_manifest_path) 1558 # might be in the manifest.
1083 try: 1559 self._ValidateOptionsWithManifest(opt, mp)
1084 with open(smart_sync_manifest_path, 'w') as f: 1560
1085 f.write(manifest_str) 1561 superproject_logging_data = {}
1086 except IOError as e: 1562 self._UpdateProjectsRevisionId(
1087 print('error: cannot write manifest to %s:\n%s' 1563 opt, args, superproject_logging_data, manifest
1088 % (smart_sync_manifest_path, e),
1089 file=sys.stderr)
1090 sys.exit(1)
1091 self._ReloadManifest(manifest_name, manifest)
1092 else:
1093 print('error: manifest server RPC call failed: %s' %
1094 manifest_str, file=sys.stderr)
1095 sys.exit(1)
1096 except (socket.error, IOError, xmlrpc.client.Fault) as e:
1097 print('error: cannot connect to manifest server %s:\n%s'
1098 % (manifest.manifest_server, e), file=sys.stderr)
1099 sys.exit(1)
1100 except xmlrpc.client.ProtocolError as e:
1101 print('error: cannot connect to manifest server %s:\n%d %s'
1102 % (manifest.manifest_server, e.errcode, e.errmsg),
1103 file=sys.stderr)
1104 sys.exit(1)
1105
1106 return manifest_name
1107
1108 def _UpdateAllManifestProjects(self, opt, mp, manifest_name):
1109 """Fetch & update the local manifest project.
1110
1111 After syncing the manifest project, if the manifest has any sub manifests,
1112 those are recursively processed.
1113
1114 Args:
1115 opt: Program options returned from optparse. See _Options().
1116 mp: the manifestProject to query.
1117 manifest_name: Manifest file to be reloaded.
1118 """
1119 if not mp.standalone_manifest_url:
1120 self._UpdateManifestProject(opt, mp, manifest_name)
1121
1122 if mp.manifest.submanifests:
1123 for submanifest in mp.manifest.submanifests.values():
1124 child = submanifest.repo_client.manifest
1125 child.manifestProject.SyncWithPossibleInit(
1126 submanifest,
1127 current_branch_only=self._GetCurrentBranchOnly(opt, child),
1128 verbose=opt.verbose,
1129 tags=opt.tags,
1130 git_event_log=self.git_event_log,
1131 ) 1564 )
1132 self._UpdateAllManifestProjects(opt, child.manifestProject, None)
1133 1565
1134 def _UpdateManifestProject(self, opt, mp, manifest_name): 1566 if self.gitc_manifest:
1135 """Fetch & update the local manifest project. 1567 gitc_manifest_projects = self.GetProjects(args, missing_ok=True)
1568 gitc_projects = []
1569 opened_projects = []
1570 for project in gitc_manifest_projects:
1571 if (
1572 project.relpath in self.gitc_manifest.paths
1573 and self.gitc_manifest.paths[project.relpath].old_revision
1574 ):
1575 opened_projects.append(project.relpath)
1576 else:
1577 gitc_projects.append(project.relpath)
1578
1579 if not args:
1580 gitc_projects = None
1581
1582 if gitc_projects != [] and not opt.local_only:
1583 print(
1584 "Updating GITC client: %s"
1585 % self.gitc_manifest.gitc_client_name
1586 )
1587 manifest = GitcManifest(
1588 self.repodir, self.gitc_manifest.gitc_client_name
1589 )
1590 if manifest_name:
1591 manifest.Override(manifest_name)
1592 else:
1593 manifest.Override(manifest.manifestFile)
1594 gitc_utils.generate_gitc_manifest(
1595 self.gitc_manifest, manifest, gitc_projects
1596 )
1597 print("GITC client successfully synced.")
1598
1599 # The opened projects need to be synced as normal, therefore we
1600 # generate a new args list to represent the opened projects.
1601 # TODO: make this more reliable -- if there's a project name/path
1602 # overlap, this may choose the wrong project.
1603 args = [
1604 os.path.relpath(manifest.paths[path].worktree, os.getcwd())
1605 for path in opened_projects
1606 ]
1607 if not args:
1608 return
1609
1610 all_projects = self.GetProjects(
1611 args,
1612 missing_ok=True,
1613 submodules_ok=opt.fetch_submodules,
1614 manifest=manifest,
1615 all_manifests=not opt.this_manifest_only,
1616 )
1136 1617
1137 Args: 1618 err_network_sync = False
1138 opt: Program options returned from optparse. See _Options(). 1619 err_update_projects = False
1139 mp: the manifestProject to query. 1620 err_update_linkfiles = False
1140 manifest_name: Manifest file to be reloaded. 1621
1141 """ 1622 self._fetch_times = _FetchTimes(manifest)
1142 if not opt.local_only: 1623 if not opt.local_only:
1143 start = time.time() 1624 with multiprocessing.Manager() as manager:
1144 success = mp.Sync_NetworkHalf(quiet=opt.quiet, verbose=opt.verbose, 1625 with ssh.ProxyManager(manager) as ssh_proxy:
1145 current_branch_only=self._GetCurrentBranchOnly(opt, mp.manifest), 1626 # Initialize the socket dir once in the parent.
1146 force_sync=opt.force_sync, 1627 ssh_proxy.sock()
1147 tags=opt.tags, 1628 result = self._FetchMain(
1148 optimized_fetch=opt.optimized_fetch, 1629 opt, args, all_projects, err_event, ssh_proxy, manifest
1149 retry_fetches=opt.retry_fetches, 1630 )
1150 submodules=mp.manifest.HasSubmodules, 1631 all_projects = result.all_projects
1151 clone_filter=mp.manifest.CloneFilter, 1632
1152 partial_clone_exclude=mp.manifest.PartialCloneExclude) 1633 if opt.network_only:
1153 finish = time.time() 1634 return
1154 self.event_log.AddSync(mp, event_log.TASK_SYNC_NETWORK, 1635
1155 start, finish, success) 1636 # If we saw an error, exit with code 1 so that other scripts can
1156 1637 # check.
1157 if mp.HasChanges: 1638 if err_event.is_set():
1158 syncbuf = SyncBuffer(mp.config) 1639 err_network_sync = True
1159 start = time.time() 1640 if opt.fail_fast:
1160 mp.Sync_LocalHalf(syncbuf, submodules=mp.manifest.HasSubmodules) 1641 print(
1161 clean = syncbuf.Finish() 1642 "\nerror: Exited sync due to fetch errors.\n"
1162 self.event_log.AddSync(mp, event_log.TASK_SYNC_LOCAL, 1643 "Local checkouts *not* updated. Resolve network issues "
1163 start, time.time(), clean) 1644 "& retry.\n"
1164 if not clean: 1645 "`repo sync -l` will update some local checkouts.",
1165 sys.exit(1) 1646 file=sys.stderr,
1166 self._ReloadManifest(manifest_name, mp.manifest) 1647 )
1167 1648 sys.exit(1)
1168 def ValidateOptions(self, opt, args): 1649
1169 if opt.force_broken: 1650 for m in self.ManifestList(opt):
1170 print('warning: -f/--force-broken is now the default behavior, and the ' 1651 if m.IsMirror or m.IsArchive:
1171 'options are deprecated', file=sys.stderr) 1652 # Bail out now, we have no working tree.
1172 if opt.network_only and opt.detach_head: 1653 continue
1173 self.OptionParser.error('cannot combine -n and -d') 1654
1174 if opt.network_only and opt.local_only: 1655 if self.UpdateProjectList(opt, m):
1175 self.OptionParser.error('cannot combine -n and -l') 1656 err_event.set()
1176 if opt.manifest_name and opt.smart_sync: 1657 err_update_projects = True
1177 self.OptionParser.error('cannot combine -m and -s') 1658 if opt.fail_fast:
1178 if opt.manifest_name and opt.smart_tag: 1659 print(
1179 self.OptionParser.error('cannot combine -m and -t') 1660 "\nerror: Local checkouts *not* updated.",
1180 if opt.manifest_server_username or opt.manifest_server_password: 1661 file=sys.stderr,
1181 if not (opt.smart_sync or opt.smart_tag): 1662 )
1182 self.OptionParser.error('-u and -p may only be combined with -s or -t') 1663 sys.exit(1)
1183 if None in [opt.manifest_server_username, opt.manifest_server_password]: 1664
1184 self.OptionParser.error('both -u and -p must be given') 1665 err_update_linkfiles = not self.UpdateCopyLinkfileList(m)
1185 1666 if err_update_linkfiles:
1186 if opt.prune is None: 1667 err_event.set()
1187 opt.prune = True 1668 if opt.fail_fast:
1188 1669 print(
1189 if opt.auto_gc is None and _AUTO_GC: 1670 "\nerror: Local update copyfile or linkfile failed.",
1190 print(f"Will run `git gc --auto` because {_REPO_AUTO_GC} is set.", 1671 file=sys.stderr,
1191 f'{_REPO_AUTO_GC} is deprecated and will be removed in a future', 1672 )
1192 'release. Use `--auto-gc` instead.', file=sys.stderr) 1673 sys.exit(1)
1193 opt.auto_gc = True 1674
1194 1675 err_results = []
1195 def _ValidateOptionsWithManifest(self, opt, mp): 1676 # NB: We don't exit here because this is the last step.
1196 """Like ValidateOptions, but after we've updated the manifest. 1677 err_checkout = not self._Checkout(all_projects, opt, err_results)
1197 1678 if err_checkout:
1198 Needed to handle sync-xxx option defaults in the manifest. 1679 err_event.set()
1199 1680
1200 Args: 1681 printed_notices = set()
1201 opt: The options to process. 1682 # If there's a notice that's supposed to print at the end of the sync,
1202 mp: The manifest project to pull defaults from. 1683 # print it now... But avoid printing duplicate messages, and preserve
1203 """ 1684 # order.
1204 if not opt.jobs: 1685 for m in sorted(self.ManifestList(opt), key=lambda x: x.path_prefix):
1205 # If the user hasn't made a choice, use the manifest value. 1686 if m.notice and m.notice not in printed_notices:
1206 opt.jobs = mp.manifest.default.sync_j 1687 print(m.notice)
1207 if opt.jobs: 1688 printed_notices.add(m.notice)
1208 # If --jobs has a non-default value, propagate it as the default for 1689
1209 # --jobs-xxx flags too. 1690 # If we saw an error, exit with code 1 so that other scripts can check.
1210 if not opt.jobs_network: 1691 if err_event.is_set():
1211 opt.jobs_network = opt.jobs 1692 print("\nerror: Unable to fully sync the tree.", file=sys.stderr)
1212 if not opt.jobs_checkout: 1693 if err_network_sync:
1213 opt.jobs_checkout = opt.jobs 1694 print(
1214 else: 1695 "error: Downloading network changes failed.",
1215 # Neither user nor manifest have made a choice, so setup defaults. 1696 file=sys.stderr,
1216 if not opt.jobs_network: 1697 )
1217 opt.jobs_network = 1 1698 if err_update_projects:
1218 if not opt.jobs_checkout: 1699 print(
1219 opt.jobs_checkout = DEFAULT_LOCAL_JOBS 1700 "error: Updating local project lists failed.",
1220 opt.jobs = os.cpu_count() 1701 file=sys.stderr,
1221 1702 )
1222 # Try to stay under user rlimit settings. 1703 if err_update_linkfiles:
1223 # 1704 print(
1224 # Since each worker requires at 3 file descriptors to run `git fetch`, use 1705 "error: Updating copyfiles or linkfiles failed.",
1225 # that to scale down the number of jobs. Unfortunately there isn't an easy 1706 file=sys.stderr,
1226 # way to determine this reliably as systems change, but it was last measured 1707 )
1227 # by hand in 2011. 1708 if err_checkout:
1228 soft_limit, _ = _rlimit_nofile() 1709 print(
1229 jobs_soft_limit = max(1, (soft_limit - 5) // 3) 1710 "error: Checking out local projects failed.",
1230 opt.jobs = min(opt.jobs, jobs_soft_limit) 1711 file=sys.stderr,
1231 opt.jobs_network = min(opt.jobs_network, jobs_soft_limit) 1712 )
1232 opt.jobs_checkout = min(opt.jobs_checkout, jobs_soft_limit) 1713 if err_results:
1233 1714 print(
1234 def Execute(self, opt, args): 1715 "Failing repos:\n%s" % "\n".join(err_results),
1235 manifest = self.outer_manifest 1716 file=sys.stderr,
1236 if not opt.outer_manifest: 1717 )
1237 manifest = self.manifest 1718 print(
1238 1719 'Try re-running with "-j1 --fail-fast" to exit at the first '
1239 if opt.manifest_name: 1720 "error.",
1240 manifest.Override(opt.manifest_name) 1721 file=sys.stderr,
1241 1722 )
1242 manifest_name = opt.manifest_name 1723 sys.exit(1)
1243 smart_sync_manifest_path = os.path.join( 1724
1244 manifest.manifestProject.worktree, 'smart_sync_override.xml') 1725 # Log the previous sync analysis state from the config.
1245 1726 self.git_event_log.LogDataConfigEvents(
1246 if opt.clone_bundle is None: 1727 mp.config.GetSyncAnalysisStateData(), "previous_sync_state"
1247 opt.clone_bundle = manifest.CloneBundle 1728 )
1248
1249 if opt.smart_sync or opt.smart_tag:
1250 manifest_name = self._SmartSyncSetup(opt, smart_sync_manifest_path, manifest)
1251 else:
1252 if os.path.isfile(smart_sync_manifest_path):
1253 try:
1254 platform_utils.remove(smart_sync_manifest_path)
1255 except OSError as e:
1256 print('error: failed to remove existing smart sync override manifest: %s' %
1257 e, file=sys.stderr)
1258
1259 err_event = multiprocessing.Event()
1260
1261 rp = manifest.repoProject
1262 rp.PreSync()
1263 cb = rp.CurrentBranch
1264 if cb:
1265 base = rp.GetBranch(cb).merge
1266 if not base or not base.startswith('refs/heads/'):
1267 print('warning: repo is not tracking a remote branch, so it will not '
1268 'receive updates; run `repo init --repo-rev=stable` to fix.',
1269 file=sys.stderr)
1270
1271 for m in self.ManifestList(opt):
1272 if not m.manifestProject.standalone_manifest_url:
1273 m.manifestProject.PreSync()
1274
1275 if opt.repo_upgraded:
1276 _PostRepoUpgrade(manifest, quiet=opt.quiet)
1277
1278 mp = manifest.manifestProject
1279 if opt.mp_update:
1280 self._UpdateAllManifestProjects(opt, mp, manifest_name)
1281 else:
1282 print('Skipping update of local manifest project.')
1283
1284 # Now that the manifests are up-to-date, setup options whose defaults might
1285 # be in the manifest.
1286 self._ValidateOptionsWithManifest(opt, mp)
1287
1288 superproject_logging_data = {}
1289 self._UpdateProjectsRevisionId(opt, args, superproject_logging_data,
1290 manifest)
1291
1292 if self.gitc_manifest:
1293 gitc_manifest_projects = self.GetProjects(args, missing_ok=True)
1294 gitc_projects = []
1295 opened_projects = []
1296 for project in gitc_manifest_projects:
1297 if project.relpath in self.gitc_manifest.paths and \
1298 self.gitc_manifest.paths[project.relpath].old_revision:
1299 opened_projects.append(project.relpath)
1300 else:
1301 gitc_projects.append(project.relpath)
1302 1729
1303 if not args: 1730 # Update and log with the new sync analysis state.
1304 gitc_projects = None 1731 mp.config.UpdateSyncAnalysisState(opt, superproject_logging_data)
1732 self.git_event_log.LogDataConfigEvents(
1733 mp.config.GetSyncAnalysisStateData(), "current_sync_state"
1734 )
1305 1735
1306 if gitc_projects != [] and not opt.local_only: 1736 if not opt.quiet:
1307 print('Updating GITC client: %s' % self.gitc_manifest.gitc_client_name) 1737 print("repo sync has finished successfully.")
1308 manifest = GitcManifest(self.repodir, self.gitc_manifest.gitc_client_name)
1309 if manifest_name:
1310 manifest.Override(manifest_name)
1311 else:
1312 manifest.Override(manifest.manifestFile)
1313 gitc_utils.generate_gitc_manifest(self.gitc_manifest,
1314 manifest,
1315 gitc_projects)
1316 print('GITC client successfully synced.')
1317
1318 # The opened projects need to be synced as normal, therefore we
1319 # generate a new args list to represent the opened projects.
1320 # TODO: make this more reliable -- if there's a project name/path overlap,
1321 # this may choose the wrong project.
1322 args = [os.path.relpath(manifest.paths[path].worktree, os.getcwd())
1323 for path in opened_projects]
1324 if not args:
1325 return
1326
1327 all_projects = self.GetProjects(args,
1328 missing_ok=True,
1329 submodules_ok=opt.fetch_submodules,
1330 manifest=manifest,
1331 all_manifests=not opt.this_manifest_only)
1332
1333 err_network_sync = False
1334 err_update_projects = False
1335 err_update_linkfiles = False
1336
1337 self._fetch_times = _FetchTimes(manifest)
1338 if not opt.local_only:
1339 with multiprocessing.Manager() as manager:
1340 with ssh.ProxyManager(manager) as ssh_proxy:
1341 # Initialize the socket dir once in the parent.
1342 ssh_proxy.sock()
1343 result = self._FetchMain(opt, args, all_projects, err_event,
1344 ssh_proxy, manifest)
1345 all_projects = result.all_projects
1346
1347 if opt.network_only:
1348 return
1349
1350 # If we saw an error, exit with code 1 so that other scripts can check.
1351 if err_event.is_set():
1352 err_network_sync = True
1353 if opt.fail_fast:
1354 print('\nerror: Exited sync due to fetch errors.\n'
1355 'Local checkouts *not* updated. Resolve network issues & '
1356 'retry.\n'
1357 '`repo sync -l` will update some local checkouts.',
1358 file=sys.stderr)
1359 sys.exit(1)
1360
1361 for m in self.ManifestList(opt):
1362 if m.IsMirror or m.IsArchive:
1363 # bail out now, we have no working tree
1364 continue
1365
1366 if self.UpdateProjectList(opt, m):
1367 err_event.set()
1368 err_update_projects = True
1369 if opt.fail_fast:
1370 print('\nerror: Local checkouts *not* updated.', file=sys.stderr)
1371 sys.exit(1)
1372
1373 err_update_linkfiles = not self.UpdateCopyLinkfileList(m)
1374 if err_update_linkfiles:
1375 err_event.set()
1376 if opt.fail_fast:
1377 print('\nerror: Local update copyfile or linkfile failed.', file=sys.stderr)
1378 sys.exit(1)
1379
1380 err_results = []
1381 # NB: We don't exit here because this is the last step.
1382 err_checkout = not self._Checkout(all_projects, opt, err_results)
1383 if err_checkout:
1384 err_event.set()
1385
1386 printed_notices = set()
1387 # If there's a notice that's supposed to print at the end of the sync,
1388 # print it now... But avoid printing duplicate messages, and preserve
1389 # order.
1390 for m in sorted(self.ManifestList(opt), key=lambda x: x.path_prefix):
1391 if m.notice and m.notice not in printed_notices:
1392 print(m.notice)
1393 printed_notices.add(m.notice)
1394
1395 # If we saw an error, exit with code 1 so that other scripts can check.
1396 if err_event.is_set():
1397 print('\nerror: Unable to fully sync the tree.', file=sys.stderr)
1398 if err_network_sync:
1399 print('error: Downloading network changes failed.', file=sys.stderr)
1400 if err_update_projects:
1401 print('error: Updating local project lists failed.', file=sys.stderr)
1402 if err_update_linkfiles:
1403 print('error: Updating copyfiles or linkfiles failed.', file=sys.stderr)
1404 if err_checkout:
1405 print('error: Checking out local projects failed.', file=sys.stderr)
1406 if err_results:
1407 print('Failing repos:\n%s' % '\n'.join(err_results), file=sys.stderr)
1408 print('Try re-running with "-j1 --fail-fast" to exit at the first error.',
1409 file=sys.stderr)
1410 sys.exit(1)
1411
1412 # Log the previous sync analysis state from the config.
1413 self.git_event_log.LogDataConfigEvents(mp.config.GetSyncAnalysisStateData(),
1414 'previous_sync_state')
1415
1416 # Update and log with the new sync analysis state.
1417 mp.config.UpdateSyncAnalysisState(opt, superproject_logging_data)
1418 self.git_event_log.LogDataConfigEvents(mp.config.GetSyncAnalysisStateData(),
1419 'current_sync_state')
1420
1421 if not opt.quiet:
1422 print('repo sync has finished successfully.')
1423 1738
1424 1739
1425def _PostRepoUpgrade(manifest, quiet=False): 1740def _PostRepoUpgrade(manifest, quiet=False):
1426 # Link the docs for the internal .repo/ layout for people 1741 # Link the docs for the internal .repo/ layout for people.
1427 link = os.path.join(manifest.repodir, 'internal-fs-layout.md') 1742 link = os.path.join(manifest.repodir, "internal-fs-layout.md")
1428 if not platform_utils.islink(link): 1743 if not platform_utils.islink(link):
1429 target = os.path.join('repo', 'docs', 'internal-fs-layout.md') 1744 target = os.path.join("repo", "docs", "internal-fs-layout.md")
1430 try: 1745 try:
1431 platform_utils.symlink(target, link) 1746 platform_utils.symlink(target, link)
1432 except Exception: 1747 except Exception:
1433 pass 1748 pass
1434 1749
1435 wrapper = Wrapper() 1750 wrapper = Wrapper()
1436 if wrapper.NeedSetupGnuPG(): 1751 if wrapper.NeedSetupGnuPG():
1437 wrapper.SetupGnuPG(quiet) 1752 wrapper.SetupGnuPG(quiet)
1438 for project in manifest.projects: 1753 for project in manifest.projects:
1439 if project.Exists: 1754 if project.Exists:
1440 project.PostRepoUpgrade() 1755 project.PostRepoUpgrade()
1441 1756
1442 1757
1443def _PostRepoFetch(rp, repo_verify=True, verbose=False): 1758def _PostRepoFetch(rp, repo_verify=True, verbose=False):
1444 if rp.HasChanges: 1759 if rp.HasChanges:
1445 print('info: A new version of repo is available', file=sys.stderr) 1760 print("info: A new version of repo is available", file=sys.stderr)
1446 wrapper = Wrapper() 1761 wrapper = Wrapper()
1447 try: 1762 try:
1448 rev = rp.bare_git.describe(rp.GetRevisionId()) 1763 rev = rp.bare_git.describe(rp.GetRevisionId())
1449 except GitError: 1764 except GitError:
1450 rev = None 1765 rev = None
1451 _, new_rev = wrapper.check_repo_rev(rp.gitdir, rev, repo_verify=repo_verify) 1766 _, new_rev = wrapper.check_repo_rev(
1452 # See if we're held back due to missing signed tag. 1767 rp.gitdir, rev, repo_verify=repo_verify
1453 current_revid = rp.bare_git.rev_parse('HEAD') 1768 )
1454 new_revid = rp.bare_git.rev_parse('--verify', new_rev) 1769 # See if we're held back due to missing signed tag.
1455 if current_revid != new_revid: 1770 current_revid = rp.bare_git.rev_parse("HEAD")
1456 # We want to switch to the new rev, but also not trash any uncommitted 1771 new_revid = rp.bare_git.rev_parse("--verify", new_rev)
1457 # changes. This helps with local testing/hacking. 1772 if current_revid != new_revid:
1458 # If a local change has been made, we will throw that away. 1773 # We want to switch to the new rev, but also not trash any
1459 # We also have to make sure this will switch to an older commit if that's 1774 # uncommitted changes. This helps with local testing/hacking.
1460 # the latest tag in order to support release rollback. 1775 # If a local change has been made, we will throw that away.
1461 try: 1776 # We also have to make sure this will switch to an older commit if
1462 rp.work_git.reset('--keep', new_rev) 1777 # that's the latest tag in order to support release rollback.
1463 except GitError as e: 1778 try:
1464 sys.exit(str(e)) 1779 rp.work_git.reset("--keep", new_rev)
1465 print('info: Restarting repo with latest version', file=sys.stderr) 1780 except GitError as e:
1466 raise RepoChangedException(['--repo-upgraded']) 1781 sys.exit(str(e))
1782 print("info: Restarting repo with latest version", file=sys.stderr)
1783 raise RepoChangedException(["--repo-upgraded"])
1784 else:
1785 print(
1786 "warning: Skipped upgrade to unverified version",
1787 file=sys.stderr,
1788 )
1467 else: 1789 else:
1468 print('warning: Skipped upgrade to unverified version', file=sys.stderr) 1790 if verbose:
1469 else: 1791 print(
1470 if verbose: 1792 "repo version %s is current" % rp.work_git.describe(HEAD),
1471 print('repo version %s is current' % rp.work_git.describe(HEAD), 1793 file=sys.stderr,
1472 file=sys.stderr) 1794 )
1473 1795
1474 1796
1475class _FetchTimes(object): 1797class _FetchTimes(object):
1476 _ALPHA = 0.5 1798 _ALPHA = 0.5
1477 1799
1478 def __init__(self, manifest): 1800 def __init__(self, manifest):
1479 self._path = os.path.join(manifest.repodir, '.repo_fetchtimes.json') 1801 self._path = os.path.join(manifest.repodir, ".repo_fetchtimes.json")
1480 self._times = None 1802 self._times = None
1481 self._seen = set() 1803 self._seen = set()
1482 1804
1483 def Get(self, project): 1805 def Get(self, project):
1484 self._Load() 1806 self._Load()
1485 return self._times.get(project.name, _ONE_DAY_S) 1807 return self._times.get(project.name, _ONE_DAY_S)
1486 1808
1487 def Set(self, project, t): 1809 def Set(self, project, t):
1488 self._Load() 1810 self._Load()
1489 name = project.name 1811 name = project.name
1490 old = self._times.get(name, t) 1812 old = self._times.get(name, t)
1491 self._seen.add(name) 1813 self._seen.add(name)
1492 a = self._ALPHA 1814 a = self._ALPHA
1493 self._times[name] = (a * t) + ((1 - a) * old) 1815 self._times[name] = (a * t) + ((1 - a) * old)
1494 1816
1495 def _Load(self): 1817 def _Load(self):
1496 if self._times is None: 1818 if self._times is None:
1497 try: 1819 try:
1498 with open(self._path) as f: 1820 with open(self._path) as f:
1499 self._times = json.load(f) 1821 self._times = json.load(f)
1500 except (IOError, ValueError): 1822 except (IOError, ValueError):
1501 platform_utils.remove(self._path, missing_ok=True) 1823 platform_utils.remove(self._path, missing_ok=True)
1502 self._times = {} 1824 self._times = {}
1503 1825
1504 def Save(self): 1826 def Save(self):
1505 if self._times is None: 1827 if self._times is None:
1506 return 1828 return
1507 1829
1508 to_delete = [] 1830 to_delete = []
1509 for name in self._times: 1831 for name in self._times:
1510 if name not in self._seen: 1832 if name not in self._seen:
1511 to_delete.append(name) 1833 to_delete.append(name)
1512 for name in to_delete: 1834 for name in to_delete:
1513 del self._times[name] 1835 del self._times[name]
1514 1836
1515 try: 1837 try:
1516 with open(self._path, 'w') as f: 1838 with open(self._path, "w") as f:
1517 json.dump(self._times, f, indent=2) 1839 json.dump(self._times, f, indent=2)
1518 except (IOError, TypeError): 1840 except (IOError, TypeError):
1519 platform_utils.remove(self._path, missing_ok=True) 1841 platform_utils.remove(self._path, missing_ok=True)
1842
1520 1843
1521# This is a replacement for xmlrpc.client.Transport using urllib2 1844# This is a replacement for xmlrpc.client.Transport using urllib2
1522# and supporting persistent-http[s]. It cannot change hosts from 1845# and supporting persistent-http[s]. It cannot change hosts from
@@ -1525,98 +1848,105 @@ class _FetchTimes(object):
1525 1848
1526 1849
1527class PersistentTransport(xmlrpc.client.Transport): 1850class PersistentTransport(xmlrpc.client.Transport):
1528 def __init__(self, orig_host): 1851 def __init__(self, orig_host):
1529 self.orig_host = orig_host 1852 self.orig_host = orig_host
1530 1853
1531 def request(self, host, handler, request_body, verbose=False): 1854 def request(self, host, handler, request_body, verbose=False):
1532 with GetUrlCookieFile(self.orig_host, not verbose) as (cookiefile, proxy): 1855 with GetUrlCookieFile(self.orig_host, not verbose) as (
1533 # Python doesn't understand cookies with the #HttpOnly_ prefix 1856 cookiefile,
1534 # Since we're only using them for HTTP, copy the file temporarily, 1857 proxy,
1535 # stripping those prefixes away. 1858 ):
1536 if cookiefile: 1859 # Python doesn't understand cookies with the #HttpOnly_ prefix
1537 tmpcookiefile = tempfile.NamedTemporaryFile(mode='w') 1860 # Since we're only using them for HTTP, copy the file temporarily,
1538 tmpcookiefile.write("# HTTP Cookie File") 1861 # stripping those prefixes away.
1539 try: 1862 if cookiefile:
1540 with open(cookiefile) as f: 1863 tmpcookiefile = tempfile.NamedTemporaryFile(mode="w")
1541 for line in f: 1864 tmpcookiefile.write("# HTTP Cookie File")
1542 if line.startswith("#HttpOnly_"): 1865 try:
1543 line = line[len("#HttpOnly_"):] 1866 with open(cookiefile) as f:
1544 tmpcookiefile.write(line) 1867 for line in f:
1545 tmpcookiefile.flush() 1868 if line.startswith("#HttpOnly_"):
1546 1869 line = line[len("#HttpOnly_") :]
1547 cookiejar = cookielib.MozillaCookieJar(tmpcookiefile.name) 1870 tmpcookiefile.write(line)
1548 try: 1871 tmpcookiefile.flush()
1549 cookiejar.load() 1872
1550 except cookielib.LoadError: 1873 cookiejar = cookielib.MozillaCookieJar(tmpcookiefile.name)
1551 cookiejar = cookielib.CookieJar() 1874 try:
1552 finally: 1875 cookiejar.load()
1553 tmpcookiefile.close() 1876 except cookielib.LoadError:
1554 else: 1877 cookiejar = cookielib.CookieJar()
1555 cookiejar = cookielib.CookieJar() 1878 finally:
1556 1879 tmpcookiefile.close()
1557 proxyhandler = urllib.request.ProxyHandler 1880 else:
1558 if proxy: 1881 cookiejar = cookielib.CookieJar()
1559 proxyhandler = urllib.request.ProxyHandler({ 1882
1560 "http": proxy, 1883 proxyhandler = urllib.request.ProxyHandler
1561 "https": proxy}) 1884 if proxy:
1562 1885 proxyhandler = urllib.request.ProxyHandler(
1563 opener = urllib.request.build_opener( 1886 {"http": proxy, "https": proxy}
1564 urllib.request.HTTPCookieProcessor(cookiejar), 1887 )
1565 proxyhandler) 1888
1566 1889 opener = urllib.request.build_opener(
1567 url = urllib.parse.urljoin(self.orig_host, handler) 1890 urllib.request.HTTPCookieProcessor(cookiejar), proxyhandler
1568 parse_results = urllib.parse.urlparse(url) 1891 )
1569 1892
1570 scheme = parse_results.scheme 1893 url = urllib.parse.urljoin(self.orig_host, handler)
1571 if scheme == 'persistent-http': 1894 parse_results = urllib.parse.urlparse(url)
1572 scheme = 'http' 1895
1573 if scheme == 'persistent-https': 1896 scheme = parse_results.scheme
1574 # If we're proxying through persistent-https, use http. The 1897 if scheme == "persistent-http":
1575 # proxy itself will do the https. 1898 scheme = "http"
1576 if proxy: 1899 if scheme == "persistent-https":
1577 scheme = 'http' 1900 # If we're proxying through persistent-https, use http. The
1578 else: 1901 # proxy itself will do the https.
1579 scheme = 'https' 1902 if proxy:
1580 1903 scheme = "http"
1581 # Parse out any authentication information using the base class 1904 else:
1582 host, extra_headers, _ = self.get_host_info(parse_results.netloc) 1905 scheme = "https"
1583 1906
1584 url = urllib.parse.urlunparse(( 1907 # Parse out any authentication information using the base class.
1585 scheme, 1908 host, extra_headers, _ = self.get_host_info(parse_results.netloc)
1586 host, 1909
1587 parse_results.path, 1910 url = urllib.parse.urlunparse(
1588 parse_results.params, 1911 (
1589 parse_results.query, 1912 scheme,
1590 parse_results.fragment)) 1913 host,
1591 1914 parse_results.path,
1592 request = urllib.request.Request(url, request_body) 1915 parse_results.params,
1593 if extra_headers is not None: 1916 parse_results.query,
1594 for (name, header) in extra_headers: 1917 parse_results.fragment,
1595 request.add_header(name, header) 1918 )
1596 request.add_header('Content-Type', 'text/xml') 1919 )
1597 try: 1920
1598 response = opener.open(request) 1921 request = urllib.request.Request(url, request_body)
1599 except urllib.error.HTTPError as e: 1922 if extra_headers is not None:
1600 if e.code == 501: 1923 for name, header in extra_headers:
1601 # We may have been redirected through a login process 1924 request.add_header(name, header)
1602 # but our POST turned into a GET. Retry. 1925 request.add_header("Content-Type", "text/xml")
1603 response = opener.open(request) 1926 try:
1604 else: 1927 response = opener.open(request)
1605 raise 1928 except urllib.error.HTTPError as e:
1606 1929 if e.code == 501:
1607 p, u = xmlrpc.client.getparser() 1930 # We may have been redirected through a login process
1608 # Response should be fairly small, so read it all at once. 1931 # but our POST turned into a GET. Retry.
1609 # This way we can show it to the user in case of error (e.g. HTML). 1932 response = opener.open(request)
1610 data = response.read() 1933 else:
1611 try: 1934 raise
1612 p.feed(data) 1935
1613 except xml.parsers.expat.ExpatError as e: 1936 p, u = xmlrpc.client.getparser()
1614 raise IOError( 1937 # Response should be fairly small, so read it all at once.
1615 f'Parsing the manifest failed: {e}\n' 1938 # This way we can show it to the user in case of error (e.g. HTML).
1616 f'Please report this to your manifest server admin.\n' 1939 data = response.read()
1617 f'Here is the full response:\n{data.decode("utf-8")}') 1940 try:
1618 p.close() 1941 p.feed(data)
1619 return u.close() 1942 except xml.parsers.expat.ExpatError as e:
1620 1943 raise IOError(
1621 def close(self): 1944 f"Parsing the manifest failed: {e}\n"
1622 pass 1945 f"Please report this to your manifest server admin.\n"
1946 f'Here is the full response:\n{data.decode("utf-8")}'
1947 )
1948 p.close()
1949 return u.close()
1950
1951 def close(self):
1952 pass
diff --git a/subcmds/upload.py b/subcmds/upload.py
index 9c279230..63216afb 100644
--- a/subcmds/upload.py
+++ b/subcmds/upload.py
@@ -32,69 +32,77 @@ _DEFAULT_UNUSUAL_COMMIT_THRESHOLD = 5
32 32
33 33
34def _VerifyPendingCommits(branches: List[ReviewableBranch]) -> bool: 34def _VerifyPendingCommits(branches: List[ReviewableBranch]) -> bool:
35 """Perform basic safety checks on the given set of branches. 35 """Perform basic safety checks on the given set of branches.
36 36
37 Ensures that each branch does not have a "large" number of commits 37 Ensures that each branch does not have a "large" number of commits
38 and, if so, prompts the user to confirm they want to proceed with 38 and, if so, prompts the user to confirm they want to proceed with
39 the upload. 39 the upload.
40 40
41 Returns true if all branches pass the safety check or the user 41 Returns true if all branches pass the safety check or the user
42 confirmed. Returns false if the upload should be aborted. 42 confirmed. Returns false if the upload should be aborted.
43 """ 43 """
44 44
45 # Determine if any branch has a suspicious number of commits. 45 # Determine if any branch has a suspicious number of commits.
46 many_commits = False 46 many_commits = False
47 for branch in branches: 47 for branch in branches:
48 # Get the user's unusual threshold for the branch. 48 # Get the user's unusual threshold for the branch.
49 # 49 #
50 # Each branch may be configured to have a different threshold. 50 # Each branch may be configured to have a different threshold.
51 remote = branch.project.GetBranch(branch.name).remote 51 remote = branch.project.GetBranch(branch.name).remote
52 key = f'review.{remote.review}.uploadwarningthreshold' 52 key = f"review.{remote.review}.uploadwarningthreshold"
53 threshold = branch.project.config.GetInt(key) 53 threshold = branch.project.config.GetInt(key)
54 if threshold is None: 54 if threshold is None:
55 threshold = _DEFAULT_UNUSUAL_COMMIT_THRESHOLD 55 threshold = _DEFAULT_UNUSUAL_COMMIT_THRESHOLD
56 56
57 # If the branch has more commits than the threshold, show a warning. 57 # If the branch has more commits than the threshold, show a warning.
58 if len(branch.commits) > threshold: 58 if len(branch.commits) > threshold:
59 many_commits = True 59 many_commits = True
60 break 60 break
61 61
62 # If any branch has many commits, prompt the user. 62 # If any branch has many commits, prompt the user.
63 if many_commits: 63 if many_commits:
64 if len(branches) > 1: 64 if len(branches) > 1:
65 print('ATTENTION: One or more branches has an unusually high number ' 65 print(
66 'of commits.') 66 "ATTENTION: One or more branches has an unusually high number "
67 else: 67 "of commits."
68 print('ATTENTION: You are uploading an unusually high number of commits.') 68 )
69 print('YOU PROBABLY DO NOT MEAN TO DO THIS. (Did you rebase across ' 69 else:
70 'branches?)') 70 print(
71 answer = input( 71 "ATTENTION: You are uploading an unusually high number of "
72 "If you are sure you intend to do this, type 'yes': ").strip() 72 "commits."
73 return answer == 'yes' 73 )
74 74 print(
75 return True 75 "YOU PROBABLY DO NOT MEAN TO DO THIS. (Did you rebase across "
76 "branches?)"
77 )
78 answer = input(
79 "If you are sure you intend to do this, type 'yes': "
80 ).strip()
81 return answer == "yes"
82
83 return True
76 84
77 85
78def _die(fmt, *args): 86def _die(fmt, *args):
79 msg = fmt % args 87 msg = fmt % args
80 print('error: %s' % msg, file=sys.stderr) 88 print("error: %s" % msg, file=sys.stderr)
81 sys.exit(1) 89 sys.exit(1)
82 90
83 91
84def _SplitEmails(values): 92def _SplitEmails(values):
85 result = [] 93 result = []
86 for value in values: 94 for value in values:
87 result.extend([s.strip() for s in value.split(',')]) 95 result.extend([s.strip() for s in value.split(",")])
88 return result 96 return result
89 97
90 98
91class Upload(InteractiveCommand): 99class Upload(InteractiveCommand):
92 COMMON = True 100 COMMON = True
93 helpSummary = "Upload changes for code review" 101 helpSummary = "Upload changes for code review"
94 helpUsage = """ 102 helpUsage = """
95%prog [--re --cc] [<project>]... 103%prog [--re --cc] [<project>]...
96""" 104"""
97 helpDescription = """ 105 helpDescription = """
98The '%prog' command is used to send changes to the Gerrit Code 106The '%prog' command is used to send changes to the Gerrit Code
99Review system. It searches for topic branches in local projects 107Review system. It searches for topic branches in local projects
100that have not yet been published for review. If multiple topic 108that have not yet been published for review. If multiple topic
@@ -195,443 +203,611 @@ threshold to a different value.
195Gerrit Code Review: https://www.gerritcodereview.com/ 203Gerrit Code Review: https://www.gerritcodereview.com/
196 204
197""" 205"""
198 PARALLEL_JOBS = DEFAULT_LOCAL_JOBS 206 PARALLEL_JOBS = DEFAULT_LOCAL_JOBS
199 207
200 def _Options(self, p): 208 def _Options(self, p):
201 p.add_option('-t', 209 p.add_option(
202 dest='auto_topic', action='store_true', 210 "-t",
203 help='send local branch name to Gerrit Code Review') 211 dest="auto_topic",
204 p.add_option('--hashtag', '--ht', 212 action="store_true",
205 dest='hashtags', action='append', default=[], 213 help="send local branch name to Gerrit Code Review",
206 help='add hashtags (comma delimited) to the review') 214 )
207 p.add_option('--hashtag-branch', '--htb', 215 p.add_option(
208 action='store_true', 216 "--hashtag",
209 help='add local branch name as a hashtag') 217 "--ht",
210 p.add_option('-l', '--label', 218 dest="hashtags",
211 dest='labels', action='append', default=[], 219 action="append",
212 help='add a label when uploading') 220 default=[],
213 p.add_option('--re', '--reviewers', 221 help="add hashtags (comma delimited) to the review",
214 type='string', action='append', dest='reviewers', 222 )
215 help='request reviews from these people') 223 p.add_option(
216 p.add_option('--cc', 224 "--hashtag-branch",
217 type='string', action='append', dest='cc', 225 "--htb",
218 help='also send email to these email addresses') 226 action="store_true",
219 p.add_option('--br', '--branch', 227 help="add local branch name as a hashtag",
220 type='string', action='store', dest='branch', 228 )
221 help='(local) branch to upload') 229 p.add_option(
222 p.add_option('-c', '--current-branch', 230 "-l",
223 dest='current_branch', action='store_true', 231 "--label",
224 help='upload current git branch') 232 dest="labels",
225 p.add_option('--no-current-branch', 233 action="append",
226 dest='current_branch', action='store_false', 234 default=[],
227 help='upload all git branches') 235 help="add a label when uploading",
228 # Turn this into a warning & remove this someday. 236 )
229 p.add_option('--cbr', 237 p.add_option(
230 dest='current_branch', action='store_true', 238 "--re",
231 help=optparse.SUPPRESS_HELP) 239 "--reviewers",
232 p.add_option('--ne', '--no-emails', 240 type="string",
233 action='store_false', dest='notify', default=True, 241 action="append",
234 help='do not send e-mails on upload') 242 dest="reviewers",
235 p.add_option('-p', '--private', 243 help="request reviews from these people",
236 action='store_true', dest='private', default=False, 244 )
237 help='upload as a private change (deprecated; use --wip)') 245 p.add_option(
238 p.add_option('-w', '--wip', 246 "--cc",
239 action='store_true', dest='wip', default=False, 247 type="string",
240 help='upload as a work-in-progress change') 248 action="append",
241 p.add_option('-r', '--ready', 249 dest="cc",
242 action='store_true', default=False, 250 help="also send email to these email addresses",
243 help='mark change as ready (clears work-in-progress setting)') 251 )
244 p.add_option('-o', '--push-option', 252 p.add_option(
245 type='string', action='append', dest='push_options', 253 "--br",
246 default=[], 254 "--branch",
247 help='additional push options to transmit') 255 type="string",
248 p.add_option('-D', '--destination', '--dest', 256 action="store",
249 type='string', action='store', dest='dest_branch', 257 dest="branch",
250 metavar='BRANCH', 258 help="(local) branch to upload",
251 help='submit for review on this target branch') 259 )
252 p.add_option('-n', '--dry-run', 260 p.add_option(
253 dest='dryrun', default=False, action='store_true', 261 "-c",
254 help='do everything except actually upload the CL') 262 "--current-branch",
255 p.add_option('-y', '--yes', 263 dest="current_branch",
256 default=False, action='store_true', 264 action="store_true",
257 help='answer yes to all safe prompts') 265 help="upload current git branch",
258 p.add_option('--ignore-untracked-files', 266 )
259 action='store_true', default=False, 267 p.add_option(
260 help='ignore untracked files in the working copy') 268 "--no-current-branch",
261 p.add_option('--no-ignore-untracked-files', 269 dest="current_branch",
262 dest='ignore_untracked_files', action='store_false', 270 action="store_false",
263 help='always ask about untracked files in the working copy') 271 help="upload all git branches",
264 p.add_option('--no-cert-checks', 272 )
265 dest='validate_certs', action='store_false', default=True, 273 # Turn this into a warning & remove this someday.
266 help='disable verifying ssl certs (unsafe)') 274 p.add_option(
267 RepoHook.AddOptionGroup(p, 'pre-upload') 275 "--cbr",
268 276 dest="current_branch",
269 def _SingleBranch(self, opt, branch, people): 277 action="store_true",
270 project = branch.project 278 help=optparse.SUPPRESS_HELP,
271 name = branch.name 279 )
272 remote = project.GetBranch(name).remote 280 p.add_option(
273 281 "--ne",
274 key = 'review.%s.autoupload' % remote.review 282 "--no-emails",
275 answer = project.config.GetBoolean(key) 283 action="store_false",
276 284 dest="notify",
277 if answer is False: 285 default=True,
278 _die("upload blocked by %s = false" % key) 286 help="do not send e-mails on upload",
279 287 )
280 if answer is None: 288 p.add_option(
281 date = branch.date 289 "-p",
282 commit_list = branch.commits 290 "--private",
283 291 action="store_true",
284 destination = opt.dest_branch or project.dest_branch or project.revisionExpr 292 dest="private",
285 print('Upload project %s/ to remote branch %s%s:' % 293 default=False,
286 (project.RelPath(local=opt.this_manifest_only), destination, 294 help="upload as a private change (deprecated; use --wip)",
287 ' (private)' if opt.private else '')) 295 )
288 print(' branch %s (%2d commit%s, %s):' % ( 296 p.add_option(
289 name, 297 "-w",
290 len(commit_list), 298 "--wip",
291 len(commit_list) != 1 and 's' or '', 299 action="store_true",
292 date)) 300 dest="wip",
293 for commit in commit_list: 301 default=False,
294 print(' %s' % commit) 302 help="upload as a work-in-progress change",
295 303 )
296 print('to %s (y/N)? ' % remote.review, end='', flush=True) 304 p.add_option(
297 if opt.yes: 305 "-r",
298 print('<--yes>') 306 "--ready",
299 answer = True 307 action="store_true",
300 else: 308 default=False,
301 answer = sys.stdin.readline().strip().lower() 309 help="mark change as ready (clears work-in-progress setting)",
302 answer = answer in ('y', 'yes', '1', 'true', 't') 310 )
303 if not answer: 311 p.add_option(
304 _die("upload aborted by user") 312 "-o",
305 313 "--push-option",
306 # Perform some basic safety checks prior to uploading. 314 type="string",
307 if not opt.yes and not _VerifyPendingCommits([branch]): 315 action="append",
308 _die("upload aborted by user") 316 dest="push_options",
309 317 default=[],
310 self._UploadAndReport(opt, [branch], people) 318 help="additional push options to transmit",
311 319 )
312 def _MultipleBranches(self, opt, pending, people): 320 p.add_option(
313 projects = {} 321 "-D",
314 branches = {} 322 "--destination",
315 323 "--dest",
316 script = [] 324 type="string",
317 script.append('# Uncomment the branches to upload:') 325 action="store",
318 for project, avail in pending: 326 dest="dest_branch",
319 project_path = project.RelPath(local=opt.this_manifest_only) 327 metavar="BRANCH",
320 script.append('#') 328 help="submit for review on this target branch",
321 script.append(f'# project {project_path}/:') 329 )
322 330 p.add_option(
323 b = {} 331 "-n",
324 for branch in avail: 332 "--dry-run",
325 if branch is None: 333 dest="dryrun",
326 continue 334 default=False,
335 action="store_true",
336 help="do everything except actually upload the CL",
337 )
338 p.add_option(
339 "-y",
340 "--yes",
341 default=False,
342 action="store_true",
343 help="answer yes to all safe prompts",
344 )
345 p.add_option(
346 "--ignore-untracked-files",
347 action="store_true",
348 default=False,
349 help="ignore untracked files in the working copy",
350 )
351 p.add_option(
352 "--no-ignore-untracked-files",
353 dest="ignore_untracked_files",
354 action="store_false",
355 help="always ask about untracked files in the working copy",
356 )
357 p.add_option(
358 "--no-cert-checks",
359 dest="validate_certs",
360 action="store_false",
361 default=True,
362 help="disable verifying ssl certs (unsafe)",
363 )
364 RepoHook.AddOptionGroup(p, "pre-upload")
365
366 def _SingleBranch(self, opt, branch, people):
367 project = branch.project
327 name = branch.name 368 name = branch.name
328 date = branch.date 369 remote = project.GetBranch(name).remote
329 commit_list = branch.commits 370
330 371 key = "review.%s.autoupload" % remote.review
331 if b: 372 answer = project.config.GetBoolean(key)
332 script.append('#') 373
333 destination = opt.dest_branch or project.dest_branch or project.revisionExpr 374 if answer is False:
334 script.append('# branch %s (%2d commit%s, %s) to remote branch %s:' % ( 375 _die("upload blocked by %s = false" % key)
335 name, 376
336 len(commit_list), 377 if answer is None:
337 len(commit_list) != 1 and 's' or '', 378 date = branch.date
338 date, 379 commit_list = branch.commits
339 destination)) 380
340 for commit in commit_list: 381 destination = (
341 script.append('# %s' % commit) 382 opt.dest_branch or project.dest_branch or project.revisionExpr
342 b[name] = branch 383 )
343 384 print(
344 projects[project_path] = project 385 "Upload project %s/ to remote branch %s%s:"
345 branches[project_path] = b 386 % (
346 script.append('') 387 project.RelPath(local=opt.this_manifest_only),
347 388 destination,
348 script = Editor.EditString("\n".join(script)).split("\n") 389 " (private)" if opt.private else "",
349 390 )
350 project_re = re.compile(r'^#?\s*project\s*([^\s]+)/:$') 391 )
351 branch_re = re.compile(r'^\s*branch\s*([^\s(]+)\s*\(.*') 392 print(
352 393 " branch %s (%2d commit%s, %s):"
353 project = None 394 % (
354 todo = [] 395 name,
355 396 len(commit_list),
356 for line in script: 397 len(commit_list) != 1 and "s" or "",
357 m = project_re.match(line) 398 date,
358 if m: 399 )
359 name = m.group(1) 400 )
360 project = projects.get(name) 401 for commit in commit_list:
361 if not project: 402 print(" %s" % commit)
362 _die('project %s not available for upload', name) 403
363 continue 404 print("to %s (y/N)? " % remote.review, end="", flush=True)
364
365 m = branch_re.match(line)
366 if m:
367 name = m.group(1)
368 if not project:
369 _die('project for branch %s not in script', name)
370 project_path = project.RelPath(local=opt.this_manifest_only)
371 branch = branches[project_path].get(name)
372 if not branch:
373 _die('branch %s not in %s', name, project_path)
374 todo.append(branch)
375 if not todo:
376 _die("nothing uncommented for upload")
377
378 # Perform some basic safety checks prior to uploading.
379 if not opt.yes and not _VerifyPendingCommits(todo):
380 _die("upload aborted by user")
381
382 self._UploadAndReport(opt, todo, people)
383
384 def _AppendAutoList(self, branch, people):
385 """
386 Appends the list of reviewers in the git project's config.
387 Appends the list of users in the CC list in the git project's config if a
388 non-empty reviewer list was found.
389 """
390 name = branch.name
391 project = branch.project
392
393 key = 'review.%s.autoreviewer' % project.GetBranch(name).remote.review
394 raw_list = project.config.GetString(key)
395 if raw_list is not None:
396 people[0].extend([entry.strip() for entry in raw_list.split(',')])
397
398 key = 'review.%s.autocopy' % project.GetBranch(name).remote.review
399 raw_list = project.config.GetString(key)
400 if raw_list is not None and len(people[0]) > 0:
401 people[1].extend([entry.strip() for entry in raw_list.split(',')])
402
403 def _FindGerritChange(self, branch):
404 last_pub = branch.project.WasPublished(branch.name)
405 if last_pub is None:
406 return ""
407
408 refs = branch.GetPublishedRefs()
409 try:
410 # refs/changes/XYZ/N --> XYZ
411 return refs.get(last_pub).split('/')[-2]
412 except (AttributeError, IndexError):
413 return ""
414
415 def _UploadAndReport(self, opt, todo, original_people):
416 have_errors = False
417 for branch in todo:
418 try:
419 people = copy.deepcopy(original_people)
420 self._AppendAutoList(branch, people)
421
422 # Check if there are local changes that may have been forgotten
423 changes = branch.project.UncommitedFiles()
424 if opt.ignore_untracked_files:
425 untracked = set(branch.project.UntrackedFiles())
426 changes = [x for x in changes if x not in untracked]
427
428 if changes:
429 key = 'review.%s.autoupload' % branch.project.remote.review
430 answer = branch.project.config.GetBoolean(key)
431
432 # if they want to auto upload, let's not ask because it could be automated
433 if answer is None:
434 print()
435 print('Uncommitted changes in %s (did you forget to amend?):'
436 % branch.project.name)
437 print('\n'.join(changes))
438 print('Continue uploading? (y/N) ', end='', flush=True)
439 if opt.yes: 405 if opt.yes:
440 print('<--yes>') 406 print("<--yes>")
441 a = 'yes' 407 answer = True
408 else:
409 answer = sys.stdin.readline().strip().lower()
410 answer = answer in ("y", "yes", "1", "true", "t")
411 if not answer:
412 _die("upload aborted by user")
413
414 # Perform some basic safety checks prior to uploading.
415 if not opt.yes and not _VerifyPendingCommits([branch]):
416 _die("upload aborted by user")
417
418 self._UploadAndReport(opt, [branch], people)
419
420 def _MultipleBranches(self, opt, pending, people):
421 projects = {}
422 branches = {}
423
424 script = []
425 script.append("# Uncomment the branches to upload:")
426 for project, avail in pending:
427 project_path = project.RelPath(local=opt.this_manifest_only)
428 script.append("#")
429 script.append(f"# project {project_path}/:")
430
431 b = {}
432 for branch in avail:
433 if branch is None:
434 continue
435 name = branch.name
436 date = branch.date
437 commit_list = branch.commits
438
439 if b:
440 script.append("#")
441 destination = (
442 opt.dest_branch
443 or project.dest_branch
444 or project.revisionExpr
445 )
446 script.append(
447 "# branch %s (%2d commit%s, %s) to remote branch %s:"
448 % (
449 name,
450 len(commit_list),
451 len(commit_list) != 1 and "s" or "",
452 date,
453 destination,
454 )
455 )
456 for commit in commit_list:
457 script.append("# %s" % commit)
458 b[name] = branch
459
460 projects[project_path] = project
461 branches[project_path] = b
462 script.append("")
463
464 script = Editor.EditString("\n".join(script)).split("\n")
465
466 project_re = re.compile(r"^#?\s*project\s*([^\s]+)/:$")
467 branch_re = re.compile(r"^\s*branch\s*([^\s(]+)\s*\(.*")
468
469 project = None
470 todo = []
471
472 for line in script:
473 m = project_re.match(line)
474 if m:
475 name = m.group(1)
476 project = projects.get(name)
477 if not project:
478 _die("project %s not available for upload", name)
479 continue
480
481 m = branch_re.match(line)
482 if m:
483 name = m.group(1)
484 if not project:
485 _die("project for branch %s not in script", name)
486 project_path = project.RelPath(local=opt.this_manifest_only)
487 branch = branches[project_path].get(name)
488 if not branch:
489 _die("branch %s not in %s", name, project_path)
490 todo.append(branch)
491 if not todo:
492 _die("nothing uncommented for upload")
493
494 # Perform some basic safety checks prior to uploading.
495 if not opt.yes and not _VerifyPendingCommits(todo):
496 _die("upload aborted by user")
497
498 self._UploadAndReport(opt, todo, people)
499
500 def _AppendAutoList(self, branch, people):
501 """
502 Appends the list of reviewers in the git project's config.
503 Appends the list of users in the CC list in the git project's config if
504 a non-empty reviewer list was found.
505 """
506 name = branch.name
507 project = branch.project
508
509 key = "review.%s.autoreviewer" % project.GetBranch(name).remote.review
510 raw_list = project.config.GetString(key)
511 if raw_list is not None:
512 people[0].extend([entry.strip() for entry in raw_list.split(",")])
513
514 key = "review.%s.autocopy" % project.GetBranch(name).remote.review
515 raw_list = project.config.GetString(key)
516 if raw_list is not None and len(people[0]) > 0:
517 people[1].extend([entry.strip() for entry in raw_list.split(",")])
518
519 def _FindGerritChange(self, branch):
520 last_pub = branch.project.WasPublished(branch.name)
521 if last_pub is None:
522 return ""
523
524 refs = branch.GetPublishedRefs()
525 try:
526 # refs/changes/XYZ/N --> XYZ
527 return refs.get(last_pub).split("/")[-2]
528 except (AttributeError, IndexError):
529 return ""
530
531 def _UploadAndReport(self, opt, todo, original_people):
532 have_errors = False
533 for branch in todo:
534 try:
535 people = copy.deepcopy(original_people)
536 self._AppendAutoList(branch, people)
537
538 # Check if there are local changes that may have been forgotten.
539 changes = branch.project.UncommitedFiles()
540 if opt.ignore_untracked_files:
541 untracked = set(branch.project.UntrackedFiles())
542 changes = [x for x in changes if x not in untracked]
543
544 if changes:
545 key = "review.%s.autoupload" % branch.project.remote.review
546 answer = branch.project.config.GetBoolean(key)
547
548 # If they want to auto upload, let's not ask because it
549 # could be automated.
550 if answer is None:
551 print()
552 print(
553 "Uncommitted changes in %s (did you forget to "
554 "amend?):" % branch.project.name
555 )
556 print("\n".join(changes))
557 print("Continue uploading? (y/N) ", end="", flush=True)
558 if opt.yes:
559 print("<--yes>")
560 a = "yes"
561 else:
562 a = sys.stdin.readline().strip().lower()
563 if a not in ("y", "yes", "t", "true", "on"):
564 print("skipping upload", file=sys.stderr)
565 branch.uploaded = False
566 branch.error = "User aborted"
567 continue
568
569 # Check if topic branches should be sent to the server during
570 # upload.
571 if opt.auto_topic is not True:
572 key = "review.%s.uploadtopic" % branch.project.remote.review
573 opt.auto_topic = branch.project.config.GetBoolean(key)
574
575 def _ExpandCommaList(value):
576 """Split |value| up into comma delimited entries."""
577 if not value:
578 return
579 for ret in value.split(","):
580 ret = ret.strip()
581 if ret:
582 yield ret
583
584 # Check if hashtags should be included.
585 key = "review.%s.uploadhashtags" % branch.project.remote.review
586 hashtags = set(
587 _ExpandCommaList(branch.project.config.GetString(key))
588 )
589 for tag in opt.hashtags:
590 hashtags.update(_ExpandCommaList(tag))
591 if opt.hashtag_branch:
592 hashtags.add(branch.name)
593
594 # Check if labels should be included.
595 key = "review.%s.uploadlabels" % branch.project.remote.review
596 labels = set(
597 _ExpandCommaList(branch.project.config.GetString(key))
598 )
599 for label in opt.labels:
600 labels.update(_ExpandCommaList(label))
601
602 # Handle e-mail notifications.
603 if opt.notify is False:
604 notify = "NONE"
605 else:
606 key = (
607 "review.%s.uploadnotify" % branch.project.remote.review
608 )
609 notify = branch.project.config.GetString(key)
610
611 destination = opt.dest_branch or branch.project.dest_branch
612
613 if branch.project.dest_branch and not opt.dest_branch:
614 merge_branch = self._GetMergeBranch(
615 branch.project, local_branch=branch.name
616 )
617
618 full_dest = destination
619 if not full_dest.startswith(R_HEADS):
620 full_dest = R_HEADS + full_dest
621
622 # If the merge branch of the local branch is different from
623 # the project's revision AND destination, this might not be
624 # intentional.
625 if (
626 merge_branch
627 and merge_branch != branch.project.revisionExpr
628 and merge_branch != full_dest
629 ):
630 print(
631 f"For local branch {branch.name}: merge branch "
632 f"{merge_branch} does not match destination branch "
633 f"{destination}"
634 )
635 print("skipping upload.")
636 print(
637 f"Please use `--destination {destination}` if this "
638 "is intentional"
639 )
640 branch.uploaded = False
641 continue
642
643 branch.UploadForReview(
644 people,
645 dryrun=opt.dryrun,
646 auto_topic=opt.auto_topic,
647 hashtags=hashtags,
648 labels=labels,
649 private=opt.private,
650 notify=notify,
651 wip=opt.wip,
652 ready=opt.ready,
653 dest_branch=destination,
654 validate_certs=opt.validate_certs,
655 push_options=opt.push_options,
656 )
657
658 branch.uploaded = True
659 except UploadError as e:
660 branch.error = e
661 branch.uploaded = False
662 have_errors = True
663
664 print(file=sys.stderr)
665 print("-" * 70, file=sys.stderr)
666
667 if have_errors:
668 for branch in todo:
669 if not branch.uploaded:
670 if len(str(branch.error)) <= 30:
671 fmt = " (%s)"
672 else:
673 fmt = "\n (%s)"
674 print(
675 ("[FAILED] %-15s %-15s" + fmt)
676 % (
677 branch.project.RelPath(local=opt.this_manifest_only)
678 + "/",
679 branch.name,
680 str(branch.error),
681 ),
682 file=sys.stderr,
683 )
684 print()
685
686 for branch in todo:
687 if branch.uploaded:
688 print(
689 "[OK ] %-15s %s"
690 % (
691 branch.project.RelPath(local=opt.this_manifest_only)
692 + "/",
693 branch.name,
694 ),
695 file=sys.stderr,
696 )
697
698 if have_errors:
699 sys.exit(1)
700
701 def _GetMergeBranch(self, project, local_branch=None):
702 if local_branch is None:
703 p = GitCommand(
704 project,
705 ["rev-parse", "--abbrev-ref", "HEAD"],
706 capture_stdout=True,
707 capture_stderr=True,
708 )
709 p.Wait()
710 local_branch = p.stdout.strip()
711 p = GitCommand(
712 project,
713 ["config", "--get", "branch.%s.merge" % local_branch],
714 capture_stdout=True,
715 capture_stderr=True,
716 )
717 p.Wait()
718 merge_branch = p.stdout.strip()
719 return merge_branch
720
721 @staticmethod
722 def _GatherOne(opt, project):
723 """Figure out the upload status for |project|."""
724 if opt.current_branch:
725 cbr = project.CurrentBranch
726 up_branch = project.GetUploadableBranch(cbr)
727 avail = [up_branch] if up_branch else None
728 else:
729 avail = project.GetUploadableBranches(opt.branch)
730 return (project, avail)
731
732 def Execute(self, opt, args):
733 projects = self.GetProjects(
734 args, all_manifests=not opt.this_manifest_only
735 )
736
737 def _ProcessResults(_pool, _out, results):
738 pending = []
739 for result in results:
740 project, avail = result
741 if avail is None:
742 print(
743 'repo: error: %s: Unable to upload branch "%s". '
744 "You might be able to fix the branch by running:\n"
745 " git branch --set-upstream-to m/%s"
746 % (
747 project.RelPath(local=opt.this_manifest_only),
748 project.CurrentBranch,
749 project.manifest.branch,
750 ),
751 file=sys.stderr,
752 )
753 elif avail:
754 pending.append(result)
755 return pending
756
757 pending = self.ExecuteInParallel(
758 opt.jobs,
759 functools.partial(self._GatherOne, opt),
760 projects,
761 callback=_ProcessResults,
762 )
763
764 if not pending:
765 if opt.branch is None:
766 print(
767 "repo: error: no branches ready for upload", file=sys.stderr
768 )
442 else: 769 else:
443 a = sys.stdin.readline().strip().lower() 770 print(
444 if a not in ('y', 'yes', 't', 'true', 'on'): 771 'repo: error: no branches named "%s" ready for upload'
445 print("skipping upload", file=sys.stderr) 772 % (opt.branch,),
446 branch.uploaded = False 773 file=sys.stderr,
447 branch.error = 'User aborted' 774 )
448 continue 775 return 1
449 776
450 # Check if topic branches should be sent to the server during upload 777 manifests = {
451 if opt.auto_topic is not True: 778 project.manifest.topdir: project.manifest
452 key = 'review.%s.uploadtopic' % branch.project.remote.review 779 for (project, available) in pending
453 opt.auto_topic = branch.project.config.GetBoolean(key) 780 }
454 781 ret = 0
455 def _ExpandCommaList(value): 782 for manifest in manifests.values():
456 """Split |value| up into comma delimited entries.""" 783 pending_proj_names = [
457 if not value: 784 project.name
458 return 785 for (project, available) in pending
459 for ret in value.split(','): 786 if project.manifest.topdir == manifest.topdir
460 ret = ret.strip() 787 ]
461 if ret: 788 pending_worktrees = [
462 yield ret 789 project.worktree
463 790 for (project, available) in pending
464 # Check if hashtags should be included. 791 if project.manifest.topdir == manifest.topdir
465 key = 'review.%s.uploadhashtags' % branch.project.remote.review 792 ]
466 hashtags = set(_ExpandCommaList(branch.project.config.GetString(key))) 793 hook = RepoHook.FromSubcmd(
467 for tag in opt.hashtags: 794 hook_type="pre-upload",
468 hashtags.update(_ExpandCommaList(tag)) 795 manifest=manifest,
469 if opt.hashtag_branch: 796 opt=opt,
470 hashtags.add(branch.name) 797 abort_if_user_denies=True,
471 798 )
472 # Check if labels should be included. 799 if not hook.Run(
473 key = 'review.%s.uploadlabels' % branch.project.remote.review 800 project_list=pending_proj_names, worktree_list=pending_worktrees
474 labels = set(_ExpandCommaList(branch.project.config.GetString(key))) 801 ):
475 for label in opt.labels: 802 ret = 1
476 labels.update(_ExpandCommaList(label)) 803 if ret:
477 804 return ret
478 # Handle e-mail notifications. 805
479 if opt.notify is False: 806 reviewers = _SplitEmails(opt.reviewers) if opt.reviewers else []
480 notify = 'NONE' 807 cc = _SplitEmails(opt.cc) if opt.cc else []
808 people = (reviewers, cc)
809
810 if len(pending) == 1 and len(pending[0][1]) == 1:
811 self._SingleBranch(opt, pending[0][1][0], people)
481 else: 812 else:
482 key = 'review.%s.uploadnotify' % branch.project.remote.review 813 self._MultipleBranches(opt, pending, people)
483 notify = branch.project.config.GetString(key)
484
485 destination = opt.dest_branch or branch.project.dest_branch
486
487 if branch.project.dest_branch and not opt.dest_branch:
488
489 merge_branch = self._GetMergeBranch(
490 branch.project, local_branch=branch.name)
491
492 full_dest = destination
493 if not full_dest.startswith(R_HEADS):
494 full_dest = R_HEADS + full_dest
495
496 # If the merge branch of the local branch is different from the
497 # project's revision AND destination, this might not be intentional.
498 if (merge_branch and merge_branch != branch.project.revisionExpr
499 and merge_branch != full_dest):
500 print(f'For local branch {branch.name}: merge branch '
501 f'{merge_branch} does not match destination branch '
502 f'{destination}')
503 print('skipping upload.')
504 print(f'Please use `--destination {destination}` if this is intentional')
505 branch.uploaded = False
506 continue
507
508 branch.UploadForReview(people,
509 dryrun=opt.dryrun,
510 auto_topic=opt.auto_topic,
511 hashtags=hashtags,
512 labels=labels,
513 private=opt.private,
514 notify=notify,
515 wip=opt.wip,
516 ready=opt.ready,
517 dest_branch=destination,
518 validate_certs=opt.validate_certs,
519 push_options=opt.push_options)
520
521 branch.uploaded = True
522 except UploadError as e:
523 branch.error = e
524 branch.uploaded = False
525 have_errors = True
526
527 print(file=sys.stderr)
528 print('----------------------------------------------------------------------', file=sys.stderr)
529
530 if have_errors:
531 for branch in todo:
532 if not branch.uploaded:
533 if len(str(branch.error)) <= 30:
534 fmt = ' (%s)'
535 else:
536 fmt = '\n (%s)'
537 print(('[FAILED] %-15s %-15s' + fmt) % (
538 branch.project.RelPath(local=opt.this_manifest_only) + '/',
539 branch.name,
540 str(branch.error)),
541 file=sys.stderr)
542 print()
543
544 for branch in todo:
545 if branch.uploaded:
546 print('[OK ] %-15s %s' % (
547 branch.project.RelPath(local=opt.this_manifest_only) + '/',
548 branch.name),
549 file=sys.stderr)
550
551 if have_errors:
552 sys.exit(1)
553
554 def _GetMergeBranch(self, project, local_branch=None):
555 if local_branch is None:
556 p = GitCommand(project,
557 ['rev-parse', '--abbrev-ref', 'HEAD'],
558 capture_stdout=True,
559 capture_stderr=True)
560 p.Wait()
561 local_branch = p.stdout.strip()
562 p = GitCommand(project,
563 ['config', '--get', 'branch.%s.merge' % local_branch],
564 capture_stdout=True,
565 capture_stderr=True)
566 p.Wait()
567 merge_branch = p.stdout.strip()
568 return merge_branch
569
570 @staticmethod
571 def _GatherOne(opt, project):
572 """Figure out the upload status for |project|."""
573 if opt.current_branch:
574 cbr = project.CurrentBranch
575 up_branch = project.GetUploadableBranch(cbr)
576 avail = [up_branch] if up_branch else None
577 else:
578 avail = project.GetUploadableBranches(opt.branch)
579 return (project, avail)
580
581 def Execute(self, opt, args):
582 projects = self.GetProjects(args, all_manifests=not opt.this_manifest_only)
583
584 def _ProcessResults(_pool, _out, results):
585 pending = []
586 for result in results:
587 project, avail = result
588 if avail is None:
589 print('repo: error: %s: Unable to upload branch "%s". '
590 'You might be able to fix the branch by running:\n'
591 ' git branch --set-upstream-to m/%s' %
592 (project.RelPath(local=opt.this_manifest_only), project.CurrentBranch,
593 project.manifest.branch),
594 file=sys.stderr)
595 elif avail:
596 pending.append(result)
597 return pending
598
599 pending = self.ExecuteInParallel(
600 opt.jobs,
601 functools.partial(self._GatherOne, opt),
602 projects,
603 callback=_ProcessResults)
604
605 if not pending:
606 if opt.branch is None:
607 print('repo: error: no branches ready for upload', file=sys.stderr)
608 else:
609 print('repo: error: no branches named "%s" ready for upload' %
610 (opt.branch,), file=sys.stderr)
611 return 1
612
613 manifests = {project.manifest.topdir: project.manifest
614 for (project, available) in pending}
615 ret = 0
616 for manifest in manifests.values():
617 pending_proj_names = [project.name for (project, available) in pending
618 if project.manifest.topdir == manifest.topdir]
619 pending_worktrees = [project.worktree for (project, available) in pending
620 if project.manifest.topdir == manifest.topdir]
621 hook = RepoHook.FromSubcmd(
622 hook_type='pre-upload', manifest=manifest,
623 opt=opt, abort_if_user_denies=True)
624 if not hook.Run(project_list=pending_proj_names,
625 worktree_list=pending_worktrees):
626 ret = 1
627 if ret:
628 return ret
629
630 reviewers = _SplitEmails(opt.reviewers) if opt.reviewers else []
631 cc = _SplitEmails(opt.cc) if opt.cc else []
632 people = (reviewers, cc)
633
634 if len(pending) == 1 and len(pending[0][1]) == 1:
635 self._SingleBranch(opt, pending[0][1][0], people)
636 else:
637 self._MultipleBranches(opt, pending, people)
diff --git a/subcmds/version.py b/subcmds/version.py
index c68cb0af..c539db63 100644
--- a/subcmds/version.py
+++ b/subcmds/version.py
@@ -22,45 +22,52 @@ from wrapper import Wrapper
22 22
23 23
24class Version(Command, MirrorSafeCommand): 24class Version(Command, MirrorSafeCommand):
25 wrapper_version = None 25 wrapper_version = None
26 wrapper_path = None 26 wrapper_path = None
27 27
28 COMMON = False 28 COMMON = False
29 helpSummary = "Display the version of repo" 29 helpSummary = "Display the version of repo"
30 helpUsage = """ 30 helpUsage = """
31%prog 31%prog
32""" 32"""
33 33
34 def Execute(self, opt, args): 34 def Execute(self, opt, args):
35 rp = self.manifest.repoProject 35 rp = self.manifest.repoProject
36 rem = rp.GetRemote() 36 rem = rp.GetRemote()
37 branch = rp.GetBranch('default') 37 branch = rp.GetBranch("default")
38 38
39 # These might not be the same. Report them both. 39 # These might not be the same. Report them both.
40 src_ver = RepoSourceVersion() 40 src_ver = RepoSourceVersion()
41 rp_ver = rp.bare_git.describe(HEAD) 41 rp_ver = rp.bare_git.describe(HEAD)
42 print('repo version %s' % rp_ver) 42 print("repo version %s" % rp_ver)
43 print(' (from %s)' % rem.url) 43 print(" (from %s)" % rem.url)
44 print(' (tracking %s)' % branch.merge) 44 print(" (tracking %s)" % branch.merge)
45 print(' (%s)' % rp.bare_git.log('-1', '--format=%cD', HEAD)) 45 print(" (%s)" % rp.bare_git.log("-1", "--format=%cD", HEAD))
46 46
47 if self.wrapper_path is not None: 47 if self.wrapper_path is not None:
48 print('repo launcher version %s' % self.wrapper_version) 48 print("repo launcher version %s" % self.wrapper_version)
49 print(' (from %s)' % self.wrapper_path) 49 print(" (from %s)" % self.wrapper_path)
50 50
51 if src_ver != rp_ver: 51 if src_ver != rp_ver:
52 print(' (currently at %s)' % src_ver) 52 print(" (currently at %s)" % src_ver)
53 53
54 print('repo User-Agent %s' % user_agent.repo) 54 print("repo User-Agent %s" % user_agent.repo)
55 print('git %s' % git.version_tuple().full) 55 print("git %s" % git.version_tuple().full)
56 print('git User-Agent %s' % user_agent.git) 56 print("git User-Agent %s" % user_agent.git)
57 print('Python %s' % sys.version) 57 print("Python %s" % sys.version)
58 uname = platform.uname() 58 uname = platform.uname()
59 if sys.version_info.major < 3: 59 if sys.version_info.major < 3:
60 # Python 3 returns a named tuple, but Python 2 is simpler. 60 # Python 3 returns a named tuple, but Python 2 is simpler.
61 print(uname) 61 print(uname)
62 else: 62 else:
63 print('OS %s %s (%s)' % (uname.system, uname.release, uname.version)) 63 print(
64 print('CPU %s (%s)' % 64 "OS %s %s (%s)" % (uname.system, uname.release, uname.version)
65 (uname.machine, uname.processor if uname.processor else 'unknown')) 65 )
66 print('Bug reports:', Wrapper().BUG_URL) 66 print(
67 "CPU %s (%s)"
68 % (
69 uname.machine,
70 uname.processor if uname.processor else "unknown",
71 )
72 )
73 print("Bug reports:", Wrapper().BUG_URL)
diff --git a/tests/conftest.py b/tests/conftest.py
index 3e43f6d3..e1a2292a 100644
--- a/tests/conftest.py
+++ b/tests/conftest.py
@@ -21,5 +21,5 @@ import repo_trace
21 21
22@pytest.fixture(autouse=True) 22@pytest.fixture(autouse=True)
23def disable_repo_trace(tmp_path): 23def disable_repo_trace(tmp_path):
24 """Set an environment marker to relax certain strict checks for test code.""" 24 """Set an environment marker to relax certain strict checks for test code.""" # noqa: E501
25 repo_trace._TRACE_FILE = str(tmp_path / 'TRACE_FILE_from_test') 25 repo_trace._TRACE_FILE = str(tmp_path / "TRACE_FILE_from_test")
diff --git a/tests/test_editor.py b/tests/test_editor.py
index cfd4f5ed..8f5d160e 100644
--- a/tests/test_editor.py
+++ b/tests/test_editor.py
@@ -20,37 +20,37 @@ from editor import Editor
20 20
21 21
22class EditorTestCase(unittest.TestCase): 22class EditorTestCase(unittest.TestCase):
23 """Take care of resetting Editor state across tests.""" 23 """Take care of resetting Editor state across tests."""
24 24
25 def setUp(self): 25 def setUp(self):
26 self.setEditor(None) 26 self.setEditor(None)
27 27
28 def tearDown(self): 28 def tearDown(self):
29 self.setEditor(None) 29 self.setEditor(None)
30 30
31 @staticmethod 31 @staticmethod
32 def setEditor(editor): 32 def setEditor(editor):
33 Editor._editor = editor 33 Editor._editor = editor
34 34
35 35
36class GetEditor(EditorTestCase): 36class GetEditor(EditorTestCase):
37 """Check GetEditor behavior.""" 37 """Check GetEditor behavior."""
38 38
39 def test_basic(self): 39 def test_basic(self):
40 """Basic checking of _GetEditor.""" 40 """Basic checking of _GetEditor."""
41 self.setEditor(':') 41 self.setEditor(":")
42 self.assertEqual(':', Editor._GetEditor()) 42 self.assertEqual(":", Editor._GetEditor())
43 43
44 44
45class EditString(EditorTestCase): 45class EditString(EditorTestCase):
46 """Check EditString behavior.""" 46 """Check EditString behavior."""
47 47
48 def test_no_editor(self): 48 def test_no_editor(self):
49 """Check behavior when no editor is available.""" 49 """Check behavior when no editor is available."""
50 self.setEditor(':') 50 self.setEditor(":")
51 self.assertEqual('foo', Editor.EditString('foo')) 51 self.assertEqual("foo", Editor.EditString("foo"))
52 52
53 def test_cat_editor(self): 53 def test_cat_editor(self):
54 """Check behavior when editor is `cat`.""" 54 """Check behavior when editor is `cat`."""
55 self.setEditor('cat') 55 self.setEditor("cat")
56 self.assertEqual('foo', Editor.EditString('foo')) 56 self.assertEqual("foo", Editor.EditString("foo"))
diff --git a/tests/test_error.py b/tests/test_error.py
index 82b00c24..784e2d57 100644
--- a/tests/test_error.py
+++ b/tests/test_error.py
@@ -22,32 +22,34 @@ import error
22 22
23 23
24class PickleTests(unittest.TestCase): 24class PickleTests(unittest.TestCase):
25 """Make sure all our custom exceptions can be pickled.""" 25 """Make sure all our custom exceptions can be pickled."""
26 26
27 def getExceptions(self): 27 def getExceptions(self):
28 """Return all our custom exceptions.""" 28 """Return all our custom exceptions."""
29 for name in dir(error): 29 for name in dir(error):
30 cls = getattr(error, name) 30 cls = getattr(error, name)
31 if isinstance(cls, type) and issubclass(cls, Exception): 31 if isinstance(cls, type) and issubclass(cls, Exception):
32 yield cls 32 yield cls
33 33
34 def testExceptionLookup(self): 34 def testExceptionLookup(self):
35 """Make sure our introspection logic works.""" 35 """Make sure our introspection logic works."""
36 classes = list(self.getExceptions()) 36 classes = list(self.getExceptions())
37 self.assertIn(error.HookError, classes) 37 self.assertIn(error.HookError, classes)
38 # Don't assert the exact number to avoid being a change-detector test. 38 # Don't assert the exact number to avoid being a change-detector test.
39 self.assertGreater(len(classes), 10) 39 self.assertGreater(len(classes), 10)
40 40
41 def testPickle(self): 41 def testPickle(self):
42 """Try to pickle all the exceptions.""" 42 """Try to pickle all the exceptions."""
43 for cls in self.getExceptions(): 43 for cls in self.getExceptions():
44 args = inspect.getfullargspec(cls.__init__).args[1:] 44 args = inspect.getfullargspec(cls.__init__).args[1:]
45 obj = cls(*args) 45 obj = cls(*args)
46 p = pickle.dumps(obj) 46 p = pickle.dumps(obj)
47 try: 47 try:
48 newobj = pickle.loads(p) 48 newobj = pickle.loads(p)
49 except Exception as e: # pylint: disable=broad-except 49 except Exception as e: # pylint: disable=broad-except
50 self.fail('Class %s is unable to be pickled: %s\n' 50 self.fail(
51 'Incomplete super().__init__(...) call?' % (cls, e)) 51 "Class %s is unable to be pickled: %s\n"
52 self.assertIsInstance(newobj, cls) 52 "Incomplete super().__init__(...) call?" % (cls, e)
53 self.assertEqual(str(obj), str(newobj)) 53 )
54 self.assertIsInstance(newobj, cls)
55 self.assertEqual(str(obj), str(newobj))
diff --git a/tests/test_git_command.py b/tests/test_git_command.py
index 96408a23..c4c3a4c5 100644
--- a/tests/test_git_command.py
+++ b/tests/test_git_command.py
@@ -19,138 +19,146 @@ import os
19import unittest 19import unittest
20 20
21try: 21try:
22 from unittest import mock 22 from unittest import mock
23except ImportError: 23except ImportError:
24 import mock 24 import mock
25 25
26import git_command 26import git_command
27import wrapper 27import wrapper
28 28
29 29
30class GitCommandTest(unittest.TestCase): 30class GitCommandTest(unittest.TestCase):
31 """Tests the GitCommand class (via git_command.git).""" 31 """Tests the GitCommand class (via git_command.git)."""
32 32
33 def setUp(self): 33 def setUp(self):
34 def realpath_mock(val):
35 return val
34 36
35 def realpath_mock(val): 37 mock.patch.object(
36 return val 38 os.path, "realpath", side_effect=realpath_mock
39 ).start()
37 40
38 mock.patch.object(os.path, 'realpath', side_effect=realpath_mock).start() 41 def tearDown(self):
42 mock.patch.stopall()
39 43
40 def tearDown(self): 44 def test_alternative_setting_when_matching(self):
41 mock.patch.stopall() 45 r = git_command._build_env(
46 objdir=os.path.join("zap", "objects"), gitdir="zap"
47 )
42 48
43 def test_alternative_setting_when_matching(self): 49 self.assertIsNone(r.get("GIT_ALTERNATE_OBJECT_DIRECTORIES"))
44 r = git_command._build_env( 50 self.assertEqual(
45 objdir = os.path.join('zap', 'objects'), 51 r.get("GIT_OBJECT_DIRECTORY"), os.path.join("zap", "objects")
46 gitdir = 'zap' 52 )
47 )
48 53
49 self.assertIsNone(r.get('GIT_ALTERNATE_OBJECT_DIRECTORIES')) 54 def test_alternative_setting_when_different(self):
50 self.assertEqual(r.get('GIT_OBJECT_DIRECTORY'), os.path.join('zap', 'objects')) 55 r = git_command._build_env(
56 objdir=os.path.join("wow", "objects"), gitdir="zap"
57 )
51 58
52 def test_alternative_setting_when_different(self): 59 self.assertEqual(
53 r = git_command._build_env( 60 r.get("GIT_ALTERNATE_OBJECT_DIRECTORIES"),
54 objdir = os.path.join('wow', 'objects'), 61 os.path.join("zap", "objects"),
55 gitdir = 'zap' 62 )
56 ) 63 self.assertEqual(
57 64 r.get("GIT_OBJECT_DIRECTORY"), os.path.join("wow", "objects")
58 self.assertEqual(r.get('GIT_ALTERNATE_OBJECT_DIRECTORIES'), os.path.join('zap', 'objects')) 65 )
59 self.assertEqual(r.get('GIT_OBJECT_DIRECTORY'), os.path.join('wow', 'objects'))
60 66
61 67
62class GitCallUnitTest(unittest.TestCase): 68class GitCallUnitTest(unittest.TestCase):
63 """Tests the _GitCall class (via git_command.git).""" 69 """Tests the _GitCall class (via git_command.git)."""
64 70
65 def test_version_tuple(self): 71 def test_version_tuple(self):
66 """Check git.version_tuple() handling.""" 72 """Check git.version_tuple() handling."""
67 ver = git_command.git.version_tuple() 73 ver = git_command.git.version_tuple()
68 self.assertIsNotNone(ver) 74 self.assertIsNotNone(ver)
69 75
70 # We don't dive too deep into the values here to avoid having to update 76 # We don't dive too deep into the values here to avoid having to update
71 # whenever git versions change. We do check relative to this min version 77 # whenever git versions change. We do check relative to this min
72 # as this is what `repo` itself requires via MIN_GIT_VERSION. 78 # version as this is what `repo` itself requires via MIN_GIT_VERSION.
73 MIN_GIT_VERSION = (2, 10, 2) 79 MIN_GIT_VERSION = (2, 10, 2)
74 self.assertTrue(isinstance(ver.major, int)) 80 self.assertTrue(isinstance(ver.major, int))
75 self.assertTrue(isinstance(ver.minor, int)) 81 self.assertTrue(isinstance(ver.minor, int))
76 self.assertTrue(isinstance(ver.micro, int)) 82 self.assertTrue(isinstance(ver.micro, int))
77 83
78 self.assertGreater(ver.major, MIN_GIT_VERSION[0] - 1) 84 self.assertGreater(ver.major, MIN_GIT_VERSION[0] - 1)
79 self.assertGreaterEqual(ver.micro, 0) 85 self.assertGreaterEqual(ver.micro, 0)
80 self.assertGreaterEqual(ver.major, 0) 86 self.assertGreaterEqual(ver.major, 0)
81 87
82 self.assertGreaterEqual(ver, MIN_GIT_VERSION) 88 self.assertGreaterEqual(ver, MIN_GIT_VERSION)
83 self.assertLess(ver, (9999, 9999, 9999)) 89 self.assertLess(ver, (9999, 9999, 9999))
84 90
85 self.assertNotEqual('', ver.full) 91 self.assertNotEqual("", ver.full)
86 92
87 93
88class UserAgentUnitTest(unittest.TestCase): 94class UserAgentUnitTest(unittest.TestCase):
89 """Tests the UserAgent function.""" 95 """Tests the UserAgent function."""
90 96
91 def test_smoke_os(self): 97 def test_smoke_os(self):
92 """Make sure UA OS setting returns something useful.""" 98 """Make sure UA OS setting returns something useful."""
93 os_name = git_command.user_agent.os 99 os_name = git_command.user_agent.os
94 # We can't dive too deep because of OS/tool differences, but we can check 100 # We can't dive too deep because of OS/tool differences, but we can
95 # the general form. 101 # check the general form.
96 m = re.match(r'^[^ ]+$', os_name) 102 m = re.match(r"^[^ ]+$", os_name)
97 self.assertIsNotNone(m) 103 self.assertIsNotNone(m)
98 104
99 def test_smoke_repo(self): 105 def test_smoke_repo(self):
100 """Make sure repo UA returns something useful.""" 106 """Make sure repo UA returns something useful."""
101 ua = git_command.user_agent.repo 107 ua = git_command.user_agent.repo
102 # We can't dive too deep because of OS/tool differences, but we can check 108 # We can't dive too deep because of OS/tool differences, but we can
103 # the general form. 109 # check the general form.
104 m = re.match(r'^git-repo/[^ ]+ ([^ ]+) git/[^ ]+ Python/[0-9.]+', ua) 110 m = re.match(r"^git-repo/[^ ]+ ([^ ]+) git/[^ ]+ Python/[0-9.]+", ua)
105 self.assertIsNotNone(m) 111 self.assertIsNotNone(m)
106 112
107 def test_smoke_git(self): 113 def test_smoke_git(self):
108 """Make sure git UA returns something useful.""" 114 """Make sure git UA returns something useful."""
109 ua = git_command.user_agent.git 115 ua = git_command.user_agent.git
110 # We can't dive too deep because of OS/tool differences, but we can check 116 # We can't dive too deep because of OS/tool differences, but we can
111 # the general form. 117 # check the general form.
112 m = re.match(r'^git/[^ ]+ ([^ ]+) git-repo/[^ ]+', ua) 118 m = re.match(r"^git/[^ ]+ ([^ ]+) git-repo/[^ ]+", ua)
113 self.assertIsNotNone(m) 119 self.assertIsNotNone(m)
114 120
115 121
116class GitRequireTests(unittest.TestCase): 122class GitRequireTests(unittest.TestCase):
117 """Test the git_require helper.""" 123 """Test the git_require helper."""
118 124
119 def setUp(self): 125 def setUp(self):
120 self.wrapper = wrapper.Wrapper() 126 self.wrapper = wrapper.Wrapper()
121 ver = self.wrapper.GitVersion(1, 2, 3, 4) 127 ver = self.wrapper.GitVersion(1, 2, 3, 4)
122 mock.patch.object(git_command.git, 'version_tuple', return_value=ver).start() 128 mock.patch.object(
123 129 git_command.git, "version_tuple", return_value=ver
124 def tearDown(self): 130 ).start()
125 mock.patch.stopall() 131
126 132 def tearDown(self):
127 def test_older_nonfatal(self): 133 mock.patch.stopall()
128 """Test non-fatal require calls with old versions.""" 134
129 self.assertFalse(git_command.git_require((2,))) 135 def test_older_nonfatal(self):
130 self.assertFalse(git_command.git_require((1, 3))) 136 """Test non-fatal require calls with old versions."""
131 self.assertFalse(git_command.git_require((1, 2, 4))) 137 self.assertFalse(git_command.git_require((2,)))
132 self.assertFalse(git_command.git_require((1, 2, 3, 5))) 138 self.assertFalse(git_command.git_require((1, 3)))
133 139 self.assertFalse(git_command.git_require((1, 2, 4)))
134 def test_newer_nonfatal(self): 140 self.assertFalse(git_command.git_require((1, 2, 3, 5)))
135 """Test non-fatal require calls with newer versions.""" 141
136 self.assertTrue(git_command.git_require((0,))) 142 def test_newer_nonfatal(self):
137 self.assertTrue(git_command.git_require((1, 0))) 143 """Test non-fatal require calls with newer versions."""
138 self.assertTrue(git_command.git_require((1, 2, 0))) 144 self.assertTrue(git_command.git_require((0,)))
139 self.assertTrue(git_command.git_require((1, 2, 3, 0))) 145 self.assertTrue(git_command.git_require((1, 0)))
140 146 self.assertTrue(git_command.git_require((1, 2, 0)))
141 def test_equal_nonfatal(self): 147 self.assertTrue(git_command.git_require((1, 2, 3, 0)))
142 """Test require calls with equal values.""" 148
143 self.assertTrue(git_command.git_require((1, 2, 3, 4), fail=False)) 149 def test_equal_nonfatal(self):
144 self.assertTrue(git_command.git_require((1, 2, 3, 4), fail=True)) 150 """Test require calls with equal values."""
145 151 self.assertTrue(git_command.git_require((1, 2, 3, 4), fail=False))
146 def test_older_fatal(self): 152 self.assertTrue(git_command.git_require((1, 2, 3, 4), fail=True))
147 """Test fatal require calls with old versions.""" 153
148 with self.assertRaises(SystemExit) as e: 154 def test_older_fatal(self):
149 git_command.git_require((2,), fail=True) 155 """Test fatal require calls with old versions."""
150 self.assertNotEqual(0, e.code) 156 with self.assertRaises(SystemExit) as e:
151 157 git_command.git_require((2,), fail=True)
152 def test_older_fatal_msg(self): 158 self.assertNotEqual(0, e.code)
153 """Test fatal require calls with old versions and message.""" 159
154 with self.assertRaises(SystemExit) as e: 160 def test_older_fatal_msg(self):
155 git_command.git_require((2,), fail=True, msg='so sad') 161 """Test fatal require calls with old versions and message."""
156 self.assertNotEqual(0, e.code) 162 with self.assertRaises(SystemExit) as e:
163 git_command.git_require((2,), fail=True, msg="so sad")
164 self.assertNotEqual(0, e.code)
diff --git a/tests/test_git_config.py b/tests/test_git_config.py
index 3b0aa8b4..a44dca0f 100644
--- a/tests/test_git_config.py
+++ b/tests/test_git_config.py
@@ -22,167 +22,169 @@ import git_config
22 22
23 23
24def fixture(*paths): 24def fixture(*paths):
25 """Return a path relative to test/fixtures. 25 """Return a path relative to test/fixtures."""
26 """ 26 return os.path.join(os.path.dirname(__file__), "fixtures", *paths)
27 return os.path.join(os.path.dirname(__file__), 'fixtures', *paths)
28 27
29 28
30class GitConfigReadOnlyTests(unittest.TestCase): 29class GitConfigReadOnlyTests(unittest.TestCase):
31 """Read-only tests of the GitConfig class.""" 30 """Read-only tests of the GitConfig class."""
32 31
33 def setUp(self): 32 def setUp(self):
34 """Create a GitConfig object using the test.gitconfig fixture. 33 """Create a GitConfig object using the test.gitconfig fixture."""
35 """ 34 config_fixture = fixture("test.gitconfig")
36 config_fixture = fixture('test.gitconfig') 35 self.config = git_config.GitConfig(config_fixture)
37 self.config = git_config.GitConfig(config_fixture) 36
38 37 def test_GetString_with_empty_config_values(self):
39 def test_GetString_with_empty_config_values(self): 38 """
40 """ 39 Test config entries with no value.
41 Test config entries with no value. 40
42 41 [section]
43 [section] 42 empty
44 empty 43
45 44 """
46 """ 45 val = self.config.GetString("section.empty")
47 val = self.config.GetString('section.empty') 46 self.assertEqual(val, None)
48 self.assertEqual(val, None) 47
49 48 def test_GetString_with_true_value(self):
50 def test_GetString_with_true_value(self): 49 """
51 """ 50 Test config entries with a string value.
52 Test config entries with a string value. 51
53 52 [section]
54 [section] 53 nonempty = true
55 nonempty = true 54
56 55 """
57 """ 56 val = self.config.GetString("section.nonempty")
58 val = self.config.GetString('section.nonempty') 57 self.assertEqual(val, "true")
59 self.assertEqual(val, 'true') 58
60 59 def test_GetString_from_missing_file(self):
61 def test_GetString_from_missing_file(self): 60 """
62 """ 61 Test missing config file
63 Test missing config file 62 """
64 """ 63 config_fixture = fixture("not.present.gitconfig")
65 config_fixture = fixture('not.present.gitconfig') 64 config = git_config.GitConfig(config_fixture)
66 config = git_config.GitConfig(config_fixture) 65 val = config.GetString("empty")
67 val = config.GetString('empty') 66 self.assertEqual(val, None)
68 self.assertEqual(val, None) 67
69 68 def test_GetBoolean_undefined(self):
70 def test_GetBoolean_undefined(self): 69 """Test GetBoolean on key that doesn't exist."""
71 """Test GetBoolean on key that doesn't exist.""" 70 self.assertIsNone(self.config.GetBoolean("section.missing"))
72 self.assertIsNone(self.config.GetBoolean('section.missing')) 71
73 72 def test_GetBoolean_invalid(self):
74 def test_GetBoolean_invalid(self): 73 """Test GetBoolean on invalid boolean value."""
75 """Test GetBoolean on invalid boolean value.""" 74 self.assertIsNone(self.config.GetBoolean("section.boolinvalid"))
76 self.assertIsNone(self.config.GetBoolean('section.boolinvalid')) 75
77 76 def test_GetBoolean_true(self):
78 def test_GetBoolean_true(self): 77 """Test GetBoolean on valid true boolean."""
79 """Test GetBoolean on valid true boolean.""" 78 self.assertTrue(self.config.GetBoolean("section.booltrue"))
80 self.assertTrue(self.config.GetBoolean('section.booltrue')) 79
81 80 def test_GetBoolean_false(self):
82 def test_GetBoolean_false(self): 81 """Test GetBoolean on valid false boolean."""
83 """Test GetBoolean on valid false boolean.""" 82 self.assertFalse(self.config.GetBoolean("section.boolfalse"))
84 self.assertFalse(self.config.GetBoolean('section.boolfalse')) 83
85 84 def test_GetInt_undefined(self):
86 def test_GetInt_undefined(self): 85 """Test GetInt on key that doesn't exist."""
87 """Test GetInt on key that doesn't exist.""" 86 self.assertIsNone(self.config.GetInt("section.missing"))
88 self.assertIsNone(self.config.GetInt('section.missing')) 87
89 88 def test_GetInt_invalid(self):
90 def test_GetInt_invalid(self): 89 """Test GetInt on invalid integer value."""
91 """Test GetInt on invalid integer value.""" 90 self.assertIsNone(self.config.GetBoolean("section.intinvalid"))
92 self.assertIsNone(self.config.GetBoolean('section.intinvalid')) 91
93 92 def test_GetInt_valid(self):
94 def test_GetInt_valid(self): 93 """Test GetInt on valid integers."""
95 """Test GetInt on valid integers.""" 94 TESTS = (
96 TESTS = ( 95 ("inthex", 16),
97 ('inthex', 16), 96 ("inthexk", 16384),
98 ('inthexk', 16384), 97 ("int", 10),
99 ('int', 10), 98 ("intk", 10240),
100 ('intk', 10240), 99 ("intm", 10485760),
101 ('intm', 10485760), 100 ("intg", 10737418240),
102 ('intg', 10737418240), 101 )
103 ) 102 for key, value in TESTS:
104 for key, value in TESTS: 103 self.assertEqual(value, self.config.GetInt("section.%s" % (key,)))
105 self.assertEqual(value, self.config.GetInt('section.%s' % (key,)))
106 104
107 105
108class GitConfigReadWriteTests(unittest.TestCase): 106class GitConfigReadWriteTests(unittest.TestCase):
109 """Read/write tests of the GitConfig class.""" 107 """Read/write tests of the GitConfig class."""
110 108
111 def setUp(self): 109 def setUp(self):
112 self.tmpfile = tempfile.NamedTemporaryFile() 110 self.tmpfile = tempfile.NamedTemporaryFile()
113 self.config = self.get_config() 111 self.config = self.get_config()
114 112
115 def get_config(self): 113 def get_config(self):
116 """Get a new GitConfig instance.""" 114 """Get a new GitConfig instance."""
117 return git_config.GitConfig(self.tmpfile.name) 115 return git_config.GitConfig(self.tmpfile.name)
118 116
119 def test_SetString(self): 117 def test_SetString(self):
120 """Test SetString behavior.""" 118 """Test SetString behavior."""
121 # Set a value. 119 # Set a value.
122 self.assertIsNone(self.config.GetString('foo.bar')) 120 self.assertIsNone(self.config.GetString("foo.bar"))
123 self.config.SetString('foo.bar', 'val') 121 self.config.SetString("foo.bar", "val")
124 self.assertEqual('val', self.config.GetString('foo.bar')) 122 self.assertEqual("val", self.config.GetString("foo.bar"))
125 123
126 # Make sure the value was actually written out. 124 # Make sure the value was actually written out.
127 config = self.get_config() 125 config = self.get_config()
128 self.assertEqual('val', config.GetString('foo.bar')) 126 self.assertEqual("val", config.GetString("foo.bar"))
129 127
130 # Update the value. 128 # Update the value.
131 self.config.SetString('foo.bar', 'valll') 129 self.config.SetString("foo.bar", "valll")
132 self.assertEqual('valll', self.config.GetString('foo.bar')) 130 self.assertEqual("valll", self.config.GetString("foo.bar"))
133 config = self.get_config() 131 config = self.get_config()
134 self.assertEqual('valll', config.GetString('foo.bar')) 132 self.assertEqual("valll", config.GetString("foo.bar"))
135 133
136 # Delete the value. 134 # Delete the value.
137 self.config.SetString('foo.bar', None) 135 self.config.SetString("foo.bar", None)
138 self.assertIsNone(self.config.GetString('foo.bar')) 136 self.assertIsNone(self.config.GetString("foo.bar"))
139 config = self.get_config() 137 config = self.get_config()
140 self.assertIsNone(config.GetString('foo.bar')) 138 self.assertIsNone(config.GetString("foo.bar"))
141 139
142 def test_SetBoolean(self): 140 def test_SetBoolean(self):
143 """Test SetBoolean behavior.""" 141 """Test SetBoolean behavior."""
144 # Set a true value. 142 # Set a true value.
145 self.assertIsNone(self.config.GetBoolean('foo.bar')) 143 self.assertIsNone(self.config.GetBoolean("foo.bar"))
146 for val in (True, 1): 144 for val in (True, 1):
147 self.config.SetBoolean('foo.bar', val) 145 self.config.SetBoolean("foo.bar", val)
148 self.assertTrue(self.config.GetBoolean('foo.bar')) 146 self.assertTrue(self.config.GetBoolean("foo.bar"))
149 147
150 # Make sure the value was actually written out. 148 # Make sure the value was actually written out.
151 config = self.get_config() 149 config = self.get_config()
152 self.assertTrue(config.GetBoolean('foo.bar')) 150 self.assertTrue(config.GetBoolean("foo.bar"))
153 self.assertEqual('true', config.GetString('foo.bar')) 151 self.assertEqual("true", config.GetString("foo.bar"))
154 152
155 # Set a false value. 153 # Set a false value.
156 for val in (False, 0): 154 for val in (False, 0):
157 self.config.SetBoolean('foo.bar', val) 155 self.config.SetBoolean("foo.bar", val)
158 self.assertFalse(self.config.GetBoolean('foo.bar')) 156 self.assertFalse(self.config.GetBoolean("foo.bar"))
159 157
160 # Make sure the value was actually written out. 158 # Make sure the value was actually written out.
161 config = self.get_config() 159 config = self.get_config()
162 self.assertFalse(config.GetBoolean('foo.bar')) 160 self.assertFalse(config.GetBoolean("foo.bar"))
163 self.assertEqual('false', config.GetString('foo.bar')) 161 self.assertEqual("false", config.GetString("foo.bar"))
164 162
165 # Delete the value. 163 # Delete the value.
166 self.config.SetBoolean('foo.bar', None) 164 self.config.SetBoolean("foo.bar", None)
167 self.assertIsNone(self.config.GetBoolean('foo.bar')) 165 self.assertIsNone(self.config.GetBoolean("foo.bar"))
168 config = self.get_config() 166 config = self.get_config()
169 self.assertIsNone(config.GetBoolean('foo.bar')) 167 self.assertIsNone(config.GetBoolean("foo.bar"))
170 168
171 def test_GetSyncAnalysisStateData(self): 169 def test_GetSyncAnalysisStateData(self):
172 """Test config entries with a sync state analysis data.""" 170 """Test config entries with a sync state analysis data."""
173 superproject_logging_data = {} 171 superproject_logging_data = {}
174 superproject_logging_data['test'] = False 172 superproject_logging_data["test"] = False
175 options = type('options', (object,), {})() 173 options = type("options", (object,), {})()
176 options.verbose = 'true' 174 options.verbose = "true"
177 options.mp_update = 'false' 175 options.mp_update = "false"
178 TESTS = ( 176 TESTS = (
179 ('superproject.test', 'false'), 177 ("superproject.test", "false"),
180 ('options.verbose', 'true'), 178 ("options.verbose", "true"),
181 ('options.mpupdate', 'false'), 179 ("options.mpupdate", "false"),
182 ('main.version', '1'), 180 ("main.version", "1"),
183 ) 181 )
184 self.config.UpdateSyncAnalysisState(options, superproject_logging_data) 182 self.config.UpdateSyncAnalysisState(options, superproject_logging_data)
185 sync_data = self.config.GetSyncAnalysisStateData() 183 sync_data = self.config.GetSyncAnalysisStateData()
186 for key, value in TESTS: 184 for key, value in TESTS:
187 self.assertEqual(sync_data[f'{git_config.SYNC_STATE_PREFIX}{key}'], value) 185 self.assertEqual(
188 self.assertTrue(sync_data[f'{git_config.SYNC_STATE_PREFIX}main.synctime']) 186 sync_data[f"{git_config.SYNC_STATE_PREFIX}{key}"], value
187 )
188 self.assertTrue(
189 sync_data[f"{git_config.SYNC_STATE_PREFIX}main.synctime"]
190 )
diff --git a/tests/test_git_superproject.py b/tests/test_git_superproject.py
index b9b597a6..eb542c60 100644
--- a/tests/test_git_superproject.py
+++ b/tests/test_git_superproject.py
@@ -28,297 +28,369 @@ from test_manifest_xml import sort_attributes
28 28
29 29
30class SuperprojectTestCase(unittest.TestCase): 30class SuperprojectTestCase(unittest.TestCase):
31 """TestCase for the Superproject module.""" 31 """TestCase for the Superproject module."""
32 32
33 PARENT_SID_KEY = 'GIT_TRACE2_PARENT_SID' 33 PARENT_SID_KEY = "GIT_TRACE2_PARENT_SID"
34 PARENT_SID_VALUE = 'parent_sid' 34 PARENT_SID_VALUE = "parent_sid"
35 SELF_SID_REGEX = r'repo-\d+T\d+Z-.*' 35 SELF_SID_REGEX = r"repo-\d+T\d+Z-.*"
36 FULL_SID_REGEX = r'^%s/%s' % (PARENT_SID_VALUE, SELF_SID_REGEX) 36 FULL_SID_REGEX = r"^%s/%s" % (PARENT_SID_VALUE, SELF_SID_REGEX)
37 37
38 def setUp(self): 38 def setUp(self):
39 """Set up superproject every time.""" 39 """Set up superproject every time."""
40 self.tempdirobj = tempfile.TemporaryDirectory(prefix='repo_tests') 40 self.tempdirobj = tempfile.TemporaryDirectory(prefix="repo_tests")
41 self.tempdir = self.tempdirobj.name 41 self.tempdir = self.tempdirobj.name
42 self.repodir = os.path.join(self.tempdir, '.repo') 42 self.repodir = os.path.join(self.tempdir, ".repo")
43 self.manifest_file = os.path.join( 43 self.manifest_file = os.path.join(
44 self.repodir, manifest_xml.MANIFEST_FILE_NAME) 44 self.repodir, manifest_xml.MANIFEST_FILE_NAME
45 os.mkdir(self.repodir) 45 )
46 self.platform = platform.system().lower() 46 os.mkdir(self.repodir)
47 47 self.platform = platform.system().lower()
48 # By default we initialize with the expected case where 48
49 # repo launches us (so GIT_TRACE2_PARENT_SID is set). 49 # By default we initialize with the expected case where
50 env = { 50 # repo launches us (so GIT_TRACE2_PARENT_SID is set).
51 self.PARENT_SID_KEY: self.PARENT_SID_VALUE, 51 env = {
52 } 52 self.PARENT_SID_KEY: self.PARENT_SID_VALUE,
53 self.git_event_log = git_trace2_event_log.EventLog(env=env) 53 }
54 54 self.git_event_log = git_trace2_event_log.EventLog(env=env)
55 # The manifest parsing really wants a git repo currently. 55
56 gitdir = os.path.join(self.repodir, 'manifests.git') 56 # The manifest parsing really wants a git repo currently.
57 os.mkdir(gitdir) 57 gitdir = os.path.join(self.repodir, "manifests.git")
58 with open(os.path.join(gitdir, 'config'), 'w') as fp: 58 os.mkdir(gitdir)
59 fp.write("""[remote "origin"] 59 with open(os.path.join(gitdir, "config"), "w") as fp:
60 fp.write(
61 """[remote "origin"]
60 url = https://localhost:0/manifest 62 url = https://localhost:0/manifest
61""") 63"""
64 )
62 65
63 manifest = self.getXmlManifest(""" 66 manifest = self.getXmlManifest(
67 """
64<manifest> 68<manifest>
65 <remote name="default-remote" fetch="http://localhost" /> 69 <remote name="default-remote" fetch="http://localhost" />
66 <default remote="default-remote" revision="refs/heads/main" /> 70 <default remote="default-remote" revision="refs/heads/main" />
67 <superproject name="superproject"/> 71 <superproject name="superproject"/>
68 <project path="art" name="platform/art" groups="notdefault,platform-""" + self.platform + """ 72 <project path="art" name="platform/art" groups="notdefault,platform-"""
73 + self.platform
74 + """
69 " /></manifest> 75 " /></manifest>
70""") 76"""
71 self._superproject = git_superproject.Superproject( 77 )
72 manifest, name='superproject', 78 self._superproject = git_superproject.Superproject(
73 remote=manifest.remotes.get('default-remote').ToRemoteSpec('superproject'), 79 manifest,
74 revision='refs/heads/main') 80 name="superproject",
75 81 remote=manifest.remotes.get("default-remote").ToRemoteSpec(
76 def tearDown(self): 82 "superproject"
77 """Tear down superproject every time.""" 83 ),
78 self.tempdirobj.cleanup() 84 revision="refs/heads/main",
79 85 )
80 def getXmlManifest(self, data): 86
81 """Helper to initialize a manifest for testing.""" 87 def tearDown(self):
82 with open(self.manifest_file, 'w') as fp: 88 """Tear down superproject every time."""
83 fp.write(data) 89 self.tempdirobj.cleanup()
84 return manifest_xml.XmlManifest(self.repodir, self.manifest_file) 90
85 91 def getXmlManifest(self, data):
86 def verifyCommonKeys(self, log_entry, expected_event_name, full_sid=True): 92 """Helper to initialize a manifest for testing."""
87 """Helper function to verify common event log keys.""" 93 with open(self.manifest_file, "w") as fp:
88 self.assertIn('event', log_entry) 94 fp.write(data)
89 self.assertIn('sid', log_entry) 95 return manifest_xml.XmlManifest(self.repodir, self.manifest_file)
90 self.assertIn('thread', log_entry) 96
91 self.assertIn('time', log_entry) 97 def verifyCommonKeys(self, log_entry, expected_event_name, full_sid=True):
92 98 """Helper function to verify common event log keys."""
93 # Do basic data format validation. 99 self.assertIn("event", log_entry)
94 self.assertEqual(expected_event_name, log_entry['event']) 100 self.assertIn("sid", log_entry)
95 if full_sid: 101 self.assertIn("thread", log_entry)
96 self.assertRegex(log_entry['sid'], self.FULL_SID_REGEX) 102 self.assertIn("time", log_entry)
97 else: 103
98 self.assertRegex(log_entry['sid'], self.SELF_SID_REGEX) 104 # Do basic data format validation.
99 self.assertRegex(log_entry['time'], r'^\d+-\d+-\d+T\d+:\d+:\d+\.\d+Z$') 105 self.assertEqual(expected_event_name, log_entry["event"])
100 106 if full_sid:
101 def readLog(self, log_path): 107 self.assertRegex(log_entry["sid"], self.FULL_SID_REGEX)
102 """Helper function to read log data into a list.""" 108 else:
103 log_data = [] 109 self.assertRegex(log_entry["sid"], self.SELF_SID_REGEX)
104 with open(log_path, mode='rb') as f: 110 self.assertRegex(log_entry["time"], r"^\d+-\d+-\d+T\d+:\d+:\d+\.\d+Z$")
105 for line in f: 111
106 log_data.append(json.loads(line)) 112 def readLog(self, log_path):
107 return log_data 113 """Helper function to read log data into a list."""
108 114 log_data = []
109 def verifyErrorEvent(self): 115 with open(log_path, mode="rb") as f:
110 """Helper to verify that error event is written.""" 116 for line in f:
111 117 log_data.append(json.loads(line))
112 with tempfile.TemporaryDirectory(prefix='event_log_tests') as tempdir: 118 return log_data
113 log_path = self.git_event_log.Write(path=tempdir) 119
114 self.log_data = self.readLog(log_path) 120 def verifyErrorEvent(self):
115 121 """Helper to verify that error event is written."""
116 self.assertEqual(len(self.log_data), 2) 122
117 error_event = self.log_data[1] 123 with tempfile.TemporaryDirectory(prefix="event_log_tests") as tempdir:
118 self.verifyCommonKeys(self.log_data[0], expected_event_name='version') 124 log_path = self.git_event_log.Write(path=tempdir)
119 self.verifyCommonKeys(error_event, expected_event_name='error') 125 self.log_data = self.readLog(log_path)
120 # Check for 'error' event specific fields. 126
121 self.assertIn('msg', error_event) 127 self.assertEqual(len(self.log_data), 2)
122 self.assertIn('fmt', error_event) 128 error_event = self.log_data[1]
123 129 self.verifyCommonKeys(self.log_data[0], expected_event_name="version")
124 def test_superproject_get_superproject_no_superproject(self): 130 self.verifyCommonKeys(error_event, expected_event_name="error")
125 """Test with no url.""" 131 # Check for 'error' event specific fields.
126 manifest = self.getXmlManifest(""" 132 self.assertIn("msg", error_event)
133 self.assertIn("fmt", error_event)
134
135 def test_superproject_get_superproject_no_superproject(self):
136 """Test with no url."""
137 manifest = self.getXmlManifest(
138 """
127<manifest> 139<manifest>
128</manifest> 140</manifest>
129""") 141"""
130 self.assertIsNone(manifest.superproject) 142 )
131 143 self.assertIsNone(manifest.superproject)
132 def test_superproject_get_superproject_invalid_url(self): 144
133 """Test with an invalid url.""" 145 def test_superproject_get_superproject_invalid_url(self):
134 manifest = self.getXmlManifest(""" 146 """Test with an invalid url."""
147 manifest = self.getXmlManifest(
148 """
135<manifest> 149<manifest>
136 <remote name="test-remote" fetch="localhost" /> 150 <remote name="test-remote" fetch="localhost" />
137 <default remote="test-remote" revision="refs/heads/main" /> 151 <default remote="test-remote" revision="refs/heads/main" />
138 <superproject name="superproject"/> 152 <superproject name="superproject"/>
139</manifest> 153</manifest>
140""") 154"""
141 superproject = git_superproject.Superproject( 155 )
142 manifest, name='superproject', 156 superproject = git_superproject.Superproject(
143 remote=manifest.remotes.get('test-remote').ToRemoteSpec('superproject'), 157 manifest,
144 revision='refs/heads/main') 158 name="superproject",
145 sync_result = superproject.Sync(self.git_event_log) 159 remote=manifest.remotes.get("test-remote").ToRemoteSpec(
146 self.assertFalse(sync_result.success) 160 "superproject"
147 self.assertTrue(sync_result.fatal) 161 ),
148 162 revision="refs/heads/main",
149 def test_superproject_get_superproject_invalid_branch(self): 163 )
150 """Test with an invalid branch.""" 164 sync_result = superproject.Sync(self.git_event_log)
151 manifest = self.getXmlManifest(""" 165 self.assertFalse(sync_result.success)
166 self.assertTrue(sync_result.fatal)
167
168 def test_superproject_get_superproject_invalid_branch(self):
169 """Test with an invalid branch."""
170 manifest = self.getXmlManifest(
171 """
152<manifest> 172<manifest>
153 <remote name="test-remote" fetch="localhost" /> 173 <remote name="test-remote" fetch="localhost" />
154 <default remote="test-remote" revision="refs/heads/main" /> 174 <default remote="test-remote" revision="refs/heads/main" />
155 <superproject name="superproject"/> 175 <superproject name="superproject"/>
156</manifest> 176</manifest>
157""") 177"""
158 self._superproject = git_superproject.Superproject( 178 )
159 manifest, name='superproject', 179 self._superproject = git_superproject.Superproject(
160 remote=manifest.remotes.get('test-remote').ToRemoteSpec('superproject'), 180 manifest,
161 revision='refs/heads/main') 181 name="superproject",
162 with mock.patch.object(self._superproject, '_branch', 'junk'): 182 remote=manifest.remotes.get("test-remote").ToRemoteSpec(
163 sync_result = self._superproject.Sync(self.git_event_log) 183 "superproject"
164 self.assertFalse(sync_result.success) 184 ),
165 self.assertTrue(sync_result.fatal) 185 revision="refs/heads/main",
166 self.verifyErrorEvent() 186 )
167 187 with mock.patch.object(self._superproject, "_branch", "junk"):
168 def test_superproject_get_superproject_mock_init(self): 188 sync_result = self._superproject.Sync(self.git_event_log)
169 """Test with _Init failing.""" 189 self.assertFalse(sync_result.success)
170 with mock.patch.object(self._superproject, '_Init', return_value=False): 190 self.assertTrue(sync_result.fatal)
171 sync_result = self._superproject.Sync(self.git_event_log) 191 self.verifyErrorEvent()
172 self.assertFalse(sync_result.success) 192
173 self.assertTrue(sync_result.fatal) 193 def test_superproject_get_superproject_mock_init(self):
174 194 """Test with _Init failing."""
175 def test_superproject_get_superproject_mock_fetch(self): 195 with mock.patch.object(self._superproject, "_Init", return_value=False):
176 """Test with _Fetch failing.""" 196 sync_result = self._superproject.Sync(self.git_event_log)
177 with mock.patch.object(self._superproject, '_Init', return_value=True): 197 self.assertFalse(sync_result.success)
178 os.mkdir(self._superproject._superproject_path) 198 self.assertTrue(sync_result.fatal)
179 with mock.patch.object(self._superproject, '_Fetch', return_value=False): 199
180 sync_result = self._superproject.Sync(self.git_event_log) 200 def test_superproject_get_superproject_mock_fetch(self):
181 self.assertFalse(sync_result.success) 201 """Test with _Fetch failing."""
182 self.assertTrue(sync_result.fatal) 202 with mock.patch.object(self._superproject, "_Init", return_value=True):
183 203 os.mkdir(self._superproject._superproject_path)
184 def test_superproject_get_all_project_commit_ids_mock_ls_tree(self): 204 with mock.patch.object(
185 """Test with LsTree being a mock.""" 205 self._superproject, "_Fetch", return_value=False
186 data = ('120000 blob 158258bdf146f159218e2b90f8b699c4d85b5804\tAndroid.bp\x00' 206 ):
187 '160000 commit 2c2724cb36cd5a9cec6c852c681efc3b7c6b86ea\tart\x00' 207 sync_result = self._superproject.Sync(self.git_event_log)
188 '160000 commit e9d25da64d8d365dbba7c8ee00fe8c4473fe9a06\tbootable/recovery\x00' 208 self.assertFalse(sync_result.success)
189 '120000 blob acc2cbdf438f9d2141f0ae424cec1d8fc4b5d97f\tbootstrap.bash\x00' 209 self.assertTrue(sync_result.fatal)
190 '160000 commit ade9b7a0d874e25fff4bf2552488825c6f111928\tbuild/bazel\x00') 210
191 with mock.patch.object(self._superproject, '_Init', return_value=True): 211 def test_superproject_get_all_project_commit_ids_mock_ls_tree(self):
192 with mock.patch.object(self._superproject, '_Fetch', return_value=True): 212 """Test with LsTree being a mock."""
193 with mock.patch.object(self._superproject, '_LsTree', return_value=data): 213 data = (
194 commit_ids_result = self._superproject._GetAllProjectsCommitIds() 214 "120000 blob 158258bdf146f159218e2b90f8b699c4d85b5804\tAndroid.bp\x00"
195 self.assertEqual(commit_ids_result.commit_ids, { 215 "160000 commit 2c2724cb36cd5a9cec6c852c681efc3b7c6b86ea\tart\x00"
196 'art': '2c2724cb36cd5a9cec6c852c681efc3b7c6b86ea', 216 "160000 commit e9d25da64d8d365dbba7c8ee00fe8c4473fe9a06\tbootable/recovery\x00"
197 'bootable/recovery': 'e9d25da64d8d365dbba7c8ee00fe8c4473fe9a06', 217 "120000 blob acc2cbdf438f9d2141f0ae424cec1d8fc4b5d97f\tbootstrap.bash\x00"
198 'build/bazel': 'ade9b7a0d874e25fff4bf2552488825c6f111928' 218 "160000 commit ade9b7a0d874e25fff4bf2552488825c6f111928\tbuild/bazel\x00"
199 }) 219 )
200 self.assertFalse(commit_ids_result.fatal) 220 with mock.patch.object(self._superproject, "_Init", return_value=True):
201 221 with mock.patch.object(
202 def test_superproject_write_manifest_file(self): 222 self._superproject, "_Fetch", return_value=True
203 """Test with writing manifest to a file after setting revisionId.""" 223 ):
204 self.assertEqual(len(self._superproject._manifest.projects), 1) 224 with mock.patch.object(
205 project = self._superproject._manifest.projects[0] 225 self._superproject, "_LsTree", return_value=data
206 project.SetRevisionId('ABCDEF') 226 ):
207 # Create temporary directory so that it can write the file. 227 commit_ids_result = (
208 os.mkdir(self._superproject._superproject_path) 228 self._superproject._GetAllProjectsCommitIds()
209 manifest_path = self._superproject._WriteManifestFile() 229 )
210 self.assertIsNotNone(manifest_path) 230 self.assertEqual(
211 with open(manifest_path, 'r') as fp: 231 commit_ids_result.commit_ids,
212 manifest_xml_data = fp.read() 232 {
213 self.assertEqual( 233 "art": "2c2724cb36cd5a9cec6c852c681efc3b7c6b86ea",
214 sort_attributes(manifest_xml_data), 234 "bootable/recovery": "e9d25da64d8d365dbba7c8ee00fe8c4473fe9a06",
215 '<?xml version="1.0" ?><manifest>' 235 "build/bazel": "ade9b7a0d874e25fff4bf2552488825c6f111928",
216 '<remote fetch="http://localhost" name="default-remote"/>' 236 },
217 '<default remote="default-remote" revision="refs/heads/main"/>' 237 )
218 '<project groups="notdefault,platform-' + self.platform + '" ' 238 self.assertFalse(commit_ids_result.fatal)
219 'name="platform/art" path="art" revision="ABCDEF" upstream="refs/heads/main"/>' 239
220 '<superproject name="superproject"/>' 240 def test_superproject_write_manifest_file(self):
221 '</manifest>') 241 """Test with writing manifest to a file after setting revisionId."""
222 242 self.assertEqual(len(self._superproject._manifest.projects), 1)
223 def test_superproject_update_project_revision_id(self): 243 project = self._superproject._manifest.projects[0]
224 """Test with LsTree being a mock.""" 244 project.SetRevisionId("ABCDEF")
225 self.assertEqual(len(self._superproject._manifest.projects), 1) 245 # Create temporary directory so that it can write the file.
226 projects = self._superproject._manifest.projects 246 os.mkdir(self._superproject._superproject_path)
227 data = ('160000 commit 2c2724cb36cd5a9cec6c852c681efc3b7c6b86ea\tart\x00' 247 manifest_path = self._superproject._WriteManifestFile()
228 '160000 commit e9d25da64d8d365dbba7c8ee00fe8c4473fe9a06\tbootable/recovery\x00') 248 self.assertIsNotNone(manifest_path)
229 with mock.patch.object(self._superproject, '_Init', return_value=True): 249 with open(manifest_path, "r") as fp:
230 with mock.patch.object(self._superproject, '_Fetch', return_value=True):
231 with mock.patch.object(self._superproject,
232 '_LsTree',
233 return_value=data):
234 # Create temporary directory so that it can write the file.
235 os.mkdir(self._superproject._superproject_path)
236 update_result = self._superproject.UpdateProjectsRevisionId(projects, self.git_event_log)
237 self.assertIsNotNone(update_result.manifest_path)
238 self.assertFalse(update_result.fatal)
239 with open(update_result.manifest_path, 'r') as fp:
240 manifest_xml_data = fp.read() 250 manifest_xml_data = fp.read()
241 self.assertEqual( 251 self.assertEqual(
242 sort_attributes(manifest_xml_data), 252 sort_attributes(manifest_xml_data),
243 '<?xml version="1.0" ?><manifest>' 253 '<?xml version="1.0" ?><manifest>'
244 '<remote fetch="http://localhost" name="default-remote"/>' 254 '<remote fetch="http://localhost" name="default-remote"/>'
245 '<default remote="default-remote" revision="refs/heads/main"/>' 255 '<default remote="default-remote" revision="refs/heads/main"/>'
246 '<project groups="notdefault,platform-' + self.platform + '" ' 256 '<project groups="notdefault,platform-' + self.platform + '" '
247 'name="platform/art" path="art" ' 257 'name="platform/art" path="art" revision="ABCDEF" upstream="refs/heads/main"/>'
248 'revision="2c2724cb36cd5a9cec6c852c681efc3b7c6b86ea" upstream="refs/heads/main"/>' 258 '<superproject name="superproject"/>'
249 '<superproject name="superproject"/>' 259 "</manifest>",
250 '</manifest>') 260 )
251 261
252 def test_superproject_update_project_revision_id_no_superproject_tag(self): 262 def test_superproject_update_project_revision_id(self):
253 """Test update of commit ids of a manifest without superproject tag.""" 263 """Test with LsTree being a mock."""
254 manifest = self.getXmlManifest(""" 264 self.assertEqual(len(self._superproject._manifest.projects), 1)
265 projects = self._superproject._manifest.projects
266 data = (
267 "160000 commit 2c2724cb36cd5a9cec6c852c681efc3b7c6b86ea\tart\x00"
268 "160000 commit e9d25da64d8d365dbba7c8ee00fe8c4473fe9a06\tbootable/recovery\x00"
269 )
270 with mock.patch.object(self._superproject, "_Init", return_value=True):
271 with mock.patch.object(
272 self._superproject, "_Fetch", return_value=True
273 ):
274 with mock.patch.object(
275 self._superproject, "_LsTree", return_value=data
276 ):
277 # Create temporary directory so that it can write the file.
278 os.mkdir(self._superproject._superproject_path)
279 update_result = self._superproject.UpdateProjectsRevisionId(
280 projects, self.git_event_log
281 )
282 self.assertIsNotNone(update_result.manifest_path)
283 self.assertFalse(update_result.fatal)
284 with open(update_result.manifest_path, "r") as fp:
285 manifest_xml_data = fp.read()
286 self.assertEqual(
287 sort_attributes(manifest_xml_data),
288 '<?xml version="1.0" ?><manifest>'
289 '<remote fetch="http://localhost" name="default-remote"/>'
290 '<default remote="default-remote" revision="refs/heads/main"/>'
291 '<project groups="notdefault,platform-'
292 + self.platform
293 + '" '
294 'name="platform/art" path="art" '
295 'revision="2c2724cb36cd5a9cec6c852c681efc3b7c6b86ea" upstream="refs/heads/main"/>'
296 '<superproject name="superproject"/>'
297 "</manifest>",
298 )
299
300 def test_superproject_update_project_revision_id_no_superproject_tag(self):
301 """Test update of commit ids of a manifest without superproject tag."""
302 manifest = self.getXmlManifest(
303 """
255<manifest> 304<manifest>
256 <remote name="default-remote" fetch="http://localhost" /> 305 <remote name="default-remote" fetch="http://localhost" />
257 <default remote="default-remote" revision="refs/heads/main" /> 306 <default remote="default-remote" revision="refs/heads/main" />
258 <project name="test-name"/> 307 <project name="test-name"/>
259</manifest> 308</manifest>
260""") 309"""
261 self.maxDiff = None 310 )
262 self.assertIsNone(manifest.superproject) 311 self.maxDiff = None
263 self.assertEqual( 312 self.assertIsNone(manifest.superproject)
264 sort_attributes(manifest.ToXml().toxml()), 313 self.assertEqual(
265 '<?xml version="1.0" ?><manifest>' 314 sort_attributes(manifest.ToXml().toxml()),
266 '<remote fetch="http://localhost" name="default-remote"/>' 315 '<?xml version="1.0" ?><manifest>'
267 '<default remote="default-remote" revision="refs/heads/main"/>' 316 '<remote fetch="http://localhost" name="default-remote"/>'
268 '<project name="test-name"/>' 317 '<default remote="default-remote" revision="refs/heads/main"/>'
269 '</manifest>') 318 '<project name="test-name"/>'
270 319 "</manifest>",
271 def test_superproject_update_project_revision_id_from_local_manifest_group(self): 320 )
272 """Test update of commit ids of a manifest that have local manifest no superproject group.""" 321
273 local_group = manifest_xml.LOCAL_MANIFEST_GROUP_PREFIX + ':local' 322 def test_superproject_update_project_revision_id_from_local_manifest_group(
274 manifest = self.getXmlManifest(""" 323 self,
324 ):
325 """Test update of commit ids of a manifest that have local manifest no superproject group."""
326 local_group = manifest_xml.LOCAL_MANIFEST_GROUP_PREFIX + ":local"
327 manifest = self.getXmlManifest(
328 """
275<manifest> 329<manifest>
276 <remote name="default-remote" fetch="http://localhost" /> 330 <remote name="default-remote" fetch="http://localhost" />
277 <remote name="goog" fetch="http://localhost2" /> 331 <remote name="goog" fetch="http://localhost2" />
278 <default remote="default-remote" revision="refs/heads/main" /> 332 <default remote="default-remote" revision="refs/heads/main" />
279 <superproject name="superproject"/> 333 <superproject name="superproject"/>
280 <project path="vendor/x" name="platform/vendor/x" remote="goog" 334 <project path="vendor/x" name="platform/vendor/x" remote="goog"
281 groups=\"""" + local_group + """ 335 groups=\""""
336 + local_group
337 + """
282 " revision="master-with-vendor" clone-depth="1" /> 338 " revision="master-with-vendor" clone-depth="1" />
283 <project path="art" name="platform/art" groups="notdefault,platform-""" + self.platform + """ 339 <project path="art" name="platform/art" groups="notdefault,platform-"""
340 + self.platform
341 + """
284 " /></manifest> 342 " /></manifest>
285""") 343"""
286 self.maxDiff = None 344 )
287 self._superproject = git_superproject.Superproject( 345 self.maxDiff = None
288 manifest, name='superproject', 346 self._superproject = git_superproject.Superproject(
289 remote=manifest.remotes.get('default-remote').ToRemoteSpec('superproject'), 347 manifest,
290 revision='refs/heads/main') 348 name="superproject",
291 self.assertEqual(len(self._superproject._manifest.projects), 2) 349 remote=manifest.remotes.get("default-remote").ToRemoteSpec(
292 projects = self._superproject._manifest.projects 350 "superproject"
293 data = ('160000 commit 2c2724cb36cd5a9cec6c852c681efc3b7c6b86ea\tart\x00') 351 ),
294 with mock.patch.object(self._superproject, '_Init', return_value=True): 352 revision="refs/heads/main",
295 with mock.patch.object(self._superproject, '_Fetch', return_value=True): 353 )
296 with mock.patch.object(self._superproject, 354 self.assertEqual(len(self._superproject._manifest.projects), 2)
297 '_LsTree', 355 projects = self._superproject._manifest.projects
298 return_value=data): 356 data = "160000 commit 2c2724cb36cd5a9cec6c852c681efc3b7c6b86ea\tart\x00"
299 # Create temporary directory so that it can write the file. 357 with mock.patch.object(self._superproject, "_Init", return_value=True):
300 os.mkdir(self._superproject._superproject_path) 358 with mock.patch.object(
301 update_result = self._superproject.UpdateProjectsRevisionId(projects, self.git_event_log) 359 self._superproject, "_Fetch", return_value=True
302 self.assertIsNotNone(update_result.manifest_path) 360 ):
303 self.assertFalse(update_result.fatal) 361 with mock.patch.object(
304 with open(update_result.manifest_path, 'r') as fp: 362 self._superproject, "_LsTree", return_value=data
305 manifest_xml_data = fp.read() 363 ):
306 # Verify platform/vendor/x's project revision hasn't changed. 364 # Create temporary directory so that it can write the file.
307 self.assertEqual( 365 os.mkdir(self._superproject._superproject_path)
308 sort_attributes(manifest_xml_data), 366 update_result = self._superproject.UpdateProjectsRevisionId(
309 '<?xml version="1.0" ?><manifest>' 367 projects, self.git_event_log
310 '<remote fetch="http://localhost" name="default-remote"/>' 368 )
311 '<remote fetch="http://localhost2" name="goog"/>' 369 self.assertIsNotNone(update_result.manifest_path)
312 '<default remote="default-remote" revision="refs/heads/main"/>' 370 self.assertFalse(update_result.fatal)
313 '<project groups="notdefault,platform-' + self.platform + '" ' 371 with open(update_result.manifest_path, "r") as fp:
314 'name="platform/art" path="art" ' 372 manifest_xml_data = fp.read()
315 'revision="2c2724cb36cd5a9cec6c852c681efc3b7c6b86ea" upstream="refs/heads/main"/>' 373 # Verify platform/vendor/x's project revision hasn't
316 '<superproject name="superproject"/>' 374 # changed.
317 '</manifest>') 375 self.assertEqual(
318 376 sort_attributes(manifest_xml_data),
319 def test_superproject_update_project_revision_id_with_pinned_manifest(self): 377 '<?xml version="1.0" ?><manifest>'
320 """Test update of commit ids of a pinned manifest.""" 378 '<remote fetch="http://localhost" name="default-remote"/>'
321 manifest = self.getXmlManifest(""" 379 '<remote fetch="http://localhost2" name="goog"/>'
380 '<default remote="default-remote" revision="refs/heads/main"/>'
381 '<project groups="notdefault,platform-'
382 + self.platform
383 + '" '
384 'name="platform/art" path="art" '
385 'revision="2c2724cb36cd5a9cec6c852c681efc3b7c6b86ea" upstream="refs/heads/main"/>'
386 '<superproject name="superproject"/>'
387 "</manifest>",
388 )
389
390 def test_superproject_update_project_revision_id_with_pinned_manifest(self):
391 """Test update of commit ids of a pinned manifest."""
392 manifest = self.getXmlManifest(
393 """
322<manifest> 394<manifest>
323 <remote name="default-remote" fetch="http://localhost" /> 395 <remote name="default-remote" fetch="http://localhost" />
324 <default remote="default-remote" revision="refs/heads/main" /> 396 <default remote="default-remote" revision="refs/heads/main" />
@@ -326,80 +398,132 @@ class SuperprojectTestCase(unittest.TestCase):
326 <project path="vendor/x" name="platform/vendor/x" revision="" /> 398 <project path="vendor/x" name="platform/vendor/x" revision="" />
327 <project path="vendor/y" name="platform/vendor/y" 399 <project path="vendor/y" name="platform/vendor/y"
328 revision="52d3c9f7c107839ece2319d077de0cd922aa9d8f" /> 400 revision="52d3c9f7c107839ece2319d077de0cd922aa9d8f" />
329 <project path="art" name="platform/art" groups="notdefault,platform-""" + self.platform + """ 401 <project path="art" name="platform/art" groups="notdefault,platform-"""
402 + self.platform
403 + """
330 " /></manifest> 404 " /></manifest>
331""") 405"""
332 self.maxDiff = None 406 )
333 self._superproject = git_superproject.Superproject( 407 self.maxDiff = None
334 manifest, name='superproject', 408 self._superproject = git_superproject.Superproject(
335 remote=manifest.remotes.get('default-remote').ToRemoteSpec('superproject'), 409 manifest,
336 revision='refs/heads/main') 410 name="superproject",
337 self.assertEqual(len(self._superproject._manifest.projects), 3) 411 remote=manifest.remotes.get("default-remote").ToRemoteSpec(
338 projects = self._superproject._manifest.projects 412 "superproject"
339 data = ('160000 commit 2c2724cb36cd5a9cec6c852c681efc3b7c6b86ea\tart\x00' 413 ),
340 '160000 commit e9d25da64d8d365dbba7c8ee00fe8c4473fe9a06\tvendor/x\x00') 414 revision="refs/heads/main",
341 with mock.patch.object(self._superproject, '_Init', return_value=True): 415 )
342 with mock.patch.object(self._superproject, '_Fetch', return_value=True): 416 self.assertEqual(len(self._superproject._manifest.projects), 3)
343 with mock.patch.object(self._superproject, 417 projects = self._superproject._manifest.projects
344 '_LsTree', 418 data = (
345 return_value=data): 419 "160000 commit 2c2724cb36cd5a9cec6c852c681efc3b7c6b86ea\tart\x00"
346 # Create temporary directory so that it can write the file. 420 "160000 commit e9d25da64d8d365dbba7c8ee00fe8c4473fe9a06\tvendor/x\x00"
347 os.mkdir(self._superproject._superproject_path) 421 )
348 update_result = self._superproject.UpdateProjectsRevisionId(projects, self.git_event_log) 422 with mock.patch.object(self._superproject, "_Init", return_value=True):
349 self.assertIsNotNone(update_result.manifest_path) 423 with mock.patch.object(
350 self.assertFalse(update_result.fatal) 424 self._superproject, "_Fetch", return_value=True
351 with open(update_result.manifest_path, 'r') as fp: 425 ):
352 manifest_xml_data = fp.read() 426 with mock.patch.object(
353 # Verify platform/vendor/x's project revision hasn't changed. 427 self._superproject, "_LsTree", return_value=data
354 self.assertEqual( 428 ):
355 sort_attributes(manifest_xml_data), 429 # Create temporary directory so that it can write the file.
356 '<?xml version="1.0" ?><manifest>' 430 os.mkdir(self._superproject._superproject_path)
357 '<remote fetch="http://localhost" name="default-remote"/>' 431 update_result = self._superproject.UpdateProjectsRevisionId(
358 '<default remote="default-remote" revision="refs/heads/main"/>' 432 projects, self.git_event_log
359 '<project groups="notdefault,platform-' + self.platform + '" ' 433 )
360 'name="platform/art" path="art" ' 434 self.assertIsNotNone(update_result.manifest_path)
361 'revision="2c2724cb36cd5a9cec6c852c681efc3b7c6b86ea" upstream="refs/heads/main"/>' 435 self.assertFalse(update_result.fatal)
362 '<project name="platform/vendor/x" path="vendor/x" ' 436 with open(update_result.manifest_path, "r") as fp:
363 'revision="e9d25da64d8d365dbba7c8ee00fe8c4473fe9a06" upstream="refs/heads/main"/>' 437 manifest_xml_data = fp.read()
364 '<project name="platform/vendor/y" path="vendor/y" ' 438 # Verify platform/vendor/x's project revision hasn't
365 'revision="52d3c9f7c107839ece2319d077de0cd922aa9d8f"/>' 439 # changed.
366 '<superproject name="superproject"/>' 440 self.assertEqual(
367 '</manifest>') 441 sort_attributes(manifest_xml_data),
368 442 '<?xml version="1.0" ?><manifest>'
369 def test_Fetch(self): 443 '<remote fetch="http://localhost" name="default-remote"/>'
370 manifest = self.getXmlManifest(""" 444 '<default remote="default-remote" revision="refs/heads/main"/>'
445 '<project groups="notdefault,platform-'
446 + self.platform
447 + '" '
448 'name="platform/art" path="art" '
449 'revision="2c2724cb36cd5a9cec6c852c681efc3b7c6b86ea" upstream="refs/heads/main"/>'
450 '<project name="platform/vendor/x" path="vendor/x" '
451 'revision="e9d25da64d8d365dbba7c8ee00fe8c4473fe9a06" upstream="refs/heads/main"/>'
452 '<project name="platform/vendor/y" path="vendor/y" '
453 'revision="52d3c9f7c107839ece2319d077de0cd922aa9d8f"/>'
454 '<superproject name="superproject"/>'
455 "</manifest>",
456 )
457
458 def test_Fetch(self):
459 manifest = self.getXmlManifest(
460 """
371<manifest> 461<manifest>
372 <remote name="default-remote" fetch="http://localhost" /> 462 <remote name="default-remote" fetch="http://localhost" />
373 <default remote="default-remote" revision="refs/heads/main" /> 463 <default remote="default-remote" revision="refs/heads/main" />
374 <superproject name="superproject"/> 464 <superproject name="superproject"/>
375 " /></manifest> 465 " /></manifest>
376""") 466"""
377 self.maxDiff = None 467 )
378 self._superproject = git_superproject.Superproject( 468 self.maxDiff = None
379 manifest, name='superproject', 469 self._superproject = git_superproject.Superproject(
380 remote=manifest.remotes.get('default-remote').ToRemoteSpec('superproject'), 470 manifest,
381 revision='refs/heads/main') 471 name="superproject",
382 os.mkdir(self._superproject._superproject_path) 472 remote=manifest.remotes.get("default-remote").ToRemoteSpec(
383 os.mkdir(self._superproject._work_git) 473 "superproject"
384 with mock.patch.object(self._superproject, '_Init', return_value=True): 474 ),
385 with mock.patch('git_superproject.GitCommand', autospec=True) as mock_git_command: 475 revision="refs/heads/main",
386 with mock.patch('git_superproject.GitRefs.get', autospec=True) as mock_git_refs: 476 )
387 instance = mock_git_command.return_value 477 os.mkdir(self._superproject._superproject_path)
388 instance.Wait.return_value = 0 478 os.mkdir(self._superproject._work_git)
389 mock_git_refs.side_effect = ['', '1234'] 479 with mock.patch.object(self._superproject, "_Init", return_value=True):
390 480 with mock.patch(
391 self.assertTrue(self._superproject._Fetch()) 481 "git_superproject.GitCommand", autospec=True
392 self.assertEqual(mock_git_command.call_args.args,(None, [ 482 ) as mock_git_command:
393 'fetch', 'http://localhost/superproject', '--depth', '1', 483 with mock.patch(
394 '--force', '--no-tags', '--filter', 'blob:none', 484 "git_superproject.GitRefs.get", autospec=True
395 'refs/heads/main:refs/heads/main' 485 ) as mock_git_refs:
396 ])) 486 instance = mock_git_command.return_value
397 487 instance.Wait.return_value = 0
398 # If branch for revision exists, set as --negotiation-tip. 488 mock_git_refs.side_effect = ["", "1234"]
399 self.assertTrue(self._superproject._Fetch()) 489
400 self.assertEqual(mock_git_command.call_args.args,(None, [ 490 self.assertTrue(self._superproject._Fetch())
401 'fetch', 'http://localhost/superproject', '--depth', '1', 491 self.assertEqual(
402 '--force', '--no-tags', '--filter', 'blob:none', 492 mock_git_command.call_args.args,
403 '--negotiation-tip', '1234', 493 (
404 'refs/heads/main:refs/heads/main' 494 None,
405 ])) 495 [
496 "fetch",
497 "http://localhost/superproject",
498 "--depth",
499 "1",
500 "--force",
501 "--no-tags",
502 "--filter",
503 "blob:none",
504 "refs/heads/main:refs/heads/main",
505 ],
506 ),
507 )
508
509 # If branch for revision exists, set as --negotiation-tip.
510 self.assertTrue(self._superproject._Fetch())
511 self.assertEqual(
512 mock_git_command.call_args.args,
513 (
514 None,
515 [
516 "fetch",
517 "http://localhost/superproject",
518 "--depth",
519 "1",
520 "--force",
521 "--no-tags",
522 "--filter",
523 "blob:none",
524 "--negotiation-tip",
525 "1234",
526 "refs/heads/main:refs/heads/main",
527 ],
528 ),
529 )
diff --git a/tests/test_git_trace2_event_log.py b/tests/test_git_trace2_event_log.py
index 7e7dfb7a..a6078d38 100644
--- a/tests/test_git_trace2_event_log.py
+++ b/tests/test_git_trace2_event_log.py
@@ -27,361 +27,382 @@ import platform_utils
27 27
28 28
29def serverLoggingThread(socket_path, server_ready, received_traces): 29def serverLoggingThread(socket_path, server_ready, received_traces):
30 """Helper function to receive logs over a Unix domain socket. 30 """Helper function to receive logs over a Unix domain socket.
31
32 Appends received messages on the provided socket and appends to received_traces.
33
34 Args:
35 socket_path: path to a Unix domain socket on which to listen for traces
36 server_ready: a threading.Condition used to signal to the caller that this thread is ready to
37 accept connections
38 received_traces: a list to which received traces will be appended (after decoding to a utf-8
39 string).
40 """
41 platform_utils.remove(socket_path, missing_ok=True)
42 data = b''
43 with socket.socket(socket.AF_UNIX, socket.SOCK_STREAM) as sock:
44 sock.bind(socket_path)
45 sock.listen(0)
46 with server_ready:
47 server_ready.notify()
48 with sock.accept()[0] as conn:
49 while True:
50 recved = conn.recv(4096)
51 if not recved:
52 break
53 data += recved
54 received_traces.extend(data.decode('utf-8').splitlines())
55 31
32 Appends received messages on the provided socket and appends to
33 received_traces.
56 34
57class EventLogTestCase(unittest.TestCase): 35 Args:
58 """TestCase for the EventLog module.""" 36 socket_path: path to a Unix domain socket on which to listen for traces
59 37 server_ready: a threading.Condition used to signal to the caller that
60 PARENT_SID_KEY = 'GIT_TRACE2_PARENT_SID' 38 this thread is ready to accept connections
61 PARENT_SID_VALUE = 'parent_sid' 39 received_traces: a list to which received traces will be appended (after
62 SELF_SID_REGEX = r'repo-\d+T\d+Z-.*' 40 decoding to a utf-8 string).
63 FULL_SID_REGEX = r'^%s/%s' % (PARENT_SID_VALUE, SELF_SID_REGEX)
64
65 def setUp(self):
66 """Load the event_log module every time."""
67 self._event_log_module = None
68 # By default we initialize with the expected case where
69 # repo launches us (so GIT_TRACE2_PARENT_SID is set).
70 env = {
71 self.PARENT_SID_KEY: self.PARENT_SID_VALUE,
72 }
73 self._event_log_module = git_trace2_event_log.EventLog(env=env)
74 self._log_data = None
75
76 def verifyCommonKeys(self, log_entry, expected_event_name=None, full_sid=True):
77 """Helper function to verify common event log keys."""
78 self.assertIn('event', log_entry)
79 self.assertIn('sid', log_entry)
80 self.assertIn('thread', log_entry)
81 self.assertIn('time', log_entry)
82
83 # Do basic data format validation.
84 if expected_event_name:
85 self.assertEqual(expected_event_name, log_entry['event'])
86 if full_sid:
87 self.assertRegex(log_entry['sid'], self.FULL_SID_REGEX)
88 else:
89 self.assertRegex(log_entry['sid'], self.SELF_SID_REGEX)
90 self.assertRegex(log_entry['time'], r'^\d+-\d+-\d+T\d+:\d+:\d+\.\d+Z$')
91
92 def readLog(self, log_path):
93 """Helper function to read log data into a list."""
94 log_data = []
95 with open(log_path, mode='rb') as f:
96 for line in f:
97 log_data.append(json.loads(line))
98 return log_data
99
100 def remove_prefix(self, s, prefix):
101 """Return a copy string after removing |prefix| from |s|, if present or the original string."""
102 if s.startswith(prefix):
103 return s[len(prefix):]
104 else:
105 return s
106
107 def test_initial_state_with_parent_sid(self):
108 """Test initial state when 'GIT_TRACE2_PARENT_SID' is set by parent."""
109 self.assertRegex(self._event_log_module.full_sid, self.FULL_SID_REGEX)
110
111 def test_initial_state_no_parent_sid(self):
112 """Test initial state when 'GIT_TRACE2_PARENT_SID' is not set."""
113 # Setup an empty environment dict (no parent sid).
114 self._event_log_module = git_trace2_event_log.EventLog(env={})
115 self.assertRegex(self._event_log_module.full_sid, self.SELF_SID_REGEX)
116
117 def test_version_event(self):
118 """Test 'version' event data is valid.
119
120 Verify that the 'version' event is written even when no other
121 events are addded.
122
123 Expected event log:
124 <version event>
125 """
126 with tempfile.TemporaryDirectory(prefix='event_log_tests') as tempdir:
127 log_path = self._event_log_module.Write(path=tempdir)
128 self._log_data = self.readLog(log_path)
129
130 # A log with no added events should only have the version entry.
131 self.assertEqual(len(self._log_data), 1)
132 version_event = self._log_data[0]
133 self.verifyCommonKeys(version_event, expected_event_name='version')
134 # Check for 'version' event specific fields.
135 self.assertIn('evt', version_event)
136 self.assertIn('exe', version_event)
137 # Verify "evt" version field is a string.
138 self.assertIsInstance(version_event['evt'], str)
139
140 def test_start_event(self):
141 """Test and validate 'start' event data is valid.
142
143 Expected event log:
144 <version event>
145 <start event>
146 """
147 self._event_log_module.StartEvent()
148 with tempfile.TemporaryDirectory(prefix='event_log_tests') as tempdir:
149 log_path = self._event_log_module.Write(path=tempdir)
150 self._log_data = self.readLog(log_path)
151
152 self.assertEqual(len(self._log_data), 2)
153 start_event = self._log_data[1]
154 self.verifyCommonKeys(self._log_data[0], expected_event_name='version')
155 self.verifyCommonKeys(start_event, expected_event_name='start')
156 # Check for 'start' event specific fields.
157 self.assertIn('argv', start_event)
158 self.assertTrue(isinstance(start_event['argv'], list))
159
160 def test_exit_event_result_none(self):
161 """Test 'exit' event data is valid when result is None.
162
163 We expect None result to be converted to 0 in the exit event data.
164
165 Expected event log:
166 <version event>
167 <exit event>
168 """
169 self._event_log_module.ExitEvent(None)
170 with tempfile.TemporaryDirectory(prefix='event_log_tests') as tempdir:
171 log_path = self._event_log_module.Write(path=tempdir)
172 self._log_data = self.readLog(log_path)
173
174 self.assertEqual(len(self._log_data), 2)
175 exit_event = self._log_data[1]
176 self.verifyCommonKeys(self._log_data[0], expected_event_name='version')
177 self.verifyCommonKeys(exit_event, expected_event_name='exit')
178 # Check for 'exit' event specific fields.
179 self.assertIn('code', exit_event)
180 # 'None' result should convert to 0 (successful) return code.
181 self.assertEqual(exit_event['code'], 0)
182
183 def test_exit_event_result_integer(self):
184 """Test 'exit' event data is valid when result is an integer.
185
186 Expected event log:
187 <version event>
188 <exit event>
189 """
190 self._event_log_module.ExitEvent(2)
191 with tempfile.TemporaryDirectory(prefix='event_log_tests') as tempdir:
192 log_path = self._event_log_module.Write(path=tempdir)
193 self._log_data = self.readLog(log_path)
194
195 self.assertEqual(len(self._log_data), 2)
196 exit_event = self._log_data[1]
197 self.verifyCommonKeys(self._log_data[0], expected_event_name='version')
198 self.verifyCommonKeys(exit_event, expected_event_name='exit')
199 # Check for 'exit' event specific fields.
200 self.assertIn('code', exit_event)
201 self.assertEqual(exit_event['code'], 2)
202
203 def test_command_event(self):
204 """Test and validate 'command' event data is valid.
205
206 Expected event log:
207 <version event>
208 <command event>
209 """
210 name = 'repo'
211 subcommands = ['init' 'this']
212 self._event_log_module.CommandEvent(name='repo', subcommands=subcommands)
213 with tempfile.TemporaryDirectory(prefix='event_log_tests') as tempdir:
214 log_path = self._event_log_module.Write(path=tempdir)
215 self._log_data = self.readLog(log_path)
216
217 self.assertEqual(len(self._log_data), 2)
218 command_event = self._log_data[1]
219 self.verifyCommonKeys(self._log_data[0], expected_event_name='version')
220 self.verifyCommonKeys(command_event, expected_event_name='command')
221 # Check for 'command' event specific fields.
222 self.assertIn('name', command_event)
223 self.assertIn('subcommands', command_event)
224 self.assertEqual(command_event['name'], name)
225 self.assertEqual(command_event['subcommands'], subcommands)
226
227 def test_def_params_event_repo_config(self):
228 """Test 'def_params' event data outputs only repo config keys.
229
230 Expected event log:
231 <version event>
232 <def_param event>
233 <def_param event>
234 """ 41 """
235 config = { 42 platform_utils.remove(socket_path, missing_ok=True)
236 'git.foo': 'bar', 43 data = b""
237 'repo.partialclone': 'true', 44 with socket.socket(socket.AF_UNIX, socket.SOCK_STREAM) as sock:
238 'repo.partialclonefilter': 'blob:none', 45 sock.bind(socket_path)
239 } 46 sock.listen(0)
240 self._event_log_module.DefParamRepoEvents(config)
241
242 with tempfile.TemporaryDirectory(prefix='event_log_tests') as tempdir:
243 log_path = self._event_log_module.Write(path=tempdir)
244 self._log_data = self.readLog(log_path)
245
246 self.assertEqual(len(self._log_data), 3)
247 def_param_events = self._log_data[1:]
248 self.verifyCommonKeys(self._log_data[0], expected_event_name='version')
249
250 for event in def_param_events:
251 self.verifyCommonKeys(event, expected_event_name='def_param')
252 # Check for 'def_param' event specific fields.
253 self.assertIn('param', event)
254 self.assertIn('value', event)
255 self.assertTrue(event['param'].startswith('repo.'))
256
257 def test_def_params_event_no_repo_config(self):
258 """Test 'def_params' event data won't output non-repo config keys.
259
260 Expected event log:
261 <version event>
262 """
263 config = {
264 'git.foo': 'bar',
265 'git.core.foo2': 'baz',
266 }
267 self._event_log_module.DefParamRepoEvents(config)
268
269 with tempfile.TemporaryDirectory(prefix='event_log_tests') as tempdir:
270 log_path = self._event_log_module.Write(path=tempdir)
271 self._log_data = self.readLog(log_path)
272
273 self.assertEqual(len(self._log_data), 1)
274 self.verifyCommonKeys(self._log_data[0], expected_event_name='version')
275
276 def test_data_event_config(self):
277 """Test 'data' event data outputs all config keys.
278
279 Expected event log:
280 <version event>
281 <data event>
282 <data event>
283 """
284 config = {
285 'git.foo': 'bar',
286 'repo.partialclone': 'false',
287 'repo.syncstate.superproject.hassuperprojecttag': 'true',
288 'repo.syncstate.superproject.sys.argv': ['--', 'sync', 'protobuf'],
289 }
290 prefix_value = 'prefix'
291 self._event_log_module.LogDataConfigEvents(config, prefix_value)
292
293 with tempfile.TemporaryDirectory(prefix='event_log_tests') as tempdir:
294 log_path = self._event_log_module.Write(path=tempdir)
295 self._log_data = self.readLog(log_path)
296
297 self.assertEqual(len(self._log_data), 5)
298 data_events = self._log_data[1:]
299 self.verifyCommonKeys(self._log_data[0], expected_event_name='version')
300
301 for event in data_events:
302 self.verifyCommonKeys(event)
303 # Check for 'data' event specific fields.
304 self.assertIn('key', event)
305 self.assertIn('value', event)
306 key = event['key']
307 key = self.remove_prefix(key, f'{prefix_value}/')
308 value = event['value']
309 self.assertEqual(self._event_log_module.GetDataEventName(value), event['event'])
310 self.assertTrue(key in config and value == config[key])
311
312 def test_error_event(self):
313 """Test and validate 'error' event data is valid.
314
315 Expected event log:
316 <version event>
317 <error event>
318 """
319 msg = 'invalid option: --cahced'
320 fmt = 'invalid option: %s'
321 self._event_log_module.ErrorEvent(msg, fmt)
322 with tempfile.TemporaryDirectory(prefix='event_log_tests') as tempdir:
323 log_path = self._event_log_module.Write(path=tempdir)
324 self._log_data = self.readLog(log_path)
325
326 self.assertEqual(len(self._log_data), 2)
327 error_event = self._log_data[1]
328 self.verifyCommonKeys(self._log_data[0], expected_event_name='version')
329 self.verifyCommonKeys(error_event, expected_event_name='error')
330 # Check for 'error' event specific fields.
331 self.assertIn('msg', error_event)
332 self.assertIn('fmt', error_event)
333 self.assertEqual(error_event['msg'], msg)
334 self.assertEqual(error_event['fmt'], fmt)
335
336 def test_write_with_filename(self):
337 """Test Write() with a path to a file exits with None."""
338 self.assertIsNone(self._event_log_module.Write(path='path/to/file'))
339
340 def test_write_with_git_config(self):
341 """Test Write() uses the git config path when 'git config' call succeeds."""
342 with tempfile.TemporaryDirectory(prefix='event_log_tests') as tempdir:
343 with mock.patch.object(self._event_log_module,
344 '_GetEventTargetPath', return_value=tempdir):
345 self.assertEqual(os.path.dirname(self._event_log_module.Write()), tempdir)
346
347 def test_write_no_git_config(self):
348 """Test Write() with no git config variable present exits with None."""
349 with mock.patch.object(self._event_log_module,
350 '_GetEventTargetPath', return_value=None):
351 self.assertIsNone(self._event_log_module.Write())
352
353 def test_write_non_string(self):
354 """Test Write() with non-string type for |path| throws TypeError."""
355 with self.assertRaises(TypeError):
356 self._event_log_module.Write(path=1234)
357
358 def test_write_socket(self):
359 """Test Write() with Unix domain socket for |path| and validate received traces."""
360 received_traces = []
361 with tempfile.TemporaryDirectory(prefix='test_server_sockets') as tempdir:
362 socket_path = os.path.join(tempdir, "server.sock")
363 server_ready = threading.Condition()
364 # Start "server" listening on Unix domain socket at socket_path.
365 try:
366 server_thread = threading.Thread(
367 target=serverLoggingThread,
368 args=(socket_path, server_ready, received_traces))
369 server_thread.start()
370
371 with server_ready: 47 with server_ready:
372 server_ready.wait(timeout=120) 48 server_ready.notify()
49 with sock.accept()[0] as conn:
50 while True:
51 recved = conn.recv(4096)
52 if not recved:
53 break
54 data += recved
55 received_traces.extend(data.decode("utf-8").splitlines())
56
373 57
58class EventLogTestCase(unittest.TestCase):
59 """TestCase for the EventLog module."""
60
61 PARENT_SID_KEY = "GIT_TRACE2_PARENT_SID"
62 PARENT_SID_VALUE = "parent_sid"
63 SELF_SID_REGEX = r"repo-\d+T\d+Z-.*"
64 FULL_SID_REGEX = r"^%s/%s" % (PARENT_SID_VALUE, SELF_SID_REGEX)
65
66 def setUp(self):
67 """Load the event_log module every time."""
68 self._event_log_module = None
69 # By default we initialize with the expected case where
70 # repo launches us (so GIT_TRACE2_PARENT_SID is set).
71 env = {
72 self.PARENT_SID_KEY: self.PARENT_SID_VALUE,
73 }
74 self._event_log_module = git_trace2_event_log.EventLog(env=env)
75 self._log_data = None
76
77 def verifyCommonKeys(
78 self, log_entry, expected_event_name=None, full_sid=True
79 ):
80 """Helper function to verify common event log keys."""
81 self.assertIn("event", log_entry)
82 self.assertIn("sid", log_entry)
83 self.assertIn("thread", log_entry)
84 self.assertIn("time", log_entry)
85
86 # Do basic data format validation.
87 if expected_event_name:
88 self.assertEqual(expected_event_name, log_entry["event"])
89 if full_sid:
90 self.assertRegex(log_entry["sid"], self.FULL_SID_REGEX)
91 else:
92 self.assertRegex(log_entry["sid"], self.SELF_SID_REGEX)
93 self.assertRegex(log_entry["time"], r"^\d+-\d+-\d+T\d+:\d+:\d+\.\d+Z$")
94
95 def readLog(self, log_path):
96 """Helper function to read log data into a list."""
97 log_data = []
98 with open(log_path, mode="rb") as f:
99 for line in f:
100 log_data.append(json.loads(line))
101 return log_data
102
103 def remove_prefix(self, s, prefix):
104 """Return a copy string after removing |prefix| from |s|, if present or
105 the original string."""
106 if s.startswith(prefix):
107 return s[len(prefix) :]
108 else:
109 return s
110
111 def test_initial_state_with_parent_sid(self):
112 """Test initial state when 'GIT_TRACE2_PARENT_SID' is set by parent."""
113 self.assertRegex(self._event_log_module.full_sid, self.FULL_SID_REGEX)
114
115 def test_initial_state_no_parent_sid(self):
116 """Test initial state when 'GIT_TRACE2_PARENT_SID' is not set."""
117 # Setup an empty environment dict (no parent sid).
118 self._event_log_module = git_trace2_event_log.EventLog(env={})
119 self.assertRegex(self._event_log_module.full_sid, self.SELF_SID_REGEX)
120
121 def test_version_event(self):
122 """Test 'version' event data is valid.
123
124 Verify that the 'version' event is written even when no other
125 events are addded.
126
127 Expected event log:
128 <version event>
129 """
130 with tempfile.TemporaryDirectory(prefix="event_log_tests") as tempdir:
131 log_path = self._event_log_module.Write(path=tempdir)
132 self._log_data = self.readLog(log_path)
133
134 # A log with no added events should only have the version entry.
135 self.assertEqual(len(self._log_data), 1)
136 version_event = self._log_data[0]
137 self.verifyCommonKeys(version_event, expected_event_name="version")
138 # Check for 'version' event specific fields.
139 self.assertIn("evt", version_event)
140 self.assertIn("exe", version_event)
141 # Verify "evt" version field is a string.
142 self.assertIsInstance(version_event["evt"], str)
143
144 def test_start_event(self):
145 """Test and validate 'start' event data is valid.
146
147 Expected event log:
148 <version event>
149 <start event>
150 """
374 self._event_log_module.StartEvent() 151 self._event_log_module.StartEvent()
375 path = self._event_log_module.Write(path=f'af_unix:{socket_path}') 152 with tempfile.TemporaryDirectory(prefix="event_log_tests") as tempdir:
376 finally: 153 log_path = self._event_log_module.Write(path=tempdir)
377 server_thread.join(timeout=5) 154 self._log_data = self.readLog(log_path)
378 155
379 self.assertEqual(path, f'af_unix:stream:{socket_path}') 156 self.assertEqual(len(self._log_data), 2)
380 self.assertEqual(len(received_traces), 2) 157 start_event = self._log_data[1]
381 version_event = json.loads(received_traces[0]) 158 self.verifyCommonKeys(self._log_data[0], expected_event_name="version")
382 start_event = json.loads(received_traces[1]) 159 self.verifyCommonKeys(start_event, expected_event_name="start")
383 self.verifyCommonKeys(version_event, expected_event_name='version') 160 # Check for 'start' event specific fields.
384 self.verifyCommonKeys(start_event, expected_event_name='start') 161 self.assertIn("argv", start_event)
385 # Check for 'start' event specific fields. 162 self.assertTrue(isinstance(start_event["argv"], list))
386 self.assertIn('argv', start_event) 163
387 self.assertIsInstance(start_event['argv'], list) 164 def test_exit_event_result_none(self):
165 """Test 'exit' event data is valid when result is None.
166
167 We expect None result to be converted to 0 in the exit event data.
168
169 Expected event log:
170 <version event>
171 <exit event>
172 """
173 self._event_log_module.ExitEvent(None)
174 with tempfile.TemporaryDirectory(prefix="event_log_tests") as tempdir:
175 log_path = self._event_log_module.Write(path=tempdir)
176 self._log_data = self.readLog(log_path)
177
178 self.assertEqual(len(self._log_data), 2)
179 exit_event = self._log_data[1]
180 self.verifyCommonKeys(self._log_data[0], expected_event_name="version")
181 self.verifyCommonKeys(exit_event, expected_event_name="exit")
182 # Check for 'exit' event specific fields.
183 self.assertIn("code", exit_event)
184 # 'None' result should convert to 0 (successful) return code.
185 self.assertEqual(exit_event["code"], 0)
186
187 def test_exit_event_result_integer(self):
188 """Test 'exit' event data is valid when result is an integer.
189
190 Expected event log:
191 <version event>
192 <exit event>
193 """
194 self._event_log_module.ExitEvent(2)
195 with tempfile.TemporaryDirectory(prefix="event_log_tests") as tempdir:
196 log_path = self._event_log_module.Write(path=tempdir)
197 self._log_data = self.readLog(log_path)
198
199 self.assertEqual(len(self._log_data), 2)
200 exit_event = self._log_data[1]
201 self.verifyCommonKeys(self._log_data[0], expected_event_name="version")
202 self.verifyCommonKeys(exit_event, expected_event_name="exit")
203 # Check for 'exit' event specific fields.
204 self.assertIn("code", exit_event)
205 self.assertEqual(exit_event["code"], 2)
206
207 def test_command_event(self):
208 """Test and validate 'command' event data is valid.
209
210 Expected event log:
211 <version event>
212 <command event>
213 """
214 name = "repo"
215 subcommands = ["init" "this"]
216 self._event_log_module.CommandEvent(
217 name="repo", subcommands=subcommands
218 )
219 with tempfile.TemporaryDirectory(prefix="event_log_tests") as tempdir:
220 log_path = self._event_log_module.Write(path=tempdir)
221 self._log_data = self.readLog(log_path)
222
223 self.assertEqual(len(self._log_data), 2)
224 command_event = self._log_data[1]
225 self.verifyCommonKeys(self._log_data[0], expected_event_name="version")
226 self.verifyCommonKeys(command_event, expected_event_name="command")
227 # Check for 'command' event specific fields.
228 self.assertIn("name", command_event)
229 self.assertIn("subcommands", command_event)
230 self.assertEqual(command_event["name"], name)
231 self.assertEqual(command_event["subcommands"], subcommands)
232
233 def test_def_params_event_repo_config(self):
234 """Test 'def_params' event data outputs only repo config keys.
235
236 Expected event log:
237 <version event>
238 <def_param event>
239 <def_param event>
240 """
241 config = {
242 "git.foo": "bar",
243 "repo.partialclone": "true",
244 "repo.partialclonefilter": "blob:none",
245 }
246 self._event_log_module.DefParamRepoEvents(config)
247
248 with tempfile.TemporaryDirectory(prefix="event_log_tests") as tempdir:
249 log_path = self._event_log_module.Write(path=tempdir)
250 self._log_data = self.readLog(log_path)
251
252 self.assertEqual(len(self._log_data), 3)
253 def_param_events = self._log_data[1:]
254 self.verifyCommonKeys(self._log_data[0], expected_event_name="version")
255
256 for event in def_param_events:
257 self.verifyCommonKeys(event, expected_event_name="def_param")
258 # Check for 'def_param' event specific fields.
259 self.assertIn("param", event)
260 self.assertIn("value", event)
261 self.assertTrue(event["param"].startswith("repo."))
262
263 def test_def_params_event_no_repo_config(self):
264 """Test 'def_params' event data won't output non-repo config keys.
265
266 Expected event log:
267 <version event>
268 """
269 config = {
270 "git.foo": "bar",
271 "git.core.foo2": "baz",
272 }
273 self._event_log_module.DefParamRepoEvents(config)
274
275 with tempfile.TemporaryDirectory(prefix="event_log_tests") as tempdir:
276 log_path = self._event_log_module.Write(path=tempdir)
277 self._log_data = self.readLog(log_path)
278
279 self.assertEqual(len(self._log_data), 1)
280 self.verifyCommonKeys(self._log_data[0], expected_event_name="version")
281
282 def test_data_event_config(self):
283 """Test 'data' event data outputs all config keys.
284
285 Expected event log:
286 <version event>
287 <data event>
288 <data event>
289 """
290 config = {
291 "git.foo": "bar",
292 "repo.partialclone": "false",
293 "repo.syncstate.superproject.hassuperprojecttag": "true",
294 "repo.syncstate.superproject.sys.argv": ["--", "sync", "protobuf"],
295 }
296 prefix_value = "prefix"
297 self._event_log_module.LogDataConfigEvents(config, prefix_value)
298
299 with tempfile.TemporaryDirectory(prefix="event_log_tests") as tempdir:
300 log_path = self._event_log_module.Write(path=tempdir)
301 self._log_data = self.readLog(log_path)
302
303 self.assertEqual(len(self._log_data), 5)
304 data_events = self._log_data[1:]
305 self.verifyCommonKeys(self._log_data[0], expected_event_name="version")
306
307 for event in data_events:
308 self.verifyCommonKeys(event)
309 # Check for 'data' event specific fields.
310 self.assertIn("key", event)
311 self.assertIn("value", event)
312 key = event["key"]
313 key = self.remove_prefix(key, f"{prefix_value}/")
314 value = event["value"]
315 self.assertEqual(
316 self._event_log_module.GetDataEventName(value), event["event"]
317 )
318 self.assertTrue(key in config and value == config[key])
319
320 def test_error_event(self):
321 """Test and validate 'error' event data is valid.
322
323 Expected event log:
324 <version event>
325 <error event>
326 """
327 msg = "invalid option: --cahced"
328 fmt = "invalid option: %s"
329 self._event_log_module.ErrorEvent(msg, fmt)
330 with tempfile.TemporaryDirectory(prefix="event_log_tests") as tempdir:
331 log_path = self._event_log_module.Write(path=tempdir)
332 self._log_data = self.readLog(log_path)
333
334 self.assertEqual(len(self._log_data), 2)
335 error_event = self._log_data[1]
336 self.verifyCommonKeys(self._log_data[0], expected_event_name="version")
337 self.verifyCommonKeys(error_event, expected_event_name="error")
338 # Check for 'error' event specific fields.
339 self.assertIn("msg", error_event)
340 self.assertIn("fmt", error_event)
341 self.assertEqual(error_event["msg"], msg)
342 self.assertEqual(error_event["fmt"], fmt)
343
344 def test_write_with_filename(self):
345 """Test Write() with a path to a file exits with None."""
346 self.assertIsNone(self._event_log_module.Write(path="path/to/file"))
347
348 def test_write_with_git_config(self):
349 """Test Write() uses the git config path when 'git config' call
350 succeeds."""
351 with tempfile.TemporaryDirectory(prefix="event_log_tests") as tempdir:
352 with mock.patch.object(
353 self._event_log_module,
354 "_GetEventTargetPath",
355 return_value=tempdir,
356 ):
357 self.assertEqual(
358 os.path.dirname(self._event_log_module.Write()), tempdir
359 )
360
361 def test_write_no_git_config(self):
362 """Test Write() with no git config variable present exits with None."""
363 with mock.patch.object(
364 self._event_log_module, "_GetEventTargetPath", return_value=None
365 ):
366 self.assertIsNone(self._event_log_module.Write())
367
368 def test_write_non_string(self):
369 """Test Write() with non-string type for |path| throws TypeError."""
370 with self.assertRaises(TypeError):
371 self._event_log_module.Write(path=1234)
372
373 def test_write_socket(self):
374 """Test Write() with Unix domain socket for |path| and validate received
375 traces."""
376 received_traces = []
377 with tempfile.TemporaryDirectory(
378 prefix="test_server_sockets"
379 ) as tempdir:
380 socket_path = os.path.join(tempdir, "server.sock")
381 server_ready = threading.Condition()
382 # Start "server" listening on Unix domain socket at socket_path.
383 try:
384 server_thread = threading.Thread(
385 target=serverLoggingThread,
386 args=(socket_path, server_ready, received_traces),
387 )
388 server_thread.start()
389
390 with server_ready:
391 server_ready.wait(timeout=120)
392
393 self._event_log_module.StartEvent()
394 path = self._event_log_module.Write(
395 path=f"af_unix:{socket_path}"
396 )
397 finally:
398 server_thread.join(timeout=5)
399
400 self.assertEqual(path, f"af_unix:stream:{socket_path}")
401 self.assertEqual(len(received_traces), 2)
402 version_event = json.loads(received_traces[0])
403 start_event = json.loads(received_traces[1])
404 self.verifyCommonKeys(version_event, expected_event_name="version")
405 self.verifyCommonKeys(start_event, expected_event_name="start")
406 # Check for 'start' event specific fields.
407 self.assertIn("argv", start_event)
408 self.assertIsInstance(start_event["argv"], list)
diff --git a/tests/test_hooks.py b/tests/test_hooks.py
index 6632b3e5..78277128 100644
--- a/tests/test_hooks.py
+++ b/tests/test_hooks.py
@@ -17,39 +17,38 @@
17import hooks 17import hooks
18import unittest 18import unittest
19 19
20
20class RepoHookShebang(unittest.TestCase): 21class RepoHookShebang(unittest.TestCase):
21 """Check shebang parsing in RepoHook.""" 22 """Check shebang parsing in RepoHook."""
22 23
23 def test_no_shebang(self): 24 def test_no_shebang(self):
24 """Lines w/out shebangs should be rejected.""" 25 """Lines w/out shebangs should be rejected."""
25 DATA = ( 26 DATA = ("", "#\n# foo\n", "# Bad shebang in script\n#!/foo\n")
26 '', 27 for data in DATA:
27 '#\n# foo\n', 28 self.assertIsNone(hooks.RepoHook._ExtractInterpFromShebang(data))
28 '# Bad shebang in script\n#!/foo\n'
29 )
30 for data in DATA:
31 self.assertIsNone(hooks.RepoHook._ExtractInterpFromShebang(data))
32 29
33 def test_direct_interp(self): 30 def test_direct_interp(self):
34 """Lines whose shebang points directly to the interpreter.""" 31 """Lines whose shebang points directly to the interpreter."""
35 DATA = ( 32 DATA = (
36 ('#!/foo', '/foo'), 33 ("#!/foo", "/foo"),
37 ('#! /foo', '/foo'), 34 ("#! /foo", "/foo"),
38 ('#!/bin/foo ', '/bin/foo'), 35 ("#!/bin/foo ", "/bin/foo"),
39 ('#! /usr/foo ', '/usr/foo'), 36 ("#! /usr/foo ", "/usr/foo"),
40 ('#! /usr/foo -args', '/usr/foo'), 37 ("#! /usr/foo -args", "/usr/foo"),
41 ) 38 )
42 for shebang, interp in DATA: 39 for shebang, interp in DATA:
43 self.assertEqual(hooks.RepoHook._ExtractInterpFromShebang(shebang), 40 self.assertEqual(
44 interp) 41 hooks.RepoHook._ExtractInterpFromShebang(shebang), interp
42 )
45 43
46 def test_env_interp(self): 44 def test_env_interp(self):
47 """Lines whose shebang launches through `env`.""" 45 """Lines whose shebang launches through `env`."""
48 DATA = ( 46 DATA = (
49 ('#!/usr/bin/env foo', 'foo'), 47 ("#!/usr/bin/env foo", "foo"),
50 ('#!/bin/env foo', 'foo'), 48 ("#!/bin/env foo", "foo"),
51 ('#! /bin/env /bin/foo ', '/bin/foo'), 49 ("#! /bin/env /bin/foo ", "/bin/foo"),
52 ) 50 )
53 for shebang, interp in DATA: 51 for shebang, interp in DATA:
54 self.assertEqual(hooks.RepoHook._ExtractInterpFromShebang(shebang), 52 self.assertEqual(
55 interp) 53 hooks.RepoHook._ExtractInterpFromShebang(shebang), interp
54 )
diff --git a/tests/test_manifest_xml.py b/tests/test_manifest_xml.py
index 3634701f..648acde8 100644
--- a/tests/test_manifest_xml.py
+++ b/tests/test_manifest_xml.py
@@ -27,291 +27,318 @@ import manifest_xml
27 27
28# Invalid paths that we don't want in the filesystem. 28# Invalid paths that we don't want in the filesystem.
29INVALID_FS_PATHS = ( 29INVALID_FS_PATHS = (
30 '', 30 "",
31 '.', 31 ".",
32 '..', 32 "..",
33 '../', 33 "../",
34 './', 34 "./",
35 './/', 35 ".//",
36 'foo/', 36 "foo/",
37 './foo', 37 "./foo",
38 '../foo', 38 "../foo",
39 'foo/./bar', 39 "foo/./bar",
40 'foo/../../bar', 40 "foo/../../bar",
41 '/foo', 41 "/foo",
42 './../foo', 42 "./../foo",
43 '.git/foo', 43 ".git/foo",
44 # Check case folding. 44 # Check case folding.
45 '.GIT/foo', 45 ".GIT/foo",
46 'blah/.git/foo', 46 "blah/.git/foo",
47 '.repo/foo', 47 ".repo/foo",
48 '.repoconfig', 48 ".repoconfig",
49 # Block ~ due to 8.3 filenames on Windows filesystems. 49 # Block ~ due to 8.3 filenames on Windows filesystems.
50 '~', 50 "~",
51 'foo~', 51 "foo~",
52 'blah/foo~', 52 "blah/foo~",
53 # Block Unicode characters that get normalized out by filesystems. 53 # Block Unicode characters that get normalized out by filesystems.
54 u'foo\u200Cbar', 54 "foo\u200Cbar",
55 # Block newlines. 55 # Block newlines.
56 'f\n/bar', 56 "f\n/bar",
57 'f\r/bar', 57 "f\r/bar",
58) 58)
59 59
60# Make sure platforms that use path separators (e.g. Windows) are also 60# Make sure platforms that use path separators (e.g. Windows) are also
61# rejected properly. 61# rejected properly.
62if os.path.sep != '/': 62if os.path.sep != "/":
63 INVALID_FS_PATHS += tuple(x.replace('/', os.path.sep) for x in INVALID_FS_PATHS) 63 INVALID_FS_PATHS += tuple(
64 x.replace("/", os.path.sep) for x in INVALID_FS_PATHS
65 )
64 66
65 67
66def sort_attributes(manifest): 68def sort_attributes(manifest):
67 """Sort the attributes of all elements alphabetically. 69 """Sort the attributes of all elements alphabetically.
68 70
69 This is needed because different versions of the toxml() function from 71 This is needed because different versions of the toxml() function from
70 xml.dom.minidom outputs the attributes of elements in different orders. 72 xml.dom.minidom outputs the attributes of elements in different orders.
71 Before Python 3.8 they were output alphabetically, later versions preserve 73 Before Python 3.8 they were output alphabetically, later versions preserve
72 the order specified by the user. 74 the order specified by the user.
73 75
74 Args: 76 Args:
75 manifest: String containing an XML manifest. 77 manifest: String containing an XML manifest.
76 78
77 Returns: 79 Returns:
78 The XML manifest with the attributes of all elements sorted alphabetically. 80 The XML manifest with the attributes of all elements sorted
79 """ 81 alphabetically.
80 new_manifest = '' 82 """
81 # This will find every element in the XML manifest, whether they have 83 new_manifest = ""
82 # attributes or not. This simplifies recreating the manifest below. 84 # This will find every element in the XML manifest, whether they have
83 matches = re.findall(r'(<[/?]?[a-z-]+\s*)((?:\S+?="[^"]+"\s*?)*)(\s*[/?]?>)', manifest) 85 # attributes or not. This simplifies recreating the manifest below.
84 for head, attrs, tail in matches: 86 matches = re.findall(
85 m = re.findall(r'\S+?="[^"]+"', attrs) 87 r'(<[/?]?[a-z-]+\s*)((?:\S+?="[^"]+"\s*?)*)(\s*[/?]?>)', manifest
86 new_manifest += head + ' '.join(sorted(m)) + tail 88 )
87 return new_manifest 89 for head, attrs, tail in matches:
90 m = re.findall(r'\S+?="[^"]+"', attrs)
91 new_manifest += head + " ".join(sorted(m)) + tail
92 return new_manifest
88 93
89 94
90class ManifestParseTestCase(unittest.TestCase): 95class ManifestParseTestCase(unittest.TestCase):
91 """TestCase for parsing manifests.""" 96 """TestCase for parsing manifests."""
92 97
93 def setUp(self): 98 def setUp(self):
94 self.tempdirobj = tempfile.TemporaryDirectory(prefix='repo_tests') 99 self.tempdirobj = tempfile.TemporaryDirectory(prefix="repo_tests")
95 self.tempdir = self.tempdirobj.name 100 self.tempdir = self.tempdirobj.name
96 self.repodir = os.path.join(self.tempdir, '.repo') 101 self.repodir = os.path.join(self.tempdir, ".repo")
97 self.manifest_dir = os.path.join(self.repodir, 'manifests') 102 self.manifest_dir = os.path.join(self.repodir, "manifests")
98 self.manifest_file = os.path.join( 103 self.manifest_file = os.path.join(
99 self.repodir, manifest_xml.MANIFEST_FILE_NAME) 104 self.repodir, manifest_xml.MANIFEST_FILE_NAME
100 self.local_manifest_dir = os.path.join( 105 )
101 self.repodir, manifest_xml.LOCAL_MANIFESTS_DIR_NAME) 106 self.local_manifest_dir = os.path.join(
102 os.mkdir(self.repodir) 107 self.repodir, manifest_xml.LOCAL_MANIFESTS_DIR_NAME
103 os.mkdir(self.manifest_dir) 108 )
104 109 os.mkdir(self.repodir)
105 # The manifest parsing really wants a git repo currently. 110 os.mkdir(self.manifest_dir)
106 gitdir = os.path.join(self.repodir, 'manifests.git') 111
107 os.mkdir(gitdir) 112 # The manifest parsing really wants a git repo currently.
108 with open(os.path.join(gitdir, 'config'), 'w') as fp: 113 gitdir = os.path.join(self.repodir, "manifests.git")
109 fp.write("""[remote "origin"] 114 os.mkdir(gitdir)
115 with open(os.path.join(gitdir, "config"), "w") as fp:
116 fp.write(
117 """[remote "origin"]
110 url = https://localhost:0/manifest 118 url = https://localhost:0/manifest
111""") 119"""
120 )
112 121
113 def tearDown(self): 122 def tearDown(self):
114 self.tempdirobj.cleanup() 123 self.tempdirobj.cleanup()
115 124
116 def getXmlManifest(self, data): 125 def getXmlManifest(self, data):
117 """Helper to initialize a manifest for testing.""" 126 """Helper to initialize a manifest for testing."""
118 with open(self.manifest_file, 'w', encoding="utf-8") as fp: 127 with open(self.manifest_file, "w", encoding="utf-8") as fp:
119 fp.write(data) 128 fp.write(data)
120 return manifest_xml.XmlManifest(self.repodir, self.manifest_file) 129 return manifest_xml.XmlManifest(self.repodir, self.manifest_file)
121 130
122 @staticmethod 131 @staticmethod
123 def encodeXmlAttr(attr): 132 def encodeXmlAttr(attr):
124 """Encode |attr| using XML escape rules.""" 133 """Encode |attr| using XML escape rules."""
125 return attr.replace('\r', '&#x000d;').replace('\n', '&#x000a;') 134 return attr.replace("\r", "&#x000d;").replace("\n", "&#x000a;")
126 135
127 136
128class ManifestValidateFilePaths(unittest.TestCase): 137class ManifestValidateFilePaths(unittest.TestCase):
129 """Check _ValidateFilePaths helper. 138 """Check _ValidateFilePaths helper.
130 139
131 This doesn't access a real filesystem. 140 This doesn't access a real filesystem.
132 """ 141 """
133 142
134 def check_both(self, *args): 143 def check_both(self, *args):
135 manifest_xml.XmlManifest._ValidateFilePaths('copyfile', *args) 144 manifest_xml.XmlManifest._ValidateFilePaths("copyfile", *args)
136 manifest_xml.XmlManifest._ValidateFilePaths('linkfile', *args) 145 manifest_xml.XmlManifest._ValidateFilePaths("linkfile", *args)
137 146
138 def test_normal_path(self): 147 def test_normal_path(self):
139 """Make sure good paths are accepted.""" 148 """Make sure good paths are accepted."""
140 self.check_both('foo', 'bar') 149 self.check_both("foo", "bar")
141 self.check_both('foo/bar', 'bar') 150 self.check_both("foo/bar", "bar")
142 self.check_both('foo', 'bar/bar') 151 self.check_both("foo", "bar/bar")
143 self.check_both('foo/bar', 'bar/bar') 152 self.check_both("foo/bar", "bar/bar")
144 153
145 def test_symlink_targets(self): 154 def test_symlink_targets(self):
146 """Some extra checks for symlinks.""" 155 """Some extra checks for symlinks."""
147 def check(*args): 156
148 manifest_xml.XmlManifest._ValidateFilePaths('linkfile', *args) 157 def check(*args):
149 158 manifest_xml.XmlManifest._ValidateFilePaths("linkfile", *args)
150 # We allow symlinks to end in a slash since we allow them to point to dirs 159
151 # in general. Technically the slash isn't necessary. 160 # We allow symlinks to end in a slash since we allow them to point to
152 check('foo/', 'bar') 161 # dirs in general. Technically the slash isn't necessary.
153 # We allow a single '.' to get a reference to the project itself. 162 check("foo/", "bar")
154 check('.', 'bar') 163 # We allow a single '.' to get a reference to the project itself.
155 164 check(".", "bar")
156 def test_bad_paths(self): 165
157 """Make sure bad paths (src & dest) are rejected.""" 166 def test_bad_paths(self):
158 for path in INVALID_FS_PATHS: 167 """Make sure bad paths (src & dest) are rejected."""
159 self.assertRaises( 168 for path in INVALID_FS_PATHS:
160 error.ManifestInvalidPathError, self.check_both, path, 'a') 169 self.assertRaises(
161 self.assertRaises( 170 error.ManifestInvalidPathError, self.check_both, path, "a"
162 error.ManifestInvalidPathError, self.check_both, 'a', path) 171 )
172 self.assertRaises(
173 error.ManifestInvalidPathError, self.check_both, "a", path
174 )
163 175
164 176
165class ValueTests(unittest.TestCase): 177class ValueTests(unittest.TestCase):
166 """Check utility parsing code.""" 178 """Check utility parsing code."""
167 179
168 def _get_node(self, text): 180 def _get_node(self, text):
169 return xml.dom.minidom.parseString(text).firstChild 181 return xml.dom.minidom.parseString(text).firstChild
170 182
171 def test_bool_default(self): 183 def test_bool_default(self):
172 """Check XmlBool default handling.""" 184 """Check XmlBool default handling."""
173 node = self._get_node('<node/>') 185 node = self._get_node("<node/>")
174 self.assertIsNone(manifest_xml.XmlBool(node, 'a')) 186 self.assertIsNone(manifest_xml.XmlBool(node, "a"))
175 self.assertIsNone(manifest_xml.XmlBool(node, 'a', None)) 187 self.assertIsNone(manifest_xml.XmlBool(node, "a", None))
176 self.assertEqual(123, manifest_xml.XmlBool(node, 'a', 123)) 188 self.assertEqual(123, manifest_xml.XmlBool(node, "a", 123))
177 189
178 node = self._get_node('<node a=""/>') 190 node = self._get_node('<node a=""/>')
179 self.assertIsNone(manifest_xml.XmlBool(node, 'a')) 191 self.assertIsNone(manifest_xml.XmlBool(node, "a"))
180 192
181 def test_bool_invalid(self): 193 def test_bool_invalid(self):
182 """Check XmlBool invalid handling.""" 194 """Check XmlBool invalid handling."""
183 node = self._get_node('<node a="moo"/>') 195 node = self._get_node('<node a="moo"/>')
184 self.assertEqual(123, manifest_xml.XmlBool(node, 'a', 123)) 196 self.assertEqual(123, manifest_xml.XmlBool(node, "a", 123))
185 197
186 def test_bool_true(self): 198 def test_bool_true(self):
187 """Check XmlBool true values.""" 199 """Check XmlBool true values."""
188 for value in ('yes', 'true', '1'): 200 for value in ("yes", "true", "1"):
189 node = self._get_node('<node a="%s"/>' % (value,)) 201 node = self._get_node('<node a="%s"/>' % (value,))
190 self.assertTrue(manifest_xml.XmlBool(node, 'a')) 202 self.assertTrue(manifest_xml.XmlBool(node, "a"))
191 203
192 def test_bool_false(self): 204 def test_bool_false(self):
193 """Check XmlBool false values.""" 205 """Check XmlBool false values."""
194 for value in ('no', 'false', '0'): 206 for value in ("no", "false", "0"):
195 node = self._get_node('<node a="%s"/>' % (value,)) 207 node = self._get_node('<node a="%s"/>' % (value,))
196 self.assertFalse(manifest_xml.XmlBool(node, 'a')) 208 self.assertFalse(manifest_xml.XmlBool(node, "a"))
197 209
198 def test_int_default(self): 210 def test_int_default(self):
199 """Check XmlInt default handling.""" 211 """Check XmlInt default handling."""
200 node = self._get_node('<node/>') 212 node = self._get_node("<node/>")
201 self.assertIsNone(manifest_xml.XmlInt(node, 'a')) 213 self.assertIsNone(manifest_xml.XmlInt(node, "a"))
202 self.assertIsNone(manifest_xml.XmlInt(node, 'a', None)) 214 self.assertIsNone(manifest_xml.XmlInt(node, "a", None))
203 self.assertEqual(123, manifest_xml.XmlInt(node, 'a', 123)) 215 self.assertEqual(123, manifest_xml.XmlInt(node, "a", 123))
204 216
205 node = self._get_node('<node a=""/>') 217 node = self._get_node('<node a=""/>')
206 self.assertIsNone(manifest_xml.XmlInt(node, 'a')) 218 self.assertIsNone(manifest_xml.XmlInt(node, "a"))
207 219
208 def test_int_good(self): 220 def test_int_good(self):
209 """Check XmlInt numeric handling.""" 221 """Check XmlInt numeric handling."""
210 for value in (-1, 0, 1, 50000): 222 for value in (-1, 0, 1, 50000):
211 node = self._get_node('<node a="%s"/>' % (value,)) 223 node = self._get_node('<node a="%s"/>' % (value,))
212 self.assertEqual(value, manifest_xml.XmlInt(node, 'a')) 224 self.assertEqual(value, manifest_xml.XmlInt(node, "a"))
213 225
214 def test_int_invalid(self): 226 def test_int_invalid(self):
215 """Check XmlInt invalid handling.""" 227 """Check XmlInt invalid handling."""
216 with self.assertRaises(error.ManifestParseError): 228 with self.assertRaises(error.ManifestParseError):
217 node = self._get_node('<node a="xx"/>') 229 node = self._get_node('<node a="xx"/>')
218 manifest_xml.XmlInt(node, 'a') 230 manifest_xml.XmlInt(node, "a")
219 231
220 232
221class XmlManifestTests(ManifestParseTestCase): 233class XmlManifestTests(ManifestParseTestCase):
222 """Check manifest processing.""" 234 """Check manifest processing."""
223 235
224 def test_empty(self): 236 def test_empty(self):
225 """Parse an 'empty' manifest file.""" 237 """Parse an 'empty' manifest file."""
226 manifest = self.getXmlManifest( 238 manifest = self.getXmlManifest(
227 '<?xml version="1.0" encoding="UTF-8"?>' 239 '<?xml version="1.0" encoding="UTF-8"?>' "<manifest></manifest>"
228 '<manifest></manifest>') 240 )
229 self.assertEqual(manifest.remotes, {}) 241 self.assertEqual(manifest.remotes, {})
230 self.assertEqual(manifest.projects, []) 242 self.assertEqual(manifest.projects, [])
231 243
232 def test_link(self): 244 def test_link(self):
233 """Verify Link handling with new names.""" 245 """Verify Link handling with new names."""
234 manifest = manifest_xml.XmlManifest(self.repodir, self.manifest_file) 246 manifest = manifest_xml.XmlManifest(self.repodir, self.manifest_file)
235 with open(os.path.join(self.manifest_dir, 'foo.xml'), 'w') as fp: 247 with open(os.path.join(self.manifest_dir, "foo.xml"), "w") as fp:
236 fp.write('<manifest></manifest>') 248 fp.write("<manifest></manifest>")
237 manifest.Link('foo.xml') 249 manifest.Link("foo.xml")
238 with open(self.manifest_file) as fp: 250 with open(self.manifest_file) as fp:
239 self.assertIn('<include name="foo.xml" />', fp.read()) 251 self.assertIn('<include name="foo.xml" />', fp.read())
240 252
241 def test_toxml_empty(self): 253 def test_toxml_empty(self):
242 """Verify the ToXml() helper.""" 254 """Verify the ToXml() helper."""
243 manifest = self.getXmlManifest( 255 manifest = self.getXmlManifest(
244 '<?xml version="1.0" encoding="UTF-8"?>' 256 '<?xml version="1.0" encoding="UTF-8"?>' "<manifest></manifest>"
245 '<manifest></manifest>') 257 )
246 self.assertEqual(manifest.ToXml().toxml(), '<?xml version="1.0" ?><manifest/>') 258 self.assertEqual(
247 259 manifest.ToXml().toxml(), '<?xml version="1.0" ?><manifest/>'
248 def test_todict_empty(self): 260 )
249 """Verify the ToDict() helper.""" 261
250 manifest = self.getXmlManifest( 262 def test_todict_empty(self):
251 '<?xml version="1.0" encoding="UTF-8"?>' 263 """Verify the ToDict() helper."""
252 '<manifest></manifest>') 264 manifest = self.getXmlManifest(
253 self.assertEqual(manifest.ToDict(), {}) 265 '<?xml version="1.0" encoding="UTF-8"?>' "<manifest></manifest>"
254 266 )
255 def test_toxml_omit_local(self): 267 self.assertEqual(manifest.ToDict(), {})
256 """Does not include local_manifests projects when omit_local=True.""" 268
257 manifest = self.getXmlManifest( 269 def test_toxml_omit_local(self):
258 '<?xml version="1.0" encoding="UTF-8"?><manifest>' 270 """Does not include local_manifests projects when omit_local=True."""
259 '<remote name="a" fetch=".."/><default remote="a" revision="r"/>' 271 manifest = self.getXmlManifest(
260 '<project name="p" groups="local::me"/>' 272 '<?xml version="1.0" encoding="UTF-8"?><manifest>'
261 '<project name="q"/>' 273 '<remote name="a" fetch=".."/><default remote="a" revision="r"/>'
262 '<project name="r" groups="keep"/>' 274 '<project name="p" groups="local::me"/>'
263 '</manifest>') 275 '<project name="q"/>'
264 self.assertEqual( 276 '<project name="r" groups="keep"/>'
265 sort_attributes(manifest.ToXml(omit_local=True).toxml()), 277 "</manifest>"
266 '<?xml version="1.0" ?><manifest>' 278 )
267 '<remote fetch=".." name="a"/><default remote="a" revision="r"/>' 279 self.assertEqual(
268 '<project name="q"/><project groups="keep" name="r"/></manifest>') 280 sort_attributes(manifest.ToXml(omit_local=True).toxml()),
269 281 '<?xml version="1.0" ?><manifest>'
270 def test_toxml_with_local(self): 282 '<remote fetch=".." name="a"/><default remote="a" revision="r"/>'
271 """Does include local_manifests projects when omit_local=False.""" 283 '<project name="q"/><project groups="keep" name="r"/></manifest>',
272 manifest = self.getXmlManifest( 284 )
273 '<?xml version="1.0" encoding="UTF-8"?><manifest>' 285
274 '<remote name="a" fetch=".."/><default remote="a" revision="r"/>' 286 def test_toxml_with_local(self):
275 '<project name="p" groups="local::me"/>' 287 """Does include local_manifests projects when omit_local=False."""
276 '<project name="q"/>' 288 manifest = self.getXmlManifest(
277 '<project name="r" groups="keep"/>' 289 '<?xml version="1.0" encoding="UTF-8"?><manifest>'
278 '</manifest>') 290 '<remote name="a" fetch=".."/><default remote="a" revision="r"/>'
279 self.assertEqual( 291 '<project name="p" groups="local::me"/>'
280 sort_attributes(manifest.ToXml(omit_local=False).toxml()), 292 '<project name="q"/>'
281 '<?xml version="1.0" ?><manifest>' 293 '<project name="r" groups="keep"/>'
282 '<remote fetch=".." name="a"/><default remote="a" revision="r"/>' 294 "</manifest>"
283 '<project groups="local::me" name="p"/>' 295 )
284 '<project name="q"/><project groups="keep" name="r"/></manifest>') 296 self.assertEqual(
285 297 sort_attributes(manifest.ToXml(omit_local=False).toxml()),
286 def test_repo_hooks(self): 298 '<?xml version="1.0" ?><manifest>'
287 """Check repo-hooks settings.""" 299 '<remote fetch=".." name="a"/><default remote="a" revision="r"/>'
288 manifest = self.getXmlManifest(""" 300 '<project groups="local::me" name="p"/>'
301 '<project name="q"/><project groups="keep" name="r"/></manifest>',
302 )
303
304 def test_repo_hooks(self):
305 """Check repo-hooks settings."""
306 manifest = self.getXmlManifest(
307 """
289<manifest> 308<manifest>
290 <remote name="test-remote" fetch="http://localhost" /> 309 <remote name="test-remote" fetch="http://localhost" />
291 <default remote="test-remote" revision="refs/heads/main" /> 310 <default remote="test-remote" revision="refs/heads/main" />
292 <project name="repohooks" path="src/repohooks"/> 311 <project name="repohooks" path="src/repohooks"/>
293 <repo-hooks in-project="repohooks" enabled-list="a, b"/> 312 <repo-hooks in-project="repohooks" enabled-list="a, b"/>
294</manifest> 313</manifest>
295""") 314"""
296 self.assertEqual(manifest.repo_hooks_project.name, 'repohooks') 315 )
297 self.assertEqual(manifest.repo_hooks_project.enabled_repo_hooks, ['a', 'b']) 316 self.assertEqual(manifest.repo_hooks_project.name, "repohooks")
298 317 self.assertEqual(
299 def test_repo_hooks_unordered(self): 318 manifest.repo_hooks_project.enabled_repo_hooks, ["a", "b"]
300 """Check repo-hooks settings work even if the project def comes second.""" 319 )
301 manifest = self.getXmlManifest(""" 320
321 def test_repo_hooks_unordered(self):
322 """Check repo-hooks settings work even if the project def comes second.""" # noqa: E501
323 manifest = self.getXmlManifest(
324 """
302<manifest> 325<manifest>
303 <remote name="test-remote" fetch="http://localhost" /> 326 <remote name="test-remote" fetch="http://localhost" />
304 <default remote="test-remote" revision="refs/heads/main" /> 327 <default remote="test-remote" revision="refs/heads/main" />
305 <repo-hooks in-project="repohooks" enabled-list="a, b"/> 328 <repo-hooks in-project="repohooks" enabled-list="a, b"/>
306 <project name="repohooks" path="src/repohooks"/> 329 <project name="repohooks" path="src/repohooks"/>
307</manifest> 330</manifest>
308""") 331"""
309 self.assertEqual(manifest.repo_hooks_project.name, 'repohooks') 332 )
310 self.assertEqual(manifest.repo_hooks_project.enabled_repo_hooks, ['a', 'b']) 333 self.assertEqual(manifest.repo_hooks_project.name, "repohooks")
311 334 self.assertEqual(
312 def test_unknown_tags(self): 335 manifest.repo_hooks_project.enabled_repo_hooks, ["a", "b"]
313 """Check superproject settings.""" 336 )
314 manifest = self.getXmlManifest(""" 337
338 def test_unknown_tags(self):
339 """Check superproject settings."""
340 manifest = self.getXmlManifest(
341 """
315<manifest> 342<manifest>
316 <remote name="test-remote" fetch="http://localhost" /> 343 <remote name="test-remote" fetch="http://localhost" />
317 <default remote="test-remote" revision="refs/heads/main" /> 344 <default remote="test-remote" revision="refs/heads/main" />
@@ -319,44 +346,54 @@ class XmlManifestTests(ManifestParseTestCase):
319 <iankaz value="unknown (possible) future tags are ignored"/> 346 <iankaz value="unknown (possible) future tags are ignored"/>
320 <x-custom-tag>X tags are always ignored</x-custom-tag> 347 <x-custom-tag>X tags are always ignored</x-custom-tag>
321</manifest> 348</manifest>
322""") 349"""
323 self.assertEqual(manifest.superproject.name, 'superproject') 350 )
324 self.assertEqual(manifest.superproject.remote.name, 'test-remote') 351 self.assertEqual(manifest.superproject.name, "superproject")
325 self.assertEqual( 352 self.assertEqual(manifest.superproject.remote.name, "test-remote")
326 sort_attributes(manifest.ToXml().toxml()), 353 self.assertEqual(
327 '<?xml version="1.0" ?><manifest>' 354 sort_attributes(manifest.ToXml().toxml()),
328 '<remote fetch="http://localhost" name="test-remote"/>' 355 '<?xml version="1.0" ?><manifest>'
329 '<default remote="test-remote" revision="refs/heads/main"/>' 356 '<remote fetch="http://localhost" name="test-remote"/>'
330 '<superproject name="superproject"/>' 357 '<default remote="test-remote" revision="refs/heads/main"/>'
331 '</manifest>') 358 '<superproject name="superproject"/>'
332 359 "</manifest>",
333 def test_remote_annotations(self): 360 )
334 """Check remote settings.""" 361
335 manifest = self.getXmlManifest(""" 362 def test_remote_annotations(self):
363 """Check remote settings."""
364 manifest = self.getXmlManifest(
365 """
336<manifest> 366<manifest>
337 <remote name="test-remote" fetch="http://localhost"> 367 <remote name="test-remote" fetch="http://localhost">
338 <annotation name="foo" value="bar"/> 368 <annotation name="foo" value="bar"/>
339 </remote> 369 </remote>
340</manifest> 370</manifest>
341""") 371"""
342 self.assertEqual(manifest.remotes['test-remote'].annotations[0].name, 'foo') 372 )
343 self.assertEqual(manifest.remotes['test-remote'].annotations[0].value, 'bar') 373 self.assertEqual(
344 self.assertEqual( 374 manifest.remotes["test-remote"].annotations[0].name, "foo"
345 sort_attributes(manifest.ToXml().toxml()), 375 )
346 '<?xml version="1.0" ?><manifest>' 376 self.assertEqual(
347 '<remote fetch="http://localhost" name="test-remote">' 377 manifest.remotes["test-remote"].annotations[0].value, "bar"
348 '<annotation name="foo" value="bar"/>' 378 )
349 '</remote>' 379 self.assertEqual(
350 '</manifest>') 380 sort_attributes(manifest.ToXml().toxml()),
381 '<?xml version="1.0" ?><manifest>'
382 '<remote fetch="http://localhost" name="test-remote">'
383 '<annotation name="foo" value="bar"/>'
384 "</remote>"
385 "</manifest>",
386 )
351 387
352 388
353class IncludeElementTests(ManifestParseTestCase): 389class IncludeElementTests(ManifestParseTestCase):
354 """Tests for <include>.""" 390 """Tests for <include>."""
355 391
356 def test_group_levels(self): 392 def test_group_levels(self):
357 root_m = os.path.join(self.manifest_dir, 'root.xml') 393 root_m = os.path.join(self.manifest_dir, "root.xml")
358 with open(root_m, 'w') as fp: 394 with open(root_m, "w") as fp:
359 fp.write(""" 395 fp.write(
396 """
360<manifest> 397<manifest>
361 <remote name="test-remote" fetch="http://localhost" /> 398 <remote name="test-remote" fetch="http://localhost" />
362 <default remote="test-remote" revision="refs/heads/main" /> 399 <default remote="test-remote" revision="refs/heads/main" />
@@ -364,438 +401,524 @@ class IncludeElementTests(ManifestParseTestCase):
364 <project name="root-name1" path="root-path1" /> 401 <project name="root-name1" path="root-path1" />
365 <project name="root-name2" path="root-path2" groups="r2g1,r2g2" /> 402 <project name="root-name2" path="root-path2" groups="r2g1,r2g2" />
366</manifest> 403</manifest>
367""") 404"""
368 with open(os.path.join(self.manifest_dir, 'level1.xml'), 'w') as fp: 405 )
369 fp.write(""" 406 with open(os.path.join(self.manifest_dir, "level1.xml"), "w") as fp:
407 fp.write(
408 """
370<manifest> 409<manifest>
371 <include name="level2.xml" groups="level2-group" /> 410 <include name="level2.xml" groups="level2-group" />
372 <project name="level1-name1" path="level1-path1" /> 411 <project name="level1-name1" path="level1-path1" />
373</manifest> 412</manifest>
374""") 413"""
375 with open(os.path.join(self.manifest_dir, 'level2.xml'), 'w') as fp: 414 )
376 fp.write(""" 415 with open(os.path.join(self.manifest_dir, "level2.xml"), "w") as fp:
416 fp.write(
417 """
377<manifest> 418<manifest>
378 <project name="level2-name1" path="level2-path1" groups="l2g1,l2g2" /> 419 <project name="level2-name1" path="level2-path1" groups="l2g1,l2g2" />
379</manifest> 420</manifest>
380""") 421"""
381 include_m = manifest_xml.XmlManifest(self.repodir, root_m) 422 )
382 for proj in include_m.projects: 423 include_m = manifest_xml.XmlManifest(self.repodir, root_m)
383 if proj.name == 'root-name1': 424 for proj in include_m.projects:
384 # Check include group not set on root level proj. 425 if proj.name == "root-name1":
385 self.assertNotIn('level1-group', proj.groups) 426 # Check include group not set on root level proj.
386 if proj.name == 'root-name2': 427 self.assertNotIn("level1-group", proj.groups)
387 # Check root proj group not removed. 428 if proj.name == "root-name2":
388 self.assertIn('r2g1', proj.groups) 429 # Check root proj group not removed.
389 if proj.name == 'level1-name1': 430 self.assertIn("r2g1", proj.groups)
390 # Check level1 proj has inherited group level 1. 431 if proj.name == "level1-name1":
391 self.assertIn('level1-group', proj.groups) 432 # Check level1 proj has inherited group level 1.
392 if proj.name == 'level2-name1': 433 self.assertIn("level1-group", proj.groups)
393 # Check level2 proj has inherited group levels 1 and 2. 434 if proj.name == "level2-name1":
394 self.assertIn('level1-group', proj.groups) 435 # Check level2 proj has inherited group levels 1 and 2.
395 self.assertIn('level2-group', proj.groups) 436 self.assertIn("level1-group", proj.groups)
396 # Check level2 proj group not removed. 437 self.assertIn("level2-group", proj.groups)
397 self.assertIn('l2g1', proj.groups) 438 # Check level2 proj group not removed.
398 439 self.assertIn("l2g1", proj.groups)
399 def test_allow_bad_name_from_user(self): 440
400 """Check handling of bad name attribute from the user's input.""" 441 def test_allow_bad_name_from_user(self):
401 def parse(name): 442 """Check handling of bad name attribute from the user's input."""
402 name = self.encodeXmlAttr(name) 443
403 manifest = self.getXmlManifest(f""" 444 def parse(name):
445 name = self.encodeXmlAttr(name)
446 manifest = self.getXmlManifest(
447 f"""
404<manifest> 448<manifest>
405 <remote name="default-remote" fetch="http://localhost" /> 449 <remote name="default-remote" fetch="http://localhost" />
406 <default remote="default-remote" revision="refs/heads/main" /> 450 <default remote="default-remote" revision="refs/heads/main" />
407 <include name="{name}" /> 451 <include name="{name}" />
408</manifest> 452</manifest>
409""") 453"""
410 # Force the manifest to be parsed. 454 )
411 manifest.ToXml() 455 # Force the manifest to be parsed.
412 456 manifest.ToXml()
413 # Setup target of the include. 457
414 target = os.path.join(self.tempdir, 'target.xml') 458 # Setup target of the include.
415 with open(target, 'w') as fp: 459 target = os.path.join(self.tempdir, "target.xml")
416 fp.write('<manifest></manifest>') 460 with open(target, "w") as fp:
417 461 fp.write("<manifest></manifest>")
418 # Include with absolute path. 462
419 parse(os.path.abspath(target)) 463 # Include with absolute path.
420 464 parse(os.path.abspath(target))
421 # Include with relative path. 465
422 parse(os.path.relpath(target, self.manifest_dir)) 466 # Include with relative path.
423 467 parse(os.path.relpath(target, self.manifest_dir))
424 def test_bad_name_checks(self): 468
425 """Check handling of bad name attribute.""" 469 def test_bad_name_checks(self):
426 def parse(name): 470 """Check handling of bad name attribute."""
427 name = self.encodeXmlAttr(name) 471
428 # Setup target of the include. 472 def parse(name):
429 with open(os.path.join(self.manifest_dir, 'target.xml'), 'w', encoding="utf-8") as fp: 473 name = self.encodeXmlAttr(name)
430 fp.write(f'<manifest><include name="{name}"/></manifest>') 474 # Setup target of the include.
431 475 with open(
432 manifest = self.getXmlManifest(""" 476 os.path.join(self.manifest_dir, "target.xml"),
477 "w",
478 encoding="utf-8",
479 ) as fp:
480 fp.write(f'<manifest><include name="{name}"/></manifest>')
481
482 manifest = self.getXmlManifest(
483 """
433<manifest> 484<manifest>
434 <remote name="default-remote" fetch="http://localhost" /> 485 <remote name="default-remote" fetch="http://localhost" />
435 <default remote="default-remote" revision="refs/heads/main" /> 486 <default remote="default-remote" revision="refs/heads/main" />
436 <include name="target.xml" /> 487 <include name="target.xml" />
437</manifest> 488</manifest>
438""") 489"""
439 # Force the manifest to be parsed. 490 )
440 manifest.ToXml() 491 # Force the manifest to be parsed.
492 manifest.ToXml()
441 493
442 # Handle empty name explicitly because a different codepath rejects it. 494 # Handle empty name explicitly because a different codepath rejects it.
443 with self.assertRaises(error.ManifestParseError): 495 with self.assertRaises(error.ManifestParseError):
444 parse('') 496 parse("")
445 497
446 for path in INVALID_FS_PATHS: 498 for path in INVALID_FS_PATHS:
447 if not path: 499 if not path:
448 continue 500 continue
449 501
450 with self.assertRaises(error.ManifestInvalidPathError): 502 with self.assertRaises(error.ManifestInvalidPathError):
451 parse(path) 503 parse(path)
452 504
453 505
454class ProjectElementTests(ManifestParseTestCase): 506class ProjectElementTests(ManifestParseTestCase):
455 """Tests for <project>.""" 507 """Tests for <project>."""
456 508
457 def test_group(self): 509 def test_group(self):
458 """Check project group settings.""" 510 """Check project group settings."""
459 manifest = self.getXmlManifest(""" 511 manifest = self.getXmlManifest(
512 """
460<manifest> 513<manifest>
461 <remote name="test-remote" fetch="http://localhost" /> 514 <remote name="test-remote" fetch="http://localhost" />
462 <default remote="test-remote" revision="refs/heads/main" /> 515 <default remote="test-remote" revision="refs/heads/main" />
463 <project name="test-name" path="test-path"/> 516 <project name="test-name" path="test-path"/>
464 <project name="extras" path="path" groups="g1,g2,g1"/> 517 <project name="extras" path="path" groups="g1,g2,g1"/>
465</manifest> 518</manifest>
466""") 519"""
467 self.assertEqual(len(manifest.projects), 2) 520 )
468 # Ordering isn't guaranteed. 521 self.assertEqual(len(manifest.projects), 2)
469 result = { 522 # Ordering isn't guaranteed.
470 manifest.projects[0].name: manifest.projects[0].groups, 523 result = {
471 manifest.projects[1].name: manifest.projects[1].groups, 524 manifest.projects[0].name: manifest.projects[0].groups,
472 } 525 manifest.projects[1].name: manifest.projects[1].groups,
473 project = manifest.projects[0] 526 }
474 self.assertCountEqual( 527 self.assertCountEqual(
475 result['test-name'], 528 result["test-name"], ["name:test-name", "all", "path:test-path"]
476 ['name:test-name', 'all', 'path:test-path']) 529 )
477 self.assertCountEqual( 530 self.assertCountEqual(
478 result['extras'], 531 result["extras"],
479 ['g1', 'g2', 'g1', 'name:extras', 'all', 'path:path']) 532 ["g1", "g2", "g1", "name:extras", "all", "path:path"],
480 groupstr = 'default,platform-' + platform.system().lower() 533 )
481 self.assertEqual(groupstr, manifest.GetGroupsStr()) 534 groupstr = "default,platform-" + platform.system().lower()
482 groupstr = 'g1,g2,g1' 535 self.assertEqual(groupstr, manifest.GetGroupsStr())
483 manifest.manifestProject.config.SetString('manifest.groups', groupstr) 536 groupstr = "g1,g2,g1"
484 self.assertEqual(groupstr, manifest.GetGroupsStr()) 537 manifest.manifestProject.config.SetString("manifest.groups", groupstr)
485 538 self.assertEqual(groupstr, manifest.GetGroupsStr())
486 def test_set_revision_id(self): 539
487 """Check setting of project's revisionId.""" 540 def test_set_revision_id(self):
488 manifest = self.getXmlManifest(""" 541 """Check setting of project's revisionId."""
542 manifest = self.getXmlManifest(
543 """
489<manifest> 544<manifest>
490 <remote name="default-remote" fetch="http://localhost" /> 545 <remote name="default-remote" fetch="http://localhost" />
491 <default remote="default-remote" revision="refs/heads/main" /> 546 <default remote="default-remote" revision="refs/heads/main" />
492 <project name="test-name"/> 547 <project name="test-name"/>
493</manifest> 548</manifest>
494""") 549"""
495 self.assertEqual(len(manifest.projects), 1) 550 )
496 project = manifest.projects[0] 551 self.assertEqual(len(manifest.projects), 1)
497 project.SetRevisionId('ABCDEF') 552 project = manifest.projects[0]
498 self.assertEqual( 553 project.SetRevisionId("ABCDEF")
499 sort_attributes(manifest.ToXml().toxml()), 554 self.assertEqual(
500 '<?xml version="1.0" ?><manifest>' 555 sort_attributes(manifest.ToXml().toxml()),
501 '<remote fetch="http://localhost" name="default-remote"/>' 556 '<?xml version="1.0" ?><manifest>'
502 '<default remote="default-remote" revision="refs/heads/main"/>' 557 '<remote fetch="http://localhost" name="default-remote"/>'
503 '<project name="test-name" revision="ABCDEF" upstream="refs/heads/main"/>' 558 '<default remote="default-remote" revision="refs/heads/main"/>'
504 '</manifest>') 559 '<project name="test-name" revision="ABCDEF" upstream="refs/heads/main"/>' # noqa: E501
505 560 "</manifest>",
506 def test_trailing_slash(self): 561 )
507 """Check handling of trailing slashes in attributes.""" 562
508 def parse(name, path): 563 def test_trailing_slash(self):
509 name = self.encodeXmlAttr(name) 564 """Check handling of trailing slashes in attributes."""
510 path = self.encodeXmlAttr(path) 565
511 return self.getXmlManifest(f""" 566 def parse(name, path):
567 name = self.encodeXmlAttr(name)
568 path = self.encodeXmlAttr(path)
569 return self.getXmlManifest(
570 f"""
512<manifest> 571<manifest>
513 <remote name="default-remote" fetch="http://localhost" /> 572 <remote name="default-remote" fetch="http://localhost" />
514 <default remote="default-remote" revision="refs/heads/main" /> 573 <default remote="default-remote" revision="refs/heads/main" />
515 <project name="{name}" path="{path}" /> 574 <project name="{name}" path="{path}" />
516</manifest> 575</manifest>
517""") 576"""
518 577 )
519 manifest = parse('a/path/', 'foo') 578
520 self.assertEqual(os.path.normpath(manifest.projects[0].gitdir), 579 manifest = parse("a/path/", "foo")
521 os.path.join(self.tempdir, '.repo', 'projects', 'foo.git')) 580 self.assertEqual(
522 self.assertEqual(os.path.normpath(manifest.projects[0].objdir), 581 os.path.normpath(manifest.projects[0].gitdir),
523 os.path.join(self.tempdir, '.repo', 'project-objects', 'a', 'path.git')) 582 os.path.join(self.tempdir, ".repo", "projects", "foo.git"),
524 583 )
525 manifest = parse('a/path', 'foo/') 584 self.assertEqual(
526 self.assertEqual(os.path.normpath(manifest.projects[0].gitdir), 585 os.path.normpath(manifest.projects[0].objdir),
527 os.path.join(self.tempdir, '.repo', 'projects', 'foo.git')) 586 os.path.join(
528 self.assertEqual(os.path.normpath(manifest.projects[0].objdir), 587 self.tempdir, ".repo", "project-objects", "a", "path.git"
529 os.path.join(self.tempdir, '.repo', 'project-objects', 'a', 'path.git')) 588 ),
530 589 )
531 manifest = parse('a/path', 'foo//////') 590
532 self.assertEqual(os.path.normpath(manifest.projects[0].gitdir), 591 manifest = parse("a/path", "foo/")
533 os.path.join(self.tempdir, '.repo', 'projects', 'foo.git')) 592 self.assertEqual(
534 self.assertEqual(os.path.normpath(manifest.projects[0].objdir), 593 os.path.normpath(manifest.projects[0].gitdir),
535 os.path.join(self.tempdir, '.repo', 'project-objects', 'a', 'path.git')) 594 os.path.join(self.tempdir, ".repo", "projects", "foo.git"),
536 595 )
537 def test_toplevel_path(self): 596 self.assertEqual(
538 """Check handling of path=. specially.""" 597 os.path.normpath(manifest.projects[0].objdir),
539 def parse(name, path): 598 os.path.join(
540 name = self.encodeXmlAttr(name) 599 self.tempdir, ".repo", "project-objects", "a", "path.git"
541 path = self.encodeXmlAttr(path) 600 ),
542 return self.getXmlManifest(f""" 601 )
602
603 manifest = parse("a/path", "foo//////")
604 self.assertEqual(
605 os.path.normpath(manifest.projects[0].gitdir),
606 os.path.join(self.tempdir, ".repo", "projects", "foo.git"),
607 )
608 self.assertEqual(
609 os.path.normpath(manifest.projects[0].objdir),
610 os.path.join(
611 self.tempdir, ".repo", "project-objects", "a", "path.git"
612 ),
613 )
614
615 def test_toplevel_path(self):
616 """Check handling of path=. specially."""
617
618 def parse(name, path):
619 name = self.encodeXmlAttr(name)
620 path = self.encodeXmlAttr(path)
621 return self.getXmlManifest(
622 f"""
543<manifest> 623<manifest>
544 <remote name="default-remote" fetch="http://localhost" /> 624 <remote name="default-remote" fetch="http://localhost" />
545 <default remote="default-remote" revision="refs/heads/main" /> 625 <default remote="default-remote" revision="refs/heads/main" />
546 <project name="{name}" path="{path}" /> 626 <project name="{name}" path="{path}" />
547</manifest> 627</manifest>
548""") 628"""
549 629 )
550 for path in ('.', './', './/', './//'): 630
551 manifest = parse('server/path', path) 631 for path in (".", "./", ".//", ".///"):
552 self.assertEqual(os.path.normpath(manifest.projects[0].gitdir), 632 manifest = parse("server/path", path)
553 os.path.join(self.tempdir, '.repo', 'projects', '..git')) 633 self.assertEqual(
554 634 os.path.normpath(manifest.projects[0].gitdir),
555 def test_bad_path_name_checks(self): 635 os.path.join(self.tempdir, ".repo", "projects", "..git"),
556 """Check handling of bad path & name attributes.""" 636 )
557 def parse(name, path): 637
558 name = self.encodeXmlAttr(name) 638 def test_bad_path_name_checks(self):
559 path = self.encodeXmlAttr(path) 639 """Check handling of bad path & name attributes."""
560 manifest = self.getXmlManifest(f""" 640
641 def parse(name, path):
642 name = self.encodeXmlAttr(name)
643 path = self.encodeXmlAttr(path)
644 manifest = self.getXmlManifest(
645 f"""
561<manifest> 646<manifest>
562 <remote name="default-remote" fetch="http://localhost" /> 647 <remote name="default-remote" fetch="http://localhost" />
563 <default remote="default-remote" revision="refs/heads/main" /> 648 <default remote="default-remote" revision="refs/heads/main" />
564 <project name="{name}" path="{path}" /> 649 <project name="{name}" path="{path}" />
565</manifest> 650</manifest>
566""") 651"""
567 # Force the manifest to be parsed. 652 )
568 manifest.ToXml() 653 # Force the manifest to be parsed.
654 manifest.ToXml()
569 655
570 # Verify the parser is valid by default to avoid buggy tests below. 656 # Verify the parser is valid by default to avoid buggy tests below.
571 parse('ok', 'ok') 657 parse("ok", "ok")
572 658
573 # Handle empty name explicitly because a different codepath rejects it. 659 # Handle empty name explicitly because a different codepath rejects it.
574 # Empty path is OK because it defaults to the name field. 660 # Empty path is OK because it defaults to the name field.
575 with self.assertRaises(error.ManifestParseError): 661 with self.assertRaises(error.ManifestParseError):
576 parse('', 'ok') 662 parse("", "ok")
577 663
578 for path in INVALID_FS_PATHS: 664 for path in INVALID_FS_PATHS:
579 if not path or path.endswith('/') or path.endswith(os.path.sep): 665 if not path or path.endswith("/") or path.endswith(os.path.sep):
580 continue 666 continue
581 667
582 with self.assertRaises(error.ManifestInvalidPathError): 668 with self.assertRaises(error.ManifestInvalidPathError):
583 parse(path, 'ok') 669 parse(path, "ok")
584 670
585 # We have a dedicated test for path=".". 671 # We have a dedicated test for path=".".
586 if path not in {'.'}: 672 if path not in {"."}:
587 with self.assertRaises(error.ManifestInvalidPathError): 673 with self.assertRaises(error.ManifestInvalidPathError):
588 parse('ok', path) 674 parse("ok", path)
589 675
590 676
591class SuperProjectElementTests(ManifestParseTestCase): 677class SuperProjectElementTests(ManifestParseTestCase):
592 """Tests for <superproject>.""" 678 """Tests for <superproject>."""
593 679
594 def test_superproject(self): 680 def test_superproject(self):
595 """Check superproject settings.""" 681 """Check superproject settings."""
596 manifest = self.getXmlManifest(""" 682 manifest = self.getXmlManifest(
683 """
597<manifest> 684<manifest>
598 <remote name="test-remote" fetch="http://localhost" /> 685 <remote name="test-remote" fetch="http://localhost" />
599 <default remote="test-remote" revision="refs/heads/main" /> 686 <default remote="test-remote" revision="refs/heads/main" />
600 <superproject name="superproject"/> 687 <superproject name="superproject"/>
601</manifest> 688</manifest>
602""") 689"""
603 self.assertEqual(manifest.superproject.name, 'superproject') 690 )
604 self.assertEqual(manifest.superproject.remote.name, 'test-remote') 691 self.assertEqual(manifest.superproject.name, "superproject")
605 self.assertEqual(manifest.superproject.remote.url, 'http://localhost/superproject') 692 self.assertEqual(manifest.superproject.remote.name, "test-remote")
606 self.assertEqual(manifest.superproject.revision, 'refs/heads/main') 693 self.assertEqual(
607 self.assertEqual( 694 manifest.superproject.remote.url, "http://localhost/superproject"
608 sort_attributes(manifest.ToXml().toxml()), 695 )
609 '<?xml version="1.0" ?><manifest>' 696 self.assertEqual(manifest.superproject.revision, "refs/heads/main")
610 '<remote fetch="http://localhost" name="test-remote"/>' 697 self.assertEqual(
611 '<default remote="test-remote" revision="refs/heads/main"/>' 698 sort_attributes(manifest.ToXml().toxml()),
612 '<superproject name="superproject"/>' 699 '<?xml version="1.0" ?><manifest>'
613 '</manifest>') 700 '<remote fetch="http://localhost" name="test-remote"/>'
614 701 '<default remote="test-remote" revision="refs/heads/main"/>'
615 def test_superproject_revision(self): 702 '<superproject name="superproject"/>'
616 """Check superproject settings with a different revision attribute""" 703 "</manifest>",
617 self.maxDiff = None 704 )
618 manifest = self.getXmlManifest(""" 705
706 def test_superproject_revision(self):
707 """Check superproject settings with a different revision attribute"""
708 self.maxDiff = None
709 manifest = self.getXmlManifest(
710 """
619<manifest> 711<manifest>
620 <remote name="test-remote" fetch="http://localhost" /> 712 <remote name="test-remote" fetch="http://localhost" />
621 <default remote="test-remote" revision="refs/heads/main" /> 713 <default remote="test-remote" revision="refs/heads/main" />
622 <superproject name="superproject" revision="refs/heads/stable" /> 714 <superproject name="superproject" revision="refs/heads/stable" />
623</manifest> 715</manifest>
624""") 716"""
625 self.assertEqual(manifest.superproject.name, 'superproject') 717 )
626 self.assertEqual(manifest.superproject.remote.name, 'test-remote') 718 self.assertEqual(manifest.superproject.name, "superproject")
627 self.assertEqual(manifest.superproject.remote.url, 'http://localhost/superproject') 719 self.assertEqual(manifest.superproject.remote.name, "test-remote")
628 self.assertEqual(manifest.superproject.revision, 'refs/heads/stable') 720 self.assertEqual(
629 self.assertEqual( 721 manifest.superproject.remote.url, "http://localhost/superproject"
630 sort_attributes(manifest.ToXml().toxml()), 722 )
631 '<?xml version="1.0" ?><manifest>' 723 self.assertEqual(manifest.superproject.revision, "refs/heads/stable")
632 '<remote fetch="http://localhost" name="test-remote"/>' 724 self.assertEqual(
633 '<default remote="test-remote" revision="refs/heads/main"/>' 725 sort_attributes(manifest.ToXml().toxml()),
634 '<superproject name="superproject" revision="refs/heads/stable"/>' 726 '<?xml version="1.0" ?><manifest>'
635 '</manifest>') 727 '<remote fetch="http://localhost" name="test-remote"/>'
636 728 '<default remote="test-remote" revision="refs/heads/main"/>'
637 def test_superproject_revision_default_negative(self): 729 '<superproject name="superproject" revision="refs/heads/stable"/>'
638 """Check superproject settings with a same revision attribute""" 730 "</manifest>",
639 self.maxDiff = None 731 )
640 manifest = self.getXmlManifest(""" 732
733 def test_superproject_revision_default_negative(self):
734 """Check superproject settings with a same revision attribute"""
735 self.maxDiff = None
736 manifest = self.getXmlManifest(
737 """
641<manifest> 738<manifest>
642 <remote name="test-remote" fetch="http://localhost" /> 739 <remote name="test-remote" fetch="http://localhost" />
643 <default remote="test-remote" revision="refs/heads/stable" /> 740 <default remote="test-remote" revision="refs/heads/stable" />
644 <superproject name="superproject" revision="refs/heads/stable" /> 741 <superproject name="superproject" revision="refs/heads/stable" />
645</manifest> 742</manifest>
646""") 743"""
647 self.assertEqual(manifest.superproject.name, 'superproject') 744 )
648 self.assertEqual(manifest.superproject.remote.name, 'test-remote') 745 self.assertEqual(manifest.superproject.name, "superproject")
649 self.assertEqual(manifest.superproject.remote.url, 'http://localhost/superproject') 746 self.assertEqual(manifest.superproject.remote.name, "test-remote")
650 self.assertEqual(manifest.superproject.revision, 'refs/heads/stable') 747 self.assertEqual(
651 self.assertEqual( 748 manifest.superproject.remote.url, "http://localhost/superproject"
652 sort_attributes(manifest.ToXml().toxml()), 749 )
653 '<?xml version="1.0" ?><manifest>' 750 self.assertEqual(manifest.superproject.revision, "refs/heads/stable")
654 '<remote fetch="http://localhost" name="test-remote"/>' 751 self.assertEqual(
655 '<default remote="test-remote" revision="refs/heads/stable"/>' 752 sort_attributes(manifest.ToXml().toxml()),
656 '<superproject name="superproject"/>' 753 '<?xml version="1.0" ?><manifest>'
657 '</manifest>') 754 '<remote fetch="http://localhost" name="test-remote"/>'
658 755 '<default remote="test-remote" revision="refs/heads/stable"/>'
659 def test_superproject_revision_remote(self): 756 '<superproject name="superproject"/>'
660 """Check superproject settings with a same revision attribute""" 757 "</manifest>",
661 self.maxDiff = None 758 )
662 manifest = self.getXmlManifest(""" 759
760 def test_superproject_revision_remote(self):
761 """Check superproject settings with a same revision attribute"""
762 self.maxDiff = None
763 manifest = self.getXmlManifest(
764 """
663<manifest> 765<manifest>
664 <remote name="test-remote" fetch="http://localhost" revision="refs/heads/main" /> 766 <remote name="test-remote" fetch="http://localhost" revision="refs/heads/main" />
665 <default remote="test-remote" /> 767 <default remote="test-remote" />
666 <superproject name="superproject" revision="refs/heads/stable" /> 768 <superproject name="superproject" revision="refs/heads/stable" />
667</manifest> 769</manifest>
668""") 770""" # noqa: E501
669 self.assertEqual(manifest.superproject.name, 'superproject') 771 )
670 self.assertEqual(manifest.superproject.remote.name, 'test-remote') 772 self.assertEqual(manifest.superproject.name, "superproject")
671 self.assertEqual(manifest.superproject.remote.url, 'http://localhost/superproject') 773 self.assertEqual(manifest.superproject.remote.name, "test-remote")
672 self.assertEqual(manifest.superproject.revision, 'refs/heads/stable') 774 self.assertEqual(
673 self.assertEqual( 775 manifest.superproject.remote.url, "http://localhost/superproject"
674 sort_attributes(manifest.ToXml().toxml()), 776 )
675 '<?xml version="1.0" ?><manifest>' 777 self.assertEqual(manifest.superproject.revision, "refs/heads/stable")
676 '<remote fetch="http://localhost" name="test-remote" revision="refs/heads/main"/>' 778 self.assertEqual(
677 '<default remote="test-remote"/>' 779 sort_attributes(manifest.ToXml().toxml()),
678 '<superproject name="superproject" revision="refs/heads/stable"/>' 780 '<?xml version="1.0" ?><manifest>'
679 '</manifest>') 781 '<remote fetch="http://localhost" name="test-remote" revision="refs/heads/main"/>' # noqa: E501
680 782 '<default remote="test-remote"/>'
681 def test_remote(self): 783 '<superproject name="superproject" revision="refs/heads/stable"/>'
682 """Check superproject settings with a remote.""" 784 "</manifest>",
683 manifest = self.getXmlManifest(""" 785 )
786
787 def test_remote(self):
788 """Check superproject settings with a remote."""
789 manifest = self.getXmlManifest(
790 """
684<manifest> 791<manifest>
685 <remote name="default-remote" fetch="http://localhost" /> 792 <remote name="default-remote" fetch="http://localhost" />
686 <remote name="superproject-remote" fetch="http://localhost" /> 793 <remote name="superproject-remote" fetch="http://localhost" />
687 <default remote="default-remote" revision="refs/heads/main" /> 794 <default remote="default-remote" revision="refs/heads/main" />
688 <superproject name="platform/superproject" remote="superproject-remote"/> 795 <superproject name="platform/superproject" remote="superproject-remote"/>
689</manifest> 796</manifest>
690""") 797"""
691 self.assertEqual(manifest.superproject.name, 'platform/superproject') 798 )
692 self.assertEqual(manifest.superproject.remote.name, 'superproject-remote') 799 self.assertEqual(manifest.superproject.name, "platform/superproject")
693 self.assertEqual(manifest.superproject.remote.url, 'http://localhost/platform/superproject') 800 self.assertEqual(
694 self.assertEqual(manifest.superproject.revision, 'refs/heads/main') 801 manifest.superproject.remote.name, "superproject-remote"
695 self.assertEqual( 802 )
696 sort_attributes(manifest.ToXml().toxml()), 803 self.assertEqual(
697 '<?xml version="1.0" ?><manifest>' 804 manifest.superproject.remote.url,
698 '<remote fetch="http://localhost" name="default-remote"/>' 805 "http://localhost/platform/superproject",
699 '<remote fetch="http://localhost" name="superproject-remote"/>' 806 )
700 '<default remote="default-remote" revision="refs/heads/main"/>' 807 self.assertEqual(manifest.superproject.revision, "refs/heads/main")
701 '<superproject name="platform/superproject" remote="superproject-remote"/>' 808 self.assertEqual(
702 '</manifest>') 809 sort_attributes(manifest.ToXml().toxml()),
703 810 '<?xml version="1.0" ?><manifest>'
704 def test_defalut_remote(self): 811 '<remote fetch="http://localhost" name="default-remote"/>'
705 """Check superproject settings with a default remote.""" 812 '<remote fetch="http://localhost" name="superproject-remote"/>'
706 manifest = self.getXmlManifest(""" 813 '<default remote="default-remote" revision="refs/heads/main"/>'
814 '<superproject name="platform/superproject" remote="superproject-remote"/>' # noqa: E501
815 "</manifest>",
816 )
817
818 def test_defalut_remote(self):
819 """Check superproject settings with a default remote."""
820 manifest = self.getXmlManifest(
821 """
707<manifest> 822<manifest>
708 <remote name="default-remote" fetch="http://localhost" /> 823 <remote name="default-remote" fetch="http://localhost" />
709 <default remote="default-remote" revision="refs/heads/main" /> 824 <default remote="default-remote" revision="refs/heads/main" />
710 <superproject name="superproject" remote="default-remote"/> 825 <superproject name="superproject" remote="default-remote"/>
711</manifest> 826</manifest>
712""") 827"""
713 self.assertEqual(manifest.superproject.name, 'superproject') 828 )
714 self.assertEqual(manifest.superproject.remote.name, 'default-remote') 829 self.assertEqual(manifest.superproject.name, "superproject")
715 self.assertEqual(manifest.superproject.revision, 'refs/heads/main') 830 self.assertEqual(manifest.superproject.remote.name, "default-remote")
716 self.assertEqual( 831 self.assertEqual(manifest.superproject.revision, "refs/heads/main")
717 sort_attributes(manifest.ToXml().toxml()), 832 self.assertEqual(
718 '<?xml version="1.0" ?><manifest>' 833 sort_attributes(manifest.ToXml().toxml()),
719 '<remote fetch="http://localhost" name="default-remote"/>' 834 '<?xml version="1.0" ?><manifest>'
720 '<default remote="default-remote" revision="refs/heads/main"/>' 835 '<remote fetch="http://localhost" name="default-remote"/>'
721 '<superproject name="superproject"/>' 836 '<default remote="default-remote" revision="refs/heads/main"/>'
722 '</manifest>') 837 '<superproject name="superproject"/>'
838 "</manifest>",
839 )
723 840
724 841
725class ContactinfoElementTests(ManifestParseTestCase): 842class ContactinfoElementTests(ManifestParseTestCase):
726 """Tests for <contactinfo>.""" 843 """Tests for <contactinfo>."""
727 844
728 def test_contactinfo(self): 845 def test_contactinfo(self):
729 """Check contactinfo settings.""" 846 """Check contactinfo settings."""
730 bugurl = 'http://localhost/contactinfo' 847 bugurl = "http://localhost/contactinfo"
731 manifest = self.getXmlManifest(f""" 848 manifest = self.getXmlManifest(
849 f"""
732<manifest> 850<manifest>
733 <contactinfo bugurl="{bugurl}"/> 851 <contactinfo bugurl="{bugurl}"/>
734</manifest> 852</manifest>
735""") 853"""
736 self.assertEqual(manifest.contactinfo.bugurl, bugurl) 854 )
737 self.assertEqual( 855 self.assertEqual(manifest.contactinfo.bugurl, bugurl)
738 manifest.ToXml().toxml(), 856 self.assertEqual(
739 '<?xml version="1.0" ?><manifest>' 857 manifest.ToXml().toxml(),
740 f'<contactinfo bugurl="{bugurl}"/>' 858 '<?xml version="1.0" ?><manifest>'
741 '</manifest>') 859 f'<contactinfo bugurl="{bugurl}"/>'
860 "</manifest>",
861 )
742 862
743 863
744class DefaultElementTests(ManifestParseTestCase): 864class DefaultElementTests(ManifestParseTestCase):
745 """Tests for <default>.""" 865 """Tests for <default>."""
746 866
747 def test_default(self): 867 def test_default(self):
748 """Check default settings.""" 868 """Check default settings."""
749 a = manifest_xml._Default() 869 a = manifest_xml._Default()
750 a.revisionExpr = 'foo' 870 a.revisionExpr = "foo"
751 a.remote = manifest_xml._XmlRemote(name='remote') 871 a.remote = manifest_xml._XmlRemote(name="remote")
752 b = manifest_xml._Default() 872 b = manifest_xml._Default()
753 b.revisionExpr = 'bar' 873 b.revisionExpr = "bar"
754 self.assertEqual(a, a) 874 self.assertEqual(a, a)
755 self.assertNotEqual(a, b) 875 self.assertNotEqual(a, b)
756 self.assertNotEqual(b, a.remote) 876 self.assertNotEqual(b, a.remote)
757 self.assertNotEqual(a, 123) 877 self.assertNotEqual(a, 123)
758 self.assertNotEqual(a, None) 878 self.assertNotEqual(a, None)
759 879
760 880
761class RemoteElementTests(ManifestParseTestCase): 881class RemoteElementTests(ManifestParseTestCase):
762 """Tests for <remote>.""" 882 """Tests for <remote>."""
763 883
764 def test_remote(self): 884 def test_remote(self):
765 """Check remote settings.""" 885 """Check remote settings."""
766 a = manifest_xml._XmlRemote(name='foo') 886 a = manifest_xml._XmlRemote(name="foo")
767 a.AddAnnotation('key1', 'value1', 'true') 887 a.AddAnnotation("key1", "value1", "true")
768 b = manifest_xml._XmlRemote(name='foo') 888 b = manifest_xml._XmlRemote(name="foo")
769 b.AddAnnotation('key2', 'value1', 'true') 889 b.AddAnnotation("key2", "value1", "true")
770 c = manifest_xml._XmlRemote(name='foo') 890 c = manifest_xml._XmlRemote(name="foo")
771 c.AddAnnotation('key1', 'value2', 'true') 891 c.AddAnnotation("key1", "value2", "true")
772 d = manifest_xml._XmlRemote(name='foo') 892 d = manifest_xml._XmlRemote(name="foo")
773 d.AddAnnotation('key1', 'value1', 'false') 893 d.AddAnnotation("key1", "value1", "false")
774 self.assertEqual(a, a) 894 self.assertEqual(a, a)
775 self.assertNotEqual(a, b) 895 self.assertNotEqual(a, b)
776 self.assertNotEqual(a, c) 896 self.assertNotEqual(a, c)
777 self.assertNotEqual(a, d) 897 self.assertNotEqual(a, d)
778 self.assertNotEqual(a, manifest_xml._Default()) 898 self.assertNotEqual(a, manifest_xml._Default())
779 self.assertNotEqual(a, 123) 899 self.assertNotEqual(a, 123)
780 self.assertNotEqual(a, None) 900 self.assertNotEqual(a, None)
781 901
782 902
783class RemoveProjectElementTests(ManifestParseTestCase): 903class RemoveProjectElementTests(ManifestParseTestCase):
784 """Tests for <remove-project>.""" 904 """Tests for <remove-project>."""
785 905
786 def test_remove_one_project(self): 906 def test_remove_one_project(self):
787 manifest = self.getXmlManifest(""" 907 manifest = self.getXmlManifest(
908 """
788<manifest> 909<manifest>
789 <remote name="default-remote" fetch="http://localhost" /> 910 <remote name="default-remote" fetch="http://localhost" />
790 <default remote="default-remote" revision="refs/heads/main" /> 911 <default remote="default-remote" revision="refs/heads/main" />
791 <project name="myproject" /> 912 <project name="myproject" />
792 <remove-project name="myproject" /> 913 <remove-project name="myproject" />
793</manifest> 914</manifest>
794""") 915"""
795 self.assertEqual(manifest.projects, []) 916 )
917 self.assertEqual(manifest.projects, [])
796 918
797 def test_remove_one_project_one_remains(self): 919 def test_remove_one_project_one_remains(self):
798 manifest = self.getXmlManifest(""" 920 manifest = self.getXmlManifest(
921 """
799<manifest> 922<manifest>
800 <remote name="default-remote" fetch="http://localhost" /> 923 <remote name="default-remote" fetch="http://localhost" />
801 <default remote="default-remote" revision="refs/heads/main" /> 924 <default remote="default-remote" revision="refs/heads/main" />
@@ -803,51 +926,59 @@ class RemoveProjectElementTests(ManifestParseTestCase):
803 <project name="yourproject" /> 926 <project name="yourproject" />
804 <remove-project name="myproject" /> 927 <remove-project name="myproject" />
805</manifest> 928</manifest>
806""") 929"""
930 )
807 931
808 self.assertEqual(len(manifest.projects), 1) 932 self.assertEqual(len(manifest.projects), 1)
809 self.assertEqual(manifest.projects[0].name, 'yourproject') 933 self.assertEqual(manifest.projects[0].name, "yourproject")
810 934
811 def test_remove_one_project_doesnt_exist(self): 935 def test_remove_one_project_doesnt_exist(self):
812 with self.assertRaises(manifest_xml.ManifestParseError): 936 with self.assertRaises(manifest_xml.ManifestParseError):
813 manifest = self.getXmlManifest(""" 937 manifest = self.getXmlManifest(
938 """
814<manifest> 939<manifest>
815 <remote name="default-remote" fetch="http://localhost" /> 940 <remote name="default-remote" fetch="http://localhost" />
816 <default remote="default-remote" revision="refs/heads/main" /> 941 <default remote="default-remote" revision="refs/heads/main" />
817 <remove-project name="myproject" /> 942 <remove-project name="myproject" />
818</manifest> 943</manifest>
819""") 944"""
820 manifest.projects 945 )
946 manifest.projects
821 947
822 def test_remove_one_optional_project_doesnt_exist(self): 948 def test_remove_one_optional_project_doesnt_exist(self):
823 manifest = self.getXmlManifest(""" 949 manifest = self.getXmlManifest(
950 """
824<manifest> 951<manifest>
825 <remote name="default-remote" fetch="http://localhost" /> 952 <remote name="default-remote" fetch="http://localhost" />
826 <default remote="default-remote" revision="refs/heads/main" /> 953 <default remote="default-remote" revision="refs/heads/main" />
827 <remove-project name="myproject" optional="true" /> 954 <remove-project name="myproject" optional="true" />
828</manifest> 955</manifest>
829""") 956"""
830 self.assertEqual(manifest.projects, []) 957 )
958 self.assertEqual(manifest.projects, [])
831 959
832 960
833class ExtendProjectElementTests(ManifestParseTestCase): 961class ExtendProjectElementTests(ManifestParseTestCase):
834 """Tests for <extend-project>.""" 962 """Tests for <extend-project>."""
835 963
836 def test_extend_project_dest_path_single_match(self): 964 def test_extend_project_dest_path_single_match(self):
837 manifest = self.getXmlManifest(""" 965 manifest = self.getXmlManifest(
966 """
838<manifest> 967<manifest>
839 <remote name="default-remote" fetch="http://localhost" /> 968 <remote name="default-remote" fetch="http://localhost" />
840 <default remote="default-remote" revision="refs/heads/main" /> 969 <default remote="default-remote" revision="refs/heads/main" />
841 <project name="myproject" /> 970 <project name="myproject" />
842 <extend-project name="myproject" dest-path="bar" /> 971 <extend-project name="myproject" dest-path="bar" />
843</manifest> 972</manifest>
844""") 973"""
845 self.assertEqual(len(manifest.projects), 1) 974 )
846 self.assertEqual(manifest.projects[0].relpath, 'bar') 975 self.assertEqual(len(manifest.projects), 1)
847 976 self.assertEqual(manifest.projects[0].relpath, "bar")
848 def test_extend_project_dest_path_multi_match(self): 977
849 with self.assertRaises(manifest_xml.ManifestParseError): 978 def test_extend_project_dest_path_multi_match(self):
850 manifest = self.getXmlManifest(""" 979 with self.assertRaises(manifest_xml.ManifestParseError):
980 manifest = self.getXmlManifest(
981 """
851<manifest> 982<manifest>
852 <remote name="default-remote" fetch="http://localhost" /> 983 <remote name="default-remote" fetch="http://localhost" />
853 <default remote="default-remote" revision="refs/heads/main" /> 984 <default remote="default-remote" revision="refs/heads/main" />
@@ -855,11 +986,13 @@ class ExtendProjectElementTests(ManifestParseTestCase):
855 <project name="myproject" path="y" /> 986 <project name="myproject" path="y" />
856 <extend-project name="myproject" dest-path="bar" /> 987 <extend-project name="myproject" dest-path="bar" />
857</manifest> 988</manifest>
858""") 989"""
859 manifest.projects 990 )
991 manifest.projects
860 992
861 def test_extend_project_dest_path_multi_match_path_specified(self): 993 def test_extend_project_dest_path_multi_match_path_specified(self):
862 manifest = self.getXmlManifest(""" 994 manifest = self.getXmlManifest(
995 """
863<manifest> 996<manifest>
864 <remote name="default-remote" fetch="http://localhost" /> 997 <remote name="default-remote" fetch="http://localhost" />
865 <default remote="default-remote" revision="refs/heads/main" /> 998 <default remote="default-remote" revision="refs/heads/main" />
@@ -867,34 +1000,39 @@ class ExtendProjectElementTests(ManifestParseTestCase):
867 <project name="myproject" path="y" /> 1000 <project name="myproject" path="y" />
868 <extend-project name="myproject" path="x" dest-path="bar" /> 1001 <extend-project name="myproject" path="x" dest-path="bar" />
869</manifest> 1002</manifest>
870""") 1003"""
871 self.assertEqual(len(manifest.projects), 2) 1004 )
872 if manifest.projects[0].relpath == 'y': 1005 self.assertEqual(len(manifest.projects), 2)
873 self.assertEqual(manifest.projects[1].relpath, 'bar') 1006 if manifest.projects[0].relpath == "y":
874 else: 1007 self.assertEqual(manifest.projects[1].relpath, "bar")
875 self.assertEqual(manifest.projects[0].relpath, 'bar') 1008 else:
876 self.assertEqual(manifest.projects[1].relpath, 'y') 1009 self.assertEqual(manifest.projects[0].relpath, "bar")
877 1010 self.assertEqual(manifest.projects[1].relpath, "y")
878 def test_extend_project_dest_branch(self): 1011
879 manifest = self.getXmlManifest(""" 1012 def test_extend_project_dest_branch(self):
1013 manifest = self.getXmlManifest(
1014 """
880<manifest> 1015<manifest>
881 <remote name="default-remote" fetch="http://localhost" /> 1016 <remote name="default-remote" fetch="http://localhost" />
882 <default remote="default-remote" revision="refs/heads/main" dest-branch="foo" /> 1017 <default remote="default-remote" revision="refs/heads/main" dest-branch="foo" />
883 <project name="myproject" /> 1018 <project name="myproject" />
884 <extend-project name="myproject" dest-branch="bar" /> 1019 <extend-project name="myproject" dest-branch="bar" />
885</manifest> 1020</manifest>
886""") 1021""" # noqa: E501
887 self.assertEqual(len(manifest.projects), 1) 1022 )
888 self.assertEqual(manifest.projects[0].dest_branch, 'bar') 1023 self.assertEqual(len(manifest.projects), 1)
889 1024 self.assertEqual(manifest.projects[0].dest_branch, "bar")
890 def test_extend_project_upstream(self): 1025
891 manifest = self.getXmlManifest(""" 1026 def test_extend_project_upstream(self):
1027 manifest = self.getXmlManifest(
1028 """
892<manifest> 1029<manifest>
893 <remote name="default-remote" fetch="http://localhost" /> 1030 <remote name="default-remote" fetch="http://localhost" />
894 <default remote="default-remote" revision="refs/heads/main" /> 1031 <default remote="default-remote" revision="refs/heads/main" />
895 <project name="myproject" /> 1032 <project name="myproject" />
896 <extend-project name="myproject" upstream="bar" /> 1033 <extend-project name="myproject" upstream="bar" />
897</manifest> 1034</manifest>
898""") 1035"""
899 self.assertEqual(len(manifest.projects), 1) 1036 )
900 self.assertEqual(manifest.projects[0].upstream, 'bar') 1037 self.assertEqual(len(manifest.projects), 1)
1038 self.assertEqual(manifest.projects[0].upstream, "bar")
diff --git a/tests/test_platform_utils.py b/tests/test_platform_utils.py
index 55b7805c..7a42de01 100644
--- a/tests/test_platform_utils.py
+++ b/tests/test_platform_utils.py
@@ -22,29 +22,31 @@ import platform_utils
22 22
23 23
24class RemoveTests(unittest.TestCase): 24class RemoveTests(unittest.TestCase):
25 """Check remove() helper.""" 25 """Check remove() helper."""
26 26
27 def testMissingOk(self): 27 def testMissingOk(self):
28 """Check missing_ok handling.""" 28 """Check missing_ok handling."""
29 with tempfile.TemporaryDirectory() as tmpdir: 29 with tempfile.TemporaryDirectory() as tmpdir:
30 path = os.path.join(tmpdir, 'test') 30 path = os.path.join(tmpdir, "test")
31 31
32 # Should not fail. 32 # Should not fail.
33 platform_utils.remove(path, missing_ok=True) 33 platform_utils.remove(path, missing_ok=True)
34 34
35 # Should fail. 35 # Should fail.
36 self.assertRaises(OSError, platform_utils.remove, path) 36 self.assertRaises(OSError, platform_utils.remove, path)
37 self.assertRaises(OSError, platform_utils.remove, path, missing_ok=False) 37 self.assertRaises(
38 38 OSError, platform_utils.remove, path, missing_ok=False
39 # Should not fail if it exists. 39 )
40 open(path, 'w').close() 40
41 platform_utils.remove(path, missing_ok=True) 41 # Should not fail if it exists.
42 self.assertFalse(os.path.exists(path)) 42 open(path, "w").close()
43 43 platform_utils.remove(path, missing_ok=True)
44 open(path, 'w').close() 44 self.assertFalse(os.path.exists(path))
45 platform_utils.remove(path) 45
46 self.assertFalse(os.path.exists(path)) 46 open(path, "w").close()
47 47 platform_utils.remove(path)
48 open(path, 'w').close() 48 self.assertFalse(os.path.exists(path))
49 platform_utils.remove(path, missing_ok=False) 49
50 self.assertFalse(os.path.exists(path)) 50 open(path, "w").close()
51 platform_utils.remove(path, missing_ok=False)
52 self.assertFalse(os.path.exists(path))
diff --git a/tests/test_project.py b/tests/test_project.py
index c50d9940..bc8330b2 100644
--- a/tests/test_project.py
+++ b/tests/test_project.py
@@ -31,452 +31,493 @@ import project
31 31
32@contextlib.contextmanager 32@contextlib.contextmanager
33def TempGitTree(): 33def TempGitTree():
34 """Create a new empty git checkout for testing.""" 34 """Create a new empty git checkout for testing."""
35 with tempfile.TemporaryDirectory(prefix='repo-tests') as tempdir: 35 with tempfile.TemporaryDirectory(prefix="repo-tests") as tempdir:
36 # Tests need to assume, that main is default branch at init, 36 # Tests need to assume, that main is default branch at init,
37 # which is not supported in config until 2.28. 37 # which is not supported in config until 2.28.
38 cmd = ['git', 'init'] 38 cmd = ["git", "init"]
39 if git_command.git_require((2, 28, 0)): 39 if git_command.git_require((2, 28, 0)):
40 cmd += ['--initial-branch=main'] 40 cmd += ["--initial-branch=main"]
41 else: 41 else:
42 # Use template dir for init. 42 # Use template dir for init.
43 templatedir = tempfile.mkdtemp(prefix='.test-template') 43 templatedir = tempfile.mkdtemp(prefix=".test-template")
44 with open(os.path.join(templatedir, 'HEAD'), 'w') as fp: 44 with open(os.path.join(templatedir, "HEAD"), "w") as fp:
45 fp.write('ref: refs/heads/main\n') 45 fp.write("ref: refs/heads/main\n")
46 cmd += ['--template', templatedir] 46 cmd += ["--template", templatedir]
47 subprocess.check_call(cmd, cwd=tempdir) 47 subprocess.check_call(cmd, cwd=tempdir)
48 yield tempdir 48 yield tempdir
49 49
50 50
51class FakeProject(object): 51class FakeProject(object):
52 """A fake for Project for basic functionality.""" 52 """A fake for Project for basic functionality."""
53 53
54 def __init__(self, worktree): 54 def __init__(self, worktree):
55 self.worktree = worktree 55 self.worktree = worktree
56 self.gitdir = os.path.join(worktree, '.git') 56 self.gitdir = os.path.join(worktree, ".git")
57 self.name = 'fakeproject' 57 self.name = "fakeproject"
58 self.work_git = project.Project._GitGetByExec( 58 self.work_git = project.Project._GitGetByExec(
59 self, bare=False, gitdir=self.gitdir) 59 self, bare=False, gitdir=self.gitdir
60 self.bare_git = project.Project._GitGetByExec( 60 )
61 self, bare=True, gitdir=self.gitdir) 61 self.bare_git = project.Project._GitGetByExec(
62 self.config = git_config.GitConfig.ForRepository(gitdir=self.gitdir) 62 self, bare=True, gitdir=self.gitdir
63 )
64 self.config = git_config.GitConfig.ForRepository(gitdir=self.gitdir)
63 65
64 66
65class ReviewableBranchTests(unittest.TestCase): 67class ReviewableBranchTests(unittest.TestCase):
66 """Check ReviewableBranch behavior.""" 68 """Check ReviewableBranch behavior."""
67 69
68 def test_smoke(self): 70 def test_smoke(self):
69 """A quick run through everything.""" 71 """A quick run through everything."""
70 with TempGitTree() as tempdir: 72 with TempGitTree() as tempdir:
71 fakeproj = FakeProject(tempdir) 73 fakeproj = FakeProject(tempdir)
72 74
73 # Generate some commits. 75 # Generate some commits.
74 with open(os.path.join(tempdir, 'readme'), 'w') as fp: 76 with open(os.path.join(tempdir, "readme"), "w") as fp:
75 fp.write('txt') 77 fp.write("txt")
76 fakeproj.work_git.add('readme') 78 fakeproj.work_git.add("readme")
77 fakeproj.work_git.commit('-mAdd file') 79 fakeproj.work_git.commit("-mAdd file")
78 fakeproj.work_git.checkout('-b', 'work') 80 fakeproj.work_git.checkout("-b", "work")
79 fakeproj.work_git.rm('-f', 'readme') 81 fakeproj.work_git.rm("-f", "readme")
80 fakeproj.work_git.commit('-mDel file') 82 fakeproj.work_git.commit("-mDel file")
81 83
82 # Start off with the normal details. 84 # Start off with the normal details.
83 rb = project.ReviewableBranch( 85 rb = project.ReviewableBranch(
84 fakeproj, fakeproj.config.GetBranch('work'), 'main') 86 fakeproj, fakeproj.config.GetBranch("work"), "main"
85 self.assertEqual('work', rb.name) 87 )
86 self.assertEqual(1, len(rb.commits)) 88 self.assertEqual("work", rb.name)
87 self.assertIn('Del file', rb.commits[0]) 89 self.assertEqual(1, len(rb.commits))
88 d = rb.unabbrev_commits 90 self.assertIn("Del file", rb.commits[0])
89 self.assertEqual(1, len(d)) 91 d = rb.unabbrev_commits
90 short, long = next(iter(d.items())) 92 self.assertEqual(1, len(d))
91 self.assertTrue(long.startswith(short)) 93 short, long = next(iter(d.items()))
92 self.assertTrue(rb.base_exists) 94 self.assertTrue(long.startswith(short))
93 # Hard to assert anything useful about this. 95 self.assertTrue(rb.base_exists)
94 self.assertTrue(rb.date) 96 # Hard to assert anything useful about this.
95 97 self.assertTrue(rb.date)
96 # Now delete the tracking branch! 98
97 fakeproj.work_git.branch('-D', 'main') 99 # Now delete the tracking branch!
98 rb = project.ReviewableBranch( 100 fakeproj.work_git.branch("-D", "main")
99 fakeproj, fakeproj.config.GetBranch('work'), 'main') 101 rb = project.ReviewableBranch(
100 self.assertEqual(0, len(rb.commits)) 102 fakeproj, fakeproj.config.GetBranch("work"), "main"
101 self.assertFalse(rb.base_exists) 103 )
102 # Hard to assert anything useful about this. 104 self.assertEqual(0, len(rb.commits))
103 self.assertTrue(rb.date) 105 self.assertFalse(rb.base_exists)
106 # Hard to assert anything useful about this.
107 self.assertTrue(rb.date)
104 108
105 109
106class CopyLinkTestCase(unittest.TestCase): 110class CopyLinkTestCase(unittest.TestCase):
107 """TestCase for stub repo client checkouts. 111 """TestCase for stub repo client checkouts.
108 112
109 It'll have a layout like this: 113 It'll have a layout like this:
110 tempdir/ # self.tempdir 114 tempdir/ # self.tempdir
111 checkout/ # self.topdir 115 checkout/ # self.topdir
112 git-project/ # self.worktree 116 git-project/ # self.worktree
113 117
114 Attributes: 118 Attributes:
115 tempdir: A dedicated temporary directory. 119 tempdir: A dedicated temporary directory.
116 worktree: The top of the repo client checkout. 120 worktree: The top of the repo client checkout.
117 topdir: The top of a project checkout. 121 topdir: The top of a project checkout.
118 """ 122 """
119 123
120 def setUp(self): 124 def setUp(self):
121 self.tempdirobj = tempfile.TemporaryDirectory(prefix='repo_tests') 125 self.tempdirobj = tempfile.TemporaryDirectory(prefix="repo_tests")
122 self.tempdir = self.tempdirobj.name 126 self.tempdir = self.tempdirobj.name
123 self.topdir = os.path.join(self.tempdir, 'checkout') 127 self.topdir = os.path.join(self.tempdir, "checkout")
124 self.worktree = os.path.join(self.topdir, 'git-project') 128 self.worktree = os.path.join(self.topdir, "git-project")
125 os.makedirs(self.topdir) 129 os.makedirs(self.topdir)
126 os.makedirs(self.worktree) 130 os.makedirs(self.worktree)
127 131
128 def tearDown(self): 132 def tearDown(self):
129 self.tempdirobj.cleanup() 133 self.tempdirobj.cleanup()
130 134
131 @staticmethod 135 @staticmethod
132 def touch(path): 136 def touch(path):
133 with open(path, 'w'): 137 with open(path, "w"):
134 pass 138 pass
135 139
136 def assertExists(self, path, msg=None): 140 def assertExists(self, path, msg=None):
137 """Make sure |path| exists.""" 141 """Make sure |path| exists."""
138 if os.path.exists(path): 142 if os.path.exists(path):
139 return 143 return
140 144
141 if msg is None: 145 if msg is None:
142 msg = ['path is missing: %s' % path] 146 msg = ["path is missing: %s" % path]
143 while path != '/': 147 while path != "/":
144 path = os.path.dirname(path) 148 path = os.path.dirname(path)
145 if not path: 149 if not path:
146 # If we're given something like "foo", abort once we get to "". 150 # If we're given something like "foo", abort once we get to
147 break 151 # "".
148 result = os.path.exists(path) 152 break
149 msg.append('\tos.path.exists(%s): %s' % (path, result)) 153 result = os.path.exists(path)
150 if result: 154 msg.append("\tos.path.exists(%s): %s" % (path, result))
151 msg.append('\tcontents: %r' % os.listdir(path)) 155 if result:
152 break 156 msg.append("\tcontents: %r" % os.listdir(path))
153 msg = '\n'.join(msg) 157 break
154 158 msg = "\n".join(msg)
155 raise self.failureException(msg) 159
160 raise self.failureException(msg)
156 161
157 162
158class CopyFile(CopyLinkTestCase): 163class CopyFile(CopyLinkTestCase):
159 """Check _CopyFile handling.""" 164 """Check _CopyFile handling."""
160 165
161 def CopyFile(self, src, dest): 166 def CopyFile(self, src, dest):
162 return project._CopyFile(self.worktree, src, self.topdir, dest) 167 return project._CopyFile(self.worktree, src, self.topdir, dest)
163 168
164 def test_basic(self): 169 def test_basic(self):
165 """Basic test of copying a file from a project to the toplevel.""" 170 """Basic test of copying a file from a project to the toplevel."""
166 src = os.path.join(self.worktree, 'foo.txt') 171 src = os.path.join(self.worktree, "foo.txt")
167 self.touch(src) 172 self.touch(src)
168 cf = self.CopyFile('foo.txt', 'foo') 173 cf = self.CopyFile("foo.txt", "foo")
169 cf._Copy() 174 cf._Copy()
170 self.assertExists(os.path.join(self.topdir, 'foo')) 175 self.assertExists(os.path.join(self.topdir, "foo"))
171 176
172 def test_src_subdir(self): 177 def test_src_subdir(self):
173 """Copy a file from a subdir of a project.""" 178 """Copy a file from a subdir of a project."""
174 src = os.path.join(self.worktree, 'bar', 'foo.txt') 179 src = os.path.join(self.worktree, "bar", "foo.txt")
175 os.makedirs(os.path.dirname(src)) 180 os.makedirs(os.path.dirname(src))
176 self.touch(src) 181 self.touch(src)
177 cf = self.CopyFile('bar/foo.txt', 'new.txt') 182 cf = self.CopyFile("bar/foo.txt", "new.txt")
178 cf._Copy() 183 cf._Copy()
179 self.assertExists(os.path.join(self.topdir, 'new.txt')) 184 self.assertExists(os.path.join(self.topdir, "new.txt"))
180 185
181 def test_dest_subdir(self): 186 def test_dest_subdir(self):
182 """Copy a file to a subdir of a checkout.""" 187 """Copy a file to a subdir of a checkout."""
183 src = os.path.join(self.worktree, 'foo.txt') 188 src = os.path.join(self.worktree, "foo.txt")
184 self.touch(src) 189 self.touch(src)
185 cf = self.CopyFile('foo.txt', 'sub/dir/new.txt') 190 cf = self.CopyFile("foo.txt", "sub/dir/new.txt")
186 self.assertFalse(os.path.exists(os.path.join(self.topdir, 'sub'))) 191 self.assertFalse(os.path.exists(os.path.join(self.topdir, "sub")))
187 cf._Copy() 192 cf._Copy()
188 self.assertExists(os.path.join(self.topdir, 'sub', 'dir', 'new.txt')) 193 self.assertExists(os.path.join(self.topdir, "sub", "dir", "new.txt"))
189 194
190 def test_update(self): 195 def test_update(self):
191 """Make sure changed files get copied again.""" 196 """Make sure changed files get copied again."""
192 src = os.path.join(self.worktree, 'foo.txt') 197 src = os.path.join(self.worktree, "foo.txt")
193 dest = os.path.join(self.topdir, 'bar') 198 dest = os.path.join(self.topdir, "bar")
194 with open(src, 'w') as f: 199 with open(src, "w") as f:
195 f.write('1st') 200 f.write("1st")
196 cf = self.CopyFile('foo.txt', 'bar') 201 cf = self.CopyFile("foo.txt", "bar")
197 cf._Copy() 202 cf._Copy()
198 self.assertExists(dest) 203 self.assertExists(dest)
199 with open(dest) as f: 204 with open(dest) as f:
200 self.assertEqual(f.read(), '1st') 205 self.assertEqual(f.read(), "1st")
201 206
202 with open(src, 'w') as f: 207 with open(src, "w") as f:
203 f.write('2nd!') 208 f.write("2nd!")
204 cf._Copy() 209 cf._Copy()
205 with open(dest) as f: 210 with open(dest) as f:
206 self.assertEqual(f.read(), '2nd!') 211 self.assertEqual(f.read(), "2nd!")
207 212
208 def test_src_block_symlink(self): 213 def test_src_block_symlink(self):
209 """Do not allow reading from a symlinked path.""" 214 """Do not allow reading from a symlinked path."""
210 src = os.path.join(self.worktree, 'foo.txt') 215 src = os.path.join(self.worktree, "foo.txt")
211 sym = os.path.join(self.worktree, 'sym') 216 sym = os.path.join(self.worktree, "sym")
212 self.touch(src) 217 self.touch(src)
213 platform_utils.symlink('foo.txt', sym) 218 platform_utils.symlink("foo.txt", sym)
214 self.assertExists(sym) 219 self.assertExists(sym)
215 cf = self.CopyFile('sym', 'foo') 220 cf = self.CopyFile("sym", "foo")
216 self.assertRaises(error.ManifestInvalidPathError, cf._Copy) 221 self.assertRaises(error.ManifestInvalidPathError, cf._Copy)
217 222
218 def test_src_block_symlink_traversal(self): 223 def test_src_block_symlink_traversal(self):
219 """Do not allow reading through a symlink dir.""" 224 """Do not allow reading through a symlink dir."""
220 realfile = os.path.join(self.tempdir, 'file.txt') 225 realfile = os.path.join(self.tempdir, "file.txt")
221 self.touch(realfile) 226 self.touch(realfile)
222 src = os.path.join(self.worktree, 'bar', 'file.txt') 227 src = os.path.join(self.worktree, "bar", "file.txt")
223 platform_utils.symlink(self.tempdir, os.path.join(self.worktree, 'bar')) 228 platform_utils.symlink(self.tempdir, os.path.join(self.worktree, "bar"))
224 self.assertExists(src) 229 self.assertExists(src)
225 cf = self.CopyFile('bar/file.txt', 'foo') 230 cf = self.CopyFile("bar/file.txt", "foo")
226 self.assertRaises(error.ManifestInvalidPathError, cf._Copy) 231 self.assertRaises(error.ManifestInvalidPathError, cf._Copy)
227 232
228 def test_src_block_copy_from_dir(self): 233 def test_src_block_copy_from_dir(self):
229 """Do not allow copying from a directory.""" 234 """Do not allow copying from a directory."""
230 src = os.path.join(self.worktree, 'dir') 235 src = os.path.join(self.worktree, "dir")
231 os.makedirs(src) 236 os.makedirs(src)
232 cf = self.CopyFile('dir', 'foo') 237 cf = self.CopyFile("dir", "foo")
233 self.assertRaises(error.ManifestInvalidPathError, cf._Copy) 238 self.assertRaises(error.ManifestInvalidPathError, cf._Copy)
234 239
235 def test_dest_block_symlink(self): 240 def test_dest_block_symlink(self):
236 """Do not allow writing to a symlink.""" 241 """Do not allow writing to a symlink."""
237 src = os.path.join(self.worktree, 'foo.txt') 242 src = os.path.join(self.worktree, "foo.txt")
238 self.touch(src) 243 self.touch(src)
239 platform_utils.symlink('dest', os.path.join(self.topdir, 'sym')) 244 platform_utils.symlink("dest", os.path.join(self.topdir, "sym"))
240 cf = self.CopyFile('foo.txt', 'sym') 245 cf = self.CopyFile("foo.txt", "sym")
241 self.assertRaises(error.ManifestInvalidPathError, cf._Copy) 246 self.assertRaises(error.ManifestInvalidPathError, cf._Copy)
242 247
243 def test_dest_block_symlink_traversal(self): 248 def test_dest_block_symlink_traversal(self):
244 """Do not allow writing through a symlink dir.""" 249 """Do not allow writing through a symlink dir."""
245 src = os.path.join(self.worktree, 'foo.txt') 250 src = os.path.join(self.worktree, "foo.txt")
246 self.touch(src) 251 self.touch(src)
247 platform_utils.symlink(tempfile.gettempdir(), 252 platform_utils.symlink(
248 os.path.join(self.topdir, 'sym')) 253 tempfile.gettempdir(), os.path.join(self.topdir, "sym")
249 cf = self.CopyFile('foo.txt', 'sym/foo.txt') 254 )
250 self.assertRaises(error.ManifestInvalidPathError, cf._Copy) 255 cf = self.CopyFile("foo.txt", "sym/foo.txt")
251 256 self.assertRaises(error.ManifestInvalidPathError, cf._Copy)
252 def test_src_block_copy_to_dir(self): 257
253 """Do not allow copying to a directory.""" 258 def test_src_block_copy_to_dir(self):
254 src = os.path.join(self.worktree, 'foo.txt') 259 """Do not allow copying to a directory."""
255 self.touch(src) 260 src = os.path.join(self.worktree, "foo.txt")
256 os.makedirs(os.path.join(self.topdir, 'dir')) 261 self.touch(src)
257 cf = self.CopyFile('foo.txt', 'dir') 262 os.makedirs(os.path.join(self.topdir, "dir"))
258 self.assertRaises(error.ManifestInvalidPathError, cf._Copy) 263 cf = self.CopyFile("foo.txt", "dir")
264 self.assertRaises(error.ManifestInvalidPathError, cf._Copy)
259 265
260 266
261class LinkFile(CopyLinkTestCase): 267class LinkFile(CopyLinkTestCase):
262 """Check _LinkFile handling.""" 268 """Check _LinkFile handling."""
263 269
264 def LinkFile(self, src, dest): 270 def LinkFile(self, src, dest):
265 return project._LinkFile(self.worktree, src, self.topdir, dest) 271 return project._LinkFile(self.worktree, src, self.topdir, dest)
266 272
267 def test_basic(self): 273 def test_basic(self):
268 """Basic test of linking a file from a project into the toplevel.""" 274 """Basic test of linking a file from a project into the toplevel."""
269 src = os.path.join(self.worktree, 'foo.txt') 275 src = os.path.join(self.worktree, "foo.txt")
270 self.touch(src) 276 self.touch(src)
271 lf = self.LinkFile('foo.txt', 'foo') 277 lf = self.LinkFile("foo.txt", "foo")
272 lf._Link() 278 lf._Link()
273 dest = os.path.join(self.topdir, 'foo') 279 dest = os.path.join(self.topdir, "foo")
274 self.assertExists(dest) 280 self.assertExists(dest)
275 self.assertTrue(os.path.islink(dest)) 281 self.assertTrue(os.path.islink(dest))
276 self.assertEqual(os.path.join('git-project', 'foo.txt'), os.readlink(dest)) 282 self.assertEqual(
277 283 os.path.join("git-project", "foo.txt"), os.readlink(dest)
278 def test_src_subdir(self): 284 )
279 """Link to a file in a subdir of a project.""" 285
280 src = os.path.join(self.worktree, 'bar', 'foo.txt') 286 def test_src_subdir(self):
281 os.makedirs(os.path.dirname(src)) 287 """Link to a file in a subdir of a project."""
282 self.touch(src) 288 src = os.path.join(self.worktree, "bar", "foo.txt")
283 lf = self.LinkFile('bar/foo.txt', 'foo') 289 os.makedirs(os.path.dirname(src))
284 lf._Link() 290 self.touch(src)
285 self.assertExists(os.path.join(self.topdir, 'foo')) 291 lf = self.LinkFile("bar/foo.txt", "foo")
286 292 lf._Link()
287 def test_src_self(self): 293 self.assertExists(os.path.join(self.topdir, "foo"))
288 """Link to the project itself.""" 294
289 dest = os.path.join(self.topdir, 'foo', 'bar') 295 def test_src_self(self):
290 lf = self.LinkFile('.', 'foo/bar') 296 """Link to the project itself."""
291 lf._Link() 297 dest = os.path.join(self.topdir, "foo", "bar")
292 self.assertExists(dest) 298 lf = self.LinkFile(".", "foo/bar")
293 self.assertEqual(os.path.join('..', 'git-project'), os.readlink(dest)) 299 lf._Link()
294 300 self.assertExists(dest)
295 def test_dest_subdir(self): 301 self.assertEqual(os.path.join("..", "git-project"), os.readlink(dest))
296 """Link a file to a subdir of a checkout.""" 302
297 src = os.path.join(self.worktree, 'foo.txt') 303 def test_dest_subdir(self):
298 self.touch(src) 304 """Link a file to a subdir of a checkout."""
299 lf = self.LinkFile('foo.txt', 'sub/dir/foo/bar') 305 src = os.path.join(self.worktree, "foo.txt")
300 self.assertFalse(os.path.exists(os.path.join(self.topdir, 'sub'))) 306 self.touch(src)
301 lf._Link() 307 lf = self.LinkFile("foo.txt", "sub/dir/foo/bar")
302 self.assertExists(os.path.join(self.topdir, 'sub', 'dir', 'foo', 'bar')) 308 self.assertFalse(os.path.exists(os.path.join(self.topdir, "sub")))
303 309 lf._Link()
304 def test_src_block_relative(self): 310 self.assertExists(os.path.join(self.topdir, "sub", "dir", "foo", "bar"))
305 """Do not allow relative symlinks.""" 311
306 BAD_SOURCES = ( 312 def test_src_block_relative(self):
307 './', 313 """Do not allow relative symlinks."""
308 '..', 314 BAD_SOURCES = (
309 '../', 315 "./",
310 'foo/.', 316 "..",
311 'foo/./bar', 317 "../",
312 'foo/..', 318 "foo/.",
313 'foo/../foo', 319 "foo/./bar",
314 ) 320 "foo/..",
315 for src in BAD_SOURCES: 321 "foo/../foo",
316 lf = self.LinkFile(src, 'foo') 322 )
317 self.assertRaises(error.ManifestInvalidPathError, lf._Link) 323 for src in BAD_SOURCES:
318 324 lf = self.LinkFile(src, "foo")
319 def test_update(self): 325 self.assertRaises(error.ManifestInvalidPathError, lf._Link)
320 """Make sure changed targets get updated.""" 326
321 dest = os.path.join(self.topdir, 'sym') 327 def test_update(self):
322 328 """Make sure changed targets get updated."""
323 src = os.path.join(self.worktree, 'foo.txt') 329 dest = os.path.join(self.topdir, "sym")
324 self.touch(src) 330
325 lf = self.LinkFile('foo.txt', 'sym') 331 src = os.path.join(self.worktree, "foo.txt")
326 lf._Link() 332 self.touch(src)
327 self.assertEqual(os.path.join('git-project', 'foo.txt'), os.readlink(dest)) 333 lf = self.LinkFile("foo.txt", "sym")
328 334 lf._Link()
329 # Point the symlink somewhere else. 335 self.assertEqual(
330 os.unlink(dest) 336 os.path.join("git-project", "foo.txt"), os.readlink(dest)
331 platform_utils.symlink(self.tempdir, dest) 337 )
332 lf._Link() 338
333 self.assertEqual(os.path.join('git-project', 'foo.txt'), os.readlink(dest)) 339 # Point the symlink somewhere else.
340 os.unlink(dest)
341 platform_utils.symlink(self.tempdir, dest)
342 lf._Link()
343 self.assertEqual(
344 os.path.join("git-project", "foo.txt"), os.readlink(dest)
345 )
334 346
335 347
336class MigrateWorkTreeTests(unittest.TestCase): 348class MigrateWorkTreeTests(unittest.TestCase):
337 """Check _MigrateOldWorkTreeGitDir handling.""" 349 """Check _MigrateOldWorkTreeGitDir handling."""
338 350
339 _SYMLINKS = { 351 _SYMLINKS = {
340 'config', 'description', 'hooks', 'info', 'logs', 'objects', 352 "config",
341 'packed-refs', 'refs', 'rr-cache', 'shallow', 'svn', 353 "description",
342 } 354 "hooks",
343 _FILES = { 355 "info",
344 'COMMIT_EDITMSG', 'FETCH_HEAD', 'HEAD', 'index', 'ORIG_HEAD', 356 "logs",
345 'unknown-file-should-be-migrated', 357 "objects",
346 } 358 "packed-refs",
347 _CLEAN_FILES = { 359 "refs",
348 'a-vim-temp-file~', '#an-emacs-temp-file#', 360 "rr-cache",
349 } 361 "shallow",
350 362 "svn",
351 @classmethod 363 }
352 @contextlib.contextmanager 364 _FILES = {
353 def _simple_layout(cls): 365 "COMMIT_EDITMSG",
354 """Create a simple repo client checkout to test against.""" 366 "FETCH_HEAD",
355 with tempfile.TemporaryDirectory() as tempdir: 367 "HEAD",
356 tempdir = Path(tempdir) 368 "index",
357 369 "ORIG_HEAD",
358 gitdir = tempdir / '.repo/projects/src/test.git' 370 "unknown-file-should-be-migrated",
359 gitdir.mkdir(parents=True) 371 }
360 cmd = ['git', 'init', '--bare', str(gitdir)] 372 _CLEAN_FILES = {
361 subprocess.check_call(cmd) 373 "a-vim-temp-file~",
362 374 "#an-emacs-temp-file#",
363 dotgit = tempdir / 'src/test/.git' 375 }
364 dotgit.mkdir(parents=True) 376
365 for name in cls._SYMLINKS: 377 @classmethod
366 (dotgit / name).symlink_to(f'../../../.repo/projects/src/test.git/{name}') 378 @contextlib.contextmanager
367 for name in cls._FILES | cls._CLEAN_FILES: 379 def _simple_layout(cls):
368 (dotgit / name).write_text(name) 380 """Create a simple repo client checkout to test against."""
369 381 with tempfile.TemporaryDirectory() as tempdir:
370 yield tempdir 382 tempdir = Path(tempdir)
371 383
372 def test_standard(self): 384 gitdir = tempdir / ".repo/projects/src/test.git"
373 """Migrate a standard checkout that we expect.""" 385 gitdir.mkdir(parents=True)
374 with self._simple_layout() as tempdir: 386 cmd = ["git", "init", "--bare", str(gitdir)]
375 dotgit = tempdir / 'src/test/.git' 387 subprocess.check_call(cmd)
376 project.Project._MigrateOldWorkTreeGitDir(str(dotgit)) 388
377 389 dotgit = tempdir / "src/test/.git"
378 # Make sure the dir was transformed into a symlink. 390 dotgit.mkdir(parents=True)
379 self.assertTrue(dotgit.is_symlink()) 391 for name in cls._SYMLINKS:
380 self.assertEqual(os.readlink(dotgit), os.path.normpath('../../.repo/projects/src/test.git')) 392 (dotgit / name).symlink_to(
381 393 f"../../../.repo/projects/src/test.git/{name}"
382 # Make sure files were moved over. 394 )
383 gitdir = tempdir / '.repo/projects/src/test.git' 395 for name in cls._FILES | cls._CLEAN_FILES:
384 for name in self._FILES: 396 (dotgit / name).write_text(name)
385 self.assertEqual(name, (gitdir / name).read_text()) 397
386 # Make sure files were removed. 398 yield tempdir
387 for name in self._CLEAN_FILES: 399
388 self.assertFalse((gitdir / name).exists()) 400 def test_standard(self):
389 401 """Migrate a standard checkout that we expect."""
390 def test_unknown(self): 402 with self._simple_layout() as tempdir:
391 """A checkout with unknown files should abort.""" 403 dotgit = tempdir / "src/test/.git"
392 with self._simple_layout() as tempdir: 404 project.Project._MigrateOldWorkTreeGitDir(str(dotgit))
393 dotgit = tempdir / 'src/test/.git' 405
394 (tempdir / '.repo/projects/src/test.git/random-file').write_text('one') 406 # Make sure the dir was transformed into a symlink.
395 (dotgit / 'random-file').write_text('two') 407 self.assertTrue(dotgit.is_symlink())
396 with self.assertRaises(error.GitError): 408 self.assertEqual(
397 project.Project._MigrateOldWorkTreeGitDir(str(dotgit)) 409 os.readlink(dotgit),
398 410 os.path.normpath("../../.repo/projects/src/test.git"),
399 # Make sure no content was actually changed. 411 )
400 self.assertTrue(dotgit.is_dir()) 412
401 for name in self._FILES: 413 # Make sure files were moved over.
402 self.assertTrue((dotgit / name).is_file()) 414 gitdir = tempdir / ".repo/projects/src/test.git"
403 for name in self._CLEAN_FILES: 415 for name in self._FILES:
404 self.assertTrue((dotgit / name).is_file()) 416 self.assertEqual(name, (gitdir / name).read_text())
405 for name in self._SYMLINKS: 417 # Make sure files were removed.
406 self.assertTrue((dotgit / name).is_symlink()) 418 for name in self._CLEAN_FILES:
419 self.assertFalse((gitdir / name).exists())
420
421 def test_unknown(self):
422 """A checkout with unknown files should abort."""
423 with self._simple_layout() as tempdir:
424 dotgit = tempdir / "src/test/.git"
425 (tempdir / ".repo/projects/src/test.git/random-file").write_text(
426 "one"
427 )
428 (dotgit / "random-file").write_text("two")
429 with self.assertRaises(error.GitError):
430 project.Project._MigrateOldWorkTreeGitDir(str(dotgit))
431
432 # Make sure no content was actually changed.
433 self.assertTrue(dotgit.is_dir())
434 for name in self._FILES:
435 self.assertTrue((dotgit / name).is_file())
436 for name in self._CLEAN_FILES:
437 self.assertTrue((dotgit / name).is_file())
438 for name in self._SYMLINKS:
439 self.assertTrue((dotgit / name).is_symlink())
407 440
408 441
409class ManifestPropertiesFetchedCorrectly(unittest.TestCase): 442class ManifestPropertiesFetchedCorrectly(unittest.TestCase):
410 """Ensure properties are fetched properly.""" 443 """Ensure properties are fetched properly."""
411 444
412 def setUpManifest(self, tempdir): 445 def setUpManifest(self, tempdir):
413 repodir = os.path.join(tempdir, '.repo') 446 repodir = os.path.join(tempdir, ".repo")
414 manifest_dir = os.path.join(repodir, 'manifests') 447 manifest_dir = os.path.join(repodir, "manifests")
415 manifest_file = os.path.join( 448 manifest_file = os.path.join(repodir, manifest_xml.MANIFEST_FILE_NAME)
416 repodir, manifest_xml.MANIFEST_FILE_NAME) 449 os.mkdir(repodir)
417 local_manifest_dir = os.path.join( 450 os.mkdir(manifest_dir)
418 repodir, manifest_xml.LOCAL_MANIFESTS_DIR_NAME) 451 manifest = manifest_xml.XmlManifest(repodir, manifest_file)
419 os.mkdir(repodir)
420 os.mkdir(manifest_dir)
421 manifest = manifest_xml.XmlManifest(repodir, manifest_file)
422 452
423 return project.ManifestProject( 453 return project.ManifestProject(
424 manifest, 'test/manifest', os.path.join(tempdir, '.git'), tempdir) 454 manifest, "test/manifest", os.path.join(tempdir, ".git"), tempdir
455 )
425 456
426 def test_manifest_config_properties(self): 457 def test_manifest_config_properties(self):
427 """Test we are fetching the manifest config properties correctly.""" 458 """Test we are fetching the manifest config properties correctly."""
428 459
429 with TempGitTree() as tempdir: 460 with TempGitTree() as tempdir:
430 fakeproj = self.setUpManifest(tempdir) 461 fakeproj = self.setUpManifest(tempdir)
431 462
432 # Set property using the expected Set method, then ensure 463 # Set property using the expected Set method, then ensure
433 # the porperty functions are using the correct Get methods. 464 # the porperty functions are using the correct Get methods.
434 fakeproj.config.SetString( 465 fakeproj.config.SetString(
435 'manifest.standalone', 'https://chicken/manifest.git') 466 "manifest.standalone", "https://chicken/manifest.git"
436 self.assertEqual( 467 )
437 fakeproj.standalone_manifest_url, 'https://chicken/manifest.git') 468 self.assertEqual(
469 fakeproj.standalone_manifest_url, "https://chicken/manifest.git"
470 )
438 471
439 fakeproj.config.SetString('manifest.groups', 'test-group, admin-group') 472 fakeproj.config.SetString(
440 self.assertEqual(fakeproj.manifest_groups, 'test-group, admin-group') 473 "manifest.groups", "test-group, admin-group"
474 )
475 self.assertEqual(
476 fakeproj.manifest_groups, "test-group, admin-group"
477 )
441 478
442 fakeproj.config.SetString('repo.reference', 'mirror/ref') 479 fakeproj.config.SetString("repo.reference", "mirror/ref")
443 self.assertEqual(fakeproj.reference, 'mirror/ref') 480 self.assertEqual(fakeproj.reference, "mirror/ref")
444 481
445 fakeproj.config.SetBoolean('repo.dissociate', False) 482 fakeproj.config.SetBoolean("repo.dissociate", False)
446 self.assertFalse(fakeproj.dissociate) 483 self.assertFalse(fakeproj.dissociate)
447 484
448 fakeproj.config.SetBoolean('repo.archive', False) 485 fakeproj.config.SetBoolean("repo.archive", False)
449 self.assertFalse(fakeproj.archive) 486 self.assertFalse(fakeproj.archive)
450 487
451 fakeproj.config.SetBoolean('repo.mirror', False) 488 fakeproj.config.SetBoolean("repo.mirror", False)
452 self.assertFalse(fakeproj.mirror) 489 self.assertFalse(fakeproj.mirror)
453 490
454 fakeproj.config.SetBoolean('repo.worktree', False) 491 fakeproj.config.SetBoolean("repo.worktree", False)
455 self.assertFalse(fakeproj.use_worktree) 492 self.assertFalse(fakeproj.use_worktree)
456 493
457 fakeproj.config.SetBoolean('repo.clonebundle', False) 494 fakeproj.config.SetBoolean("repo.clonebundle", False)
458 self.assertFalse(fakeproj.clone_bundle) 495 self.assertFalse(fakeproj.clone_bundle)
459 496
460 fakeproj.config.SetBoolean('repo.submodules', False) 497 fakeproj.config.SetBoolean("repo.submodules", False)
461 self.assertFalse(fakeproj.submodules) 498 self.assertFalse(fakeproj.submodules)
462 499
463 fakeproj.config.SetBoolean('repo.git-lfs', False) 500 fakeproj.config.SetBoolean("repo.git-lfs", False)
464 self.assertFalse(fakeproj.git_lfs) 501 self.assertFalse(fakeproj.git_lfs)
465 502
466 fakeproj.config.SetBoolean('repo.superproject', False) 503 fakeproj.config.SetBoolean("repo.superproject", False)
467 self.assertFalse(fakeproj.use_superproject) 504 self.assertFalse(fakeproj.use_superproject)
468 505
469 fakeproj.config.SetBoolean('repo.partialclone', False) 506 fakeproj.config.SetBoolean("repo.partialclone", False)
470 self.assertFalse(fakeproj.partial_clone) 507 self.assertFalse(fakeproj.partial_clone)
471 508
472 fakeproj.config.SetString('repo.depth', '48') 509 fakeproj.config.SetString("repo.depth", "48")
473 self.assertEqual(fakeproj.depth, '48') 510 self.assertEqual(fakeproj.depth, "48")
474 511
475 fakeproj.config.SetString('repo.clonefilter', 'blob:limit=10M') 512 fakeproj.config.SetString("repo.clonefilter", "blob:limit=10M")
476 self.assertEqual(fakeproj.clone_filter, 'blob:limit=10M') 513 self.assertEqual(fakeproj.clone_filter, "blob:limit=10M")
477 514
478 fakeproj.config.SetString('repo.partialcloneexclude', 'third_party/big_repo') 515 fakeproj.config.SetString(
479 self.assertEqual(fakeproj.partial_clone_exclude, 'third_party/big_repo') 516 "repo.partialcloneexclude", "third_party/big_repo"
517 )
518 self.assertEqual(
519 fakeproj.partial_clone_exclude, "third_party/big_repo"
520 )
480 521
481 fakeproj.config.SetString('manifest.platform', 'auto') 522 fakeproj.config.SetString("manifest.platform", "auto")
482 self.assertEqual(fakeproj.manifest_platform, 'auto') 523 self.assertEqual(fakeproj.manifest_platform, "auto")
diff --git a/tests/test_repo_trace.py b/tests/test_repo_trace.py
index 5faf2938..e4aeb5de 100644
--- a/tests/test_repo_trace.py
+++ b/tests/test_repo_trace.py
@@ -22,35 +22,39 @@ import repo_trace
22 22
23 23
24class TraceTests(unittest.TestCase): 24class TraceTests(unittest.TestCase):
25 """Check Trace behavior.""" 25 """Check Trace behavior."""
26 26
27 def testTrace_MaxSizeEnforced(self): 27 def testTrace_MaxSizeEnforced(self):
28 content = 'git chicken' 28 content = "git chicken"
29 29
30 with repo_trace.Trace(content, first_trace=True): 30 with repo_trace.Trace(content, first_trace=True):
31 pass 31 pass
32 first_trace_size = os.path.getsize(repo_trace._TRACE_FILE) 32 first_trace_size = os.path.getsize(repo_trace._TRACE_FILE)
33 33
34 with repo_trace.Trace(content): 34 with repo_trace.Trace(content):
35 pass 35 pass
36 self.assertGreater( 36 self.assertGreater(
37 os.path.getsize(repo_trace._TRACE_FILE), first_trace_size) 37 os.path.getsize(repo_trace._TRACE_FILE), first_trace_size
38 38 )
39 # Check we clear everything is the last chunk is larger than _MAX_SIZE. 39
40 with mock.patch('repo_trace._MAX_SIZE', 0): 40 # Check we clear everything is the last chunk is larger than _MAX_SIZE.
41 with repo_trace.Trace(content, first_trace=True): 41 with mock.patch("repo_trace._MAX_SIZE", 0):
42 pass 42 with repo_trace.Trace(content, first_trace=True):
43 self.assertEqual(first_trace_size, 43 pass
44 os.path.getsize(repo_trace._TRACE_FILE)) 44 self.assertEqual(
45 45 first_trace_size, os.path.getsize(repo_trace._TRACE_FILE)
46 # Check we only clear the chunks we need to. 46 )
47 repo_trace._MAX_SIZE = (first_trace_size + 1) / (1024 * 1024) 47
48 with repo_trace.Trace(content, first_trace=True): 48 # Check we only clear the chunks we need to.
49 pass 49 repo_trace._MAX_SIZE = (first_trace_size + 1) / (1024 * 1024)
50 self.assertEqual(first_trace_size * 2, 50 with repo_trace.Trace(content, first_trace=True):
51 os.path.getsize(repo_trace._TRACE_FILE)) 51 pass
52 52 self.assertEqual(
53 with repo_trace.Trace(content, first_trace=True): 53 first_trace_size * 2, os.path.getsize(repo_trace._TRACE_FILE)
54 pass 54 )
55 self.assertEqual(first_trace_size * 2, 55
56 os.path.getsize(repo_trace._TRACE_FILE)) 56 with repo_trace.Trace(content, first_trace=True):
57 pass
58 self.assertEqual(
59 first_trace_size * 2, os.path.getsize(repo_trace._TRACE_FILE)
60 )
diff --git a/tests/test_ssh.py b/tests/test_ssh.py
index ffb5cb94..a9c1be7f 100644
--- a/tests/test_ssh.py
+++ b/tests/test_ssh.py
@@ -23,52 +23,56 @@ import ssh
23 23
24 24
25class SshTests(unittest.TestCase): 25class SshTests(unittest.TestCase):
26 """Tests the ssh functions.""" 26 """Tests the ssh functions."""
27 27
28 def test_parse_ssh_version(self): 28 def test_parse_ssh_version(self):
29 """Check _parse_ssh_version() handling.""" 29 """Check _parse_ssh_version() handling."""
30 ver = ssh._parse_ssh_version('Unknown\n') 30 ver = ssh._parse_ssh_version("Unknown\n")
31 self.assertEqual(ver, ()) 31 self.assertEqual(ver, ())
32 ver = ssh._parse_ssh_version('OpenSSH_1.0\n') 32 ver = ssh._parse_ssh_version("OpenSSH_1.0\n")
33 self.assertEqual(ver, (1, 0)) 33 self.assertEqual(ver, (1, 0))
34 ver = ssh._parse_ssh_version('OpenSSH_6.6.1p1 Ubuntu-2ubuntu2.13, OpenSSL 1.0.1f 6 Jan 2014\n') 34 ver = ssh._parse_ssh_version(
35 self.assertEqual(ver, (6, 6, 1)) 35 "OpenSSH_6.6.1p1 Ubuntu-2ubuntu2.13, OpenSSL 1.0.1f 6 Jan 2014\n"
36 ver = ssh._parse_ssh_version('OpenSSH_7.6p1 Ubuntu-4ubuntu0.3, OpenSSL 1.0.2n 7 Dec 2017\n') 36 )
37 self.assertEqual(ver, (7, 6)) 37 self.assertEqual(ver, (6, 6, 1))
38 ver = ssh._parse_ssh_version(
39 "OpenSSH_7.6p1 Ubuntu-4ubuntu0.3, OpenSSL 1.0.2n 7 Dec 2017\n"
40 )
41 self.assertEqual(ver, (7, 6))
38 42
39 def test_version(self): 43 def test_version(self):
40 """Check version() handling.""" 44 """Check version() handling."""
41 with mock.patch('ssh._run_ssh_version', return_value='OpenSSH_1.2\n'): 45 with mock.patch("ssh._run_ssh_version", return_value="OpenSSH_1.2\n"):
42 self.assertEqual(ssh.version(), (1, 2)) 46 self.assertEqual(ssh.version(), (1, 2))
43 47
44 def test_context_manager_empty(self): 48 def test_context_manager_empty(self):
45 """Verify context manager with no clients works correctly.""" 49 """Verify context manager with no clients works correctly."""
46 with multiprocessing.Manager() as manager: 50 with multiprocessing.Manager() as manager:
47 with ssh.ProxyManager(manager): 51 with ssh.ProxyManager(manager):
48 pass 52 pass
49 53
50 def test_context_manager_child_cleanup(self): 54 def test_context_manager_child_cleanup(self):
51 """Verify orphaned clients & masters get cleaned up.""" 55 """Verify orphaned clients & masters get cleaned up."""
52 with multiprocessing.Manager() as manager: 56 with multiprocessing.Manager() as manager:
53 with ssh.ProxyManager(manager) as ssh_proxy: 57 with ssh.ProxyManager(manager) as ssh_proxy:
54 client = subprocess.Popen(['sleep', '964853320']) 58 client = subprocess.Popen(["sleep", "964853320"])
55 ssh_proxy.add_client(client) 59 ssh_proxy.add_client(client)
56 master = subprocess.Popen(['sleep', '964853321']) 60 master = subprocess.Popen(["sleep", "964853321"])
57 ssh_proxy.add_master(master) 61 ssh_proxy.add_master(master)
58 # If the process still exists, these will throw timeout errors. 62 # If the process still exists, these will throw timeout errors.
59 client.wait(0) 63 client.wait(0)
60 master.wait(0) 64 master.wait(0)
61 65
62 def test_ssh_sock(self): 66 def test_ssh_sock(self):
63 """Check sock() function.""" 67 """Check sock() function."""
64 manager = multiprocessing.Manager() 68 manager = multiprocessing.Manager()
65 proxy = ssh.ProxyManager(manager) 69 proxy = ssh.ProxyManager(manager)
66 with mock.patch('tempfile.mkdtemp', return_value='/tmp/foo'): 70 with mock.patch("tempfile.mkdtemp", return_value="/tmp/foo"):
67 # old ssh version uses port 71 # Old ssh version uses port.
68 with mock.patch('ssh.version', return_value=(6, 6)): 72 with mock.patch("ssh.version", return_value=(6, 6)):
69 self.assertTrue(proxy.sock().endswith('%p')) 73 self.assertTrue(proxy.sock().endswith("%p"))
70 74
71 proxy._sock_path = None 75 proxy._sock_path = None
72 # new ssh version uses hash 76 # New ssh version uses hash.
73 with mock.patch('ssh.version', return_value=(6, 7)): 77 with mock.patch("ssh.version", return_value=(6, 7)):
74 self.assertTrue(proxy.sock().endswith('%C')) 78 self.assertTrue(proxy.sock().endswith("%C"))
diff --git a/tests/test_subcmds.py b/tests/test_subcmds.py
index bc53051a..73b66e3f 100644
--- a/tests/test_subcmds.py
+++ b/tests/test_subcmds.py
@@ -21,53 +21,57 @@ import subcmds
21 21
22 22
23class AllCommands(unittest.TestCase): 23class AllCommands(unittest.TestCase):
24 """Check registered all_commands.""" 24 """Check registered all_commands."""
25 25
26 def test_required_basic(self): 26 def test_required_basic(self):
27 """Basic checking of registered commands.""" 27 """Basic checking of registered commands."""
28 # NB: We don't test all subcommands as we want to avoid "change detection" 28 # NB: We don't test all subcommands as we want to avoid "change
29 # tests, so we just look for the most common/important ones here that are 29 # detection" tests, so we just look for the most common/important ones
30 # unlikely to ever change. 30 # here that are unlikely to ever change.
31 for cmd in {'cherry-pick', 'help', 'init', 'start', 'sync', 'upload'}: 31 for cmd in {"cherry-pick", "help", "init", "start", "sync", "upload"}:
32 self.assertIn(cmd, subcmds.all_commands) 32 self.assertIn(cmd, subcmds.all_commands)
33 33
34 def test_naming(self): 34 def test_naming(self):
35 """Verify we don't add things that we shouldn't.""" 35 """Verify we don't add things that we shouldn't."""
36 for cmd in subcmds.all_commands: 36 for cmd in subcmds.all_commands:
37 # Reject filename suffixes like "help.py". 37 # Reject filename suffixes like "help.py".
38 self.assertNotIn('.', cmd) 38 self.assertNotIn(".", cmd)
39 39
40 # Make sure all '_' were converted to '-'. 40 # Make sure all '_' were converted to '-'.
41 self.assertNotIn('_', cmd) 41 self.assertNotIn("_", cmd)
42 42
43 # Reject internal python paths like "__init__". 43 # Reject internal python paths like "__init__".
44 self.assertFalse(cmd.startswith('__')) 44 self.assertFalse(cmd.startswith("__"))
45 45
46 def test_help_desc_style(self): 46 def test_help_desc_style(self):
47 """Force some consistency in option descriptions. 47 """Force some consistency in option descriptions.
48 48
49 Python's optparse & argparse has a few default options like --help. Their 49 Python's optparse & argparse has a few default options like --help.
50 option description text uses lowercase sentence fragments, so enforce our 50 Their option description text uses lowercase sentence fragments, so
51 options follow the same style so UI is consistent. 51 enforce our options follow the same style so UI is consistent.
52 52
53 We enforce: 53 We enforce:
54 * Text starts with lowercase. 54 * Text starts with lowercase.
55 * Text doesn't end with period. 55 * Text doesn't end with period.
56 """ 56 """
57 for name, cls in subcmds.all_commands.items(): 57 for name, cls in subcmds.all_commands.items():
58 cmd = cls() 58 cmd = cls()
59 parser = cmd.OptionParser 59 parser = cmd.OptionParser
60 for option in parser.option_list: 60 for option in parser.option_list:
61 if option.help == optparse.SUPPRESS_HELP: 61 if option.help == optparse.SUPPRESS_HELP:
62 continue 62 continue
63 63
64 c = option.help[0] 64 c = option.help[0]
65 self.assertEqual( 65 self.assertEqual(
66 c.lower(), c, 66 c.lower(),
67 msg=f'subcmds/{name}.py: {option.get_opt_string()}: help text ' 67 c,
68 f'should start with lowercase: "{option.help}"') 68 msg=f"subcmds/{name}.py: {option.get_opt_string()}: "
69 f'help text should start with lowercase: "{option.help}"',
70 )
69 71
70 self.assertNotEqual( 72 self.assertNotEqual(
71 option.help[-1], '.', 73 option.help[-1],
72 msg=f'subcmds/{name}.py: {option.get_opt_string()}: help text ' 74 ".",
73 f'should not end in a period: "{option.help}"') 75 msg=f"subcmds/{name}.py: {option.get_opt_string()}: "
76 f'help text should not end in a period: "{option.help}"',
77 )
diff --git a/tests/test_subcmds_init.py b/tests/test_subcmds_init.py
index af4346de..25e5be56 100644
--- a/tests/test_subcmds_init.py
+++ b/tests/test_subcmds_init.py
@@ -20,30 +20,27 @@ from subcmds import init
20 20
21 21
22class InitCommand(unittest.TestCase): 22class InitCommand(unittest.TestCase):
23 """Check registered all_commands.""" 23 """Check registered all_commands."""
24 24
25 def setUp(self): 25 def setUp(self):
26 self.cmd = init.Init() 26 self.cmd = init.Init()
27 27
28 def test_cli_parser_good(self): 28 def test_cli_parser_good(self):
29 """Check valid command line options.""" 29 """Check valid command line options."""
30 ARGV = ( 30 ARGV = ([],)
31 [], 31 for argv in ARGV:
32 ) 32 opts, args = self.cmd.OptionParser.parse_args(argv)
33 for argv in ARGV: 33 self.cmd.ValidateOptions(opts, args)
34 opts, args = self.cmd.OptionParser.parse_args(argv) 34
35 self.cmd.ValidateOptions(opts, args) 35 def test_cli_parser_bad(self):
36 36 """Check invalid command line options."""
37 def test_cli_parser_bad(self): 37 ARGV = (
38 """Check invalid command line options.""" 38 # Too many arguments.
39 ARGV = ( 39 ["url", "asdf"],
40 # Too many arguments. 40 # Conflicting options.
41 ['url', 'asdf'], 41 ["--mirror", "--archive"],
42 42 )
43 # Conflicting options. 43 for argv in ARGV:
44 ['--mirror', '--archive'], 44 opts, args = self.cmd.OptionParser.parse_args(argv)
45 ) 45 with self.assertRaises(SystemExit):
46 for argv in ARGV: 46 self.cmd.ValidateOptions(opts, args)
47 opts, args = self.cmd.OptionParser.parse_args(argv)
48 with self.assertRaises(SystemExit):
49 self.cmd.ValidateOptions(opts, args)
diff --git a/tests/test_subcmds_sync.py b/tests/test_subcmds_sync.py
index 236d54e5..5c8e606e 100644
--- a/tests/test_subcmds_sync.py
+++ b/tests/test_subcmds_sync.py
@@ -23,111 +23,138 @@ import command
23from subcmds import sync 23from subcmds import sync
24 24
25 25
26@pytest.mark.parametrize('use_superproject, cli_args, result', [ 26@pytest.mark.parametrize(
27 (True, ['--current-branch'], True), 27 "use_superproject, cli_args, result",
28 (True, ['--no-current-branch'], True), 28 [
29 (True, [], True), 29 (True, ["--current-branch"], True),
30 (False, ['--current-branch'], True), 30 (True, ["--no-current-branch"], True),
31 (False, ['--no-current-branch'], False), 31 (True, [], True),
32 (False, [], None), 32 (False, ["--current-branch"], True),
33]) 33 (False, ["--no-current-branch"], False),
34 (False, [], None),
35 ],
36)
34def test_get_current_branch_only(use_superproject, cli_args, result): 37def test_get_current_branch_only(use_superproject, cli_args, result):
35 """Test Sync._GetCurrentBranchOnly logic. 38 """Test Sync._GetCurrentBranchOnly logic.
36 39
37 Sync._GetCurrentBranchOnly should return True if a superproject is requested, 40 Sync._GetCurrentBranchOnly should return True if a superproject is
38 and otherwise the value of the current_branch_only option. 41 requested, and otherwise the value of the current_branch_only option.
39 """ 42 """
40 cmd = sync.Sync() 43 cmd = sync.Sync()
41 opts, _ = cmd.OptionParser.parse_args(cli_args) 44 opts, _ = cmd.OptionParser.parse_args(cli_args)
42 45
43 with mock.patch('git_superproject.UseSuperproject', 46 with mock.patch(
44 return_value=use_superproject): 47 "git_superproject.UseSuperproject", return_value=use_superproject
45 assert cmd._GetCurrentBranchOnly(opts, cmd.manifest) == result 48 ):
49 assert cmd._GetCurrentBranchOnly(opts, cmd.manifest) == result
46 50
47 51
48# Used to patch os.cpu_count() for reliable results. 52# Used to patch os.cpu_count() for reliable results.
49OS_CPU_COUNT = 24 53OS_CPU_COUNT = 24
50 54
51@pytest.mark.parametrize('argv, jobs_manifest, jobs, jobs_net, jobs_check', [ 55
52 # No user or manifest settings. 56@pytest.mark.parametrize(
53 ([], None, OS_CPU_COUNT, 1, command.DEFAULT_LOCAL_JOBS), 57 "argv, jobs_manifest, jobs, jobs_net, jobs_check",
54 # No user settings, so manifest settings control. 58 [
55 ([], 3, 3, 3, 3), 59 # No user or manifest settings.
56 # User settings, but no manifest. 60 ([], None, OS_CPU_COUNT, 1, command.DEFAULT_LOCAL_JOBS),
57 (['--jobs=4'], None, 4, 4, 4), 61 # No user settings, so manifest settings control.
58 (['--jobs=4', '--jobs-network=5'], None, 4, 5, 4), 62 ([], 3, 3, 3, 3),
59 (['--jobs=4', '--jobs-checkout=6'], None, 4, 4, 6), 63 # User settings, but no manifest.
60 (['--jobs=4', '--jobs-network=5', '--jobs-checkout=6'], None, 4, 5, 6), 64 (["--jobs=4"], None, 4, 4, 4),
61 (['--jobs-network=5'], None, OS_CPU_COUNT, 5, command.DEFAULT_LOCAL_JOBS), 65 (["--jobs=4", "--jobs-network=5"], None, 4, 5, 4),
62 (['--jobs-checkout=6'], None, OS_CPU_COUNT, 1, 6), 66 (["--jobs=4", "--jobs-checkout=6"], None, 4, 4, 6),
63 (['--jobs-network=5', '--jobs-checkout=6'], None, OS_CPU_COUNT, 5, 6), 67 (["--jobs=4", "--jobs-network=5", "--jobs-checkout=6"], None, 4, 5, 6),
64 # User settings with manifest settings. 68 (
65 (['--jobs=4'], 3, 4, 4, 4), 69 ["--jobs-network=5"],
66 (['--jobs=4', '--jobs-network=5'], 3, 4, 5, 4), 70 None,
67 (['--jobs=4', '--jobs-checkout=6'], 3, 4, 4, 6), 71 OS_CPU_COUNT,
68 (['--jobs=4', '--jobs-network=5', '--jobs-checkout=6'], 3, 4, 5, 6), 72 5,
69 (['--jobs-network=5'], 3, 3, 5, 3), 73 command.DEFAULT_LOCAL_JOBS,
70 (['--jobs-checkout=6'], 3, 3, 3, 6), 74 ),
71 (['--jobs-network=5', '--jobs-checkout=6'], 3, 3, 5, 6), 75 (["--jobs-checkout=6"], None, OS_CPU_COUNT, 1, 6),
72 # Settings that exceed rlimits get capped. 76 (["--jobs-network=5", "--jobs-checkout=6"], None, OS_CPU_COUNT, 5, 6),
73 (['--jobs=1000000'], None, 83, 83, 83), 77 # User settings with manifest settings.
74 ([], 1000000, 83, 83, 83), 78 (["--jobs=4"], 3, 4, 4, 4),
75]) 79 (["--jobs=4", "--jobs-network=5"], 3, 4, 5, 4),
80 (["--jobs=4", "--jobs-checkout=6"], 3, 4, 4, 6),
81 (["--jobs=4", "--jobs-network=5", "--jobs-checkout=6"], 3, 4, 5, 6),
82 (["--jobs-network=5"], 3, 3, 5, 3),
83 (["--jobs-checkout=6"], 3, 3, 3, 6),
84 (["--jobs-network=5", "--jobs-checkout=6"], 3, 3, 5, 6),
85 # Settings that exceed rlimits get capped.
86 (["--jobs=1000000"], None, 83, 83, 83),
87 ([], 1000000, 83, 83, 83),
88 ],
89)
76def test_cli_jobs(argv, jobs_manifest, jobs, jobs_net, jobs_check): 90def test_cli_jobs(argv, jobs_manifest, jobs, jobs_net, jobs_check):
77 """Tests --jobs option behavior.""" 91 """Tests --jobs option behavior."""
78 mp = mock.MagicMock() 92 mp = mock.MagicMock()
79 mp.manifest.default.sync_j = jobs_manifest 93 mp.manifest.default.sync_j = jobs_manifest
80 94
81 cmd = sync.Sync() 95 cmd = sync.Sync()
82 opts, args = cmd.OptionParser.parse_args(argv) 96 opts, args = cmd.OptionParser.parse_args(argv)
83 cmd.ValidateOptions(opts, args) 97 cmd.ValidateOptions(opts, args)
84 98
85 with mock.patch.object(sync, '_rlimit_nofile', return_value=(256, 256)): 99 with mock.patch.object(sync, "_rlimit_nofile", return_value=(256, 256)):
86 with mock.patch.object(os, 'cpu_count', return_value=OS_CPU_COUNT): 100 with mock.patch.object(os, "cpu_count", return_value=OS_CPU_COUNT):
87 cmd._ValidateOptionsWithManifest(opts, mp) 101 cmd._ValidateOptionsWithManifest(opts, mp)
88 assert opts.jobs == jobs 102 assert opts.jobs == jobs
89 assert opts.jobs_network == jobs_net 103 assert opts.jobs_network == jobs_net
90 assert opts.jobs_checkout == jobs_check 104 assert opts.jobs_checkout == jobs_check
91 105
92 106
93class GetPreciousObjectsState(unittest.TestCase): 107class GetPreciousObjectsState(unittest.TestCase):
94 """Tests for _GetPreciousObjectsState.""" 108 """Tests for _GetPreciousObjectsState."""
95 109
96 def setUp(self): 110 def setUp(self):
97 """Common setup.""" 111 """Common setup."""
98 self.cmd = sync.Sync() 112 self.cmd = sync.Sync()
99 self.project = p = mock.MagicMock(use_git_worktrees=False, 113 self.project = p = mock.MagicMock(
100 UseAlternates=False) 114 use_git_worktrees=False, UseAlternates=False
101 p.manifest.GetProjectsWithName.return_value = [p] 115 )
102 116 p.manifest.GetProjectsWithName.return_value = [p]
103 self.opt = mock.Mock(spec_set=['this_manifest_only']) 117
104 self.opt.this_manifest_only = False 118 self.opt = mock.Mock(spec_set=["this_manifest_only"])
105 119 self.opt.this_manifest_only = False
106 def test_worktrees(self): 120
107 """False for worktrees.""" 121 def test_worktrees(self):
108 self.project.use_git_worktrees = True 122 """False for worktrees."""
109 self.assertFalse(self.cmd._GetPreciousObjectsState(self.project, self.opt)) 123 self.project.use_git_worktrees = True
110 124 self.assertFalse(
111 def test_not_shared(self): 125 self.cmd._GetPreciousObjectsState(self.project, self.opt)
112 """Singleton project.""" 126 )
113 self.assertFalse(self.cmd._GetPreciousObjectsState(self.project, self.opt)) 127
114 128 def test_not_shared(self):
115 def test_shared(self): 129 """Singleton project."""
116 """Shared project.""" 130 self.assertFalse(
117 self.project.manifest.GetProjectsWithName.return_value = [ 131 self.cmd._GetPreciousObjectsState(self.project, self.opt)
118 self.project, self.project 132 )
119 ] 133
120 self.assertTrue(self.cmd._GetPreciousObjectsState(self.project, self.opt)) 134 def test_shared(self):
121 135 """Shared project."""
122 def test_shared_with_alternates(self): 136 self.project.manifest.GetProjectsWithName.return_value = [
123 """Shared project, with alternates.""" 137 self.project,
124 self.project.manifest.GetProjectsWithName.return_value = [ 138 self.project,
125 self.project, self.project 139 ]
126 ] 140 self.assertTrue(
127 self.project.UseAlternates = True 141 self.cmd._GetPreciousObjectsState(self.project, self.opt)
128 self.assertFalse(self.cmd._GetPreciousObjectsState(self.project, self.opt)) 142 )
129 143
130 def test_not_found(self): 144 def test_shared_with_alternates(self):
131 """Project not found in manifest.""" 145 """Shared project, with alternates."""
132 self.project.manifest.GetProjectsWithName.return_value = [] 146 self.project.manifest.GetProjectsWithName.return_value = [
133 self.assertFalse(self.cmd._GetPreciousObjectsState(self.project, self.opt)) 147 self.project,
148 self.project,
149 ]
150 self.project.UseAlternates = True
151 self.assertFalse(
152 self.cmd._GetPreciousObjectsState(self.project, self.opt)
153 )
154
155 def test_not_found(self):
156 """Project not found in manifest."""
157 self.project.manifest.GetProjectsWithName.return_value = []
158 self.assertFalse(
159 self.cmd._GetPreciousObjectsState(self.project, self.opt)
160 )
diff --git a/tests/test_update_manpages.py b/tests/test_update_manpages.py
index 0de85be9..12b19ec4 100644
--- a/tests/test_update_manpages.py
+++ b/tests/test_update_manpages.py
@@ -20,9 +20,9 @@ from release import update_manpages
20 20
21 21
22class UpdateManpagesTest(unittest.TestCase): 22class UpdateManpagesTest(unittest.TestCase):
23 """Tests the update-manpages code.""" 23 """Tests the update-manpages code."""
24 24
25 def test_replace_regex(self): 25 def test_replace_regex(self):
26 """Check that replace_regex works.""" 26 """Check that replace_regex works."""
27 data = '\n\033[1mSummary\033[m\n' 27 data = "\n\033[1mSummary\033[m\n"
28 self.assertEqual(update_manpages.replace_regex(data),'\nSummary\n') 28 self.assertEqual(update_manpages.replace_regex(data), "\nSummary\n")
diff --git a/tests/test_wrapper.py b/tests/test_wrapper.py
index ef879a5d..21fa094d 100644
--- a/tests/test_wrapper.py
+++ b/tests/test_wrapper.py
@@ -28,528 +28,615 @@ import wrapper
28 28
29 29
30def fixture(*paths): 30def fixture(*paths):
31 """Return a path relative to tests/fixtures. 31 """Return a path relative to tests/fixtures."""
32 """ 32 return os.path.join(os.path.dirname(__file__), "fixtures", *paths)
33 return os.path.join(os.path.dirname(__file__), 'fixtures', *paths)
34 33
35 34
36class RepoWrapperTestCase(unittest.TestCase): 35class RepoWrapperTestCase(unittest.TestCase):
37 """TestCase for the wrapper module.""" 36 """TestCase for the wrapper module."""
38 37
39 def setUp(self): 38 def setUp(self):
40 """Load the wrapper module every time.""" 39 """Load the wrapper module every time."""
41 wrapper.Wrapper.cache_clear() 40 wrapper.Wrapper.cache_clear()
42 self.wrapper = wrapper.Wrapper() 41 self.wrapper = wrapper.Wrapper()
43 42
44 43
45class RepoWrapperUnitTest(RepoWrapperTestCase): 44class RepoWrapperUnitTest(RepoWrapperTestCase):
46 """Tests helper functions in the repo wrapper 45 """Tests helper functions in the repo wrapper"""
47 """ 46
48 47 def test_version(self):
49 def test_version(self): 48 """Make sure _Version works."""
50 """Make sure _Version works.""" 49 with self.assertRaises(SystemExit) as e:
51 with self.assertRaises(SystemExit) as e: 50 with mock.patch("sys.stdout", new_callable=StringIO) as stdout:
52 with mock.patch('sys.stdout', new_callable=StringIO) as stdout: 51 with mock.patch("sys.stderr", new_callable=StringIO) as stderr:
53 with mock.patch('sys.stderr', new_callable=StringIO) as stderr: 52 self.wrapper._Version()
54 self.wrapper._Version() 53 self.assertEqual(0, e.exception.code)
55 self.assertEqual(0, e.exception.code) 54 self.assertEqual("", stderr.getvalue())
56 self.assertEqual('', stderr.getvalue()) 55 self.assertIn("repo launcher version", stdout.getvalue())
57 self.assertIn('repo launcher version', stdout.getvalue()) 56
58 57 def test_python_constraints(self):
59 def test_python_constraints(self): 58 """The launcher should never require newer than main.py."""
60 """The launcher should never require newer than main.py.""" 59 self.assertGreaterEqual(
61 self.assertGreaterEqual(main.MIN_PYTHON_VERSION_HARD, 60 main.MIN_PYTHON_VERSION_HARD, self.wrapper.MIN_PYTHON_VERSION_HARD
62 self.wrapper.MIN_PYTHON_VERSION_HARD) 61 )
63 self.assertGreaterEqual(main.MIN_PYTHON_VERSION_SOFT, 62 self.assertGreaterEqual(
64 self.wrapper.MIN_PYTHON_VERSION_SOFT) 63 main.MIN_PYTHON_VERSION_SOFT, self.wrapper.MIN_PYTHON_VERSION_SOFT
65 # Make sure the versions are themselves in sync. 64 )
66 self.assertGreaterEqual(self.wrapper.MIN_PYTHON_VERSION_SOFT, 65 # Make sure the versions are themselves in sync.
67 self.wrapper.MIN_PYTHON_VERSION_HARD) 66 self.assertGreaterEqual(
68 67 self.wrapper.MIN_PYTHON_VERSION_SOFT,
69 def test_init_parser(self): 68 self.wrapper.MIN_PYTHON_VERSION_HARD,
70 """Make sure 'init' GetParser works.""" 69 )
71 parser = self.wrapper.GetParser(gitc_init=False) 70
72 opts, args = parser.parse_args([]) 71 def test_init_parser(self):
73 self.assertEqual([], args) 72 """Make sure 'init' GetParser works."""
74 self.assertIsNone(opts.manifest_url) 73 parser = self.wrapper.GetParser(gitc_init=False)
75 74 opts, args = parser.parse_args([])
76 def test_gitc_init_parser(self): 75 self.assertEqual([], args)
77 """Make sure 'gitc-init' GetParser works.""" 76 self.assertIsNone(opts.manifest_url)
78 parser = self.wrapper.GetParser(gitc_init=True) 77
79 opts, args = parser.parse_args([]) 78 def test_gitc_init_parser(self):
80 self.assertEqual([], args) 79 """Make sure 'gitc-init' GetParser works."""
81 self.assertIsNone(opts.manifest_file) 80 parser = self.wrapper.GetParser(gitc_init=True)
82 81 opts, args = parser.parse_args([])
83 def test_get_gitc_manifest_dir_no_gitc(self): 82 self.assertEqual([], args)
84 """ 83 self.assertIsNone(opts.manifest_file)
85 Test reading a missing gitc config file 84
86 """ 85 def test_get_gitc_manifest_dir_no_gitc(self):
87 self.wrapper.GITC_CONFIG_FILE = fixture('missing_gitc_config') 86 """
88 val = self.wrapper.get_gitc_manifest_dir() 87 Test reading a missing gitc config file
89 self.assertEqual(val, '') 88 """
90 89 self.wrapper.GITC_CONFIG_FILE = fixture("missing_gitc_config")
91 def test_get_gitc_manifest_dir(self): 90 val = self.wrapper.get_gitc_manifest_dir()
92 """ 91 self.assertEqual(val, "")
93 Test reading the gitc config file and parsing the directory 92
94 """ 93 def test_get_gitc_manifest_dir(self):
95 self.wrapper.GITC_CONFIG_FILE = fixture('gitc_config') 94 """
96 val = self.wrapper.get_gitc_manifest_dir() 95 Test reading the gitc config file and parsing the directory
97 self.assertEqual(val, '/test/usr/local/google/gitc') 96 """
98 97 self.wrapper.GITC_CONFIG_FILE = fixture("gitc_config")
99 def test_gitc_parse_clientdir_no_gitc(self): 98 val = self.wrapper.get_gitc_manifest_dir()
100 """ 99 self.assertEqual(val, "/test/usr/local/google/gitc")
101 Test parsing the gitc clientdir without gitc running 100
102 """ 101 def test_gitc_parse_clientdir_no_gitc(self):
103 self.wrapper.GITC_CONFIG_FILE = fixture('missing_gitc_config') 102 """
104 self.assertEqual(self.wrapper.gitc_parse_clientdir('/something'), None) 103 Test parsing the gitc clientdir without gitc running
105 self.assertEqual(self.wrapper.gitc_parse_clientdir('/gitc/manifest-rw/test'), 'test') 104 """
106 105 self.wrapper.GITC_CONFIG_FILE = fixture("missing_gitc_config")
107 def test_gitc_parse_clientdir(self): 106 self.assertEqual(self.wrapper.gitc_parse_clientdir("/something"), None)
108 """ 107 self.assertEqual(
109 Test parsing the gitc clientdir 108 self.wrapper.gitc_parse_clientdir("/gitc/manifest-rw/test"), "test"
110 """ 109 )
111 self.wrapper.GITC_CONFIG_FILE = fixture('gitc_config') 110
112 self.assertEqual(self.wrapper.gitc_parse_clientdir('/something'), None) 111 def test_gitc_parse_clientdir(self):
113 self.assertEqual(self.wrapper.gitc_parse_clientdir('/gitc/manifest-rw/test'), 'test') 112 """
114 self.assertEqual(self.wrapper.gitc_parse_clientdir('/gitc/manifest-rw/test/'), 'test') 113 Test parsing the gitc clientdir
115 self.assertEqual(self.wrapper.gitc_parse_clientdir('/gitc/manifest-rw/test/extra'), 'test') 114 """
116 self.assertEqual(self.wrapper.gitc_parse_clientdir('/test/usr/local/google/gitc/test'), 'test') 115 self.wrapper.GITC_CONFIG_FILE = fixture("gitc_config")
117 self.assertEqual(self.wrapper.gitc_parse_clientdir('/test/usr/local/google/gitc/test/'), 'test') 116 self.assertEqual(self.wrapper.gitc_parse_clientdir("/something"), None)
118 self.assertEqual(self.wrapper.gitc_parse_clientdir('/test/usr/local/google/gitc/test/extra'), 117 self.assertEqual(
119 'test') 118 self.wrapper.gitc_parse_clientdir("/gitc/manifest-rw/test"), "test"
120 self.assertEqual(self.wrapper.gitc_parse_clientdir('/gitc/manifest-rw/'), None) 119 )
121 self.assertEqual(self.wrapper.gitc_parse_clientdir('/test/usr/local/google/gitc/'), None) 120 self.assertEqual(
121 self.wrapper.gitc_parse_clientdir("/gitc/manifest-rw/test/"), "test"
122 )
123 self.assertEqual(
124 self.wrapper.gitc_parse_clientdir("/gitc/manifest-rw/test/extra"),
125 "test",
126 )
127 self.assertEqual(
128 self.wrapper.gitc_parse_clientdir(
129 "/test/usr/local/google/gitc/test"
130 ),
131 "test",
132 )
133 self.assertEqual(
134 self.wrapper.gitc_parse_clientdir(
135 "/test/usr/local/google/gitc/test/"
136 ),
137 "test",
138 )
139 self.assertEqual(
140 self.wrapper.gitc_parse_clientdir(
141 "/test/usr/local/google/gitc/test/extra"
142 ),
143 "test",
144 )
145 self.assertEqual(
146 self.wrapper.gitc_parse_clientdir("/gitc/manifest-rw/"), None
147 )
148 self.assertEqual(
149 self.wrapper.gitc_parse_clientdir("/test/usr/local/google/gitc/"),
150 None,
151 )
122 152
123 153
124class SetGitTrace2ParentSid(RepoWrapperTestCase): 154class SetGitTrace2ParentSid(RepoWrapperTestCase):
125 """Check SetGitTrace2ParentSid behavior.""" 155 """Check SetGitTrace2ParentSid behavior."""
126 156
127 KEY = 'GIT_TRACE2_PARENT_SID' 157 KEY = "GIT_TRACE2_PARENT_SID"
128 VALID_FORMAT = re.compile(r'^repo-[0-9]{8}T[0-9]{6}Z-P[0-9a-f]{8}$') 158 VALID_FORMAT = re.compile(r"^repo-[0-9]{8}T[0-9]{6}Z-P[0-9a-f]{8}$")
129 159
130 def test_first_set(self): 160 def test_first_set(self):
131 """Test env var not yet set.""" 161 """Test env var not yet set."""
132 env = {} 162 env = {}
133 self.wrapper.SetGitTrace2ParentSid(env) 163 self.wrapper.SetGitTrace2ParentSid(env)
134 self.assertIn(self.KEY, env) 164 self.assertIn(self.KEY, env)
135 value = env[self.KEY] 165 value = env[self.KEY]
136 self.assertRegex(value, self.VALID_FORMAT) 166 self.assertRegex(value, self.VALID_FORMAT)
137 167
138 def test_append(self): 168 def test_append(self):
139 """Test env var is appended.""" 169 """Test env var is appended."""
140 env = {self.KEY: 'pfx'} 170 env = {self.KEY: "pfx"}
141 self.wrapper.SetGitTrace2ParentSid(env) 171 self.wrapper.SetGitTrace2ParentSid(env)
142 self.assertIn(self.KEY, env) 172 self.assertIn(self.KEY, env)
143 value = env[self.KEY] 173 value = env[self.KEY]
144 self.assertTrue(value.startswith('pfx/')) 174 self.assertTrue(value.startswith("pfx/"))
145 self.assertRegex(value[4:], self.VALID_FORMAT) 175 self.assertRegex(value[4:], self.VALID_FORMAT)
146 176
147 def test_global_context(self): 177 def test_global_context(self):
148 """Check os.environ gets updated by default.""" 178 """Check os.environ gets updated by default."""
149 os.environ.pop(self.KEY, None) 179 os.environ.pop(self.KEY, None)
150 self.wrapper.SetGitTrace2ParentSid() 180 self.wrapper.SetGitTrace2ParentSid()
151 self.assertIn(self.KEY, os.environ) 181 self.assertIn(self.KEY, os.environ)
152 value = os.environ[self.KEY] 182 value = os.environ[self.KEY]
153 self.assertRegex(value, self.VALID_FORMAT) 183 self.assertRegex(value, self.VALID_FORMAT)
154 184
155 185
156class RunCommand(RepoWrapperTestCase): 186class RunCommand(RepoWrapperTestCase):
157 """Check run_command behavior.""" 187 """Check run_command behavior."""
158 188
159 def test_capture(self): 189 def test_capture(self):
160 """Check capture_output handling.""" 190 """Check capture_output handling."""
161 ret = self.wrapper.run_command(['echo', 'hi'], capture_output=True) 191 ret = self.wrapper.run_command(["echo", "hi"], capture_output=True)
162 # echo command appends OS specific linesep, but on Windows + Git Bash 192 # echo command appends OS specific linesep, but on Windows + Git Bash
163 # we get UNIX ending, so we allow both. 193 # we get UNIX ending, so we allow both.
164 self.assertIn(ret.stdout, ['hi' + os.linesep, 'hi\n']) 194 self.assertIn(ret.stdout, ["hi" + os.linesep, "hi\n"])
165 195
166 def test_check(self): 196 def test_check(self):
167 """Check check handling.""" 197 """Check check handling."""
168 self.wrapper.run_command(['true'], check=False) 198 self.wrapper.run_command(["true"], check=False)
169 self.wrapper.run_command(['true'], check=True) 199 self.wrapper.run_command(["true"], check=True)
170 self.wrapper.run_command(['false'], check=False) 200 self.wrapper.run_command(["false"], check=False)
171 with self.assertRaises(self.wrapper.RunError): 201 with self.assertRaises(self.wrapper.RunError):
172 self.wrapper.run_command(['false'], check=True) 202 self.wrapper.run_command(["false"], check=True)
173 203
174 204
175class RunGit(RepoWrapperTestCase): 205class RunGit(RepoWrapperTestCase):
176 """Check run_git behavior.""" 206 """Check run_git behavior."""
177 207
178 def test_capture(self): 208 def test_capture(self):
179 """Check capture_output handling.""" 209 """Check capture_output handling."""
180 ret = self.wrapper.run_git('--version') 210 ret = self.wrapper.run_git("--version")
181 self.assertIn('git', ret.stdout) 211 self.assertIn("git", ret.stdout)
182 212
183 def test_check(self): 213 def test_check(self):
184 """Check check handling.""" 214 """Check check handling."""
185 with self.assertRaises(self.wrapper.CloneFailure): 215 with self.assertRaises(self.wrapper.CloneFailure):
186 self.wrapper.run_git('--version-asdfasdf') 216 self.wrapper.run_git("--version-asdfasdf")
187 self.wrapper.run_git('--version-asdfasdf', check=False) 217 self.wrapper.run_git("--version-asdfasdf", check=False)
188 218
189 219
190class ParseGitVersion(RepoWrapperTestCase): 220class ParseGitVersion(RepoWrapperTestCase):
191 """Check ParseGitVersion behavior.""" 221 """Check ParseGitVersion behavior."""
192 222
193 def test_autoload(self): 223 def test_autoload(self):
194 """Check we can load the version from the live git.""" 224 """Check we can load the version from the live git."""
195 ret = self.wrapper.ParseGitVersion() 225 ret = self.wrapper.ParseGitVersion()
196 self.assertIsNotNone(ret) 226 self.assertIsNotNone(ret)
197 227
198 def test_bad_ver(self): 228 def test_bad_ver(self):
199 """Check handling of bad git versions.""" 229 """Check handling of bad git versions."""
200 ret = self.wrapper.ParseGitVersion(ver_str='asdf') 230 ret = self.wrapper.ParseGitVersion(ver_str="asdf")
201 self.assertIsNone(ret) 231 self.assertIsNone(ret)
202 232
203 def test_normal_ver(self): 233 def test_normal_ver(self):
204 """Check handling of normal git versions.""" 234 """Check handling of normal git versions."""
205 ret = self.wrapper.ParseGitVersion(ver_str='git version 2.25.1') 235 ret = self.wrapper.ParseGitVersion(ver_str="git version 2.25.1")
206 self.assertEqual(2, ret.major) 236 self.assertEqual(2, ret.major)
207 self.assertEqual(25, ret.minor) 237 self.assertEqual(25, ret.minor)
208 self.assertEqual(1, ret.micro) 238 self.assertEqual(1, ret.micro)
209 self.assertEqual('2.25.1', ret.full) 239 self.assertEqual("2.25.1", ret.full)
210 240
211 def test_extended_ver(self): 241 def test_extended_ver(self):
212 """Check handling of extended distro git versions.""" 242 """Check handling of extended distro git versions."""
213 ret = self.wrapper.ParseGitVersion( 243 ret = self.wrapper.ParseGitVersion(
214 ver_str='git version 1.30.50.696.g5e7596f4ac-goog') 244 ver_str="git version 1.30.50.696.g5e7596f4ac-goog"
215 self.assertEqual(1, ret.major) 245 )
216 self.assertEqual(30, ret.minor) 246 self.assertEqual(1, ret.major)
217 self.assertEqual(50, ret.micro) 247 self.assertEqual(30, ret.minor)
218 self.assertEqual('1.30.50.696.g5e7596f4ac-goog', ret.full) 248 self.assertEqual(50, ret.micro)
249 self.assertEqual("1.30.50.696.g5e7596f4ac-goog", ret.full)
219 250
220 251
221class CheckGitVersion(RepoWrapperTestCase): 252class CheckGitVersion(RepoWrapperTestCase):
222 """Check _CheckGitVersion behavior.""" 253 """Check _CheckGitVersion behavior."""
223 254
224 def test_unknown(self): 255 def test_unknown(self):
225 """Unknown versions should abort.""" 256 """Unknown versions should abort."""
226 with mock.patch.object(self.wrapper, 'ParseGitVersion', return_value=None): 257 with mock.patch.object(
227 with self.assertRaises(self.wrapper.CloneFailure): 258 self.wrapper, "ParseGitVersion", return_value=None
228 self.wrapper._CheckGitVersion() 259 ):
229 260 with self.assertRaises(self.wrapper.CloneFailure):
230 def test_old(self): 261 self.wrapper._CheckGitVersion()
231 """Old versions should abort.""" 262
232 with mock.patch.object( 263 def test_old(self):
233 self.wrapper, 'ParseGitVersion', 264 """Old versions should abort."""
234 return_value=self.wrapper.GitVersion(1, 0, 0, '1.0.0')): 265 with mock.patch.object(
235 with self.assertRaises(self.wrapper.CloneFailure): 266 self.wrapper,
236 self.wrapper._CheckGitVersion() 267 "ParseGitVersion",
237 268 return_value=self.wrapper.GitVersion(1, 0, 0, "1.0.0"),
238 def test_new(self): 269 ):
239 """Newer versions should run fine.""" 270 with self.assertRaises(self.wrapper.CloneFailure):
240 with mock.patch.object( 271 self.wrapper._CheckGitVersion()
241 self.wrapper, 'ParseGitVersion', 272
242 return_value=self.wrapper.GitVersion(100, 0, 0, '100.0.0')): 273 def test_new(self):
243 self.wrapper._CheckGitVersion() 274 """Newer versions should run fine."""
275 with mock.patch.object(
276 self.wrapper,
277 "ParseGitVersion",
278 return_value=self.wrapper.GitVersion(100, 0, 0, "100.0.0"),
279 ):
280 self.wrapper._CheckGitVersion()
244 281
245 282
246class Requirements(RepoWrapperTestCase): 283class Requirements(RepoWrapperTestCase):
247 """Check Requirements handling.""" 284 """Check Requirements handling."""
248 285
249 def test_missing_file(self): 286 def test_missing_file(self):
250 """Don't crash if the file is missing (old version).""" 287 """Don't crash if the file is missing (old version)."""
251 testdir = os.path.dirname(os.path.realpath(__file__)) 288 testdir = os.path.dirname(os.path.realpath(__file__))
252 self.assertIsNone(self.wrapper.Requirements.from_dir(testdir)) 289 self.assertIsNone(self.wrapper.Requirements.from_dir(testdir))
253 self.assertIsNone(self.wrapper.Requirements.from_file( 290 self.assertIsNone(
254 os.path.join(testdir, 'xxxxxxxxxxxxxxxxxxxxxxxx'))) 291 self.wrapper.Requirements.from_file(
255 292 os.path.join(testdir, "xxxxxxxxxxxxxxxxxxxxxxxx")
256 def test_corrupt_data(self): 293 )
257 """If the file can't be parsed, don't blow up.""" 294 )
258 self.assertIsNone(self.wrapper.Requirements.from_file(__file__)) 295
259 self.assertIsNone(self.wrapper.Requirements.from_data(b'x')) 296 def test_corrupt_data(self):
260 297 """If the file can't be parsed, don't blow up."""
261 def test_valid_data(self): 298 self.assertIsNone(self.wrapper.Requirements.from_file(__file__))
262 """Make sure we can parse the file we ship.""" 299 self.assertIsNone(self.wrapper.Requirements.from_data(b"x"))
263 self.assertIsNotNone(self.wrapper.Requirements.from_data(b'{}')) 300
264 rootdir = os.path.dirname(os.path.dirname(os.path.realpath(__file__))) 301 def test_valid_data(self):
265 self.assertIsNotNone(self.wrapper.Requirements.from_dir(rootdir)) 302 """Make sure we can parse the file we ship."""
266 self.assertIsNotNone(self.wrapper.Requirements.from_file(os.path.join( 303 self.assertIsNotNone(self.wrapper.Requirements.from_data(b"{}"))
267 rootdir, 'requirements.json'))) 304 rootdir = os.path.dirname(os.path.dirname(os.path.realpath(__file__)))
268 305 self.assertIsNotNone(self.wrapper.Requirements.from_dir(rootdir))
269 def test_format_ver(self): 306 self.assertIsNotNone(
270 """Check format_ver can format.""" 307 self.wrapper.Requirements.from_file(
271 self.assertEqual('1.2.3', self.wrapper.Requirements._format_ver((1, 2, 3))) 308 os.path.join(rootdir, "requirements.json")
272 self.assertEqual('1', self.wrapper.Requirements._format_ver([1])) 309 )
273 310 )
274 def test_assert_all_unknown(self): 311
275 """Check assert_all works with incompatible file.""" 312 def test_format_ver(self):
276 reqs = self.wrapper.Requirements({}) 313 """Check format_ver can format."""
277 reqs.assert_all() 314 self.assertEqual(
278 315 "1.2.3", self.wrapper.Requirements._format_ver((1, 2, 3))
279 def test_assert_all_new_repo(self): 316 )
280 """Check assert_all accepts new enough repo.""" 317 self.assertEqual("1", self.wrapper.Requirements._format_ver([1]))
281 reqs = self.wrapper.Requirements({'repo': {'hard': [1, 0]}}) 318
282 reqs.assert_all() 319 def test_assert_all_unknown(self):
283 320 """Check assert_all works with incompatible file."""
284 def test_assert_all_old_repo(self): 321 reqs = self.wrapper.Requirements({})
285 """Check assert_all rejects old repo.""" 322 reqs.assert_all()
286 reqs = self.wrapper.Requirements({'repo': {'hard': [99999, 0]}}) 323
287 with self.assertRaises(SystemExit): 324 def test_assert_all_new_repo(self):
288 reqs.assert_all() 325 """Check assert_all accepts new enough repo."""
289 326 reqs = self.wrapper.Requirements({"repo": {"hard": [1, 0]}})
290 def test_assert_all_new_python(self): 327 reqs.assert_all()
291 """Check assert_all accepts new enough python.""" 328
292 reqs = self.wrapper.Requirements({'python': {'hard': sys.version_info}}) 329 def test_assert_all_old_repo(self):
293 reqs.assert_all() 330 """Check assert_all rejects old repo."""
294 331 reqs = self.wrapper.Requirements({"repo": {"hard": [99999, 0]}})
295 def test_assert_all_old_python(self): 332 with self.assertRaises(SystemExit):
296 """Check assert_all rejects old python.""" 333 reqs.assert_all()
297 reqs = self.wrapper.Requirements({'python': {'hard': [99999, 0]}}) 334
298 with self.assertRaises(SystemExit): 335 def test_assert_all_new_python(self):
299 reqs.assert_all() 336 """Check assert_all accepts new enough python."""
300 337 reqs = self.wrapper.Requirements({"python": {"hard": sys.version_info}})
301 def test_assert_ver_unknown(self): 338 reqs.assert_all()
302 """Check assert_ver works with incompatible file.""" 339
303 reqs = self.wrapper.Requirements({}) 340 def test_assert_all_old_python(self):
304 reqs.assert_ver('xxx', (1, 0)) 341 """Check assert_all rejects old python."""
305 342 reqs = self.wrapper.Requirements({"python": {"hard": [99999, 0]}})
306 def test_assert_ver_new(self): 343 with self.assertRaises(SystemExit):
307 """Check assert_ver allows new enough versions.""" 344 reqs.assert_all()
308 reqs = self.wrapper.Requirements({'git': {'hard': [1, 0], 'soft': [2, 0]}}) 345
309 reqs.assert_ver('git', (1, 0)) 346 def test_assert_ver_unknown(self):
310 reqs.assert_ver('git', (1, 5)) 347 """Check assert_ver works with incompatible file."""
311 reqs.assert_ver('git', (2, 0)) 348 reqs = self.wrapper.Requirements({})
312 reqs.assert_ver('git', (2, 5)) 349 reqs.assert_ver("xxx", (1, 0))
313 350
314 def test_assert_ver_old(self): 351 def test_assert_ver_new(self):
315 """Check assert_ver rejects old versions.""" 352 """Check assert_ver allows new enough versions."""
316 reqs = self.wrapper.Requirements({'git': {'hard': [1, 0], 'soft': [2, 0]}}) 353 reqs = self.wrapper.Requirements(
317 with self.assertRaises(SystemExit): 354 {"git": {"hard": [1, 0], "soft": [2, 0]}}
318 reqs.assert_ver('git', (0, 5)) 355 )
356 reqs.assert_ver("git", (1, 0))
357 reqs.assert_ver("git", (1, 5))
358 reqs.assert_ver("git", (2, 0))
359 reqs.assert_ver("git", (2, 5))
360
361 def test_assert_ver_old(self):
362 """Check assert_ver rejects old versions."""
363 reqs = self.wrapper.Requirements(
364 {"git": {"hard": [1, 0], "soft": [2, 0]}}
365 )
366 with self.assertRaises(SystemExit):
367 reqs.assert_ver("git", (0, 5))
319 368
320 369
321class NeedSetupGnuPG(RepoWrapperTestCase): 370class NeedSetupGnuPG(RepoWrapperTestCase):
322 """Check NeedSetupGnuPG behavior.""" 371 """Check NeedSetupGnuPG behavior."""
323 372
324 def test_missing_dir(self): 373 def test_missing_dir(self):
325 """The ~/.repoconfig tree doesn't exist yet.""" 374 """The ~/.repoconfig tree doesn't exist yet."""
326 with tempfile.TemporaryDirectory(prefix='repo-tests') as tempdir: 375 with tempfile.TemporaryDirectory(prefix="repo-tests") as tempdir:
327 self.wrapper.home_dot_repo = os.path.join(tempdir, 'foo') 376 self.wrapper.home_dot_repo = os.path.join(tempdir, "foo")
328 self.assertTrue(self.wrapper.NeedSetupGnuPG()) 377 self.assertTrue(self.wrapper.NeedSetupGnuPG())
329 378
330 def test_missing_keyring(self): 379 def test_missing_keyring(self):
331 """The keyring-version file doesn't exist yet.""" 380 """The keyring-version file doesn't exist yet."""
332 with tempfile.TemporaryDirectory(prefix='repo-tests') as tempdir: 381 with tempfile.TemporaryDirectory(prefix="repo-tests") as tempdir:
333 self.wrapper.home_dot_repo = tempdir 382 self.wrapper.home_dot_repo = tempdir
334 self.assertTrue(self.wrapper.NeedSetupGnuPG()) 383 self.assertTrue(self.wrapper.NeedSetupGnuPG())
335 384
336 def test_empty_keyring(self): 385 def test_empty_keyring(self):
337 """The keyring-version file exists, but is empty.""" 386 """The keyring-version file exists, but is empty."""
338 with tempfile.TemporaryDirectory(prefix='repo-tests') as tempdir: 387 with tempfile.TemporaryDirectory(prefix="repo-tests") as tempdir:
339 self.wrapper.home_dot_repo = tempdir 388 self.wrapper.home_dot_repo = tempdir
340 with open(os.path.join(tempdir, 'keyring-version'), 'w'): 389 with open(os.path.join(tempdir, "keyring-version"), "w"):
341 pass 390 pass
342 self.assertTrue(self.wrapper.NeedSetupGnuPG()) 391 self.assertTrue(self.wrapper.NeedSetupGnuPG())
343 392
344 def test_old_keyring(self): 393 def test_old_keyring(self):
345 """The keyring-version file exists, but it's old.""" 394 """The keyring-version file exists, but it's old."""
346 with tempfile.TemporaryDirectory(prefix='repo-tests') as tempdir: 395 with tempfile.TemporaryDirectory(prefix="repo-tests") as tempdir:
347 self.wrapper.home_dot_repo = tempdir 396 self.wrapper.home_dot_repo = tempdir
348 with open(os.path.join(tempdir, 'keyring-version'), 'w') as fp: 397 with open(os.path.join(tempdir, "keyring-version"), "w") as fp:
349 fp.write('1.0\n') 398 fp.write("1.0\n")
350 self.assertTrue(self.wrapper.NeedSetupGnuPG()) 399 self.assertTrue(self.wrapper.NeedSetupGnuPG())
351 400
352 def test_new_keyring(self): 401 def test_new_keyring(self):
353 """The keyring-version file exists, and is up-to-date.""" 402 """The keyring-version file exists, and is up-to-date."""
354 with tempfile.TemporaryDirectory(prefix='repo-tests') as tempdir: 403 with tempfile.TemporaryDirectory(prefix="repo-tests") as tempdir:
355 self.wrapper.home_dot_repo = tempdir 404 self.wrapper.home_dot_repo = tempdir
356 with open(os.path.join(tempdir, 'keyring-version'), 'w') as fp: 405 with open(os.path.join(tempdir, "keyring-version"), "w") as fp:
357 fp.write('1000.0\n') 406 fp.write("1000.0\n")
358 self.assertFalse(self.wrapper.NeedSetupGnuPG()) 407 self.assertFalse(self.wrapper.NeedSetupGnuPG())
359 408
360 409
361class SetupGnuPG(RepoWrapperTestCase): 410class SetupGnuPG(RepoWrapperTestCase):
362 """Check SetupGnuPG behavior.""" 411 """Check SetupGnuPG behavior."""
363 412
364 def test_full(self): 413 def test_full(self):
365 """Make sure it works completely.""" 414 """Make sure it works completely."""
366 with tempfile.TemporaryDirectory(prefix='repo-tests') as tempdir: 415 with tempfile.TemporaryDirectory(prefix="repo-tests") as tempdir:
367 self.wrapper.home_dot_repo = tempdir 416 self.wrapper.home_dot_repo = tempdir
368 self.wrapper.gpg_dir = os.path.join(self.wrapper.home_dot_repo, 'gnupg') 417 self.wrapper.gpg_dir = os.path.join(
369 self.assertTrue(self.wrapper.SetupGnuPG(True)) 418 self.wrapper.home_dot_repo, "gnupg"
370 with open(os.path.join(tempdir, 'keyring-version'), 'r') as fp: 419 )
371 data = fp.read() 420 self.assertTrue(self.wrapper.SetupGnuPG(True))
372 self.assertEqual('.'.join(str(x) for x in self.wrapper.KEYRING_VERSION), 421 with open(os.path.join(tempdir, "keyring-version"), "r") as fp:
373 data.strip()) 422 data = fp.read()
423 self.assertEqual(
424 ".".join(str(x) for x in self.wrapper.KEYRING_VERSION),
425 data.strip(),
426 )
374 427
375 428
376class VerifyRev(RepoWrapperTestCase): 429class VerifyRev(RepoWrapperTestCase):
377 """Check verify_rev behavior.""" 430 """Check verify_rev behavior."""
378 431
379 def test_verify_passes(self): 432 def test_verify_passes(self):
380 """Check when we have a valid signed tag.""" 433 """Check when we have a valid signed tag."""
381 desc_result = self.wrapper.RunResult(0, 'v1.0\n', '') 434 desc_result = self.wrapper.RunResult(0, "v1.0\n", "")
382 gpg_result = self.wrapper.RunResult(0, '', '') 435 gpg_result = self.wrapper.RunResult(0, "", "")
383 with mock.patch.object(self.wrapper, 'run_git', 436 with mock.patch.object(
384 side_effect=(desc_result, gpg_result)): 437 self.wrapper, "run_git", side_effect=(desc_result, gpg_result)
385 ret = self.wrapper.verify_rev('/', 'refs/heads/stable', '1234', True) 438 ):
386 self.assertEqual('v1.0^0', ret) 439 ret = self.wrapper.verify_rev(
387 440 "/", "refs/heads/stable", "1234", True
388 def test_unsigned_commit(self): 441 )
389 """Check we fall back to signed tag when we have an unsigned commit.""" 442 self.assertEqual("v1.0^0", ret)
390 desc_result = self.wrapper.RunResult(0, 'v1.0-10-g1234\n', '') 443
391 gpg_result = self.wrapper.RunResult(0, '', '') 444 def test_unsigned_commit(self):
392 with mock.patch.object(self.wrapper, 'run_git', 445 """Check we fall back to signed tag when we have an unsigned commit."""
393 side_effect=(desc_result, gpg_result)): 446 desc_result = self.wrapper.RunResult(0, "v1.0-10-g1234\n", "")
394 ret = self.wrapper.verify_rev('/', 'refs/heads/stable', '1234', True) 447 gpg_result = self.wrapper.RunResult(0, "", "")
395 self.assertEqual('v1.0^0', ret) 448 with mock.patch.object(
396 449 self.wrapper, "run_git", side_effect=(desc_result, gpg_result)
397 def test_verify_fails(self): 450 ):
398 """Check we fall back to signed tag when we have an unsigned commit.""" 451 ret = self.wrapper.verify_rev(
399 desc_result = self.wrapper.RunResult(0, 'v1.0-10-g1234\n', '') 452 "/", "refs/heads/stable", "1234", True
400 gpg_result = Exception 453 )
401 with mock.patch.object(self.wrapper, 'run_git', 454 self.assertEqual("v1.0^0", ret)
402 side_effect=(desc_result, gpg_result)): 455
403 with self.assertRaises(Exception): 456 def test_verify_fails(self):
404 self.wrapper.verify_rev('/', 'refs/heads/stable', '1234', True) 457 """Check we fall back to signed tag when we have an unsigned commit."""
458 desc_result = self.wrapper.RunResult(0, "v1.0-10-g1234\n", "")
459 gpg_result = Exception
460 with mock.patch.object(
461 self.wrapper, "run_git", side_effect=(desc_result, gpg_result)
462 ):
463 with self.assertRaises(Exception):
464 self.wrapper.verify_rev("/", "refs/heads/stable", "1234", True)
405 465
406 466
407class GitCheckoutTestCase(RepoWrapperTestCase): 467class GitCheckoutTestCase(RepoWrapperTestCase):
408 """Tests that use a real/small git checkout.""" 468 """Tests that use a real/small git checkout."""
409 469
410 GIT_DIR = None 470 GIT_DIR = None
411 REV_LIST = None 471 REV_LIST = None
412 472
413 @classmethod 473 @classmethod
414 def setUpClass(cls): 474 def setUpClass(cls):
415 # Create a repo to operate on, but do it once per-class. 475 # Create a repo to operate on, but do it once per-class.
416 cls.tempdirobj = tempfile.TemporaryDirectory(prefix='repo-rev-tests') 476 cls.tempdirobj = tempfile.TemporaryDirectory(prefix="repo-rev-tests")
417 cls.GIT_DIR = cls.tempdirobj.name 477 cls.GIT_DIR = cls.tempdirobj.name
418 run_git = wrapper.Wrapper().run_git 478 run_git = wrapper.Wrapper().run_git
419 479
420 remote = os.path.join(cls.GIT_DIR, 'remote') 480 remote = os.path.join(cls.GIT_DIR, "remote")
421 os.mkdir(remote) 481 os.mkdir(remote)
422 482
423 # Tests need to assume, that main is default branch at init, 483 # Tests need to assume, that main is default branch at init,
424 # which is not supported in config until 2.28. 484 # which is not supported in config until 2.28.
425 if git_command.git_require((2, 28, 0)): 485 if git_command.git_require((2, 28, 0)):
426 initstr = '--initial-branch=main' 486 initstr = "--initial-branch=main"
427 else: 487 else:
428 # Use template dir for init. 488 # Use template dir for init.
429 templatedir = tempfile.mkdtemp(prefix='.test-template') 489 templatedir = tempfile.mkdtemp(prefix=".test-template")
430 with open(os.path.join(templatedir, 'HEAD'), 'w') as fp: 490 with open(os.path.join(templatedir, "HEAD"), "w") as fp:
431 fp.write('ref: refs/heads/main\n') 491 fp.write("ref: refs/heads/main\n")
432 initstr = '--template=' + templatedir 492 initstr = "--template=" + templatedir
433 493
434 run_git('init', initstr, cwd=remote) 494 run_git("init", initstr, cwd=remote)
435 run_git('commit', '--allow-empty', '-minit', cwd=remote) 495 run_git("commit", "--allow-empty", "-minit", cwd=remote)
436 run_git('branch', 'stable', cwd=remote) 496 run_git("branch", "stable", cwd=remote)
437 run_git('tag', 'v1.0', cwd=remote) 497 run_git("tag", "v1.0", cwd=remote)
438 run_git('commit', '--allow-empty', '-m2nd commit', cwd=remote) 498 run_git("commit", "--allow-empty", "-m2nd commit", cwd=remote)
439 cls.REV_LIST = run_git('rev-list', 'HEAD', cwd=remote).stdout.splitlines() 499 cls.REV_LIST = run_git(
440 500 "rev-list", "HEAD", cwd=remote
441 run_git('init', cwd=cls.GIT_DIR) 501 ).stdout.splitlines()
442 run_git('fetch', remote, '+refs/heads/*:refs/remotes/origin/*', cwd=cls.GIT_DIR) 502
443 503 run_git("init", cwd=cls.GIT_DIR)
444 @classmethod 504 run_git(
445 def tearDownClass(cls): 505 "fetch",
446 if not cls.tempdirobj: 506 remote,
447 return 507 "+refs/heads/*:refs/remotes/origin/*",
448 508 cwd=cls.GIT_DIR,
449 cls.tempdirobj.cleanup() 509 )
510
511 @classmethod
512 def tearDownClass(cls):
513 if not cls.tempdirobj:
514 return
515
516 cls.tempdirobj.cleanup()
450 517
451 518
452class ResolveRepoRev(GitCheckoutTestCase): 519class ResolveRepoRev(GitCheckoutTestCase):
453 """Check resolve_repo_rev behavior.""" 520 """Check resolve_repo_rev behavior."""
454 521
455 def test_explicit_branch(self): 522 def test_explicit_branch(self):
456 """Check refs/heads/branch argument.""" 523 """Check refs/heads/branch argument."""
457 rrev, lrev = self.wrapper.resolve_repo_rev(self.GIT_DIR, 'refs/heads/stable') 524 rrev, lrev = self.wrapper.resolve_repo_rev(
458 self.assertEqual('refs/heads/stable', rrev) 525 self.GIT_DIR, "refs/heads/stable"
459 self.assertEqual(self.REV_LIST[1], lrev) 526 )
460 527 self.assertEqual("refs/heads/stable", rrev)
461 with self.assertRaises(self.wrapper.CloneFailure): 528 self.assertEqual(self.REV_LIST[1], lrev)
462 self.wrapper.resolve_repo_rev(self.GIT_DIR, 'refs/heads/unknown') 529
463 530 with self.assertRaises(self.wrapper.CloneFailure):
464 def test_explicit_tag(self): 531 self.wrapper.resolve_repo_rev(self.GIT_DIR, "refs/heads/unknown")
465 """Check refs/tags/tag argument.""" 532
466 rrev, lrev = self.wrapper.resolve_repo_rev(self.GIT_DIR, 'refs/tags/v1.0') 533 def test_explicit_tag(self):
467 self.assertEqual('refs/tags/v1.0', rrev) 534 """Check refs/tags/tag argument."""
468 self.assertEqual(self.REV_LIST[1], lrev) 535 rrev, lrev = self.wrapper.resolve_repo_rev(
469 536 self.GIT_DIR, "refs/tags/v1.0"
470 with self.assertRaises(self.wrapper.CloneFailure): 537 )
471 self.wrapper.resolve_repo_rev(self.GIT_DIR, 'refs/tags/unknown') 538 self.assertEqual("refs/tags/v1.0", rrev)
472 539 self.assertEqual(self.REV_LIST[1], lrev)
473 def test_branch_name(self): 540
474 """Check branch argument.""" 541 with self.assertRaises(self.wrapper.CloneFailure):
475 rrev, lrev = self.wrapper.resolve_repo_rev(self.GIT_DIR, 'stable') 542 self.wrapper.resolve_repo_rev(self.GIT_DIR, "refs/tags/unknown")
476 self.assertEqual('refs/heads/stable', rrev) 543
477 self.assertEqual(self.REV_LIST[1], lrev) 544 def test_branch_name(self):
478 545 """Check branch argument."""
479 rrev, lrev = self.wrapper.resolve_repo_rev(self.GIT_DIR, 'main') 546 rrev, lrev = self.wrapper.resolve_repo_rev(self.GIT_DIR, "stable")
480 self.assertEqual('refs/heads/main', rrev) 547 self.assertEqual("refs/heads/stable", rrev)
481 self.assertEqual(self.REV_LIST[0], lrev) 548 self.assertEqual(self.REV_LIST[1], lrev)
482 549
483 def test_tag_name(self): 550 rrev, lrev = self.wrapper.resolve_repo_rev(self.GIT_DIR, "main")
484 """Check tag argument.""" 551 self.assertEqual("refs/heads/main", rrev)
485 rrev, lrev = self.wrapper.resolve_repo_rev(self.GIT_DIR, 'v1.0') 552 self.assertEqual(self.REV_LIST[0], lrev)
486 self.assertEqual('refs/tags/v1.0', rrev) 553
487 self.assertEqual(self.REV_LIST[1], lrev) 554 def test_tag_name(self):
488 555 """Check tag argument."""
489 def test_full_commit(self): 556 rrev, lrev = self.wrapper.resolve_repo_rev(self.GIT_DIR, "v1.0")
490 """Check specific commit argument.""" 557 self.assertEqual("refs/tags/v1.0", rrev)
491 commit = self.REV_LIST[0] 558 self.assertEqual(self.REV_LIST[1], lrev)
492 rrev, lrev = self.wrapper.resolve_repo_rev(self.GIT_DIR, commit) 559
493 self.assertEqual(commit, rrev) 560 def test_full_commit(self):
494 self.assertEqual(commit, lrev) 561 """Check specific commit argument."""
495 562 commit = self.REV_LIST[0]
496 def test_partial_commit(self): 563 rrev, lrev = self.wrapper.resolve_repo_rev(self.GIT_DIR, commit)
497 """Check specific (partial) commit argument.""" 564 self.assertEqual(commit, rrev)
498 commit = self.REV_LIST[0][0:20] 565 self.assertEqual(commit, lrev)
499 rrev, lrev = self.wrapper.resolve_repo_rev(self.GIT_DIR, commit) 566
500 self.assertEqual(self.REV_LIST[0], rrev) 567 def test_partial_commit(self):
501 self.assertEqual(self.REV_LIST[0], lrev) 568 """Check specific (partial) commit argument."""
502 569 commit = self.REV_LIST[0][0:20]
503 def test_unknown(self): 570 rrev, lrev = self.wrapper.resolve_repo_rev(self.GIT_DIR, commit)
504 """Check unknown ref/commit argument.""" 571 self.assertEqual(self.REV_LIST[0], rrev)
505 with self.assertRaises(self.wrapper.CloneFailure): 572 self.assertEqual(self.REV_LIST[0], lrev)
506 self.wrapper.resolve_repo_rev(self.GIT_DIR, 'boooooooya') 573
574 def test_unknown(self):
575 """Check unknown ref/commit argument."""
576 with self.assertRaises(self.wrapper.CloneFailure):
577 self.wrapper.resolve_repo_rev(self.GIT_DIR, "boooooooya")
507 578
508 579
509class CheckRepoVerify(RepoWrapperTestCase): 580class CheckRepoVerify(RepoWrapperTestCase):
510 """Check check_repo_verify behavior.""" 581 """Check check_repo_verify behavior."""
511 582
512 def test_no_verify(self): 583 def test_no_verify(self):
513 """Always fail with --no-repo-verify.""" 584 """Always fail with --no-repo-verify."""
514 self.assertFalse(self.wrapper.check_repo_verify(False)) 585 self.assertFalse(self.wrapper.check_repo_verify(False))
515 586
516 def test_gpg_initialized(self): 587 def test_gpg_initialized(self):
517 """Should pass if gpg is setup already.""" 588 """Should pass if gpg is setup already."""
518 with mock.patch.object(self.wrapper, 'NeedSetupGnuPG', return_value=False): 589 with mock.patch.object(
519 self.assertTrue(self.wrapper.check_repo_verify(True)) 590 self.wrapper, "NeedSetupGnuPG", return_value=False
591 ):
592 self.assertTrue(self.wrapper.check_repo_verify(True))
520 593
521 def test_need_gpg_setup(self): 594 def test_need_gpg_setup(self):
522 """Should pass/fail based on gpg setup.""" 595 """Should pass/fail based on gpg setup."""
523 with mock.patch.object(self.wrapper, 'NeedSetupGnuPG', return_value=True): 596 with mock.patch.object(
524 with mock.patch.object(self.wrapper, 'SetupGnuPG') as m: 597 self.wrapper, "NeedSetupGnuPG", return_value=True
525 m.return_value = True 598 ):
526 self.assertTrue(self.wrapper.check_repo_verify(True)) 599 with mock.patch.object(self.wrapper, "SetupGnuPG") as m:
600 m.return_value = True
601 self.assertTrue(self.wrapper.check_repo_verify(True))
527 602
528 m.return_value = False 603 m.return_value = False
529 self.assertFalse(self.wrapper.check_repo_verify(True)) 604 self.assertFalse(self.wrapper.check_repo_verify(True))
530 605
531 606
532class CheckRepoRev(GitCheckoutTestCase): 607class CheckRepoRev(GitCheckoutTestCase):
533 """Check check_repo_rev behavior.""" 608 """Check check_repo_rev behavior."""
534 609
535 def test_verify_works(self): 610 def test_verify_works(self):
536 """Should pass when verification passes.""" 611 """Should pass when verification passes."""
537 with mock.patch.object(self.wrapper, 'check_repo_verify', return_value=True): 612 with mock.patch.object(
538 with mock.patch.object(self.wrapper, 'verify_rev', return_value='12345'): 613 self.wrapper, "check_repo_verify", return_value=True
539 rrev, lrev = self.wrapper.check_repo_rev(self.GIT_DIR, 'stable') 614 ):
540 self.assertEqual('refs/heads/stable', rrev) 615 with mock.patch.object(
541 self.assertEqual('12345', lrev) 616 self.wrapper, "verify_rev", return_value="12345"
542 617 ):
543 def test_verify_fails(self): 618 rrev, lrev = self.wrapper.check_repo_rev(self.GIT_DIR, "stable")
544 """Should fail when verification fails.""" 619 self.assertEqual("refs/heads/stable", rrev)
545 with mock.patch.object(self.wrapper, 'check_repo_verify', return_value=True): 620 self.assertEqual("12345", lrev)
546 with mock.patch.object(self.wrapper, 'verify_rev', side_effect=Exception): 621
547 with self.assertRaises(Exception): 622 def test_verify_fails(self):
548 self.wrapper.check_repo_rev(self.GIT_DIR, 'stable') 623 """Should fail when verification fails."""
549 624 with mock.patch.object(
550 def test_verify_ignore(self): 625 self.wrapper, "check_repo_verify", return_value=True
551 """Should pass when verification is disabled.""" 626 ):
552 with mock.patch.object(self.wrapper, 'verify_rev', side_effect=Exception): 627 with mock.patch.object(
553 rrev, lrev = self.wrapper.check_repo_rev(self.GIT_DIR, 'stable', repo_verify=False) 628 self.wrapper, "verify_rev", side_effect=Exception
554 self.assertEqual('refs/heads/stable', rrev) 629 ):
555 self.assertEqual(self.REV_LIST[1], lrev) 630 with self.assertRaises(Exception):
631 self.wrapper.check_repo_rev(self.GIT_DIR, "stable")
632
633 def test_verify_ignore(self):
634 """Should pass when verification is disabled."""
635 with mock.patch.object(
636 self.wrapper, "verify_rev", side_effect=Exception
637 ):
638 rrev, lrev = self.wrapper.check_repo_rev(
639 self.GIT_DIR, "stable", repo_verify=False
640 )
641 self.assertEqual("refs/heads/stable", rrev)
642 self.assertEqual(self.REV_LIST[1], lrev)
diff --git a/tox.ini b/tox.ini
index 8d3cc43c..2575a713 100644
--- a/tox.ini
+++ b/tox.ini
@@ -27,6 +27,7 @@ python =
27 27
28[testenv] 28[testenv]
29deps = 29deps =
30 black
30 pytest 31 pytest
31 pytest-timeout 32 pytest-timeout
32commands = {envpython} run_tests {posargs} 33commands = {envpython} run_tests {posargs}
diff --git a/wrapper.py b/wrapper.py
index 3099ad5d..d8823368 100644
--- a/wrapper.py
+++ b/wrapper.py
@@ -19,14 +19,14 @@ import os
19 19
20 20
21def WrapperPath(): 21def WrapperPath():
22 return os.path.join(os.path.dirname(__file__), 'repo') 22 return os.path.join(os.path.dirname(__file__), "repo")
23 23
24 24
25@functools.lru_cache(maxsize=None) 25@functools.lru_cache(maxsize=None)
26def Wrapper(): 26def Wrapper():
27 modname = 'wrapper' 27 modname = "wrapper"
28 loader = importlib.machinery.SourceFileLoader(modname, WrapperPath()) 28 loader = importlib.machinery.SourceFileLoader(modname, WrapperPath())
29 spec = importlib.util.spec_from_loader(modname, loader) 29 spec = importlib.util.spec_from_loader(modname, loader)
30 module = importlib.util.module_from_spec(spec) 30 module = importlib.util.module_from_spec(spec)
31 loader.exec_module(module) 31 loader.exec_module(module)
32 return module 32 return module